diff --git a/Makefile b/Makefile index 5d5326c..7dc99fe 100644 --- a/Makefile +++ b/Makefile @@ -6,5 +6,5 @@ push: run: dbt deps --profiles-dir=. --project-dir=. #dbt run --profiles-dir=. --project-dir=. --full-refresh - dbt run --profiles-dir=. --project-dir=. --full-refresh --select orders + dbt run --profiles-dir=. --project-dir=. --full-refresh --select order_deliveries diff --git a/dbt-env/bin/Activate.ps1 b/dbt-env/bin/Activate.ps1 new file mode 100644 index 0000000..2fb3852 --- /dev/null +++ b/dbt-env/bin/Activate.ps1 @@ -0,0 +1,241 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/dbt-env/bin/activate b/dbt-env/bin/activate new file mode 100644 index 0000000..27cf849 --- /dev/null +++ b/dbt-env/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/ubuntu/workspace/dbt-selly/dbt-env" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + if [ "x(dbt-env) " != x ] ; then + PS1="(dbt-env) ${PS1:-}" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see https://aspen.io/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r +fi diff --git a/dbt-env/bin/activate.csh b/dbt-env/bin/activate.csh new file mode 100644 index 0000000..d74f5ba --- /dev/null +++ b/dbt-env/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/ubuntu/workspace/dbt-selly/dbt-env" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("dbt-env" != "") then + set env_name = "dbt-env" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see https://aspen.io/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/dbt-env/bin/activate.fish b/dbt-env/bin/activate.fish new file mode 100644 index 0000000..2fd841f --- /dev/null +++ b/dbt-env/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/ubuntu/workspace/dbt-selly/dbt-env" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(dbt-env) " + printf "%s%s" "(dbt-env) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see https://aspen.io/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/dbt-env/bin/dbt b/dbt-env/bin/dbt new file mode 100755 index 0000000..d7dfbbf --- /dev/null +++ b/dbt-env/bin/dbt @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from dbt.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/easy_install b/dbt-env/bin/easy_install new file mode 100755 index 0000000..a77b39b --- /dev/null +++ b/dbt-env/bin/easy_install @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/easy_install-3.8 b/dbt-env/bin/easy_install-3.8 new file mode 100755 index 0000000..a77b39b --- /dev/null +++ b/dbt-env/bin/easy_install-3.8 @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/jsonschema b/dbt-env/bin/jsonschema new file mode 100755 index 0000000..c8cd69e --- /dev/null +++ b/dbt-env/bin/jsonschema @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from jsonschema.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/normalizer b/dbt-env/bin/normalizer new file mode 100755 index 0000000..bc8aacd --- /dev/null +++ b/dbt-env/bin/normalizer @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli.normalizer import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/dbt-env/bin/pip b/dbt-env/bin/pip new file mode 100755 index 0000000..b1304b6 --- /dev/null +++ b/dbt-env/bin/pip @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/pip3 b/dbt-env/bin/pip3 new file mode 100755 index 0000000..b1304b6 --- /dev/null +++ b/dbt-env/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/pip3.8 b/dbt-env/bin/pip3.8 new file mode 100755 index 0000000..b1304b6 --- /dev/null +++ b/dbt-env/bin/pip3.8 @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/pybabel b/dbt-env/bin/pybabel new file mode 100755 index 0000000..e737015 --- /dev/null +++ b/dbt-env/bin/pybabel @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from babel.messages.frontend import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/bin/python b/dbt-env/bin/python new file mode 120000 index 0000000..b8a0adb --- /dev/null +++ b/dbt-env/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/dbt-env/bin/python3 b/dbt-env/bin/python3 new file mode 120000 index 0000000..898ccd7 --- /dev/null +++ b/dbt-env/bin/python3 @@ -0,0 +1 @@ +/bin/python3 \ No newline at end of file diff --git a/dbt-env/bin/sqlformat b/dbt-env/bin/sqlformat new file mode 100755 index 0000000..c43c314 --- /dev/null +++ b/dbt-env/bin/sqlformat @@ -0,0 +1,8 @@ +#!/home/ubuntu/workspace/dbt-selly/dbt-env/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from sqlparse.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/LICENSE b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/LICENSE new file mode 100644 index 0000000..693e1a1 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2013-2021 by the Babel Team, see AUTHORS for more information. + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + 3. The name of the author may not be used to endorse or promote + products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER +IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/METADATA new file mode 100644 index 0000000..addcc77 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/METADATA @@ -0,0 +1,31 @@ +Metadata-Version: 2.1 +Name: Babel +Version: 2.9.1 +Summary: Internationalization utilities +Home-page: http://babel.pocoo.org/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Requires-Dist: pytz (>=2015.7) + +A collection of tools for internationalizing Python applications. + + diff --git a/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/RECORD new file mode 100644 index 0000000..cbf187a --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/RECORD @@ -0,0 +1,847 @@ +../../../bin/pybabel,sha256=PmAnH8QKVadqLOOOb29xOzMIzzAz0RBNgKYo9W_Jdks,258 +Babel-2.9.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Babel-2.9.1.dist-info/LICENSE,sha256=KMl78z51BuJ3SHvao6abcPFw1q9agnhawKdMhCgELkA,1451 +Babel-2.9.1.dist-info/METADATA,sha256=mlkWT3NrQ45RhCJaK7b0eaREy0YsZfn0c4qGbvW8PWw,1223 +Babel-2.9.1.dist-info/RECORD,, +Babel-2.9.1.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +Babel-2.9.1.dist-info/entry_points.txt,sha256=dyIkorJhQj3IvTvmMylr1wEzW7vfxTw5RTOWa8zoqh0,764 +Babel-2.9.1.dist-info/top_level.txt,sha256=mQO3vNkqlcYs_xRaL5EpRIy1IRjMp4N9_vdwmiemPXo,6 +babel/__init__.py,sha256=2isYTKcvIoYWhBj1H7DhXFGIVauRTrA5dbeC0I0px8k,714 +babel/__pycache__/__init__.cpython-38.pyc,, +babel/__pycache__/_compat.cpython-38.pyc,, +babel/__pycache__/core.cpython-38.pyc,, +babel/__pycache__/dates.cpython-38.pyc,, +babel/__pycache__/languages.cpython-38.pyc,, +babel/__pycache__/lists.cpython-38.pyc,, +babel/__pycache__/localedata.cpython-38.pyc,, +babel/__pycache__/numbers.cpython-38.pyc,, +babel/__pycache__/plural.cpython-38.pyc,, +babel/__pycache__/support.cpython-38.pyc,, +babel/__pycache__/units.cpython-38.pyc,, +babel/__pycache__/util.cpython-38.pyc,, +babel/_compat.py,sha256=DHx6vQR-LazZlNdeBE7wGTOBv1_1HWbRboKOkX76TiY,1685 +babel/core.py,sha256=5CmHhtC4GV_Nrd6oq63apsEO2nb3ZPdLsA3MYd9lzBI,36907 +babel/dates.py,sha256=cjONUhc89AMhtSpNk8jrtCGrtqwrPuS6rsXOzHZROpM,67709 +babel/global.dat,sha256=P2lM1DjyEx9SMpmXlgu4zNeodlF2Qp2hg9yEANLM3ps,254421 +babel/languages.py,sha256=UmLTj4Nai3kQrwHX6jptehVLeAw-KAdxcmcp2iDlgvI,2743 +babel/lists.py,sha256=yks1P7CNrIoicIptChdpwtDHN9eKVK1sfeDzhLgPVpM,2719 +babel/locale-data/af.dat,sha256=jjfQ33ml9BDllSSkfwcIQzzgohE645gzeehkj1KwTMw,171143 +babel/locale-data/af_NA.dat,sha256=6A8JdTF_O-lRxJZmjI6Lig4Se2aFbqWhdw34Ak4-67g,1407 +babel/locale-data/af_ZA.dat,sha256=ZaMuNgUHOKLle56lUxFOGAAk8QM-_41nuazbqNu5kgg,608 +babel/locale-data/agq.dat,sha256=gSE4vg5l5Yze-10YkHjiBaLVqC5fp0xgSr3nMuZB8lw,17339 +babel/locale-data/agq_CM.dat,sha256=AxBMSZitoHhaa6djMmFbp6iEPM8OUC04G77ZldkuHR8,609 +babel/locale-data/ak.dat,sha256=fk1o-LXIVWd8kObwIG98ltM0H9W68R1_alFWFl8U_2o,15859 +babel/locale-data/ak_GH.dat,sha256=tuOpoiGUJqZ_Dlr8xlp9oIqsFL-YQrBQpDUfG5z3YQk,589 +babel/locale-data/am.dat,sha256=qe-hJ0p-GstMzzmrdamvejKBIVwLBuJRPyfdEdUQLB0,200581 +babel/locale-data/am_ET.dat,sha256=JHNCRHUTq_8VPokwz2uGwCZPGxUR23nJdDIzi_HFIbA,608 +babel/locale-data/ar.dat,sha256=aCbEpK5EzE3tG7TDSCPJVF4srTWPDcTF7GIJjr_Z36o,345779 +babel/locale-data/ar_001.dat,sha256=Y2HUeGZn8VV7DCPv6nXTWmkqTtzGxZxdvXYYiS5mU-o,1680 +babel/locale-data/ar_AE.dat,sha256=UAOwSgM1tC_mFK1Gr8D1t_2HwylT_wv50l7LyDew6BI,1038 +babel/locale-data/ar_BH.dat,sha256=f1D15u-5clcZJvrqsnr2XShOLf6hw5hmR587r7hsKEs,651 +babel/locale-data/ar_DJ.dat,sha256=NWt7QxMxsj1-zAsJ8aXWGT1SwzKayjlbB8TxvY8P-iA,629 +babel/locale-data/ar_DZ.dat,sha256=1KdQGZH5ePC4W3nwrB1F2msNqVVnK0wYwfU-uqYAhXU,1713 +babel/locale-data/ar_EG.dat,sha256=EUDxLyjz-znR56wvAE-_IvJX-2ou_ubS3KVU83SHa5Q,688 +babel/locale-data/ar_EH.dat,sha256=OseNtW_AOGrIkKmxk2ufJXkpwOXsajJ4uyJAwUidL1g,589 +babel/locale-data/ar_ER.dat,sha256=FK8yF8SloHQoOXNpaKlnEMsS9oG0j9dhDhiSwk7euCw,610 +babel/locale-data/ar_IL.dat,sha256=TzNjvKnJXxJBRXR55WZ9yVxpHnN-2subVXqjeS7CX4I,1195 +babel/locale-data/ar_IQ.dat,sha256=G4JR6ue3O7NNz7Hy2WKKiHg9lqhaHGjZA-UFq8N9TKs,2336 +babel/locale-data/ar_JO.dat,sha256=K9WKy9urKZA7k6MIuprjN2Yu-VgySZVzK46jui9NYnY,2335 +babel/locale-data/ar_KM.dat,sha256=e0ER6Yw-J2DOL7R8JKuTBpU0_46WK0jLBKpsG56UHgk,1161 +babel/locale-data/ar_KW.dat,sha256=hsa41dETEsjs8XPCZkryiCFbPBLrv6aF5ya-ImPAT3g,651 +babel/locale-data/ar_LB.dat,sha256=b4DWnwWjHST9MvfKnu6YaGhP2ghxjpZP5TQy7w_OGtU,2336 +babel/locale-data/ar_LY.dat,sha256=QyNUUgeaUdsvLQtwRLPQed-PRiDez_ekokUwpxT9jRw,1652 +babel/locale-data/ar_MA.dat,sha256=ufYSdBIy96F6zJND6kKZuilh00Lpu-z3txPDPhsgAIk,2007 +babel/locale-data/ar_MR.dat,sha256=wjI22LINnrT_HVHqm1ZmJ9Ndb6bFBKvAFkeyvQ8tBi8,2173 +babel/locale-data/ar_OM.dat,sha256=gsBDBEK35j4M3ualnSdL6tFD4-u3dSbJX1NtS7znQ_E,651 +babel/locale-data/ar_PS.dat,sha256=tTTcOqje-rppgz46-DrkawXtoc9jIQ2vwyA8rRU2uDM,2273 +babel/locale-data/ar_QA.dat,sha256=QI6pzsVjeY_nxkk8iKhy_DUzfBJrZFWoL9uYKM3L9CM,651 +babel/locale-data/ar_SA.dat,sha256=T6x2_p8wVOfcUx9RZMwa5yM1qxVnDoJCxXPzUcsQXuM,30599 +babel/locale-data/ar_SD.dat,sha256=gUbUGVDm-V2a9ViTVska833oAys0b6Au-IJcoMo5leM,651 +babel/locale-data/ar_SO.dat,sha256=fSiIj3lwo0FJU0dXbnFUNLPDvV_jejp86mHcLiGtiJ4,608 +babel/locale-data/ar_SS.dat,sha256=eAaVq86grUAnWNST_u32IrhbryyjRvUo27UG0js6zS8,631 +babel/locale-data/ar_SY.dat,sha256=4ge-CrqdFYfvu3VOdrvd7lRqhGDf2rPfTYkaNBaO4Ks,2335 +babel/locale-data/ar_TD.dat,sha256=yVONNY9PuoKVCLjT0w0qp0NLO4kShNT9j9eM_uKtQYY,589 +babel/locale-data/ar_TN.dat,sha256=J3SolHTZQ9_7HZGluj2jFu4odczgSZlOYU7spA670XY,1651 +babel/locale-data/ar_YE.dat,sha256=tsYZG7eRROrZJyzc_jUbyxbMRT157S3ocGMh7eIiqsI,651 +babel/locale-data/as.dat,sha256=sEo9KNUT_mxPE43dnY2qd5x5PS0YptFdgQB33rCj108,234427 +babel/locale-data/as_IN.dat,sha256=BfzVFooc90N8ufCwaNAuKzefwYLUen0-o4wUafoLYc0,631 +babel/locale-data/asa.dat,sha256=RTNazZw6IqoA6jO4o2v0jUI-x6rKsBHeCEEi9_eWGto,16187 +babel/locale-data/asa_TZ.dat,sha256=aSECPcjC7UM3Bb4Cy16Et-RHtNrhlzGM_v4zNMGsreU,590 +babel/locale-data/ast.dat,sha256=7s8-hdazQXPAA8cPPc2AymyA1vAI2P6MoHOjtcuDXaM,209617 +babel/locale-data/ast_ES.dat,sha256=x4OX34OA3ztvfnETdf49lKg6Gz2q6Lv17Lrf0G4EZ1Y,627 +babel/locale-data/az.dat,sha256=2FAR_E0DODOjfyi82cqli4S484eNi8IWcGOJdvaUMXA,194898 +babel/locale-data/az_Cyrl.dat,sha256=1AcCgkkW-Oz7hXWJVoUqe1BRwdirpoY32Ub53YIUJDQ,38880 +babel/locale-data/az_Cyrl_AZ.dat,sha256=yhmkFuYZLbGzV8Q155t3UrHn-rEqAR9LVmz1sQkKcSI,608 +babel/locale-data/az_Latn.dat,sha256=EkZYNfi4vipZ7wH0cvvd1yvqOJxwCNtYADX0SgJMnAE,2225 +babel/locale-data/az_Latn_AZ.dat,sha256=yhmkFuYZLbGzV8Q155t3UrHn-rEqAR9LVmz1sQkKcSI,608 +babel/locale-data/bas.dat,sha256=SkyEjzAws6O1b_g_mtVKcLFvR_00Z8HK1ubf7X_NRIk,17128 +babel/locale-data/bas_CM.dat,sha256=NlquEbS-6vOPdnyIC4r5KRnEbj4Y0oaO56i3IeH2MmI,609 +babel/locale-data/be.dat,sha256=R4OLJNVpSSFRz34CGEFpWedQosrBKf227FweNgrhQvU,271438 +babel/locale-data/be_BY.dat,sha256=0uCaBRRcbIS46dyiHK85UMi-k3b1b_uspOzBHMos2jM,608 +babel/locale-data/bem.dat,sha256=pqGkFsWL-tAPVJfouEiYifYERtShK-WpoABWUzJP0vA,6534 +babel/locale-data/bem_ZM.dat,sha256=VbAesm4_2wMn2EmWOW7etCOWlleqpkSAjjMOtKBlEcQ,590 +babel/locale-data/bez.dat,sha256=lOnVgx-EVzJ-TL9G6tbvC5qckxtoPwIG9UEQR8RLthY,16977 +babel/locale-data/bez_TZ.dat,sha256=PHbB6bmtC5Wn46cFkmE8mjwuSUTr3gCxXkmFzqpiDRQ,590 +babel/locale-data/bg.dat,sha256=pUVekrzzgzY-8cecdX7rcvQ9-LNteX4V3yzZJyGu_qM,232833 +babel/locale-data/bg_BG.dat,sha256=rHaVmPZT-2n7w37ORQM31-InIsuCBYpv6xWIAc0akkk,626 +babel/locale-data/bm.dat,sha256=-E8armVyVSCPOqdRGc55rFLFYJcPPkvMUw5fCzRYAj8,15886 +babel/locale-data/bm_ML.dat,sha256=uOp8g5jSZ0dtvQRJ_GmriD3hafnqKpY-D8VhCY0lctk,589 +babel/locale-data/bn.dat,sha256=mLx95u-bxnHGpQ3f07-gBC6RYW658gmJzImHukcigK4,263365 +babel/locale-data/bn_BD.dat,sha256=J15p67iXhfwwTscvbnmaYH1jxiMf8n7kvXoaYiMDgCI,608 +babel/locale-data/bn_IN.dat,sha256=D5aBzoxDMmKe7Bnz25b_MTbqxm3ofuOqA73An-bd0Ic,866 +babel/locale-data/bo.dat,sha256=fG5-yWBu0eudG5gbVC5xJQF-C99BVk0zIgiYr-4Q4e4,22525 +babel/locale-data/bo_CN.dat,sha256=9gsaOvK_bYpPFDNXZ9lOj3Y-jgDTZlD6JHhlPLJ2Te4,608 +babel/locale-data/bo_IN.dat,sha256=EWXLEVA3oaBtJFiwQVxREJbR4fjpyaqQ1P1xVNsMowc,704 +babel/locale-data/br.dat,sha256=S6Rh6pNfXOlOKbqW0dpbTc05PfGfvWgfeL3eBZwk4hs,290897 +babel/locale-data/br_FR.dat,sha256=ioO-nP2x2c5STVkiH-RvhNxxq6giVfDejh4T-FoSjF8,626 +babel/locale-data/brx.dat,sha256=pQJJBfYcX8pxrFlEkfkwwprNf6c6AG3Bkvh-tNpp1YU,124289 +babel/locale-data/brx_IN.dat,sha256=9pIIjxmY4jmGi9LDci2mDhdHADN2oD3U53pBIRNNRc4,632 +babel/locale-data/bs.dat,sha256=wHmiOLJ8MBJyaM2xpKiuIrIDSn3KHu6kz33rYpzDJjQ,239326 +babel/locale-data/bs_Cyrl.dat,sha256=MBDuEafwSU1vWRHHor26JViQTwYsoyR_imtvVlp6N78,213685 +babel/locale-data/bs_Cyrl_BA.dat,sha256=49_6kE_iGTwxlkieBZvxsXvVPAUrQ3hlavg2hMkUzFQ,608 +babel/locale-data/bs_Latn.dat,sha256=jWzwo3YbhqMRkMWFgGPJ1SoBrHBL5OKB6yqVoJw3cKo,1957 +babel/locale-data/bs_Latn_BA.dat,sha256=49_6kE_iGTwxlkieBZvxsXvVPAUrQ3hlavg2hMkUzFQ,608 +babel/locale-data/ca.dat,sha256=mWNiuT8Rt5CV1YnXpjqiZP1HlvftCMz-DbaPLLbzEe4,208790 +babel/locale-data/ca_AD.dat,sha256=c0uaIsSk6XuXizMkzNYZcMzFcC9ItvvDTh9byxpdYV4,626 +babel/locale-data/ca_ES.dat,sha256=xAbDhTv0TIq0M66QrfejjMsbIIthq2CCXMr00ojFzoA,626 +babel/locale-data/ca_ES_VALENCIA.dat,sha256=i6YyGQHkRnxTuJLvjemc0tm1Iqqfm0v0dNZnLk_v7R8,3644 +babel/locale-data/ca_FR.dat,sha256=bm288L4_PW-Wgb5Ty-9jcQns0i9iVy4zQY8PV3IETHE,645 +babel/locale-data/ca_IT.dat,sha256=t21q_Bu_bo2FVP609O53DGAXPbAUGv5yZ-_m71S1bWU,626 +babel/locale-data/ccp.dat,sha256=nyA_rk-dZ4U4ATCfcCEQeAj4Df6igjI4XD7HP0j045w,275655 +babel/locale-data/ccp_BD.dat,sha256=Lns8llZesFHOnqgmiYntJhMufHDSmzHOL-sYczkVxCs,609 +babel/locale-data/ccp_IN.dat,sha256=zRmYBrG8ABv6_4YB7puTs2jsWSBBgqo0yBpvsBVHxZQ,632 +babel/locale-data/ce.dat,sha256=XtFwtrhjaJGFgmegHYgoVSqmZs_5o8S85XspljVTzSo,138778 +babel/locale-data/ce_RU.dat,sha256=rAfBzB42xX9qOXWNu1kMJ278N99ohijJoWO2XAskSkc,626 +babel/locale-data/ceb.dat,sha256=h8hImvVlmbhxs4ufAMKi_C3DExHORd8hrbdd2hQrXx8,103518 +babel/locale-data/ceb_PH.dat,sha256=nQM5vO0qo0sARNlhLIXlzxBCevhReUtYPios-RIcw8c,609 +babel/locale-data/cgg.dat,sha256=VfxK9EKS6ocduf7D3FKKHIXahll75kOwDuT6jrI6d-o,16228 +babel/locale-data/cgg_UG.dat,sha256=87p4kuJQ3R_CLR3yPT-oMPAT9idvdPVElVxpZC5A69s,613 +babel/locale-data/chr.dat,sha256=Nk70q12-m-cVNXDVMBrT1YTRtyS9t949i3oDVo6SyKk,200072 +babel/locale-data/chr_US.dat,sha256=BS6w-X9I6FuixJ2upUTEdFstYRKU7FCok38SRWahHWc,627 +babel/locale-data/ckb.dat,sha256=VfHXHCrZYFQJ0NxHji-px_rJ7gD15bVeVHEQDkjtrIc,41606 +babel/locale-data/ckb_IQ.dat,sha256=8qfAOdEjoncef0lQt6zsXiW0Nn9uY4Fb2jQMpgJSxd0,652 +babel/locale-data/ckb_IR.dat,sha256=cNWVEpmuwNkAyJm8qdtOho5FEIB-UF2dOHSXoDIdWj0,1204 +babel/locale-data/cs.dat,sha256=PzETNpoA6kgHESjeT01YFgtQd31qRnCbrfdWbZ8M7sM,297675 +babel/locale-data/cs_CZ.dat,sha256=IG8kxPUf2L53IheXDOJcGUHV5XPaEvrWlhznlScsZAw,626 +babel/locale-data/cu.dat,sha256=Ak6g9Gsf7fAe3EsC4ORWtTDTqfGjoHanPHd2VU7uqhY,20265 +babel/locale-data/cu_RU.dat,sha256=NHfB25KQf80ELweLH7Qe5OHUIC21BjJZsZvPUZ8wlN8,626 +babel/locale-data/cy.dat,sha256=wy7mLRchz59usL93zfp-wZ3iyalLaEO6QHwW7c1L2Pg,315824 +babel/locale-data/cy_GB.dat,sha256=ZHRJBfOpeOVn8rfsdEhIF5mY01XFhStFmTVeOmklOAk,626 +babel/locale-data/da.dat,sha256=2sCrVwkFfF2RHbLoon79r_JwKXCqZ0oj1dbpe6hBDSY,199867 +babel/locale-data/da_DK.dat,sha256=OZkvaI7AQcocAo2rtHtZq3d6-P4mzR0MWWOQ8EJXjSo,626 +babel/locale-data/da_GL.dat,sha256=uyqYUJOO4nd3vn92yPsEVm6mYGXWCqSUTG4DtImKc0M,589 +babel/locale-data/dav.dat,sha256=nRUgF6cL7_RI2vbdVR407MYHpJ5ALeZtY4lrGcRlVSc,16271 +babel/locale-data/dav_KE.dat,sha256=FP78PK3asgspsW7QIhV0jrOHFmDG4iZiGjFBvPQF-6o,609 +babel/locale-data/de.dat,sha256=bcoZmi61_dSmFuJN9YA6n_llVQCfbeRtVlkYm8QWpvM,206762 +babel/locale-data/de_AT.dat,sha256=-UMZ_o8YNkI3mlluBgFiSXc6ttnqo_adzxsPffafPDw,2563 +babel/locale-data/de_BE.dat,sha256=cAHuCiE_b6CoIRItE8FQIpr7fR-gSsF66MtRGhU3xyk,626 +babel/locale-data/de_CH.dat,sha256=x8MlguM2lvZtFSB8R-BWNJ2wgy8vjGNRDUPcYO7YGRA,3938 +babel/locale-data/de_DE.dat,sha256=uGWbXF2bkaVQcgna0h59yJQJctOj88om0v2NVVCJvPw,626 +babel/locale-data/de_IT.dat,sha256=y1GIOM88fIEDhz5M_Seiy5sk47wTF2X5nRspsQJRWXc,1619 +babel/locale-data/de_LI.dat,sha256=bggRqr8i9UGeDEA1tqBgRv_lWAGGbb4voVc152sl8js,1321 +babel/locale-data/de_LU.dat,sha256=rMtYa8P0yOF9Asg8mMIiBaeFzg1imA6BOFB_1AV0H9g,1065 +babel/locale-data/dje.dat,sha256=0lCk_5ABMX3iiY0QcB6N6R1JH1B5db3KFxiu-eS2Umw,16192 +babel/locale-data/dje_NE.dat,sha256=YRn5qozp8AlljDhoIVdw1KfDjTwJuQSR1O6zcfh6Z54,590 +babel/locale-data/dsb.dat,sha256=GVknAiKWopJw22chqjBGEvoq7GN201Sw64vnh-kqPeA,179644 +babel/locale-data/dsb_DE.dat,sha256=UEem7x_iq5-p3Tpzjtja0ONC1qtHvG2MCsQMx8KvTmo,627 +babel/locale-data/dua.dat,sha256=GSB_OYiEmeJM7B9QAdgr2IV06ehJUpVdy-ffO5j9SDo,5355 +babel/locale-data/dua_CM.dat,sha256=fOhCBrVImDeIe_5GZrgYvvdY70mSsENjcTEzSld5FYs,609 +babel/locale-data/dyo.dat,sha256=KJhfyqJEf8mhA9wV3xbD0VOcUm2JT0LQ8xJtHgqMVPM,10541 +babel/locale-data/dyo_SN.dat,sha256=coM-rzzSZhBVSz9gchqxki0QnX4zhOD-Lk1nt6sPCxE,590 +babel/locale-data/dz.dat,sha256=DG9YNNdDUCJkqwNdcstRrO8BgU9zqe1PTD4Em3dSOak,89908 +babel/locale-data/dz_BT.dat,sha256=__K3dx97Ynlcn2gcV40OXMmd-G1YchvffwSv4old74k,608 +babel/locale-data/ebu.dat,sha256=j6vvoYG_wPxra74yw8ldqZp_oD25AJFbUvysJkUtClY,16243 +babel/locale-data/ebu_KE.dat,sha256=8py7zy73R_NJUahomYGMJF7at0vD_TMjbQy8AT7OgO8,609 +babel/locale-data/ee.dat,sha256=EG6FqmRvtVrJdzN3p8QxU35xqBQzvhB2lbaLz-NKN-U,142527 +babel/locale-data/ee_GH.dat,sha256=B_aqQhHklFcblsk2BJmIZgwpAX17_pZENzO1Xoo1LpU,589 +babel/locale-data/ee_TG.dat,sha256=MG63DGqUVcZLwabu4o_BezzGovDY8g30wKLHkrbEU8o,1141 +babel/locale-data/el.dat,sha256=M-uy_xte0SOlEQtwdOMM92GLjpvx09PgWGO3bYsBMI0,244752 +babel/locale-data/el_CY.dat,sha256=07pyrGXJzOGhfxhX--19MAVJlDzuBGe1AfsnkOiHBeA,608 +babel/locale-data/el_GR.dat,sha256=WqPmX-_0xIHS9A8kTDJbVCJXhqr52NEiW0CyLdGm6yo,626 +babel/locale-data/en.dat,sha256=WFZgaqLZ9ZYKZdU8A5K2ZMepmtkc8-ve3vC0VA1dzqM,194682 +babel/locale-data/en_001.dat,sha256=Wsq2--QwmhqwjXx3jpyXs4SNShvNFtnQfH74-9RmU5M,27063 +babel/locale-data/en_150.dat,sha256=rbNu79tGtS6MsRHloWtHrmdPuZCmwWRThAQ8M7C_5sU,1765 +babel/locale-data/en_AE.dat,sha256=EhFOQNZpti_Z7dPcFMbQbuFA5r4LwCwGKigJgowZ5Is,4111 +babel/locale-data/en_AG.dat,sha256=AKSzhMbGCUU-P3phlu6HupABdKSEys-M96wBGbNwgBc,627 +babel/locale-data/en_AI.dat,sha256=6ihO9cDogLMcSHgmWHgJDTg0OCTfEC4fVftMAbv3-zo,1179 +babel/locale-data/en_AS.dat,sha256=XkSzLYRfcXzQ5S8kaps6cLh8lVLcEtpS7v5GVYJ0oxA,608 +babel/locale-data/en_AT.dat,sha256=jKCuj9au7zT3QiZd6mRgjRutjPTd3rW7k_aSCopd3lU,1200 +babel/locale-data/en_AU.dat,sha256=GgFVbFesUa_9FbZYqtHtgW3fDyUvQCxBMO1KPcH3wl0,23385 +babel/locale-data/en_BB.dat,sha256=qYW0ov3TXlP5U49PsDwBoGuNqydFwIDvAzb9g_PU4BQ,608 +babel/locale-data/en_BE.dat,sha256=sLyuId-scSREYGWTIRr-jmU6cuaSkgQbNc_jd5ij3Rc,1493 +babel/locale-data/en_BI.dat,sha256=FPbgGK-AHgRjz__MFHCzzOUIGhAmhx662sqL-CZWyXk,1162 +babel/locale-data/en_BM.dat,sha256=xkpTQIMT4L5hhYXe9ANWZkxgyS2H6wsMdG1qtacDJH8,627 +babel/locale-data/en_BS.dat,sha256=zHTFOmXUvagki_mtGdj0lht3V82InuI8-9cTPhvQ8UA,811 +babel/locale-data/en_BW.dat,sha256=R88jGKkwpex2QzxjAVOj-tu5HreJy6DlO3ZqqcD7sKw,2771 +babel/locale-data/en_BZ.dat,sha256=0gOIW5WU8lL15-_mWj9Xg0uDjtQiQHsWAl34U2sN3lI,2950 +babel/locale-data/en_CA.dat,sha256=QiSNMW4tsb8v4mvvKSJ0fjYx9wKvFyOnqZv6G3ih9xQ,25449 +babel/locale-data/en_CC.dat,sha256=n1D9R3tb0Kbc3Umv3cS_2IfDv6gXJDkauUmLRQ85stk,1160 +babel/locale-data/en_CH.dat,sha256=hU3jzJbghrq6qpPAcRCsN3_UzYw2XNJCvnC0KScoOMk,1100 +babel/locale-data/en_CK.dat,sha256=Te6ZAYFDOE6v9domOnOXCxMrCpBSt4y8i6xxX8zplZM,1160 +babel/locale-data/en_CM.dat,sha256=vogNfPzFKhhNfFR3TzWWE-P8wKdkeGujj--c757Iy-M,1410 +babel/locale-data/en_CX.dat,sha256=CkFYiGUksAivYRZlmH-sfvwQ8HUgJ5Iqx1LD0UXbcCg,1160 +babel/locale-data/en_CY.dat,sha256=Vl4Oa2RNsxbDmyq7cJaN9UbtK3KSvSpBQZ3ouieaDXc,608 +babel/locale-data/en_DE.dat,sha256=yW9FX1tuZCAANFhM095trr-p-k39XN6VwN5wYMEDS6A,952 +babel/locale-data/en_DG.dat,sha256=db0CYrcJAtcIl9MFAGUuzVUfUS4x-I0ppd4nNP_igGs,1141 +babel/locale-data/en_DK.dat,sha256=YFMkEHF_fAzY7w7CsIFe7eDhRuw31mfH-oLlniAkqhc,2350 +babel/locale-data/en_DM.dat,sha256=d1muNBxiDWlN78TXhEJiANVe_0UxZGjJ96NoIzmPQH0,627 +babel/locale-data/en_ER.dat,sha256=ZCZFGjPQWwc95TKwRCihywYh1yj-TaRmnmJqj26-dqE,860 +babel/locale-data/en_FI.dat,sha256=D8c6wssToJyrWdn61FXBz0h35HW3BZC_iGA0_MF4npA,2282 +babel/locale-data/en_FJ.dat,sha256=4dotX9Otp56WxZz2vqeHMl-FVcBKiT8BlqGktxoWKFM,645 +babel/locale-data/en_FK.dat,sha256=VC91FNLl2QBhA5qkxhodFR6TWRlIus-UHJ4dzJNtebk,1183 +babel/locale-data/en_FM.dat,sha256=kwQ5xP5wBGKlO-Mb2iqvsStkTC2JfMc46gBnlFTiI3M,589 +babel/locale-data/en_GB.dat,sha256=1rovCqWVd8rHtaNZiudq9tj9j1ZSdQpj85HcXMXRCNM,25821 +babel/locale-data/en_GD.dat,sha256=MbV-yK2BeGhQSajMlcL9TvEWWmch0zGbfBFcKj5eBzs,608 +babel/locale-data/en_GG.dat,sha256=qW--gYp58HZQysSEvEs2e-PzujR6nxJw76OKtml_R9g,1246 +babel/locale-data/en_GH.dat,sha256=WfV3h6HkT4PhJVSqNqIcrYOA-eWWywIMUCk6GX2DGkc,862 +babel/locale-data/en_GI.dat,sha256=Pcy6njgp-RTvil94c1DoD8gPEJfDqJxSa6Ap3hz4hLQ,1201 +babel/locale-data/en_GM.dat,sha256=1-9El_7BaSwYnE2NvFGES1XWvUOA3h31FOz48SyWhGw,858 +babel/locale-data/en_GU.dat,sha256=Msmmrz-7nw0Mm50eo0Df8mPsJFRBBbbDtCgjNN8K4Do,688 +babel/locale-data/en_GY.dat,sha256=U8FjrK6RICyHeGXrXRziQ-x5MYJv-67aiNk870rRU3U,667 +babel/locale-data/en_HK.dat,sha256=-or_oYC7dQt4KP_QTE8085lo6lNr9Sxd15MGoKBpMOg,2008 +babel/locale-data/en_IE.dat,sha256=N2NXGWi7XPi2TcOw4lC3Q7YVGESIhF3LoxX2WPor8e4,2043 +babel/locale-data/en_IL.dat,sha256=KmYFTJlvN4yPA1h81uPevqnBJbEMySjB6BwcgvtARKY,1397 +babel/locale-data/en_IM.dat,sha256=PYHp1IMNWDea7-pfiNMq0lMSwlw6jrd1W8PijFVp_-c,1246 +babel/locale-data/en_IN.dat,sha256=coaqyAiTCcprH5Un1jvvgWZCylIgGkVoKVWryR-RtiU,3767 +babel/locale-data/en_IO.dat,sha256=KdFDEZ-ATOiBnVWa2p-QJovncI2Voygei_GH8ole4vM,1141 +babel/locale-data/en_JE.dat,sha256=7Cmj2eNyJOMB2YSUAkna9CGLn9cHuAMoQedkWzTJtZ8,1246 +babel/locale-data/en_JM.dat,sha256=4EK16ZNGL65bz9xtQrucMOxSITM8wVwqhdKEG04s154,1599 +babel/locale-data/en_KE.dat,sha256=KFU4dptHt5iSqN9zSPGv5_HkLkN8tcXdMoZZlSaK3OE,1431 +babel/locale-data/en_KI.dat,sha256=O13XFTeRaltrxnCjO4PA2NvM_dw-ye0xpJZeEnF0UAI,608 +babel/locale-data/en_KN.dat,sha256=G3xPxRBLVKbANLsnKR7b_rFGy6gYCC4YLzRI5gT8i4Y,608 +babel/locale-data/en_KY.dat,sha256=TV5QaWQr32vWrQWyBUskv1f3oq2TJVYqQPGEGJyh5lQ,792 +babel/locale-data/en_LC.dat,sha256=C_KqmNUBK_9-uE4_kYbUwNIOBeO9VJ9fpLOcaWwWDjM,608 +babel/locale-data/en_LR.dat,sha256=768u6chWYyWCDWWpSWkm8CFsSskf4e4-aayUrLDppFI,858 +babel/locale-data/en_LS.dat,sha256=K_G56Rgw6R7d6pMU5_KfwOAUvJk_hwdZe9GqU3NNfCI,858 +babel/locale-data/en_MG.dat,sha256=HA_OJPZu4eEyZP07J0MtTm8dAz4c5cXns2d5EjLArwc,1411 +babel/locale-data/en_MH.dat,sha256=lWjdFRFi5Cw9EBkpRifvGV93XeQke2MudP1tv7MXV6I,1341 +babel/locale-data/en_MO.dat,sha256=oal8-XgFkxo3F77Z5wKqP16pocMuo77-Ac9v6doamvY,803 +babel/locale-data/en_MP.dat,sha256=4ES9-ArZ1PI5CbAQ3LLDb8sLM6LVHhAnX6KgAz0VSoQ,1322 +babel/locale-data/en_MS.dat,sha256=HMWyIEh0-s1zUWHDC6XnKM8inpIDA36BSA_bN2spR0w,1160 +babel/locale-data/en_MT.dat,sha256=4LAEeC9KAdPb17kLcMe_p6U1bBuiySoOGQpdVphMNv0,1927 +babel/locale-data/en_MU.dat,sha256=Bq5ftR9nbRzJOacnOFQ7qluvifHCFAU81X4SsWWMHVM,1411 +babel/locale-data/en_MW.dat,sha256=1-D7UAzwljnuUlgPKs2HNP0ubNQ9HGEKgIUdpkxwc4Y,859 +babel/locale-data/en_MY.dat,sha256=koZHcYmaYIjYT6OANOlHdFfPuF-RmF5iyjVbkbtb1pg,689 +babel/locale-data/en_NA.dat,sha256=384TeL01HX5bShF-vJgFfy5m65jRjC_SfITw9K852BI,858 +babel/locale-data/en_NF.dat,sha256=rEdi2JCWTfieeeS2G0OCnKMblzSSc6NsoiEg0-JO-3c,1160 +babel/locale-data/en_NG.dat,sha256=KnyRrrpnzpV97teswZmDpq3eolhm_geKohcIrryBZEA,1412 +babel/locale-data/en_NL.dat,sha256=nWoZ94n1gMwXFwzvMAFraJLYNjiXkZMx5vzAM029PRI,1097 +babel/locale-data/en_NR.dat,sha256=SVPL_wXvdKEYdWqUYhkRrgwOMc-f0YP1Uaznxqv4NP4,1160 +babel/locale-data/en_NU.dat,sha256=0cg8LGLPIboWlBVxtmd4c10rEjqPvUUz2tyyi7kUksY,1160 +babel/locale-data/en_NZ.dat,sha256=x-zR1SoiPc9fpAvtblZLyzHZXrW-Np6ydJdqyXQrPnQ,2234 +babel/locale-data/en_PG.dat,sha256=Cq0NrSqmEseFhwCIo6noFliCfKnx3wAOenRn3VkED_Y,608 +babel/locale-data/en_PH.dat,sha256=W7ezPkuNS7JqciskJ3G25Ic0SbHZTmmmmenv0a39NgI,629 +babel/locale-data/en_PK.dat,sha256=0AD-WPif80PqSYV67bzVTvlj_h074ham3WETqh3NoDk,1959 +babel/locale-data/en_PN.dat,sha256=zxKpA6olu6dMYMtZpzaq35mSoMKh6AttZc6wSprPtxM,1160 +babel/locale-data/en_PR.dat,sha256=GbsBjcumdJ8L-rzRYD1cXU2uzmIUYHQX4chTgkJw02Q,608 +babel/locale-data/en_PW.dat,sha256=LH6T7NOgz_1iwCBhMne8ZH2hjPh-JHL2MOY3xktPyho,773 +babel/locale-data/en_RW.dat,sha256=RdqSwsBE4s_LG92OJvPPTxK3BoC-qzltS8PFWM2xogQ,1411 +babel/locale-data/en_SB.dat,sha256=cW7aw5w5Wos4_O_aRX1Xj4IXuEIq7eQpF50vnCEHKjw,608 +babel/locale-data/en_SC.dat,sha256=uVgNnrmBfJL7Jlv_wpfDtbdk5yFjpsTeyCTBKhiSmig,1161 +babel/locale-data/en_SD.dat,sha256=5JQaakikEVwEQ0YJm2AdZ2Zgg44mDPfl3ZKEatwChCI,901 +babel/locale-data/en_SE.dat,sha256=gpyVY45RU4nB4BKswErRq6UvnyFU6mQqzxzEBR0tDfQ,1427 +babel/locale-data/en_SG.dat,sha256=Yky7Bpen7HGGG4IpzKRb3folOUIfQ9nh0YiPqj2THv8,2017 +babel/locale-data/en_SH.dat,sha256=slAAeHdppQ8lHrVY8Pat5VFVwP-imbX9RbClTrFJkbE,1183 +babel/locale-data/en_SI.dat,sha256=TJey3lYp_l99RHcPcbxFkJ1u4tyP0Yb7TcY-JYAdehw,968 +babel/locale-data/en_SL.dat,sha256=daqNUYE7AgFpgo8PjtGYKm1YKqW8WgVQE4ViUnvh-_g,859 +babel/locale-data/en_SS.dat,sha256=2e53Ov3bAoJClI2KxnghO_q68wsvBYm5y69cFpvZpGM,881 +babel/locale-data/en_SX.dat,sha256=Ldsv42f1G7kgTFRcGdbyL_RnXUj2_whkfivt9xCS9oQ,1163 +babel/locale-data/en_SZ.dat,sha256=qidm3zACYSmI6TgdvkJ-URbDk7BdHg1JmENh3jFUsm8,858 +babel/locale-data/en_TC.dat,sha256=BqCmasVKStg1Mia681u6ZqtglR5TxC0QgCD2j1XqAwM,589 +babel/locale-data/en_TK.dat,sha256=KmgyiXJLdOlswDEemXHOLEuZb5de5ZM0YmdDxNnAHog,1160 +babel/locale-data/en_TO.dat,sha256=wOZyazP1wVbQhv9Y_H_NDHb0ldHsMPdZPN8O-O1c5ZE,609 +babel/locale-data/en_TT.dat,sha256=UwplYXlbOs4hLPyDovdYDv6yz8KGChSZ6ufJ5htjfQo,627 +babel/locale-data/en_TV.dat,sha256=Z_vPwZ0HZB4aDDibrbzesmYFzgKRqk22hS2ZKqhq3_E,1160 +babel/locale-data/en_TZ.dat,sha256=syqDYFfemhw375IYXAM-F03S4gxAe7EbaJcYVbjt834,1412 +babel/locale-data/en_UG.dat,sha256=yczBoonl1zmDZpeNyAHAKvQ_OArvhP7AWVLOtKv9Jkg,1435 +babel/locale-data/en_UM.dat,sha256=QpePixV3RZ9RiqrYuz49bkN6Xeg-UG2y0Po_yaLbSOQ,626 +babel/locale-data/en_US.dat,sha256=JU7XRlKyRBNlDNbGDabuIBWP_tfuFahFBUNDL16cE8I,626 +babel/locale-data/en_US_POSIX.dat,sha256=22rJAk0xIO2lY6r_nfKPBUtruaYmgtYeTjq9nz4RN0g,1204 +babel/locale-data/en_VC.dat,sha256=udrNbZKYSjk5vRAlIMd_k7255C5GUjBmQjOVm_GSshk,608 +babel/locale-data/en_VG.dat,sha256=_MFcYRjyNuFMVYGYjGBQMC3C2_IZjcSXGLxNFUt15z4,589 +babel/locale-data/en_VI.dat,sha256=ptodXPLBh9jA4d91bhhHarqlw8t0BuiigzyLPxAX3Vw,626 +babel/locale-data/en_VU.dat,sha256=OKNpgxA_p9zCpKhmDA-r2qAUHQmeEY-ITSvz6Hqlp8U,609 +babel/locale-data/en_WS.dat,sha256=_qLMqdSB0O18FukP062U6fiMk7uFaNUp-u8qjJXB3SU,629 +babel/locale-data/en_ZA.dat,sha256=MYqHr53tM6t70LgMH7_wdkltFYoUWLEG-u9T8PK8bbU,3269 +babel/locale-data/en_ZM.dat,sha256=Zphtlz3AeWJ4xZaeDtX29_xuSN-aHrmFX8-dg4j9ePs,858 +babel/locale-data/en_ZW.dat,sha256=cgE3J3Wk8y0K6uki7a377LFVPQFmDFPCCn5dngZQxMI,3199 +babel/locale-data/eo.dat,sha256=8RBQNNdJRH9CcD1IPXEl4pl9Z8U2OBio9vmJg8Z6f2E,40689 +babel/locale-data/eo_001.dat,sha256=Mlc_rMI2tpO1RL6ZJcuMDcO5ou3NuFOxZ16TomDvwrs,823 +babel/locale-data/es.dat,sha256=Xj1wyewmRlgXf3qDEsDQpFFAovauJMn7tL9VLlOoOFg,199638 +babel/locale-data/es_419.dat,sha256=dtalpa188kCJUE70v4IywI7YGXtRKWv61kCBoyEd0gQ,23299 +babel/locale-data/es_AR.dat,sha256=fHbtdPaXDRqdHGGB7NNOn_e2aNSX7OZQ1BiysR9PZo0,8828 +babel/locale-data/es_BO.dat,sha256=Ks0Vs-LYS9eSPux9FeV6HZnhdWDgSzPB8RWuDLglzkM,1888 +babel/locale-data/es_BR.dat,sha256=CQBretr3RreqaOiCUo5-cmR3EIlSUiQVJMd_fhPktIw,628 +babel/locale-data/es_BZ.dat,sha256=whnvRybQayDyZH7OFfVQHMR3aHYLZhpDU2gY-j_PbYo,627 +babel/locale-data/es_CL.dat,sha256=HPawCg0SjTK7JIBSvE0JJY7gxeirRz5DdkcXUgY9eH8,5128 +babel/locale-data/es_CO.dat,sha256=B7TtJd_eH_7KNClQGfsFbFNwwLmL72t305awqTOMIK8,8665 +babel/locale-data/es_CR.dat,sha256=Vh36FfFDvJeTDnmjYlOTd2ruW-5I2Wkv34BqALvJHyc,1719 +babel/locale-data/es_CU.dat,sha256=hjouuq52dJG7NJR3ypl_nAHpBIOYgV88AYWnR_i0378,629 +babel/locale-data/es_DO.dat,sha256=q_tRNydo2SG6AsDS0CJb7FPQvPHuIYi6hjJpBbGmWk4,4020 +babel/locale-data/es_EA.dat,sha256=vYi5_0kkB6gIqCXTdtHnD755-gzO2y_Gd-lAy8ahpMU,589 +babel/locale-data/es_EC.dat,sha256=VBBKHoCazqVryt0jzkeu2RadhDAityjxxkSXfsBYIq8,3294 +babel/locale-data/es_ES.dat,sha256=VCWxAz9w1EHetI06vwya_gkk7vDXGGSXJumViKKb4Ww,626 +babel/locale-data/es_GQ.dat,sha256=v1NY_AGhDyOAq1jIFJIQ9FWosDL_RHNvIIufvaEYdWM,872 +babel/locale-data/es_GT.dat,sha256=VjbMTOJSdaBbIhZfbeLcF5KTNtCDNN5Q-Y8mYIrICTM,4896 +babel/locale-data/es_HN.dat,sha256=_cSRFPkE1DrBlZFmQFO415ymztO6xQo7zZxHaixq9CU,3476 +babel/locale-data/es_IC.dat,sha256=ap4rHt9hZKqaWzlIZXgs92V3EWTmkCULMIY6Hf91T3k,589 +babel/locale-data/es_MX.dat,sha256=QGf8oLoSurcTEe41nR0B6mFVMQyfqEXttL-tLL0dsPg,24708 +babel/locale-data/es_NI.dat,sha256=2C0CW32DPFe5bwQFn0-yGXup7iVdYncLI7ByU-yGSVY,1653 +babel/locale-data/es_PA.dat,sha256=TvrAZFMO0Auv5Et41DcMNQKASC3ZrGCalqbxyqVEReA,3884 +babel/locale-data/es_PE.dat,sha256=dYZY6-VdFeUIYLzm9wyWQ7fpDc2_prQh_Nm5UsOoOC8,4848 +babel/locale-data/es_PH.dat,sha256=az00tcCjyCZo-DL4wkdCwFGwuue6M3KwlrCvT6UJCI4,1205 +babel/locale-data/es_PR.dat,sha256=D-Qc8ImING-Wu5LZpSvgfb5tTgKdqtLE6B-IyvJnXck,3798 +babel/locale-data/es_PY.dat,sha256=UCgFkmRSNtsFZAZ1QbauP8511OKtbj0OAOdkJJ-0N3M,5319 +babel/locale-data/es_SV.dat,sha256=DZ5zGJp04BA99FVtOQIpROt0Mei6Esua2Nw0m48rViI,1402 +babel/locale-data/es_US.dat,sha256=8_RWSFk2Vj3v0dHc7iX6Ge20ZWX-M7_wYxH1BtTfv9c,25343 +babel/locale-data/es_UY.dat,sha256=-Jw0bOdLf3LWpBykOF6dZ0nIy0I4alpSH5oTo1AXAWM,2540 +babel/locale-data/es_VE.dat,sha256=DuMDeGoUvTE_qUGtOUnS7eIpbImIP5x2YeR5KxFRmnA,3752 +babel/locale-data/et.dat,sha256=pGdCv2OqghUpVp6vOMfldspch91G98i6kIm6eji2FKE,200192 +babel/locale-data/et_EE.dat,sha256=xpoZTzSn447SsQklqYnKUUL27DkZuNsRpQxTn87Tmlc,626 +babel/locale-data/eu.dat,sha256=gq-LZDCqk_bt-R29APS4rVYl-K_D3d5sIb5nhjzdZnE,176850 +babel/locale-data/eu_ES.dat,sha256=xvTtCQUjEml-wPLAZR4kU_hhXZ-j5oIE5MO577tCdFg,626 +babel/locale-data/ewo.dat,sha256=kZQmUNXQ8t64WMf8lNY9_i03WVdbmJA92_wAMK8p1jc,17595 +babel/locale-data/ewo_CM.dat,sha256=NirWcwhJ0SSdkIex0afT1IDuwyfJBBcfX7CGnJNIOAQ,609 +babel/locale-data/fa.dat,sha256=qX7byceg8QHSgGdOQuJlOPvw1G7B1V7Ge5d_cieSwnI,217724 +babel/locale-data/fa_AF.dat,sha256=donKj3WGTXbFV1upg-DOI-7fZgrLoUZjL9mQrj58NDo,11247 +babel/locale-data/fa_IR.dat,sha256=ZnDClkeVc1IPiEGa63b7BhvnhklUhgige3sTjeEK6mU,651 +babel/locale-data/ff.dat,sha256=fFDGP7W7WqzDGCEe5jMrIWsD9ODm2h_4iZobPVhcRRc,16084 +babel/locale-data/ff_Adlm.dat,sha256=ZuHELP4Qnng1f83guWn-4zQ2QsIvvJJfQh_yT-VDgv8,174659 +babel/locale-data/ff_Adlm_BF.dat,sha256=7b8PdK1LA0V-odNH3iwuNDDR1vQYQhXvHp-5bB5ZwLc,610 +babel/locale-data/ff_Adlm_CM.dat,sha256=32kFf1KDw82I2SKzaVB4P8dBfmkw_mmG6fYAuThS99g,629 +babel/locale-data/ff_Adlm_GH.dat,sha256=90UIh5AUwO8eqvY2d7MzCmPwJ2XNFfAMfHqqEr-QZio,1209 +babel/locale-data/ff_Adlm_GM.dat,sha256=NqlOMO7KDanw-Z-dnG4jSX1SUESFQrNG1MVCMutQs0w,1205 +babel/locale-data/ff_Adlm_GN.dat,sha256=VAK9og8mz1WVXD9RnykNOiKkC69nIF_gGgkwKensMX0,589 +babel/locale-data/ff_Adlm_GW.dat,sha256=_BVL7y6irTvBSRhVMmICwv4uNllP5nxIqPGpU5i9sCs,610 +babel/locale-data/ff_Adlm_LR.dat,sha256=UYThYdKlKFe4XX52x7WO2xmkiHA9heV9E3K2Mg0RP6o,1205 +babel/locale-data/ff_Adlm_MR.dat,sha256=anYa5CmU8BiiYRz2nL12UDCwLJIsUIbZqajTFSYmvd8,1206 +babel/locale-data/ff_Adlm_NE.dat,sha256=EmZR_KWVdW7b5TxkRsivHLoYKwHU029v-R0k7zieWQs,610 +babel/locale-data/ff_Adlm_NG.dat,sha256=OLPxRiTM2InmMtH2gCRJRhbmwhawtdSR--6001ckT5k,631 +babel/locale-data/ff_Adlm_SL.dat,sha256=DE0siwIkfETd-Pd5nvDRWK5F3_55bSYpJFBZfYKHquE,1206 +babel/locale-data/ff_Adlm_SN.dat,sha256=9USLkiIrnIVKikQHcPqyF0bwUqc4OiAm9vDisk9boyA,610 +babel/locale-data/ff_Latn.dat,sha256=byAYS1KDI0NXZf0r5uEtjiW_dvH3S7nniynJX6jR30w,839 +babel/locale-data/ff_Latn_BF.dat,sha256=NNCmS9PhIhnRzZlE6Zn7Sjt560T_zY0oAGvs-wkJQjo,589 +babel/locale-data/ff_Latn_CM.dat,sha256=-vhCSM41OmNfJwpW2ArLlVSOBAmbxI4qkdGrOuG7jxw,608 +babel/locale-data/ff_Latn_GH.dat,sha256=rAV1pDUEzSqrxYJi7c_bB9eVweCRaIzw1qGZkBw9HB4,1188 +babel/locale-data/ff_Latn_GM.dat,sha256=_xfcaqyGrO0UdRy19lGMjo3X_jk_MGCMSvgQjfyL0p4,1184 +babel/locale-data/ff_Latn_GN.dat,sha256=cdoXME19UJX1H7WkepaiJEiUql4zOY7h5uO8GKQoZ_4,609 +babel/locale-data/ff_Latn_GW.dat,sha256=lIrg2frFHCvM8guhuR5OmGU9Np_yUTIcORKQITZSFYs,589 +babel/locale-data/ff_Latn_LR.dat,sha256=Lg_t_ANpKoAXV_TeCGXpGFZOnmMmYIdS9GfylT6Y6h8,1184 +babel/locale-data/ff_Latn_MR.dat,sha256=qhCUin06n7Ow841YyfQ_aYJnDB1gCwbh3A-pozV-75s,1185 +babel/locale-data/ff_Latn_NE.dat,sha256=vYqMUR9LCykf0H_rTE_oeS9fYK7t-ajKpbK1IpF9-Cs,589 +babel/locale-data/ff_Latn_NG.dat,sha256=NAMpFyNWE3dSzIwJTRBwH2SUhoJlu_AzinAtCByfyJA,610 +babel/locale-data/ff_Latn_SL.dat,sha256=AMk0G4KKcrT9Yh1902tRDC8JBwXHRDqRgOcw6W8Ne4o,1185 +babel/locale-data/ff_Latn_SN.dat,sha256=Idf9JfDjAfWlKouD9gYB6m6_qg9P9xUIRAgMQ5O1-Lg,589 +babel/locale-data/fi.dat,sha256=mQkPOSvYmGw1T0FjSrtJ0vgGOZYYTV9Ixz8oTvfFoPQ,227195 +babel/locale-data/fi_FI.dat,sha256=CqHzlsNe9s14gZWMaUn9abl4FmLAZknXRX1lP5Pdrc4,626 +babel/locale-data/fil.dat,sha256=xZADFjoyLySL2eXDQqLicMAFPoaRfdLo6KX-DGk69hY,179405 +babel/locale-data/fil_PH.dat,sha256=U-tLxLn0lDKKpUqEQPLTumOuJOIYi80HvPnUk2SsObY,609 +babel/locale-data/fo.dat,sha256=c3gHwJxgpzyEo5QTL_VGVE14Ledk5QJdkedX9a2LIIk,165657 +babel/locale-data/fo_DK.dat,sha256=V7Kq03gQkns2EDztSyIiRLr80EtZsGZnmoYPsChW__w,647 +babel/locale-data/fo_FO.dat,sha256=WZJB7n6uQpGsPNWVXqP851OGZd5RmfSMbQ-s_C-00tQ,626 +babel/locale-data/fr.dat,sha256=PuqjeWNTBK-3hLx2xl3WMzDRjOsHSvq3s9pi9OVRYPk,225342 +babel/locale-data/fr_BE.dat,sha256=QW5XQfg_MDjZQazIkAJTp19ZeRRYLEwTP2Q6ix3W3C0,1254 +babel/locale-data/fr_BF.dat,sha256=gVdej2f-lrFMVDjQNCma1-odtbYzezlFw8YR16ZuMas,589 +babel/locale-data/fr_BI.dat,sha256=hCmnp8GizMpXqkYPSnRFUMb1ltL9xT1aIHUOt8uzR5s,610 +babel/locale-data/fr_BJ.dat,sha256=CZgeRt0F7zcnCbuwouXBxTg1Ht6M4UpS1JYNgdnGZOk,589 +babel/locale-data/fr_BL.dat,sha256=mN3e240_oM-t97i3jZ33ptBFR3XJFtq2519QXQskeDw,589 +babel/locale-data/fr_CA.dat,sha256=hUfSdAwXBvfuhc3dFBmHdmgllVG8dmH62gsD5jYn80U,65858 +babel/locale-data/fr_CD.dat,sha256=KqBJ-62QyyMteiPxaihTP7AZAbjuoZothXcV7YBv9cA,1106 +babel/locale-data/fr_CF.dat,sha256=zElh_1aCiSapkL83eytl19hKu0R6lrE3xmb_a2lf_cM,589 +babel/locale-data/fr_CG.dat,sha256=XqZxi9XodrhYnQqagiCv8zc51Aqv8S_E3AKgZxPO6QA,589 +babel/locale-data/fr_CH.dat,sha256=MHWdTsv0DY1KNjVQCSHuEOa1PQ_Mtbm0b3mThVRXkzg,2970 +babel/locale-data/fr_CI.dat,sha256=PULA-d30mWwNN2Zsg_58tbUde8ljeA7ej6_bQSvyngM,589 +babel/locale-data/fr_CM.dat,sha256=sA21F_q-PMRWez-fKSoxNz8yyaTtxBg78qRRukmbMR0,2083 +babel/locale-data/fr_DJ.dat,sha256=retYu_VKqCAemcxMH2ieVzYXLQDnM6-FkxJLfRksBMg,1205 +babel/locale-data/fr_DZ.dat,sha256=lT9Bd_4OA78pLByWz-ub9pRT4pfKANUsAG9Gk2NhTtU,1247 +babel/locale-data/fr_FR.dat,sha256=oucSQVTi8gnvWNQ07WJPnYh1YQRxbYR6afhy8Wd2YgI,626 +babel/locale-data/fr_GA.dat,sha256=hjGGeVpmPCTxP7LEMxE_iUUS-uSfRnY3unJ-800ilGk,589 +babel/locale-data/fr_GF.dat,sha256=FwIBhmnYvA-VIAgc_n9JLiENGTZMXFANqyFFpeNjNYc,692 +babel/locale-data/fr_GN.dat,sha256=BIJ_Gl1Yp5fVwQNISO_f4o5U3vgOWPKB-4UWMjp_SMw,609 +babel/locale-data/fr_GP.dat,sha256=7IjXNU_xYD73C0EaZ2IWMxZ8kzIFRLWgrE07-xHFB8s,626 +babel/locale-data/fr_GQ.dat,sha256=zMNFOsgv_5JFDvnqB6AovINlxEdr_QYBGw2Rl6LsdGM,589 +babel/locale-data/fr_HT.dat,sha256=FIn4weL4_b_phmnIIQFDdwhqY3UFaLITSGKAZh_sIJw,1873 +babel/locale-data/fr_KM.dat,sha256=SaUGzyArQSXa_pwsb9hw0_fs1TjcZq7o2CFW1mAfvQk,609 +babel/locale-data/fr_LU.dat,sha256=OZ6lHBdT7fHpiMgMaIEJhNB4ohZVMZRQiJQT98n2gLE,687 +babel/locale-data/fr_MA.dat,sha256=7-FeaIFIZGfbunjR-M-lTr0WkTGljmC354Iadk3_S-I,1277 +babel/locale-data/fr_MC.dat,sha256=se81gvlIKgey2DgfCYayuXiwV0Wykw-QM4otwduegCQ,626 +babel/locale-data/fr_MF.dat,sha256=asOP0aSNk9sx2Jx3R5QigjvOQmgEzRP0atpznWTZEII,589 +babel/locale-data/fr_MG.dat,sha256=GL58hdcr_DZePturTSmv-8WScEg60WajuKuijeBs5hQ,609 +babel/locale-data/fr_ML.dat,sha256=x_UkTI0saDvoYCiYdNF9CWoyc1VvMAQFBw8APjCEL78,1126 +babel/locale-data/fr_MQ.dat,sha256=v3tmYxQ45BkuVen2er9vMsxTceL196E98XYPsGWKXTM,626 +babel/locale-data/fr_MR.dat,sha256=0qY-kxib1lGMlxGNcyjEZwj7BV8Da3CZq8DGyiRbcrM,1185 +babel/locale-data/fr_MU.dat,sha256=UVc2y4RDe6Jy6_48f043AXBUqWLvktTjnj2osTeAJO0,609 +babel/locale-data/fr_NC.dat,sha256=Liy4q5CQx43KEep69njgxfUENHEJRfXaZJlsK_UcIbw,589 +babel/locale-data/fr_NE.dat,sha256=beqoAaHiYhcvUeABHOBD_9cJQ01DQzo5nbAZb5JZb88,589 +babel/locale-data/fr_PF.dat,sha256=mSlv8dzrvNyo9XfC8yczKIKGaEPGTIpf71Oi1IH_f78,589 +babel/locale-data/fr_PM.dat,sha256=yukgtaee7ROFoFHM7dUO9CSYlCmipm1i5ZEIsbvvP0o,589 +babel/locale-data/fr_RE.dat,sha256=IN73Uw9cZdifS4rK4SfWiecLcAX0R2F4j1aV_DusCUQ,1142 +babel/locale-data/fr_RW.dat,sha256=b6cY_0EAjkJjlLAjdYr7o8vkdzB0sQbIgwgWsFlaO1M,609 +babel/locale-data/fr_SC.dat,sha256=ejzZtxh5_XDx1B0oZFQx7oDpuuxAsmNp1mYxGtnRs34,609 +babel/locale-data/fr_SN.dat,sha256=AzbXwg-QV0b_-M08HrFFVoi0CvQSW9tK-rNHQ-N-9d0,1277 +babel/locale-data/fr_SY.dat,sha256=nc1VVE9d1H23_Kppl-NQLCOyQHhUc3hVpFvIHKFwZ1Q,1247 +babel/locale-data/fr_TD.dat,sha256=sUvrTreI6gzMwlLxXI-uLQmmO3bSFSa7zgdrxQqH1_w,1165 +babel/locale-data/fr_TG.dat,sha256=GWo6BaOsi8-YZfuWYIEDMyodmtktbkK54R3fNEwvsNY,589 +babel/locale-data/fr_TN.dat,sha256=0OMk3LdpZvhd0ZFbYRh2ioViNakNsBe8DQIbGnkCeQo,1185 +babel/locale-data/fr_VU.dat,sha256=yly1_BvKQwMXPMXZSc-ZRbAZ04qjatwiNPrglkTge_M,1185 +babel/locale-data/fr_WF.dat,sha256=_LOp9Pd-uVPzUqTOLmdKAVmqdhJSc9TQxN-q0AvFZMA,589 +babel/locale-data/fr_YT.dat,sha256=M-OawlEGCEqzxMFehDb_3Ofb76HA6nwXEBFBKEi4qMw,589 +babel/locale-data/fur.dat,sha256=kBxHiHga0PnTMJY38zYNV29C6Y1hw44T2qazrXRDBQE,35027 +babel/locale-data/fur_IT.dat,sha256=-jYgvCMGoV9qmo1lNuPbfFhw2RiwM9-XrMAaisqk3ck,627 +babel/locale-data/fy.dat,sha256=cNAuTsUGZ03hww_0oz20axWibvGvrSc_qtjNwX6qT30,110221 +babel/locale-data/fy_NL.dat,sha256=6kDdfEWgZuA0BeiB9BM8qhtPVPqUB4zBkpQBmEulOpU,626 +babel/locale-data/ga.dat,sha256=BWeLf2Ib-AwjhXVaP8Tj1Np23kyKwpOBuNTOIjueIAg,316865 +babel/locale-data/ga_GB.dat,sha256=DVKT5Ub0mvXWADwJua35XUCwxPrRj8olUR-xGv9x07A,626 +babel/locale-data/ga_IE.dat,sha256=cCW_n5mlSTNu6JzFj5bZMiJbEXFiOHH8BrCB4MnAi5Y,626 +babel/locale-data/gd.dat,sha256=11GEkGyo2nBzMBCd7vfzaif9d_xgR8KIEvJmPrAZ_Lw,302010 +babel/locale-data/gd_GB.dat,sha256=6VHHQkNfDnsLrshZ5VM0AvbuOmAkVWFf6KIBK6dXxhk,626 +babel/locale-data/gl.dat,sha256=VdJBH1gVlN8SYNhZOZ8-h6svbEAMapF4HVOtgqFXQ44,176367 +babel/locale-data/gl_ES.dat,sha256=taQiFoK4jse9XR19xt95xT_BXnzftMPMJgKk_ZIh1xg,626 +babel/locale-data/gsw.dat,sha256=BZgIpYmp-FUl1rjUf0EG_pUQERu1aQb9kvqIOG_LD-k,108049 +babel/locale-data/gsw_CH.dat,sha256=oNDsu5FZKmaMx0q94MsggWUHYobgGv6lNNwqRbm6mes,627 +babel/locale-data/gsw_FR.dat,sha256=4rf2w5Q1j3qezQ5Jf1-0qkY3K2Yk-ArQBBFCciWNfiU,627 +babel/locale-data/gsw_LI.dat,sha256=4aFdXjXWs0W3IE-cqe06PKFdB1l1MbQre8NY2sm3lWM,627 +babel/locale-data/gu.dat,sha256=Bn86qx8SplDJcC2V5jLIOeAavwh7EWkDZEAKz2nY92Y,246649 +babel/locale-data/gu_IN.dat,sha256=4mup-pKABihWun3Ougbz8HiGoXtPDPdAqAKMBma7Gvg,631 +babel/locale-data/guz.dat,sha256=bzP0aY17_xjZaiVCMTACE8ThMfl9HcD-ICcMFLjN7B8,16016 +babel/locale-data/guz_KE.dat,sha256=S-xrYnojrbkIXv_AMMaLR-0Et8CtW6Ty3FDkikLYqS0,609 +babel/locale-data/gv.dat,sha256=lWBEcD66RJTHCg0L0uUEk3twf9iApg0VHCUvCmUGNjw,4146 +babel/locale-data/gv_IM.dat,sha256=32eF8Qm1U-NzDs6CsC1a5G40zereETci2vy066Dq9m8,607 +babel/locale-data/ha.dat,sha256=YZzU6hOiWG1eQjBfS8xKzG8FLFYroDh0wMIQnp1xnV8,78016 +babel/locale-data/ha_GH.dat,sha256=WAVJ2CogGv-7o3KHNfd9YIThisXhPcKkOCStIkCJYOo,1188 +babel/locale-data/ha_NE.dat,sha256=4LR2guLb66Q9ptMpxPP1o9RoCbMUvWh0xdbfOtOg92Y,1248 +babel/locale-data/ha_NG.dat,sha256=7ArPguvlMmsMd_KuhyAy5K0PTuvdzDgbCrmY5c3hyKk,589 +babel/locale-data/haw.dat,sha256=daT0jGNbH-c8-byXW8Ev8k7x6FxWzWzM4kwyGowZrgY,16106 +babel/locale-data/haw_US.dat,sha256=0npKxik41EG4w134GeOKBCqQiyn4W_4RU9Xol9An9vI,627 +babel/locale-data/he.dat,sha256=B0Bypmykcuj6aTi6m1EOjpPExBZ_wQewVW3AYFRYaiY,271008 +babel/locale-data/he_IL.dat,sha256=tv1zu6LbE2qFr5KkxegGM6sl5YjsHeOohznihTWqul4,651 +babel/locale-data/hi.dat,sha256=R8gru5lLOJotWFqfLJm7RLkQ9F_k82wCa4KWk0XRnXo,244568 +babel/locale-data/hi_IN.dat,sha256=laF8SEGi7j2jIwdbvx9jumoN_ZSlsmM2qct5Qpdzy8g,631 +babel/locale-data/hr.dat,sha256=_Bnj62FHBemVoPsm6v2atZTwG_4RgIvKCN-UOxpyMj4,247517 +babel/locale-data/hr_BA.dat,sha256=cb0WcMYeSmL0iyadbeYGokENF3IdPgPG8Ir3pt2QWhI,1161 +babel/locale-data/hr_HR.dat,sha256=FBTFejY7tzVjCu1TCX5PtUIXfhL2iN0DukagJf7dS6E,608 +babel/locale-data/hsb.dat,sha256=7JQcT8q6VIK3yOG2Jx41ksSJj2G_oqcrZH3UXHS0jvo,179178 +babel/locale-data/hsb_DE.dat,sha256=mJpofwRoSQoD4uMNdi0xcLP0qyq0IysbE2VkXNYniug,627 +babel/locale-data/hu.dat,sha256=0Gvk8Gl8uJtKpPezPTWfR3XX2wkDKIRQsPuTTAYcahg,192963 +babel/locale-data/hu_HU.dat,sha256=KndrzgNop55GlUso27Dfpf6rW3RA7YhQibwBFTzufk4,626 +babel/locale-data/hy.dat,sha256=cx7Ap0eLvzTGjVGdK30x_8TLZ5QLsFgbCnbqrYu4s8I,214313 +babel/locale-data/hy_AM.dat,sha256=4HM865GP-TvuBp3XjB41rgc1QuXLLITSt3skVtB0QHA,608 +babel/locale-data/ia.dat,sha256=kIIuryj5kmGxdG2NWtOdPOezBSO3NQGbCVI7dl1Bndk,112822 +babel/locale-data/ia_001.dat,sha256=onWUTi-JeTzCyFGYj9VWyvYFnE-0w59rnsDeP3WZfCY,914 +babel/locale-data/id.dat,sha256=_jTwyiD3gWQWvaU0Re3tJ6nQI0BEmeQzc6AL2PTBZXg,163265 +babel/locale-data/id_ID.dat,sha256=Q10Fz7etpCtqpGmEOzhzIAluNUViuPV6fJ8Ibp4TMDw,608 +babel/locale-data/ig.dat,sha256=-aYZuS7od7VW9iec3iOwyWRKdHtdxhISdJIIkeLLgB8,52036 +babel/locale-data/ig_NG.dat,sha256=qvl7ZtJLFgRf6Jbys3rPutuwKL0nImrLIyI2KxDJNMY,589 +babel/locale-data/ii.dat,sha256=crBpYsdPGu1KoOSjCWHUy67ssD13AfvmK261h-VRMhk,12588 +babel/locale-data/ii_CN.dat,sha256=NptEx8Tehw_ZnxhDc7qbpFsDxF_2Dxjx403D2fg2kKA,608 +babel/locale-data/is.dat,sha256=ltT4Iwckwve2QSs3Z0MzA1x4KXXK66cFUOcRnlLDG_g,188046 +babel/locale-data/is_IS.dat,sha256=vkGTcivdc7UMK2uv1QCKnJkoGh1cFUyK877xmLKNWfQ,626 +babel/locale-data/it.dat,sha256=7JMCTHE2jVXjGEkfgmmFdwlQbKCwRo1PfTecsagqfVg,188460 +babel/locale-data/it_CH.dat,sha256=aBS-dqAT_jYf8DFLYCQJ4A4MfLi9u_rAekJ5fU8eTLs,2776 +babel/locale-data/it_IT.dat,sha256=EPq-ZuC7fD9sVQNf7K7dGReI7pvxix55SFteRJlEheo,626 +babel/locale-data/it_SM.dat,sha256=gpwEWv5kVspUSBElJAoge82X76LK1lsgxShdk_BdxwY,626 +babel/locale-data/it_VA.dat,sha256=drgEDlw3k2saTMXzEz5-BkkHgCCdnXVQ-aiCHUMYAUk,626 +babel/locale-data/ja.dat,sha256=m8MJqRkZ-VMiUTOqi0zTmQe7CMDT5pT_Vj8Ek5nbyG0,200287 +babel/locale-data/ja_JP.dat,sha256=fqV-tzCjVKeIhB1TH9R2vsz_kpEwD2KSdYUMOL2zVQY,608 +babel/locale-data/jgo.dat,sha256=eEw-8GRRYT4SB36fdEfaLJX76RKeEhJomcYa7wl3WvA,12628 +babel/locale-data/jgo_CM.dat,sha256=4EKGSIDydn6xezIwTpMhOZFnheOhhMWfwadqa9rRRpg,609 +babel/locale-data/jmc.dat,sha256=q2xbrOlR1h4dKkAb5f-FFeG1yY1zAO6vZ2PqN9tgTLs,16068 +babel/locale-data/jmc_TZ.dat,sha256=bpvlP-1bAXEnvIRsPxFHel5X-8eLxF8dUOlkJctN78k,590 +babel/locale-data/jv.dat,sha256=LaOeOox55UzDPRCZzpKBtS91S8visMnBQiwFsKvcuYE,129461 +babel/locale-data/jv_ID.dat,sha256=H5wi4GL8eID9c2QUxpz6qpFn5ORgdpE2mjYxdkozJiQ,608 +babel/locale-data/ka.dat,sha256=isStz_8QWtbWBJidLjlvr1wqrm3DJHYSLoeZkpzJAy4,260705 +babel/locale-data/ka_GE.dat,sha256=4G3wWIQOIZM5Z8r1Px0d4DvTOMwbR4Ubvq4expe_gY0,608 +babel/locale-data/kab.dat,sha256=-rQbUS5U939yjCgkQd9aA4PFolU4h3cZnP1brEqMLRA,135263 +babel/locale-data/kab_DZ.dat,sha256=KbrMqfLO_TlWJlCQVmK5IjqCwVppZUryATx2Rq5YTaE,652 +babel/locale-data/kam.dat,sha256=lr-AmdEqLfe0Asb3wwfFYGjzMMqobCr9-ZyDb8Cg-YQ,16175 +babel/locale-data/kam_KE.dat,sha256=vfQX-o-otm5WDwv2qrHY1lesX-AQ9cX_2HW-nO820AM,609 +babel/locale-data/kde.dat,sha256=iV4F5CHX9rYrt0R_iioeDnTv21snL0t8hZ95QNcGMBE,16475 +babel/locale-data/kde_TZ.dat,sha256=RdJ-ovdj55xBfaOc5mE41fqsNdLN_7oSIOcyq7-aApQ,590 +babel/locale-data/kea.dat,sha256=Wf1pAdN0uMYvlUpOiQdLgMvvS03rkpkdDwU-P8XzvWA,85757 +babel/locale-data/kea_CV.dat,sha256=7lbONkE-y9_doiZJxjyGYM50Yz41JNGwy7dV53vvhEs,590 +babel/locale-data/khq.dat,sha256=jmjjC1r6-Cxur_0Uh_WXk92MBgw4fR41PzwlhVnr3qc,15939 +babel/locale-data/khq_ML.dat,sha256=CbrIcKwmxw5THhW-nZ-sPFZjsfgpHZUDl-hhWH2toDQ,590 +babel/locale-data/ki.dat,sha256=abQFXbMy9mdR96-Nebj3PL4_frRUGAEcsk3_-DQ0TYM,16123 +babel/locale-data/ki_KE.dat,sha256=-fcVNnw6zrxr3Bw7mi-vpkzP0v4v9t2hkj5ZEuG_5Zw,608 +babel/locale-data/kk.dat,sha256=V4lqjiOozZQv3Zwukg1UlrXhsuDx5u2aA_k--eEsVAM,210242 +babel/locale-data/kk_KZ.dat,sha256=DhjfmmZRmr-w8q98Mqr8h_v2hosJAjTtTFo53E3QGQY,608 +babel/locale-data/kkj.dat,sha256=466fsXBV_YSBSPGZx0z2FYvYrU8Ql_QIIZjXfNGv4KM,4888 +babel/locale-data/kkj_CM.dat,sha256=KY8LZH7ZqifH7BTxFz4ylu4d1LAAxMAD8W-a0gYsjZo,609 +babel/locale-data/kl.dat,sha256=gHhmdk-As6QszK0V703UfVG9XcDZo-zA-AKfX3YboWs,58200 +babel/locale-data/kl_GL.dat,sha256=RojeiBiofKUHvk9f_ITt44uxy9gGhxz-sVy6sBn4Zcg,589 +babel/locale-data/kln.dat,sha256=RvTuZqeDDB_BKquwaUuXRjL89K0d8kcpj5rvD8oaIpc,18003 +babel/locale-data/kln_KE.dat,sha256=RydM7LQVL9u4kqeFACUbNwf4M8vQQhP0qkKM_oL2oGM,609 +babel/locale-data/km.dat,sha256=wMIgFZSCkKBp1QkkhFPVEgAof5LuMFxz0jEzIWRdMQY,202267 +babel/locale-data/km_KH.dat,sha256=xVjkyVxn_hrWpEp6JOzSZKxZFDZ_3UQjRQsVPvBy0CM,608 +babel/locale-data/kn.dat,sha256=ZnwTJdH60MjTwgKjjgLR_vEUIHFRreimD4eMv1rAA7A,263580 +babel/locale-data/kn_IN.dat,sha256=Kzg5Bayf-ACbA0Nun8rTGYcbi5r2PmghFxlbyQIiKV8,631 +babel/locale-data/ko.dat,sha256=5disPatHT9WJa6UOmZXl6iuL0PWplfHyQMjlNzFqki8,175158 +babel/locale-data/ko_KP.dat,sha256=2Z1Rbojo6MHJGQdInFOjfZHbpRdwvZfM-FU_08oFGng,789 +babel/locale-data/ko_KR.dat,sha256=y-3hO1aBM61NXG2L4o41zAPNlUvfA3RE14q_8SdarcM,608 +babel/locale-data/kok.dat,sha256=Ot2FE1ar2aHOhP6L29tqcxHSYoTc46KoQffIDHGkNQM,182871 +babel/locale-data/kok_IN.dat,sha256=e5cBMZY7kU_9H69kTorB93cUe7xFASM-2hUfGY3A-ec,632 +babel/locale-data/ks.dat,sha256=0DI2B4YMdtXgQZMQKTKL7FrEH8ntI8NMZuK0wMP35x4,102570 +babel/locale-data/ks_Arab.dat,sha256=kfXVFhHX_NrcA7tZO6yYXym5wsDvpjma53urJPVeGJg,823 +babel/locale-data/ks_Arab_IN.dat,sha256=_fjJMmIU0OJMR66po8yq9ByOzZZ3tomRqVt6RM4BJFw,631 +babel/locale-data/ksb.dat,sha256=SxZYlQUncKnA8ljqLb7k84GzFoGzaV1mhC9b8y03mEs,16043 +babel/locale-data/ksb_TZ.dat,sha256=2Wzocj-_i0wMu7oi3-8ynqirioha6nG9PPI1-5EMbnY,590 +babel/locale-data/ksf.dat,sha256=fE1YgWMw1RpxNYkJyieh2Cwm4NCE9FQbM9P4he1cviw,16515 +babel/locale-data/ksf_CM.dat,sha256=1CFxJU8F4NverN5cPa9xvVI-we8x7wbZgP3UfXVnL0o,609 +babel/locale-data/ksh.dat,sha256=6ttxGMPLhYNiad_7avsUIF2B7rFuLLxKw3Bv3nB1rsI,88937 +babel/locale-data/ksh_DE.dat,sha256=vTdZCAi8MAGFb4wE_zjnNTREloPZHNGc38eXQ0uwtPE,627 +babel/locale-data/ku.dat,sha256=s-_gQdcuJLmxKWYuUftIvv4TSjViNOeLQWiQp4Q38zk,28771 +babel/locale-data/ku_TR.dat,sha256=EsO9U5XN30PqoR6A-7q72uLJ6An2BMuGbrh6sYrZoFU,608 +babel/locale-data/kw.dat,sha256=A-aMyXGee3cHKH3Vrd1rP11oz8Mzvrm05B6DB_c7E9s,7242 +babel/locale-data/kw_GB.dat,sha256=nvzq6ku288buMZIPacs8IGm5rrD0LdzYFZQxBe9a_jw,626 +babel/locale-data/ky.dat,sha256=5xH7IcsPFiBp80H8j6Se0o2imNAELzqORSyNHqGrm3k,202058 +babel/locale-data/ky_KG.dat,sha256=I9WGUgCDXB09jIycutdV0trTbhHFKvbM8Cr4_eTvHmk,608 +babel/locale-data/lag.dat,sha256=r2Ms6wyrXqZ_u1eGycN4gIHvoDmq3LHisiMzAVvoLP4,17141 +babel/locale-data/lag_TZ.dat,sha256=gB3iS13Egu-2TLYBYwM2xbve9HxMHCQwgoxELuIuxTI,590 +babel/locale-data/lb.dat,sha256=4jKUN0pdyWrEMhXiWo8iLEKsx_NQppk41f_21Asp7IA,164597 +babel/locale-data/lb_LU.dat,sha256=oksbNG3kkuxhnrv6goNlYcBj0Oejpr9-ptrmWHF6EW4,626 +babel/locale-data/lg.dat,sha256=vV3xAg1xWrx9LbkPFxvjeZgQ0w26fXZFPhf6lwBDM00,16434 +babel/locale-data/lg_UG.dat,sha256=1HeWA7IllosO0gp2T_nevwD0f2CSAx9lMfQYK-JpafA,612 +babel/locale-data/lkt.dat,sha256=NdUx996UtMv1MiuP3iqWSl8T5QDZV8cfVGxpfgzc41s,12766 +babel/locale-data/lkt_US.dat,sha256=KoED03rqibBCmXUUHPR12gR0xc9ox489Wxavkf3hJl4,627 +babel/locale-data/ln.dat,sha256=hXo1sqtv_f1zrGVd9DAyMJPChHy4oxSAxN2yM8WOeiU,25891 +babel/locale-data/ln_AO.dat,sha256=Df8fip-BEQDkkdNenJMZYVEwNEFpJU3e7TBDFk1GCFw,609 +babel/locale-data/ln_CD.dat,sha256=cya8q___2YF--XiQKag0Z2j67S_3MXvGMkqjjvao8Js,589 +babel/locale-data/ln_CF.dat,sha256=GI1_WE8tFKny1XT5c7Vdr1xpgTYtA20qoi-LbfXcNmA,589 +babel/locale-data/ln_CG.dat,sha256=gR1qJakj6apKRWJfeXchgBbbmOYiZJs-sWBiOVC4giI,589 +babel/locale-data/lo.dat,sha256=wFheTwOpB9qwdWoEHy0qyUpEPf8zMbKw8tvCc2zpLK8,220343 +babel/locale-data/lo_LA.dat,sha256=Le3oiQxOdMmSpTmRje_U-gYUmJIgsRuIrll3v9Gvn9U,608 +babel/locale-data/lrc.dat,sha256=FxK5wDraPcV82EnRK7L8Lc6pOpd9DlOz93hqbBlvp4Q,19021 +babel/locale-data/lrc_IQ.dat,sha256=IulRzfQLGQNFx-m2GA1E-996l3AmXam6Kb2yxEU7Pzs,1228 +babel/locale-data/lrc_IR.dat,sha256=Xwe6srYtOSobQ5_3dgtaFUJPpdCzPfqdMdFw5u3h7iE,652 +babel/locale-data/lt.dat,sha256=eQLIpUDmVcOlDL3RWzcvVlUdsaquYCLxuxbJ8MjTWEA,284211 +babel/locale-data/lt_LT.dat,sha256=xpcc0-LW9jbhEMcG4_YJ_1Zh8gjMuO_pFWRRl71WVUI,626 +babel/locale-data/lu.dat,sha256=39-6RP_EimDpNfh7e9Rfh24evWwSEnaSoXTxU4Xus0g,15888 +babel/locale-data/lu_CD.dat,sha256=NLQ9XNdydBzo-3BIWY7FrESS7yLG1BFyU8wsX_QclOw,589 +babel/locale-data/luo.dat,sha256=dmxwIb10UL-N2Gx5ymbXU55MakSpMFCGneBfBu5k6f4,15885 +babel/locale-data/luo_KE.dat,sha256=NEKNpjQX9ul04z2QZGvlKaYQEpG7qpLnz0fraetUD2w,609 +babel/locale-data/luy.dat,sha256=zIdy-rfqrSkpjJVvZEWb5Eh-kvFHltSyVR4Rr_d-jHk,15859 +babel/locale-data/luy_KE.dat,sha256=3uCT5nrrTWh8Qcd2-x0vAMbsqdBfLbVNllWdTBPXVk0,609 +babel/locale-data/lv.dat,sha256=AxTgcKRkQs4L0G18VPpwyKW2f0pgFl1aB3e0AAIBVDM,214988 +babel/locale-data/lv_LV.dat,sha256=DVQGeBkn2bfyW4oBFSk-FG5YDgYoPrcy4P1i2msqbKw,608 +babel/locale-data/mai.dat,sha256=TGGQKi7YoVCYMbQcgUkYvWVeHeqPda-wkHv1tdh9ElY,14710 +babel/locale-data/mai_IN.dat,sha256=lZ93VuH0KWuLZAwFYQOlGidLcq19FwAh5FcTkbmWHIQ,632 +babel/locale-data/mas.dat,sha256=VAYNJuZSIHxhdo-sjuiIa8lLzVywzE6Pt8WnKsmkjmw,17297 +babel/locale-data/mas_KE.dat,sha256=H37wvJs04-E8bNfKwZQhqeDajPo6SvpdVwuo3PyJ1AY,609 +babel/locale-data/mas_TZ.dat,sha256=9JwDj_IR1VKGVgWxstev2mrAXxIodBYOH4UDM6Q3q1o,611 +babel/locale-data/mer.dat,sha256=OLCqftXJD2C30EM8YVyuLO6qPnMb1PSzlwTYmmpc6wM,16088 +babel/locale-data/mer_KE.dat,sha256=99Q8eh6kJb1wkSHx_J0OroOC7WZ23Gp5IGAFc-NBQpc,609 +babel/locale-data/mfe.dat,sha256=W4iO82UorOHb30ajoWPGnDjhjIWUIIr6HEbcWAlg8yo,15117 +babel/locale-data/mfe_MU.dat,sha256=TFnNwSIFyDkJUAVbw4Y2RyGH5uG4nvbKg8uNubPWXpA,590 +babel/locale-data/mg.dat,sha256=N388wocbzVzKtwYxGmUZRQ_Xfqdl_8pfhN2EVs191kU,23550 +babel/locale-data/mg_MG.dat,sha256=cwl6h3aaMkDtvF9Ei8qvlnO4N1FTSI_VOEVa54g3eHs,589 +babel/locale-data/mgh.dat,sha256=rXDO2pNXJITPslfwWEAO_8eTuME2YyKX7EBsBAIVBb8,10479 +babel/locale-data/mgh_MZ.dat,sha256=uJyr7jkKxWqYOJ7CmhjAs8AKMOz_cWlojWjFXRj_jPc,609 +babel/locale-data/mgo.dat,sha256=C512Q78AJ4yDUul2Wf0liRWjQh-GNv3XDENtmeCq58M,8206 +babel/locale-data/mgo_CM.dat,sha256=T5kZuEQ7hzI616QF05Grrv-RZb59B86medbIafdhrtU,609 +babel/locale-data/mi.dat,sha256=Nr8tmNRL5KurZuLLC9lojHHjScK_s2V-gCNZINEYmtY,20727 +babel/locale-data/mi_NZ.dat,sha256=7o2jTlC9sR5dX2mxLI4qjVIr897Xd5keBTxs7a-_DYU,608 +babel/locale-data/mk.dat,sha256=GIgzd7ykCmMFSFI0nEWeXGuQQYf-0CWvfI8cnLSgAQk,234490 +babel/locale-data/mk_MK.dat,sha256=DtPgHruh_KrDRllM_vDipwCsbMWzk2bua0lfFsstTus,608 +babel/locale-data/ml.dat,sha256=YRd4HGpa2b-k6G8EETwwIc_-klPwXhu5MIc9odvPtqg,285214 +babel/locale-data/ml_IN.dat,sha256=_vPZnTZA2VgZoDi31tfu-tR4uRzfj-cFFVMmcB8XZgI,631 +babel/locale-data/mn.dat,sha256=Iw1P5tKBu0KAl2JgsbS72Vq99Ye-t9_c5d2oZcoD3Bg,202642 +babel/locale-data/mn_MN.dat,sha256=gne5zuFemBThyeemcmnNvI751-rsRwCrCBUQ6uvuK4c,608 +babel/locale-data/mni.dat,sha256=GvLzWZHUo046TkH_05BSwDHFtkPTeSJVEaLG_nvWhUE,14622 +babel/locale-data/mni_Beng.dat,sha256=NiCHewI8Yl4k7ylwMAZVB7mtk6TZboLcvZl22n9uG9M,666 +babel/locale-data/mni_Beng_IN.dat,sha256=Lx0qjRdIKxv05uZIp8e9W-Onti_kqLE-bZiu4yEgCOU,632 +babel/locale-data/mr.dat,sha256=CCiBBjuwlhte5k72NPs7U_fpEqRMcI5PPOZDoO6BedM,246797 +babel/locale-data/mr_IN.dat,sha256=RkYd5HIWUeP8yK3zFCRWvxvu3FzaK0ta7H1HTQQMUdY,631 +babel/locale-data/ms.dat,sha256=KFfk5lBMcmtQcWGOBAyTMqEo4Ghf5S8wg3f1MbZt-W8,152238 +babel/locale-data/ms_BN.dat,sha256=CecewZU-8cYw5zaaIteTNbPOE7FqS_IGj6emNKzy3TM,1257 +babel/locale-data/ms_ID.dat,sha256=WcRbSIc4fsZMR56iv_9PE24j_rQLUCYPwU7NdEDO7gw,3272 +babel/locale-data/ms_MY.dat,sha256=8RsVjifl5WL8sXV_aNTdgjqquxny2SsSBXc4KqJuqlY,608 +babel/locale-data/ms_SG.dat,sha256=_nfWkz663QdJKVxb2AQQQUt_Hhl9bMk7hIQcqpVSPbU,627 +babel/locale-data/mt.dat,sha256=tnI8T6enmDliuHbzDfAq7Z97GDcdQ1MOfprVbqHVbRQ,78243 +babel/locale-data/mt_MT.dat,sha256=2vQihLOtB1H_Ca6e9ZvGCgZ73gVlB_RBsBMFOw22cDs,608 +babel/locale-data/mua.dat,sha256=k5P4AS4jugmg9aNeL2jUK6xoTlRW1DvPz2XMtumjFT0,16547 +babel/locale-data/mua_CM.dat,sha256=fQm0rv5p23ity5H_pu8jhbHVdaWDpqITuEPRev9q44I,609 +babel/locale-data/my.dat,sha256=M0Y81MUWIrt-aQVRHA04KZ73wPMvhd7DihayFm9pVmY,210242 +babel/locale-data/my_MM.dat,sha256=9DsxnFuIB4ImQJmOXpJ0Ujt1zMSUin-KV_a7R-irE-w,608 +babel/locale-data/mzn.dat,sha256=op9NwjJ4msszz0JO60nljP2t8pWh1jidWAAr9yONCrE,65499 +babel/locale-data/mzn_IR.dat,sha256=nw-iEKlN_b_C0VzjCY1SCElyqMgg3jQDZ4whD-lJrpg,652 +babel/locale-data/naq.dat,sha256=sfE12PpbssOaqdt_XWnykuk7rTArRVzg2DHBcAdHblY,16617 +babel/locale-data/naq_NA.dat,sha256=1Mh98XoWsJgytl-IvQuMXYJlLkYQvvoQ5_rptyu5ZME,590 +babel/locale-data/nb.dat,sha256=QaUKfFqj6AfimZOOiGI9DOxReUm-Jv6RysMhTdQ6XV0,210593 +babel/locale-data/nb_NO.dat,sha256=bXb8K_1FTSDsqIXeZu2C0oYPCrTUpX0Y5GxKs3vXwBc,626 +babel/locale-data/nb_SJ.dat,sha256=kCfUzsB6JilDQnWsVF1CFyg-7Ewq5VOdM-EWC7PJKP4,607 +babel/locale-data/nd.dat,sha256=_qfCNG-XGpi-qBzGvXKmKHxVu2WspuqdH_8ECspD1xY,16312 +babel/locale-data/nd_ZW.dat,sha256=szM5GcRhUeN0V1SGix3RkcgDkRNJF7D3BWJMYMOkNlY,608 +babel/locale-data/nds.dat,sha256=yjSK-CUxMes8T-cNuxynP6Av2MYxnNmn23PsGguUTA0,50632 +babel/locale-data/nds_DE.dat,sha256=wQlAFyMOkjMYQd9LVFTqLFt5GuntavA1RWhBf6E3DpM,627 +babel/locale-data/nds_NL.dat,sha256=VPodVrFivmTiPf4v5OZ3Foc0_FaQwgRBuK7QiD8xmhU,627 +babel/locale-data/ne.dat,sha256=oha7M56GETdAdz6rLbS65qD1oo_w8ab60lDltNGouec,248497 +babel/locale-data/ne_IN.dat,sha256=Pc3G-flVbWniVZRu4RzMVYB099rVPhUvxUC1TiTCr8U,1265 +babel/locale-data/ne_NP.dat,sha256=lhhB2jPqSBwBrxK6piIkUD0YHwGUNYdlmqlPBOJhb0o,608 +babel/locale-data/nl.dat,sha256=Cz1A8HnVqxHnoUgZIy8Ob0Kb_R8a8RbA5jfgO5oLleI,215801 +babel/locale-data/nl_AW.dat,sha256=6gmsswLqSrJ0XRfJJguCU0QFSU_dTpUOlPanbq5KGpM,611 +babel/locale-data/nl_BE.dat,sha256=IZZy26NCmKbOgbU-ZHHeviTctmUMFxmBeOmloDOCN6c,1835 +babel/locale-data/nl_BQ.dat,sha256=E39EYJYegrYGpAdLuSfkizgwgoBtfyRp-1Crb_I5PkI,608 +babel/locale-data/nl_CW.dat,sha256=Ho1si5eWdnrkT1_OA7ZWxarnzgfNdUmTlJLUkynzNck,611 +babel/locale-data/nl_NL.dat,sha256=kLT_7mliQl_5XhGi5lU_guTBSD6V-DUK92xhdWQxzjk,626 +babel/locale-data/nl_SR.dat,sha256=VVLYKCz48vdDn000ZzlokZnD1Qr1T7Tmn47j2wPG9fQ,669 +babel/locale-data/nl_SX.dat,sha256=FTjEPrmwtpu8IQVixzrdl0dEyAH524Ml8cWUxd0pvjE,611 +babel/locale-data/nmg.dat,sha256=CTnRxMH09auUbe_gky2WT9aYX7OiS_3FEXvxzVDH2-U,16189 +babel/locale-data/nmg_CM.dat,sha256=4wv7ftQl9xu_DkfdjxoJ97gcm-pMhM51OCXYX3CQ6gU,609 +babel/locale-data/nn.dat,sha256=5rU8EBZu8-b1HBGXSeaeEiPItaYqKWupiG06aTbIBgE,179884 +babel/locale-data/nn_NO.dat,sha256=yc4l2fwSD9fD1-sCQirXzrAkfxIqD_garBegapCzWs8,626 +babel/locale-data/nnh.dat,sha256=jO3oG64qBHal9MpjumyI90ETg9LoNJ8CuKliElSLnHk,6766 +babel/locale-data/nnh_CM.dat,sha256=azweVaEFbSCMHLptoZQ46yKcr_antYfr2pRmxIuZQCk,609 +babel/locale-data/nus.dat,sha256=x_agBjiW-ZVuFQenH9AT5Rdn4TbFCJoL8n_N1AbmXBE,9153 +babel/locale-data/nus_SS.dat,sha256=XQQtE8pKShDclBfN4yU1Rh_zEqrYFVndB-t2ScdGGUs,590 +babel/locale-data/nyn.dat,sha256=PIizZE2pabGtsXhhg6TFiv0OlM352lMKqO2Fb-Wrizo,16275 +babel/locale-data/nyn_UG.dat,sha256=i2Qcu0KO73WK35o2BvnFV9yd6dLK_p69_LtbVTMkCJA,613 +babel/locale-data/om.dat,sha256=agj_n2fMWPhqLD3cZ3Pkux0y6b9r64pXXBQcl1nB5Bs,16588 +babel/locale-data/om_ET.dat,sha256=MhyQf6WK9JWcW9TuiTrQwo2C8tKIELtGKBiJ5Scrt1A,608 +babel/locale-data/om_KE.dat,sha256=A-EqNdXkq-j_TZK182yI6rnZYaFGGgAMyM76Q68FdG0,1566 +babel/locale-data/or.dat,sha256=-VmSkqqCokDThjp6Jhc0ENRKPAU1Gp3W0NkZfbiOwUA,241213 +babel/locale-data/or_IN.dat,sha256=tUmTnuoY49gDDU8WbdUCLyhv_2Yo-JJc_iTZlOJrH2Q,631 +babel/locale-data/os.dat,sha256=VRCyHyBI0alMKelyuYpaQw5Z3WEEZxkN0-GC039oqf8,17627 +babel/locale-data/os_GE.dat,sha256=bGm3R8Bz7k8wmb2GK_Ol83ud254rlJMbs26c1zN0w4Y,608 +babel/locale-data/os_RU.dat,sha256=A2armkX5bdC0hKe6ie0WxB1IB0exTMHAZWk_0PNjFD4,668 +babel/locale-data/pa.dat,sha256=oKoR7WJ7Ii8MNYowoXnZ8Lsgbgw25NPBbDYkgVEMPkc,244684 +babel/locale-data/pa_Arab.dat,sha256=T7-060vg7GELKQvqGl2tfZSn5_eN4dqGS_gBwymurvE,3984 +babel/locale-data/pa_Arab_PK.dat,sha256=-x9ycmOmzJ0ZIzy8J1ryO7nhRuBceY7ravgHBLEgyDY,608 +babel/locale-data/pa_Guru.dat,sha256=risWFebHubet9zREjGQ-AIrrtBdOtKXo0yafgX6FhJU,1249 +babel/locale-data/pa_Guru_IN.dat,sha256=tyUJVyrhCWckcBP10pfvLg2Xgv9shPpvWBaSiXg-G9c,631 +babel/locale-data/pcm.dat,sha256=e7YrXsfssCZkE5qlRy1BRYA4CPZOUiWsrSoYchqqpYE,174311 +babel/locale-data/pcm_NG.dat,sha256=E8wPtqkOJFSjTkU6T9V4RCZcer0Vc52PBvZ_LioI3S4,590 +babel/locale-data/pl.dat,sha256=OP4mD1-bddsjgwLV2FSJgpTAnlX-2rATEv4qHqdxnQA,236139 +babel/locale-data/pl_PL.dat,sha256=V62k9TTJ4FpN6KYLXzlsBBAMjHXnlNFuBnGoY9TBdDc,626 +babel/locale-data/prg.dat,sha256=1lqaf6Rcgbak-QLb0gFocafAWd3MXC8rM6Zcgn5ct00,20167 +babel/locale-data/prg_001.dat,sha256=m1nhpZ2Lh8TiVNCuKOVyBElL3M0Q9YIRZSC19jqZymE,1567 +babel/locale-data/ps.dat,sha256=ln_T0uNZqoPpDfA0obDhJAG6L6YADt_o66ZyFxdx4l0,180248 +babel/locale-data/ps_AF.dat,sha256=goJChlJTUKnh7pZiMUkZiRMMth5lshKHVDZFwKq3Iwg,651 +babel/locale-data/ps_PK.dat,sha256=_MSs-UxrpD1DJvl72MouRRhRmYbE8F6-MQ6q7ansCVw,7954 +babel/locale-data/pt.dat,sha256=xNKXRfbr-VRWZ1xT_p_gx99OW9I16x9Wgf2NyvP7RXk,195079 +babel/locale-data/pt_AO.dat,sha256=cTQSDeLXeH4NvRK6Kydc1LB3QyQO9qOSwh4UE7Ngga0,995 +babel/locale-data/pt_BR.dat,sha256=PoV5yebMbOhPwgtPQJ4qoKxOhQd3E5NCYcjjgOsvqu4,608 +babel/locale-data/pt_CH.dat,sha256=aFs_w4Xa1ZxFfw0GnV7IAj92XFa0xpK9mN4uY4ynDho,626 +babel/locale-data/pt_CV.dat,sha256=G5LMZLQqplotaUIvlv1iR3hgaIkwsYhMK0TXzjrmIzI,1015 +babel/locale-data/pt_GQ.dat,sha256=mQbJaJxvrVnC7MaTHD8r36VIe7vTfJfudKJc5XjzFg0,589 +babel/locale-data/pt_GW.dat,sha256=DTJrtZaU3wXwYHJvKkJK8BAZCcT9fSnsspJ25xuM4Po,975 +babel/locale-data/pt_LU.dat,sha256=rgoGwpHgeahu8nLnT-uyVsrkD4CrWWwTCqDct0NBJmw,645 +babel/locale-data/pt_MO.dat,sha256=gkvSsQQWHZFLT7SALzaYT0inBlWKH2tTt3R1KDMXc5g,1592 +babel/locale-data/pt_MZ.dat,sha256=hmhU3Y7HgDXrsyTUootEEdjCO4dy8wxGRvZRezeWq_Y,1015 +babel/locale-data/pt_PT.dat,sha256=C7vXAMw_2sCavBd1R62VD0n3r08CZAA49hFMT_ti9cY,99144 +babel/locale-data/pt_ST.dat,sha256=YBm07Nws76viG-9eMRgf3fn-n2B0jCptD5N5s6OWySA,995 +babel/locale-data/pt_TL.dat,sha256=qG2kU_auBSaMJNnYn6fYwxspLJ3OK0EpL6Qd9-Qtzi4,975 +babel/locale-data/qu.dat,sha256=ghWjIAkE8YxHAfaLXgXiAKedoohrLikSTsUBiM3MnYU,107964 +babel/locale-data/qu_BO.dat,sha256=CGWYNs1_AuQG3T-fYwe-q2AwDl9LAEITGRRYzc_MdKQ,836 +babel/locale-data/qu_EC.dat,sha256=WewzwnSQA5llc9gb5UYy2ue5Y8_HRb1HnddOVIXcf6Q,810 +babel/locale-data/qu_PE.dat,sha256=gT0fXlP3-rFSzIy6SdYVt-6viGPP79ukYHbBynqU4Bk,608 +babel/locale-data/rm.dat,sha256=V12vyZyv0wD6Nu84Gl5V33yohhcm-dpGSJ2I9Ot1GwY,67934 +babel/locale-data/rm_CH.dat,sha256=atueKnCga8bqm2GrXOwBjQf1ypER1IAjcv4RX6Oz0Sk,626 +babel/locale-data/rn.dat,sha256=Dk-PN1zn4fjGlXZXwhEOfUmpJBu_2DlHZaDI_ins0Q0,16781 +babel/locale-data/rn_BI.dat,sha256=II-eZWKAf73Hh0aGZifK2NLJvvXWws8a7Uv_2TUZ2VA,589 +babel/locale-data/ro.dat,sha256=4B1VWnRbEV3KkNT3hbnkbtq2bAZdNqZIDV4DdD2AEZE,225782 +babel/locale-data/ro_MD.dat,sha256=tkVS0RiHpOA2ZjCZbufnznw8aVIFj-XLoWr8whLVN3w,3215 +babel/locale-data/ro_RO.dat,sha256=rFeMpPnG0zRek72AxhPuZAuJFKAuA-UL5fAyLAnPiQ8,608 +babel/locale-data/rof.dat,sha256=jx81EEsVRwsEnaROsGiG4vQ1jm1wFW_V3yNpPsYZwmM,16170 +babel/locale-data/rof_TZ.dat,sha256=6mZ6eFqNAqwuWCZuT7oZClLSv9eWSdGH0efVoQqxj40,590 +babel/locale-data/root.dat,sha256=W5zvjlEJQD3MWdvhMHuW72ERorcD51LDhqtyXmmYARk,42432 +babel/locale-data/ru.dat,sha256=TM12Pev9K3Ccwr5Z_Wj6hClv_qY_Hqk91f9nD6YSt8k,305212 +babel/locale-data/ru_BY.dat,sha256=Pb4BHcT6RF6ONtgLhPcGQXQHVGj9dPrrodoI4ihsTSk,649 +babel/locale-data/ru_KG.dat,sha256=iQapNW3xr7lH-HEbM7CIbdQwmUjm2Tgq3iJAMFUC7zc,632 +babel/locale-data/ru_KZ.dat,sha256=OnFw_fadGFUzN0KL3WKvL0ekAwCCv5NOIhz2IFbHK0g,629 +babel/locale-data/ru_MD.dat,sha256=vZr7Dz0UZlMeWWSisvdMuyOcLyreeihFbILXdQemOXM,627 +babel/locale-data/ru_RU.dat,sha256=QhIIdAW2iPQ6LcErVIuxwvaBi1ku8V5-zsy1MZV1YU8,626 +babel/locale-data/ru_UA.dat,sha256=ZALhQpV7aWxa78cxM54zedaE6-0YNGkB7NeL5BAeHOs,1747 +babel/locale-data/rw.dat,sha256=OJ5hT_uLwSOdlkjZkrP5WmriCJTqE48b2bJTS4cRV6g,16215 +babel/locale-data/rw_RW.dat,sha256=G6ta2DtZdiILzTdyZlXTCzoL-oRoF1gekRFCmN_dEyg,589 +babel/locale-data/rwk.dat,sha256=uZEibGF11hKUyRFeC3IcSTrxMuWJsK9Z0c2VtykOGu0,16057 +babel/locale-data/rwk_TZ.dat,sha256=RtQRCyT2DbqRtc4uzP8Nna10O8KouCqtbtSxCJ-PukI,590 +babel/locale-data/sah.dat,sha256=eXy9sdtMoVJcQM3C1-m9w8iGOzgqVguyHD1b_J_Xg78,48180 +babel/locale-data/sah_RU.dat,sha256=-Hi7VNsxTYaC-4G8qYQsnSPVMc5jXBYQJBvd5UeC-lo,627 +babel/locale-data/saq.dat,sha256=T_ODjjLim-CJKf2XIGBOfuEGtEqeN4I4VbyY6N_Rcl8,16455 +babel/locale-data/saq_KE.dat,sha256=uHKDZR4LUK5CGvcVC-dIpdYM3uY1KXVh6vkAOnOrc-w,609 +babel/locale-data/sat.dat,sha256=FRyhxW7p0BSz3oG0GLb96hg4e49rufWbNgOX5HifceM,12597 +babel/locale-data/sat_Olck.dat,sha256=XNqbalNvgWolPY1M9vZXPpLzFkzYzjSAvKtoP1x5oBs,878 +babel/locale-data/sat_Olck_IN.dat,sha256=Rx6KNBVSK2m0PvmKzotwxqBIp30-b5dCQU5-hqSB8tQ,632 +babel/locale-data/sbp.dat,sha256=N_zsOwROvCa1Nd1geNViDAkUsWkkPLDrpfXFmjJBwII,16479 +babel/locale-data/sbp_TZ.dat,sha256=myr2BmLmSpSCCyRFCjm70nQfdeUAopZ29zxfemg6F8c,590 +babel/locale-data/sd.dat,sha256=QoU7MJunjYKrj_TjLuMS6x0BuFxAsE4UcXnJw5XCrXk,194219 +babel/locale-data/sd_Arab.dat,sha256=EOWPc5-ACgE6NQEHILMBA_BP6mK35sTdUEEuEFCv748,852 +babel/locale-data/sd_Arab_PK.dat,sha256=pNtPPmwu0jQK9V31EOv-lVoFiYwf1iHDxJmB5NNIZzU,608 +babel/locale-data/sd_Deva.dat,sha256=0Imih19CK7Tq6YdIazJNgJJMZPwPjKXSo0xFXWnliTA,15216 +babel/locale-data/sd_Deva_IN.dat,sha256=Uei2PSaYXixwn6VPwb7xeFMXt8I_jyM_myr-8lADGVs,631 +babel/locale-data/se.dat,sha256=cGes_DRLmVtVV0bjj37-QC9s_zALE7zEfrseAwvUhU4,72353 +babel/locale-data/se_FI.dat,sha256=klpIv_TDIAH88KnWG6g9AUWvdhmwMSKaSfCC0qPu0tY,46574 +babel/locale-data/se_NO.dat,sha256=k-BEm9_tnVXbt-H7pAtUU78dZfuiq7-cTcbsgsqISlg,626 +babel/locale-data/se_SE.dat,sha256=BxFV9gNTLfUF3ibsRvgeuRnuDo99396qMA-89tpdEFY,667 +babel/locale-data/seh.dat,sha256=0qAVFIAc1GZkYBCp4DP5DM1G2QfcfI0EvPQeP7Uci7c,15910 +babel/locale-data/seh_MZ.dat,sha256=feukobIWsGC_o5s_qb0UgFI7gzVCrNSydoRaXs0NUZ0,609 +babel/locale-data/ses.dat,sha256=X5NMBmnvudU3XqhKZWH8l36b4RFIf1vXuhLSIqUbgfI,15998 +babel/locale-data/ses_ML.dat,sha256=O7stcUKOw8ZkGmgnPqSWBCxPFA3_aDIcHZGAT9yRrtw,590 +babel/locale-data/sg.dat,sha256=GQNOEIiWV9wnrISbr5uFtFK9gvWoNNF6G5gUta9V1Io,16635 +babel/locale-data/sg_CF.dat,sha256=dDZMdfhJBfy2ShSVhAopU2nIEEBnTssu3Vji2v9SpHg,589 +babel/locale-data/shi.dat,sha256=yI6ilP0U3O4u2IGEJcHnEpFxPPqVIuZJRSAOyr-axtA,22036 +babel/locale-data/shi_Latn.dat,sha256=rqL_ruNuCpetsrf_JIL02XNbqkoBDDZCC-VjchfA8mY,15618 +babel/locale-data/shi_Latn_MA.dat,sha256=blTyj-JXuFz7wgjLjUC19rH4Emj7_-TOtMvBKb7qAus,590 +babel/locale-data/shi_Tfng.dat,sha256=PmToPVEqibydgF2nxMw21pujbbqf4odWn7GlEqQL2u0,947 +babel/locale-data/shi_Tfng_MA.dat,sha256=blTyj-JXuFz7wgjLjUC19rH4Emj7_-TOtMvBKb7qAus,590 +babel/locale-data/si.dat,sha256=zxzkUZZtc4uVe-G1jMp1pAtkW726ZJ872nLo6ZTSaZE,244850 +babel/locale-data/si_LK.dat,sha256=2k1GulXssuQkuKMmj4J74iAYHlfh507gp6l75lKDJwg,608 +babel/locale-data/sk.dat,sha256=BrxSaUkE6hCEqX1etX6_C6dfuCmSNAJvtH8IFqGzHAE,256860 +babel/locale-data/sk_SK.dat,sha256=b8ugTdqk71Ge03FdSEhnOmH0wP5XeDSI40ys2hGovNQ,626 +babel/locale-data/sl.dat,sha256=n8vsb9jixRFE_J7WJYW4w1GehxoGJnNHgGjFZFlmpEM,241874 +babel/locale-data/sl_SI.dat,sha256=V1oy5BlaUDjrX8b0jv9LK7Mgg4Yx84u-Iry4j3M8LYc,608 +babel/locale-data/smn.dat,sha256=iTOwk1p0Y-9jOGDIEuTz3-jwEbC3neN1n4xTKSIfyyY,42674 +babel/locale-data/smn_FI.dat,sha256=3FaHTO42uw4LsL2o9CnKHuk86tRNGaorVeYh7SPONWY,627 +babel/locale-data/sn.dat,sha256=pfT3kmp47TnfQGNvcsWF43YQaoHOtkXbRFvt3leN80Q,23252 +babel/locale-data/sn_ZW.dat,sha256=R48ZM21PI5bjz154uuK-wccs9d-M9YMdiQLtX-rbG5k,608 +babel/locale-data/so.dat,sha256=6KnXbRvPY84Ag4eBiPzLfjRvHuuuiu-ZJo5bA7qL7cU,153105 +babel/locale-data/so_DJ.dat,sha256=CWxbbQZ8iogPci77q4bpbWHOFBOKISwLOkqixb-TqRA,629 +babel/locale-data/so_ET.dat,sha256=JKebALALzWT58gltRAjWVKl3LqFGiy1iD-nbFFsvfZ8,628 +babel/locale-data/so_KE.dat,sha256=Ooas5zUI6mtrajAybdc2YQyFlS4RFNUxeXqT0pUQ2fo,1181 +babel/locale-data/so_SO.dat,sha256=WtbiqTAukA-EouWNpajrPLIyRqUNmh0A6bcbXQywwqc,589 +babel/locale-data/sq.dat,sha256=Gj2LUjCjKeRcNUF86vv8cbX1j_VPUmSBjQxHP8HETHQ,175659 +babel/locale-data/sq_AL.dat,sha256=061xsDws549-glqPY5No20svTFqs0pO6Cn9oUtwquVA,608 +babel/locale-data/sq_MK.dat,sha256=8D6OAIV9T29UVST-ZD0zjtgYgrScEKaeZDaf8fJBk4E,1181 +babel/locale-data/sq_XK.dat,sha256=nFWTBnEjpW6ux-oMbSov3ta96NrvwhjtQ80viYXFfKY,1160 +babel/locale-data/sr.dat,sha256=PtgWUhY4n2x_GzfWOjZbYvPrliijqBNBO0KBJmk5v0s,277763 +babel/locale-data/sr_Cyrl.dat,sha256=PqtXXFd7yu1Bweljv2UkBHFUYVPlIFY2abO3lfl8t4Y,1957 +babel/locale-data/sr_Cyrl_BA.dat,sha256=dShsmp6LRaSkv8ASEvrPf6KeovR0BHLg7DUu-CJp8_8,4710 +babel/locale-data/sr_Cyrl_ME.dat,sha256=kT_iSmJ3fWaM4XDKSSCWgfT-zdxMxm8Mg3hISHZC9m8,3873 +babel/locale-data/sr_Cyrl_RS.dat,sha256=AAJ6aaa8D73J6na-RIjuqhEfLWKReHLwzYavdFaobhs,608 +babel/locale-data/sr_Cyrl_XK.dat,sha256=hXdgzDWE5gDdWBbiNBaEHTl1hjEqqtd9dmnFXPFRkfc,2756 +babel/locale-data/sr_Latn.dat,sha256=uGrDTKlSnMMQg7XORMw8Lg5v17HB-mNAFlD0Kczta1I,230252 +babel/locale-data/sr_Latn_BA.dat,sha256=ohHi1ZC9VzvqIIRkemStv_FVS9U5CPiLr8I2QUCt0ag,3994 +babel/locale-data/sr_Latn_ME.dat,sha256=gPaIdFD34NWU-69gmIOuCw6b8klXWs6RJUeU8ihcSqQ,3074 +babel/locale-data/sr_Latn_RS.dat,sha256=AAJ6aaa8D73J6na-RIjuqhEfLWKReHLwzYavdFaobhs,608 +babel/locale-data/sr_Latn_XK.dat,sha256=_BATf9SVmbg1Knn5nGG5srTFj0suBkMuTG5gcF46MO8,2194 +babel/locale-data/su.dat,sha256=tjQYu8CY2Y_Hz12pRczCaxX7LrRJK4xOvYthqwFgVBg,12452 +babel/locale-data/su_Latn.dat,sha256=AoqRqUqiJYE1G-ZRCIIhkYSRQ8s71qDefLwv70XrgZA,718 +babel/locale-data/su_Latn_ID.dat,sha256=Hi1QalxGc49vElzHunyzz1Mfc6_1KgzXkGjcj04mq8c,608 +babel/locale-data/sv.dat,sha256=DLFxWbJiUU2870aLFBVVDW3_OEWpE2yL8p-Q1h0J9aw,221078 +babel/locale-data/sv_AX.dat,sha256=4LqqSZFfMV2iRBS5TyTdWWKfdoN_ahxotRUbyuoaX8g,626 +babel/locale-data/sv_FI.dat,sha256=WDEfn4f8iqbtpxYsGA_6mDW_jk4gFolp7gU58aRKt1w,2584 +babel/locale-data/sv_SE.dat,sha256=OtNxgFxUFAH5mYJ4yGqp_wLMmnMu9mwVnvJ57BKUOKs,626 +babel/locale-data/sw.dat,sha256=LxA1OYCK0j4dbhSCT8KgSwrsF_wA94vHs30XAP-vQ5A,179207 +babel/locale-data/sw_CD.dat,sha256=6HlO0ltwzAj1j8ns4jxCI0P36r7MauG7h5EQqpQhFkY,2660 +babel/locale-data/sw_KE.dat,sha256=GKdU3qzg_MR2lQE7gRkAZqHA8gD0nBUhgSqNOkS0fbY,35934 +babel/locale-data/sw_TZ.dat,sha256=rRGQVQ_Vp0bQ6_KnZTZDn7YDY5HDNiIsUMFLSnKD6nA,589 +babel/locale-data/sw_UG.dat,sha256=GN6zqbWL7dor3RJWaJ7EwhMX8BIA6HVUzN7xWswPgbU,633 +babel/locale-data/ta.dat,sha256=ytGi4rbhhefQHdKzbgWOCheyBDU6H6JJGOYHWJvuMxE,263466 +babel/locale-data/ta_IN.dat,sha256=1D_ISTGx78nHVtYBoccQpH04o6CCL5dIIGRm4RWkMDg,631 +babel/locale-data/ta_LK.dat,sha256=sHPH1bp1O7mbhnp0WgIyHfKckEIATHbtNtSGQPdbVSU,1181 +babel/locale-data/ta_MY.dat,sha256=SVPpzoaW_lDTyUaLLP05lUjx5bnkv5G0QFrnMC3WRGs,1238 +babel/locale-data/ta_SG.dat,sha256=9e057sNK17LS7GQfZ74fzkPmHndampHvierKZNX6vT4,1257 +babel/locale-data/te.dat,sha256=QZ7c7f3RbUpaJfZ7fr4tWqrUowkXNnCBOlGGAfH9y4I,262280 +babel/locale-data/te_IN.dat,sha256=uRakP3CRkBJKNlCxMpMDOGOi5aeiq5OiLxBbUWn2NZk,631 +babel/locale-data/teo.dat,sha256=67b09CdiUgLq-6n63Ox1CvkkxExMIWAJh82HVSoyah8,16671 +babel/locale-data/teo_KE.dat,sha256=yZVnSmYqZ77jAPlyuyY_yivRqbj4dwbb99MW52EJNvU,630 +babel/locale-data/teo_UG.dat,sha256=o5PkAO5zi67Lxbh5vKaOJF6qerkAFvJu3RrQ0iHlmwA,613 +babel/locale-data/tg.dat,sha256=D0Rhojw2aiooD1a0COKgtkD3nvxhPwkvOaBJuqoQgRQ,36303 +babel/locale-data/tg_TJ.dat,sha256=ge5GlIElwu5VzdLeWCCEz5A2-F9ihtOH9Ic9k5ii4wY,608 +babel/locale-data/th.dat,sha256=qvpR1DSefc-FR-cvQXpDw_sADav-_oRt0RLnBBZd_eU,235280 +babel/locale-data/th_TH.dat,sha256=aQd6NJ_y5ObTF2koe4cI_s_mVdG0c7JfaiivpLN8M50,608 +babel/locale-data/ti.dat,sha256=wLPieG3yJtUVM8VrrwEjhqnO8El_xTQaTlgsTL2qFP4,73036 +babel/locale-data/ti_ER.dat,sha256=c0K7KojJP6dMJAGn1SRfi6lmNy8BvijIUNC4m9gqc2I,958 +babel/locale-data/ti_ET.dat,sha256=vT6Tl0BBaMupoRD4fb3kmM_Mufx2EGuqP460HO_Bh7Q,608 +babel/locale-data/tk.dat,sha256=CJVPDLCt8C3T8GhrgEsnnWTnwoWm2RcNDfvQ4bjDZSE,167927 +babel/locale-data/tk_TM.dat,sha256=06szpphDghkTBac9nMge32AzZKeQdxXeZjsXF0X1-Nk,608 +babel/locale-data/to.dat,sha256=ZSHEXy-JD4xKehjprq07mYbMgXKEy0j3kA0P6RKiClw,166458 +babel/locale-data/to_TO.dat,sha256=UaceT8b6KsoNoQd68finqhXDLVr_4GtcGFsgTwJaYTc,589 +babel/locale-data/tr.dat,sha256=xeuwfXivD-H9zszsBigvaEqV57cp7jkngzv8es_trSQ,209226 +babel/locale-data/tr_CY.dat,sha256=sHH4XyRX7XNKfRzQPuDEVNlfBCdyg7jy-bvyP7yPTBw,1184 +babel/locale-data/tr_TR.dat,sha256=lUyZY1ya9qqjmhdYhAkP6j4V953QWx_cC16GrVZCaYM,608 +babel/locale-data/tt.dat,sha256=szBjiq0ZOV4gwKairWY_axw1_Q2WuV9CO9XotiJtI9A,33588 +babel/locale-data/tt_RU.dat,sha256=MqxY1dPvSLsO7huGCYIhaTqwfGw9qHzq-oUt3VwVPyU,626 +babel/locale-data/twq.dat,sha256=AW_qiBsx_oBOrFVQ9yvnHEms3jqkxNFRY0hYF8ErVrs,16171 +babel/locale-data/twq_NE.dat,sha256=yv89EP--ZBgtvC_Vfy7UN37T7OktzntjGpRKfo89AW4,590 +babel/locale-data/tzm.dat,sha256=oVsKo32vfNoupn0mwkP0QmlYCh0irKQvAoIhauxhD1c,16149 +babel/locale-data/tzm_MA.dat,sha256=jOAK87vFm-WpZK130vSvCLE8fWxfjeyPs_G1elc5TMk,590 +babel/locale-data/ug.dat,sha256=N05TG_7CnVabGWTTHoVWlxPQ6MseNb_y3_YI2_pLiRw,128534 +babel/locale-data/ug_CN.dat,sha256=EMQBXnc07gL0zsdKDerIo0Sl6DtZVJaapUCoOL9V22k,608 +babel/locale-data/uk.dat,sha256=GfQIacOGkBh-WmYgbPOlmiu6mLYADuzVi919Lk2p9Ig,315834 +babel/locale-data/uk_UA.dat,sha256=YE9ivxv_h3qwHTeFyWTV4gt5JaYgYdKQTLHHadAQQT8,608 +babel/locale-data/ur.dat,sha256=q4apEHeXrfxzm0HJrKkdHJd6Y54ZUwFC_6lZzzkxvjU,197918 +babel/locale-data/ur_IN.dat,sha256=YSaoN2o4C1InByihAFCDOBE_HlCt7xkRl9lyOrfoCTk,12595 +babel/locale-data/ur_PK.dat,sha256=VXzQfSYJAIlzcDjPva4UM67jhqIwDUqAVNRGB2YPcfI,608 +babel/locale-data/uz.dat,sha256=KvGhlHDdpSZtTcqMENPN-zCjmsIIIwu8o1Oc2MTHjDA,173590 +babel/locale-data/uz_Arab.dat,sha256=WrXXAaoKSVPU_PeGQIGU0jmdaVzPtkZQxzhtOlH7VjM,4111 +babel/locale-data/uz_Arab_AF.dat,sha256=ONnsHyim0Q-GRD6BAHPTj2Ri4aR41EB5HWhJQrKKXAU,651 +babel/locale-data/uz_Cyrl.dat,sha256=_q-bVsz4aZwG1LVqQC5rN_aCn5o-ocAJ1zD8eqVOMj8,98924 +babel/locale-data/uz_Cyrl_UZ.dat,sha256=D2g0Iy4gME1-ZrXDPgcs1VlFNW1FWKRD607VKgUsFwA,608 +babel/locale-data/uz_Latn.dat,sha256=wkn_uCtrZQx7Ut7_pTVXVU9X956I30Nr4seILvPnZ_o,1265 +babel/locale-data/uz_Latn_UZ.dat,sha256=D2g0Iy4gME1-ZrXDPgcs1VlFNW1FWKRD607VKgUsFwA,608 +babel/locale-data/vai.dat,sha256=Y2F73JC5rT_LyIvVXYCJEMwnLE0YwmOTiEh-ZsUhx9A,18988 +babel/locale-data/vai_Latn.dat,sha256=qys9gT6Krcg_09r52_SMMrAEyI2XzngIuKlPKWpZnhg,14989 +babel/locale-data/vai_Latn_LR.dat,sha256=mFG8a5AB_Cnv2lwGAVg5SxhF0lgkrS4vB3UdqB1L8Y4,590 +babel/locale-data/vai_Vaii.dat,sha256=rZi5j11eMQeE9MzTRK4Gl5EhqEy_X6o3V06k_E4ioOY,666 +babel/locale-data/vai_Vaii_LR.dat,sha256=mFG8a5AB_Cnv2lwGAVg5SxhF0lgkrS4vB3UdqB1L8Y4,590 +babel/locale-data/vi.dat,sha256=pfW__H9jiCPiorXwYFxodzP4CjbHhGh6AwCye8RznAQ,162325 +babel/locale-data/vi_VN.dat,sha256=hn8-pr09TFtQwAvAau15ETGT4bmPay2o_LGOvLA6Bsk,608 +babel/locale-data/vo.dat,sha256=nmu1bYpYLcTQzd5w4nGKygzPddIlbRfHZIJCYPT8bIM,5225 +babel/locale-data/vo_001.dat,sha256=sQnDTedm-Ec9H8JaIqP3ZLpsAvJOr8GRr1BADz4lNRc,823 +babel/locale-data/vun.dat,sha256=_9UYsm8lOz6Q0DNjRQDTaZU8wR5sv5PS4y-oDn7siqE,16067 +babel/locale-data/vun_TZ.dat,sha256=1lrpmdkRCqdDzX4Cel249MWWRGVDmubt9OiMf6Qsrnk,590 +babel/locale-data/wae.dat,sha256=hLrXzFfFW-jPsSrA0ZFzjZdhV60X1etEZOKlb6f0Dbw,28660 +babel/locale-data/wae_CH.dat,sha256=5fOXgR-rrWoIYbf230jds8iOj1dsbvNW2Qv2XBNaCto,627 +babel/locale-data/wo.dat,sha256=btSGt-pyMUGCK1wCUkG_9dccsvl_w9qPmAv1OV9Q5Iw,25698 +babel/locale-data/wo_SN.dat,sha256=ryU-hZj708JELq9ldfmRU8I2EDihWGM_6v8QQY4qRuE,589 +babel/locale-data/xh.dat,sha256=FILfYkTCJIqiqqcEo29ID_9AxcUGdWePyXJnbOI_kGM,15055 +babel/locale-data/xh_ZA.dat,sha256=LPaE6z0iRUaCSVd2CeuJzy_0GfhGCs2KgtYPHqLL18I,608 +babel/locale-data/xog.dat,sha256=TU2N1WKiMwbPTe7tF_Hj300hT6Hn-_f5H12h5HU1Xf8,16555 +babel/locale-data/xog_UG.dat,sha256=5B_ozUekB9sXcaT_-7brqH1nfv_XEP5CnB2PG84_JlM,613 +babel/locale-data/yav.dat,sha256=R_FIP-S4MiI3XULYddxjJtYhFPPwILvVDbHd3S6C1dc,15302 +babel/locale-data/yav_CM.dat,sha256=y9SNKPJTMwmSdGc0MO-vn7hUeK4arphzRDp2zBFYigs,609 +babel/locale-data/yi.dat,sha256=XNca4NO0IFPfGXT-E7HlW-9BOGi9ZvJSxrx8O13tHUs,30314 +babel/locale-data/yi_001.dat,sha256=wdIcCz3ZcZHFJqT28rBWo4iYmRffPPVWpxod_13KIYY,885 +babel/locale-data/yo.dat,sha256=a1uZep-5fpoV77z6eDBYeaaWw3c8AMXswNgYdHqpyWs,68429 +babel/locale-data/yo_BJ.dat,sha256=oy0uMgotXzVGUeC1paiyc0YEoxsXHf1qQ5eYr9dl5TE,34481 +babel/locale-data/yo_NG.dat,sha256=vVCmItRDqtBEzSXYDsXGoiobciBukV84o_LpnAZRiDs,589 +babel/locale-data/yue.dat,sha256=kgkc69B6aeh8K1K-iNS1nVb_xi4IWHUcIG9CgIWmjz4,183333 +babel/locale-data/yue_Hans.dat,sha256=aXi0gwVQezQ6RZDRAd09wLWhwlKUTfrSyDF-ZjEIJvo,183225 +babel/locale-data/yue_Hans_CN.dat,sha256=0SEPKM5hD5K5TXbakL6_Q7mE-Te_ea6eOhSy1uwwJXA,609 +babel/locale-data/yue_Hant.dat,sha256=_BWk9N_79PzY7EPWu-O_M8j1ISYhkN29HEbuo-i0AoI,1279 +babel/locale-data/yue_Hant_HK.dat,sha256=8iSo-1wkebNgS7h3iCFLd9s-nW8TuQ3-4UFUMUEmbMM,609 +babel/locale-data/zgh.dat,sha256=52E-cKoUGWnMZMAeTavTmgj666axYtF6L9u5tHn1JYQ,30498 +babel/locale-data/zgh_MA.dat,sha256=sIGElmHSGowAduz_ykRA_-PotBTJaOqmBtRhXJ_swJc,590 +babel/locale-data/zh.dat,sha256=ffkBVzcOpHqTELueQekXu2xkPOiFnhAH88ImnVX7ZT8,181044 +babel/locale-data/zh_Hans.dat,sha256=-JH1KTn0ibMkBj6gw2V2Q6ixLBL5x0n7B7AkCpU0TRI,1278 +babel/locale-data/zh_Hans_CN.dat,sha256=sTrrw5ttuMLr70IDoBM02f7vGVzuB-0gQNQK0IDNyXA,608 +babel/locale-data/zh_Hans_HK.dat,sha256=gy8r4jqxvwhNf1BUPjnkRmJGzweemtJ1ylkumHBmUgg,3138 +babel/locale-data/zh_Hans_MO.dat,sha256=C1N9WHRhsqPiq-9su32Ar1ZcBpN5P0pTJapGAFRCOrM,3270 +babel/locale-data/zh_Hans_SG.dat,sha256=qCiYdP4MAwcVyJyJ-YJ9B_kvsNUxu9nWkeR3XwgNYpQ,3466 +babel/locale-data/zh_Hant.dat,sha256=dzsvtbyAGj7fzzCl2xBJ010IQzq9IJsi59vW7Qf2d9Y,185029 +babel/locale-data/zh_Hant_HK.dat,sha256=71VE9xyc0m5Gh4gJ1STha37iLm5lvpISG3fZsKFuQ9g,56895 +babel/locale-data/zh_Hant_MO.dat,sha256=gTS5IdcTsRoMwAnANseIbj2nyflLj0tBQn2-4AFAaf4,630 +babel/locale-data/zh_Hant_TW.dat,sha256=QYchBgi8JmeZgs7rQrUYn93CNNL9aavsOWYzAaTndwM,608 +babel/locale-data/zu.dat,sha256=8ONW9eqg4gnEisbcMQeDl71dCVY1ckM4nJOXat9rPuM,167684 +babel/locale-data/zu_ZA.dat,sha256=YO0tFWUiAdda0x3XEoL98oPNFGRLuk5OZ8DaR3lUX38,608 +babel/localedata.py,sha256=bp9ZCXKgvJ08oOwrOIeknZ3Ks7QVi03HS_IhIDOf_ow,7931 +babel/localtime/__init__.py,sha256=43mNcGAGljnTjngbhTMl6xhfeHMR1Sabby2O-Deh4gE,1721 +babel/localtime/__pycache__/__init__.cpython-38.pyc,, +babel/localtime/__pycache__/_unix.cpython-38.pyc,, +babel/localtime/__pycache__/_win32.cpython-38.pyc,, +babel/localtime/_unix.py,sha256=P66o3ErKXzhFvj3e3Qk6MBS7AR0qsDqSQclIAMHKp18,4801 +babel/localtime/_win32.py,sha256=dGzhQ8AlY5iItSd-i3Fi2O3YWuVJ83PFSWe7EG2BaBg,3086 +babel/messages/__init__.py,sha256=mYEtObYlyGT9zKJog4IjXFN-au3uxnc16wg89edsMxo,254 +babel/messages/__pycache__/__init__.cpython-38.pyc,, +babel/messages/__pycache__/catalog.cpython-38.pyc,, +babel/messages/__pycache__/checkers.cpython-38.pyc,, +babel/messages/__pycache__/extract.cpython-38.pyc,, +babel/messages/__pycache__/frontend.cpython-38.pyc,, +babel/messages/__pycache__/jslexer.cpython-38.pyc,, +babel/messages/__pycache__/mofile.cpython-38.pyc,, +babel/messages/__pycache__/plurals.cpython-38.pyc,, +babel/messages/__pycache__/pofile.cpython-38.pyc,, +babel/messages/catalog.py,sha256=lQUVsuoNKL8yy--XqrHWX2AEKk6_qj4vpTNwuJJEtOE,32291 +babel/messages/checkers.py,sha256=tCqwgZpzwJwhouwSIpKNHW0m48338W1omEllOFDxv9s,6085 +babel/messages/extract.py,sha256=V-_eVZzxtANiue1ST3Yp6Kv1N3z5Swy4eu2_GuEt2WU,26474 +babel/messages/frontend.py,sha256=skgwlQry-3Sbf2v448eL3KxwUhd4pFRLa_5kkilKgvc,38729 +babel/messages/jslexer.py,sha256=NU2h-nZxoNnycllWViSv3X-jB_p1Vpw53uvqVklsMQ4,6334 +babel/messages/mofile.py,sha256=2JBYxNfuBqO35U-H_VqB1rgW2gocS0Has1YgGZwYxLI,7204 +babel/messages/plurals.py,sha256=GOD89ObneG4--qFbt_PQOvpEY-72o9LdghOsztNgRiw,7206 +babel/messages/pofile.py,sha256=fMUcLVi5-vIyKfDjkOde6He1QTSr0n4iYEis560zHSw,22146 +babel/numbers.py,sha256=-V_bQC6whhEalzEmYkFtvydZB0_knjLDgzu2C2SzGbM,39872 +babel/plural.py,sha256=bo-QkRGinKBBF58KgJoE7aUFKTG9PrFYtpTYvOSfYMw,21314 +babel/support.py,sha256=uO02VsCqKTcgBopi1RkQGhco2gRTZi4q3hlrbhgnxsw,22622 +babel/units.py,sha256=QPa8jqWKdS_dB4zTp60ffty2PClONBdlbKc885hb2RA,11291 +babel/util.py,sha256=o6JpnIY9-dhVdTgli0MAUknu4haiag_swAcLYdi8wVY,7576 diff --git a/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/WHEEL new file mode 100644 index 0000000..01b8fc7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/entry_points.txt b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/entry_points.txt new file mode 100644 index 0000000..18c3a58 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/entry_points.txt @@ -0,0 +1,22 @@ + + [console_scripts] + pybabel = babel.messages.frontend:main + + [distutils.commands] + compile_catalog = babel.messages.frontend:compile_catalog + extract_messages = babel.messages.frontend:extract_messages + init_catalog = babel.messages.frontend:init_catalog + update_catalog = babel.messages.frontend:update_catalog + + [distutils.setup_keywords] + message_extractors = babel.messages.frontend:check_message_extractors + + [babel.checkers] + num_plurals = babel.messages.checkers:num_plurals + python_format = babel.messages.checkers:python_format + + [babel.extractors] + ignore = babel.messages.extract:extract_nothing + python = babel.messages.extract:extract_python + javascript = babel.messages.extract:extract_javascript + \ No newline at end of file diff --git a/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/top_level.txt new file mode 100644 index 0000000..98f6593 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Babel-2.9.1.dist-info/top_level.txt @@ -0,0 +1 @@ +babel diff --git a/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/LICENSE.rst b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/METADATA new file mode 100644 index 0000000..1af8df0 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: Jinja2 +Version: 2.11.3 +Summary: A very fast and expressive template engine. +Home-page: https://palletsprojects.com/p/jinja/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/jinja +Project-URL: Issue tracker, https://github.com/pallets/jinja/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Description-Content-Type: text/x-rst +Requires-Dist: MarkupSafe (>=0.23) +Provides-Extra: i18n +Requires-Dist: Babel (>=0.8) ; extra == 'i18n' + +Jinja +===== + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Jinja2 + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +In A Nutshell +------------- + +.. code-block:: jinja + + {% extends "base.html" %} + {% block title %}Members{% endblock %} + {% block content %} + + {% endblock %} + + +Links +----- + +- Website: https://palletsprojects.com/p/jinja/ +- Documentation: https://jinja.palletsprojects.com/ +- Releases: https://pypi.org/project/Jinja2/ +- Code: https://github.com/pallets/jinja +- Issue tracker: https://github.com/pallets/jinja/issues +- Test status: https://dev.azure.com/pallets/jinja/_build +- Official chat: https://discord.gg/t6rrQZH + + diff --git a/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/RECORD new file mode 100644 index 0000000..da92696 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/RECORD @@ -0,0 +1,61 @@ +Jinja2-2.11.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-2.11.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Jinja2-2.11.3.dist-info/METADATA,sha256=PscpJ1C3RSp8xcjV3fAuTz13rKbGxmzJXnMQFH-WKhs,3535 +Jinja2-2.11.3.dist-info/RECORD,, +Jinja2-2.11.3.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +Jinja2-2.11.3.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61 +Jinja2-2.11.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=LZUXmxJc2GIchfSAeMWsxCWiQYO-w1-736f2Q3I8ms8,1549 +jinja2/__pycache__/__init__.cpython-38.pyc,, +jinja2/__pycache__/_compat.cpython-38.pyc,, +jinja2/__pycache__/_identifier.cpython-38.pyc,, +jinja2/__pycache__/asyncfilters.cpython-38.pyc,, +jinja2/__pycache__/asyncsupport.cpython-38.pyc,, +jinja2/__pycache__/bccache.cpython-38.pyc,, +jinja2/__pycache__/compiler.cpython-38.pyc,, +jinja2/__pycache__/constants.cpython-38.pyc,, +jinja2/__pycache__/debug.cpython-38.pyc,, +jinja2/__pycache__/defaults.cpython-38.pyc,, +jinja2/__pycache__/environment.cpython-38.pyc,, +jinja2/__pycache__/exceptions.cpython-38.pyc,, +jinja2/__pycache__/ext.cpython-38.pyc,, +jinja2/__pycache__/filters.cpython-38.pyc,, +jinja2/__pycache__/idtracking.cpython-38.pyc,, +jinja2/__pycache__/lexer.cpython-38.pyc,, +jinja2/__pycache__/loaders.cpython-38.pyc,, +jinja2/__pycache__/meta.cpython-38.pyc,, +jinja2/__pycache__/nativetypes.cpython-38.pyc,, +jinja2/__pycache__/nodes.cpython-38.pyc,, +jinja2/__pycache__/optimizer.cpython-38.pyc,, +jinja2/__pycache__/parser.cpython-38.pyc,, +jinja2/__pycache__/runtime.cpython-38.pyc,, +jinja2/__pycache__/sandbox.cpython-38.pyc,, +jinja2/__pycache__/tests.cpython-38.pyc,, +jinja2/__pycache__/utils.cpython-38.pyc,, +jinja2/__pycache__/visitor.cpython-38.pyc,, +jinja2/_compat.py,sha256=B6Se8HjnXVpzz9-vfHejn-DV2NjaVK-Iewupc5kKlu8,3191 +jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775 +jinja2/asyncfilters.py,sha256=XJtYXTxFvcJ5xwk6SaDL4S0oNnT0wPYvXBCSzc482fI,4250 +jinja2/asyncsupport.py,sha256=ZBFsDLuq3Gtji3Ia87lcyuDbqaHZJRdtShZcqwpFnSQ,7209 +jinja2/bccache.py,sha256=3Pmp4jo65M9FQuIxdxoDBbEDFwe4acDMQf77nEJfrHA,12139 +jinja2/compiler.py,sha256=Ta9W1Lit542wItAHXlDcg0sEOsFDMirCdlFPHAurg4o,66284 +jinja2/constants.py,sha256=RR1sTzNzUmKco6aZicw4JpQpJGCuPuqm1h1YmCNUEFY,1458 +jinja2/debug.py,sha256=neR7GIGGjZH3_ILJGVUYy3eLQCCaWJMXOb7o0kGInWc,8529 +jinja2/defaults.py,sha256=85B6YUUCyWPSdrSeVhcqFVuu_bHUAQXeey--FIwSeVQ,1126 +jinja2/environment.py,sha256=XDSLKc4SqNLMOwTSq3TbWEyA5WyXfuLuVD0wAVjEFwM,50629 +jinja2/exceptions.py,sha256=VjNLawcmf2ODffqVMCQK1cRmvFaUfQWF4u8ouP3QPcE,5425 +jinja2/ext.py,sha256=AtwL5O5enT_L3HR9-oBvhGyUTdGoyaqG_ICtnR_EVd4,26441 +jinja2/filters.py,sha256=9ORilsZrUoydSI9upz8_qGy7gozDWLYoFmlIBFSVRnQ,41439 +jinja2/idtracking.py,sha256=J3O4VHsrbf3wzwiBc7Cro26kHb6_5kbULeIOzocchIU,9211 +jinja2/lexer.py,sha256=nUFLRKhhKmmEWkLI65nQePgcQs7qsRdjVYZETMt_v0g,30331 +jinja2/loaders.py,sha256=C-fST_dmFjgWkp0ZuCkrgICAoOsoSIF28wfAFink0oU,17666 +jinja2/meta.py,sha256=QjyYhfNRD3QCXjBJpiPl9KgkEkGXJbAkCUq4-Ur10EQ,4131 +jinja2/nativetypes.py,sha256=Ul__gtVw4xH-0qvUvnCNHedQeNDwmEuyLJztzzSPeRg,2753 +jinja2/nodes.py,sha256=Mk1oJPVgIjnQw9WOqILvcu3rLepcFZ0ahxQm2mbwDwc,31095 +jinja2/optimizer.py,sha256=gQLlMYzvQhluhzmAIFA1tXS0cwgWYOjprN-gTRcHVsc,1457 +jinja2/parser.py,sha256=fcfdqePNTNyvosIvczbytVA332qpsURvYnCGcjDHSkA,35660 +jinja2/runtime.py,sha256=0y-BRyIEZ9ltByL2Id6GpHe1oDRQAwNeQvI0SKobNMw,30618 +jinja2/sandbox.py,sha256=knayyUvXsZ-F0mk15mO2-ehK9gsw04UhB8td-iUOtLc,17127 +jinja2/tests.py,sha256=iO_Y-9Vo60zrVe1lMpSl5sKHqAxe2leZHC08OoZ8K24,4799 +jinja2/utils.py,sha256=Wy4yC3IByqUWwnKln6SdaixdzgK74P6F5nf-gQZrYnU,22436 +jinja2/visitor.py,sha256=DUHupl0a4PGp7nxRtZFttUzAi1ccxzqc2hzetPYUz8U,3240 diff --git a/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/WHEEL new file mode 100644 index 0000000..01b8fc7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/entry_points.txt b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/entry_points.txt new file mode 100644 index 0000000..3619483 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[babel.extractors] +jinja2 = jinja2.ext:babel_extract [i18n] + diff --git a/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/top_level.txt new file mode 100644 index 0000000..7f7afbf --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Jinja2-2.11.3.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/LICENSE b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/LICENSE new file mode 100644 index 0000000..72aaa87 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/LICENSE @@ -0,0 +1,31 @@ +Copyright (c) 2010 by the Logbook Team, see AUTHORS for more details. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/METADATA new file mode 100644 index 0000000..20a09eb --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/METADATA @@ -0,0 +1,97 @@ +Metadata-Version: 2.1 +Name: Logbook +Version: 1.5.3 +Summary: A logging replacement for Python +Home-page: http://logbook.pocoo.org/ +Author: Armin Ronacher, Georg Brandl +Author-email: armin.ronacher@active-4.com +License: BSD +Platform: any +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Provides-Extra: all +Requires-Dist: Jinja2 ; extra == 'all' +Requires-Dist: brotli ; extra == 'all' +Requires-Dist: cython ; extra == 'all' +Requires-Dist: execnet (>=1.0.9) ; extra == 'all' +Requires-Dist: pytest-cov (>=2.6) ; extra == 'all' +Requires-Dist: pytest (>4.0) ; extra == 'all' +Requires-Dist: pyzmq ; extra == 'all' +Requires-Dist: redis ; extra == 'all' +Requires-Dist: sqlalchemy ; extra == 'all' +Provides-Extra: compression +Requires-Dist: brotli ; extra == 'compression' +Provides-Extra: dev +Requires-Dist: cython ; extra == 'dev' +Requires-Dist: pytest-cov (>=2.6) ; extra == 'dev' +Requires-Dist: pytest (>4.0) ; extra == 'dev' +Provides-Extra: execnet +Requires-Dist: execnet (>=1.0.9) ; extra == 'execnet' +Provides-Extra: jinja +Requires-Dist: Jinja2 ; extra == 'jinja' +Provides-Extra: redis +Requires-Dist: redis ; extra == 'redis' +Provides-Extra: sqlalchemy +Requires-Dist: sqlalchemy ; extra == 'sqlalchemy' +Provides-Extra: test +Requires-Dist: pytest-cov (>=2.6) ; extra == 'test' +Requires-Dist: pytest (>4.0) ; extra == 'test' +Provides-Extra: zmq +Requires-Dist: pyzmq ; extra == 'zmq' + + +Logbook +------- + +An awesome logging implementation that is fun to use. + +Quickstart +`````````` + +:: + + from logbook import Logger + log = Logger('A Fancy Name') + + log.warn('Logbook is too awesome for most applications') + log.error("Can't touch this") + +Works for web apps too +`````````````````````` + +:: + + from logbook import MailHandler, Processor + + mailhandler = MailHandler(from_addr='servererror@example.com', + recipients=['admin@example.com'], + level='ERROR', format_string=u'''\ + Subject: Application Error for {record.extra[path]} [{record.extra[method]}] + + Message type: {record.level_name} + Location: {record.filename}:{record.lineno} + Module: {record.module} + Function: {record.func_name} + Time: {record.time:%Y-%m-%d %H:%M:%S} + Remote IP: {record.extra[ip]} + Request: {record.extra[path]} [{record.extra[method]}] + + Message: + + {record.message} + ''') + + def handle_request(request): + def inject_extra(record, handler): + record.extra['ip'] = request.remote_addr + record.extra['method'] = request.method + record.extra['path'] = request.path + + with Processor(inject_extra): + with mailhandler: + # execute code that might fail in the context of the + # request. + + diff --git a/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/RECORD new file mode 100644 index 0000000..87f49e4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/RECORD @@ -0,0 +1,34 @@ +Logbook-1.5.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Logbook-1.5.3.dist-info/LICENSE,sha256=LKJmM3zGlHV4ZUqw3WQ43k0B-niy5utVSwl1ffEXj5g,1556 +Logbook-1.5.3.dist-info/METADATA,sha256=oqrSnUqd8OwC2DMgum049AQtZc3cWAQOhvtQX1uurt4,2963 +Logbook-1.5.3.dist-info/RECORD,, +Logbook-1.5.3.dist-info/WHEEL,sha256=TpFVeXF_cAlV118WSIPWtjqW7nPvzoOw-49FmS3fDKQ,103 +Logbook-1.5.3.dist-info/top_level.txt,sha256=qzmH3hHLJn8FFss5841pprO6ussFm-fZ0JaV7hlPHXU,8 +logbook/__init__.py,sha256=ifiLon6CSGJc2Q368932_JDgv4qWycAafbFSIv-bHwE,1772 +logbook/__pycache__/__init__.cpython-38.pyc,, +logbook/__pycache__/__version__.cpython-38.pyc,, +logbook/__pycache__/_fallback.cpython-38.pyc,, +logbook/__pycache__/_termcolors.cpython-38.pyc,, +logbook/__pycache__/base.cpython-38.pyc,, +logbook/__pycache__/compat.cpython-38.pyc,, +logbook/__pycache__/concurrency.cpython-38.pyc,, +logbook/__pycache__/handlers.cpython-38.pyc,, +logbook/__pycache__/helpers.cpython-38.pyc,, +logbook/__pycache__/more.cpython-38.pyc,, +logbook/__pycache__/notifiers.cpython-38.pyc,, +logbook/__pycache__/queues.cpython-38.pyc,, +logbook/__pycache__/ticketing.cpython-38.pyc,, +logbook/__pycache__/utils.cpython-38.pyc,, +logbook/__version__.py,sha256=Zp7MROdsvpLQ5mrGb8Sw1eImWsL_gO5A9AX2_i9Wo5E,22 +logbook/_fallback.py,sha256=k88rfU0emQvIJxnQ4PCnjE3fu3aCnbt_pYo6qX6u1Yo,8112 +logbook/_termcolors.py,sha256=19Osp4BhM_sDnD8JUMxeN5pwKJrkiCi33wLAatSqJMo,1138 +logbook/base.py,sha256=UNz19VHMXegHXIVIzmdPFm6iq7G_cPt4kK_V6Gyq0Mg,41344 +logbook/compat.py,sha256=NqWB2oIN5G1OvVhB-cA3xWE6TTl1UF7B4SykSWoQSWY,10377 +logbook/concurrency.py,sha256=OQCi2dtXMIYrDgVsTX5rq93b3DhYKCIAwlo4VFtGDjo,6257 +logbook/handlers.py,sha256=Q7OSw8IAZcxEi2q5wjggafB-N8p3Us_P30WKoQj6N9M,71534 +logbook/helpers.py,sha256=arWukVOcbOKzY7eJAJusVHvQqqxMCd1FUwufwYZ-R0s,8384 +logbook/more.py,sha256=xL8BBu_ioBU_Ioop9-6pRcJyDnbGv33KmgK2D8qjTUM,19776 +logbook/notifiers.py,sha256=cJ5OrnVs-idBL1QqFE5W-UUwaO3uljbcbntVC4Ca7HA,12035 +logbook/queues.py,sha256=KexTSCWueDNjwED-ili57F0u5W05rNulGKEhdY2J7AI,24663 +logbook/ticketing.py,sha256=NnbFdJvrOyfxo3ruWt--VRloU6YxfF0aAMaLbOkZKK8,19231 +logbook/utils.py,sha256=2DRvZ8Og8tfY1Od08XhV5bZLCxRa5efpYnDnTcKCHgQ,5783 diff --git a/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/WHEEL new file mode 100644 index 0000000..d193dea --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: false +Tag: cp38-cp38-linux_x86_64 + diff --git a/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/top_level.txt new file mode 100644 index 0000000..4cb292f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Logbook-1.5.3.dist-info/top_level.txt @@ -0,0 +1 @@ +logbook diff --git a/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst new file mode 100644 index 0000000..9d227a0 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA new file mode 100644 index 0000000..485a5e0 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/METADATA @@ -0,0 +1,101 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.1.1 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("Hello") + Markup('hello') + + >>> escape(Markup("Hello")) + Markup('hello') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello {name}") + >>> template.format(name='"World"') + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Website: https://palletsprojects.com/p/markupsafe/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD new file mode 100644 index 0000000..6f34462 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-2.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.1.1.dist-info/METADATA,sha256=DC93VszmzjLQcrVChRUjtW4XbUwjTdbaplpgdlbFdbs,3242 +MarkupSafe-2.1.1.dist-info/RECORD,, +MarkupSafe-2.1.1.dist-info/WHEEL,sha256=paN2rHE-sLfyg0Z4YvQnentMRWXxZnkclRDH8E5J6qk,148 +MarkupSafe-2.1.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=xfaUQkKNRTdYWe6HnnJ2HjguFmS-C_0H6g8-Q9VAfkQ,9284 +markupsafe/__pycache__/__init__.cpython-38.pyc,, +markupsafe/__pycache__/_native.cpython-38.pyc,, +markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 +markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 +markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so,sha256=gBmi2f9vNFVvJs2gdtjYKwK0tIgrxEqVUMbyL-1roRo,45008 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL new file mode 100644 index 0000000..32bdea0 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/MarkupSafe-2.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/LICENSE b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/LICENSE new file mode 100644 index 0000000..2f1b8e1 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2017-2021 Ingy döt Net +Copyright (c) 2006-2016 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/METADATA new file mode 100644 index 0000000..9a91076 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/METADATA @@ -0,0 +1,46 @@ +Metadata-Version: 2.1 +Name: PyYAML +Version: 6.0 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Download-URL: https://pypi.org/project/PyYAML/ +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=3.6 +License-File: LICENSE + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. + diff --git a/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/RECORD new file mode 100644 index 0000000..7105b9f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/RECORD @@ -0,0 +1,43 @@ +PyYAML-6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyYAML-6.0.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +PyYAML-6.0.dist-info/METADATA,sha256=QmHx9kGp_0yezQCXYaft4eEFeJ6W4oyFfYwHDLP1kdg,2006 +PyYAML-6.0.dist-info/RECORD,, +PyYAML-6.0.dist-info/WHEEL,sha256=RiwktpmF40OphKd3_aIG01PzIOQlJ7dpBn3cFSc9vak,217 +PyYAML-6.0.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +_yaml/__pycache__/__init__.cpython-38.pyc,, +yaml/__init__.py,sha256=NDS7S8XgA72-hY6LRmGzUWTPvzGzjWVrWk-OGA-77AA,12309 +yaml/__pycache__/__init__.cpython-38.pyc,, +yaml/__pycache__/composer.cpython-38.pyc,, +yaml/__pycache__/constructor.cpython-38.pyc,, +yaml/__pycache__/cyaml.cpython-38.pyc,, +yaml/__pycache__/dumper.cpython-38.pyc,, +yaml/__pycache__/emitter.cpython-38.pyc,, +yaml/__pycache__/error.cpython-38.pyc,, +yaml/__pycache__/events.cpython-38.pyc,, +yaml/__pycache__/loader.cpython-38.pyc,, +yaml/__pycache__/nodes.cpython-38.pyc,, +yaml/__pycache__/parser.cpython-38.pyc,, +yaml/__pycache__/reader.cpython-38.pyc,, +yaml/__pycache__/representer.cpython-38.pyc,, +yaml/__pycache__/resolver.cpython-38.pyc,, +yaml/__pycache__/scanner.cpython-38.pyc,, +yaml/__pycache__/serializer.cpython-38.pyc,, +yaml/__pycache__/tokens.cpython-38.pyc,, +yaml/_yaml.cpython-38-x86_64-linux-gnu.so,sha256=lMaKSmQZy3WNZSmmU0Wg5Y5ZAs-HR5vItyGVUIsp8Rg,2847784 +yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 +yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 +yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 +yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/WHEEL new file mode 100644 index 0000000..34ce4b8 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/WHEEL @@ -0,0 +1,8 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_5_x86_64 +Tag: cp38-cp38-manylinux1_x86_64 +Tag: cp38-cp38-manylinux_2_12_x86_64 +Tag: cp38-cp38-manylinux2010_x86_64 + diff --git a/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/top_level.txt new file mode 100644 index 0000000..e6475e9 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/PyYAML-6.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_yaml +yaml diff --git a/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/LICENSE.rst b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/LICENSE.rst new file mode 100644 index 0000000..c37cae4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/METADATA new file mode 100644 index 0000000..551fa0b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/METADATA @@ -0,0 +1,129 @@ +Metadata-Version: 2.1 +Name: Werkzeug +Version: 2.0.3 +Summary: The comprehensive WSGI web application library. +Home-page: https://palletsprojects.com/p/werkzeug/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://werkzeug.palletsprojects.com/ +Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/werkzeug/ +Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: dataclasses ; python_version < "3.7" +Provides-Extra: watchdog +Requires-Dist: watchdog ; extra == 'watchdog' + +Werkzeug +======== + +*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +- An interactive debugger that allows inspecting stack traces and + source code in the browser with an interactive interpreter for any + frame in the stack. +- A full-featured request object with objects to interact with + headers, query args, form data, files, and cookies. +- A response object that can wrap other WSGI applications and handle + streaming data. +- A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables + from URLs. +- HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +- A threaded WSGI server for use while developing applications + locally. +- A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug doesn't enforce any dependencies. It is up to the developer to +choose a template engine, database adapter, and even how to handle +requests. It can be used to build all sorts of end user applications +such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Donate +------ + +The Pallets organization develops and supports Werkzeug and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://werkzeug.palletsprojects.com/ +- Changes: https://werkzeug.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Werkzeug/ +- Source Code: https://github.com/pallets/werkzeug/ +- Issue Tracker: https://github.com/pallets/werkzeug/issues/ +- Website: https://palletsprojects.com/p/werkzeug/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/RECORD new file mode 100644 index 0000000..94fed15 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/RECORD @@ -0,0 +1,111 @@ +Werkzeug-2.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Werkzeug-2.0.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Werkzeug-2.0.3.dist-info/METADATA,sha256=Rxzda7JFgpyr7oqR42Z57bNxRp-pjna_KYhcivqvXY4,4452 +Werkzeug-2.0.3.dist-info/RECORD,, +Werkzeug-2.0.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Werkzeug-2.0.3.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9 +werkzeug/__init__.py,sha256=2frslFsD2EbmZUTfzZ5njDmic66S5f6XMdT24AOGYhk,188 +werkzeug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/__pycache__/_internal.cpython-38.pyc,, +werkzeug/__pycache__/_reloader.cpython-38.pyc,, +werkzeug/__pycache__/datastructures.cpython-38.pyc,, +werkzeug/__pycache__/exceptions.cpython-38.pyc,, +werkzeug/__pycache__/filesystem.cpython-38.pyc,, +werkzeug/__pycache__/formparser.cpython-38.pyc,, +werkzeug/__pycache__/http.cpython-38.pyc,, +werkzeug/__pycache__/local.cpython-38.pyc,, +werkzeug/__pycache__/routing.cpython-38.pyc,, +werkzeug/__pycache__/security.cpython-38.pyc,, +werkzeug/__pycache__/serving.cpython-38.pyc,, +werkzeug/__pycache__/test.cpython-38.pyc,, +werkzeug/__pycache__/testapp.cpython-38.pyc,, +werkzeug/__pycache__/urls.cpython-38.pyc,, +werkzeug/__pycache__/user_agent.cpython-38.pyc,, +werkzeug/__pycache__/useragents.cpython-38.pyc,, +werkzeug/__pycache__/utils.cpython-38.pyc,, +werkzeug/__pycache__/wsgi.cpython-38.pyc,, +werkzeug/_internal.py,sha256=_0GZM3B6gE4eoRTp9K6T7spvY5qJQ9Od9GRIp4lZpzU,18572 +werkzeug/_reloader.py,sha256=B1hEfgsUOz2IginBQM5Zak_eaIF7gr3GS5-0x2OHvAE,13950 +werkzeug/datastructures.py,sha256=m79A8rHQEt5B7qVqyrjARXzHL66Katn8S92urGscTw4,97929 +werkzeug/datastructures.pyi,sha256=uFOqffFoaOEa-43IPlK9otu1X4lDOoqIgG4ULS0ObiE,34119 +werkzeug/debug/__init__.py,sha256=Vn0WQfD9w6DGg1j_2gWpSKKTaFlwxhbCBwi7QQMz1s8,17917 +werkzeug/debug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/debug/__pycache__/console.cpython-38.pyc,, +werkzeug/debug/__pycache__/repr.cpython-38.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,, +werkzeug/debug/console.py,sha256=jJjid1dIlCNWbDHXTtjJW5XqNfPjSOKbtUmEX5weNdY,5976 +werkzeug/debug/repr.py,sha256=QCSHENKsChEZDCIApkVi_UNjhJ77v8BMXK1OfxO189M,9483 +werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673 +werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818 +werkzeug/debug/shared/style.css,sha256=h1ZSUVaKNpfbfcYzRb513WAhPySGDQom1uih3uEDxPw,6704 +werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220 +werkzeug/debug/tbtools.py,sha256=khUCWQcpbxzeOs5NlT-E9n99BI-ELH9K9RY5exc-X_o,19362 +werkzeug/exceptions.py,sha256=WLCqXBEHm5Xj2d2sfON9XIneeRS3MlNXKH85k1AQIJU,28776 +werkzeug/filesystem.py,sha256=JS2Dv2QF98WILxY4_thHl-WMcUcwluF_4igkDPaP1l4,1956 +werkzeug/formparser.py,sha256=X-p3Ek4ji8XrKrbmaWxr8StLSc6iuksbpIeweaabs4s,17400 +werkzeug/http.py,sha256=Xm3WhYKRQKh_J12514F8y8prILldXceOceeO8EiQEZI,45222 +werkzeug/local.py,sha256=5HbGdD0vVNJgXH3SXfkMjdxIpzy7iqkHJMGCNjljFNo,23664 +werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500 +werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,, +werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,, +werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,, +werkzeug/middleware/__pycache__/lint.cpython-38.pyc,, +werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,, +werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,, +werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,, +werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580 +werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558 +werkzeug/middleware/lint.py,sha256=sAg3GcOhICIkwYX5bJGG8n8iebX0Yipq_UH0HvrBvoU,13964 +werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899 +werkzeug/middleware/proxy_fix.py,sha256=l7LC_LDu0Yd4SvUxS5SFigAJMzcIOGm6LNKl9IXJBSU,6974 +werkzeug/middleware/shared_data.py,sha256=xydEqOhAGg0aQJEllPDVfz2-8jHwWvJpAxfPsfPCu7k,10960 +werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/routing.py,sha256=rATL0ZkbTBgvdgJp6WgihuwKyivCF8K4a8kQ4hFgY6A,84581 +werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/sansio/__pycache__/__init__.cpython-38.pyc,, +werkzeug/sansio/__pycache__/multipart.cpython-38.pyc,, +werkzeug/sansio/__pycache__/request.cpython-38.pyc,, +werkzeug/sansio/__pycache__/response.cpython-38.pyc,, +werkzeug/sansio/__pycache__/utils.cpython-38.pyc,, +werkzeug/sansio/multipart.py,sha256=BRjBk_mCPjSJzwNVvBgmrJGk3QxA9pYfsgzFki28bxc,8751 +werkzeug/sansio/request.py,sha256=kt7fizz15HPuYKYU1_3TTEkNSuXeeaM4aLcjW84qvv4,20247 +werkzeug/sansio/response.py,sha256=zvCq9HSBBZGBd5Gg412BY9RZIwnKsJl5Kzfd3Kl9sSo,26098 +werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164 +werkzeug/security.py,sha256=gPDRuCjkjWrcqj99tBMq8_nHFZLFQjgoW5Ga5XIw9jo,8158 +werkzeug/serving.py,sha256=6aV-RKbZm4rUHveQGuh4SY0wFZTmXyR43yD_kCQm8Wo,38287 +werkzeug/test.py,sha256=eUORFaeIDXcmncLdYxgFqYiVdolZkYRY67QV1_ATk20,48235 +werkzeug/testapp.py,sha256=f48prWSGJhbSrvYb8e1fnAah4BkrLb0enHSdChgsjBY,9471 +werkzeug/urls.py,sha256=Du2lreBHvgBh5c2_bcx72g3hzV2ZabXYZsp-picUIJs,41023 +werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420 +werkzeug/useragents.py,sha256=G8tmv_6vxJaPrLQH3eODNgIYe0_V6KETROQlJI-WxDE,7264 +werkzeug/utils.py,sha256=D_dnCLUfodQ4k0GRSpnI6qDoVoaX7-Dza57bx7sabG0,37101 +werkzeug/wrappers/__init__.py,sha256=-s75nPbyXHzU_rwmLPDhoMuGbEUk0jZT_n0ZQAOFGf8,654 +werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/accept.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/auth.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/base_request.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/base_response.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/common_descriptors.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/cors.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/etag.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/json.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/request.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/response.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/user_agent.cpython-38.pyc,, +werkzeug/wrappers/accept.py,sha256=NzyLfKH3qC5cSbkEc5azw5-lp_kU8JIrtc8AdGQ0HBs,413 +werkzeug/wrappers/auth.py,sha256=ArJiEn8HHzy1B7wUGuN7s3AHpnClKlaDY0F7N7QZSLA,824 +werkzeug/wrappers/base_request.py,sha256=saz9RyNQkvI_XLPYVm29KijNHmD1YzgxDqa0qHTbgss,1174 +werkzeug/wrappers/base_response.py,sha256=q_-TaYywT5G4zA-DWDRDJhJSat2_4O7gOPob6ye4_9A,1186 +werkzeug/wrappers/common_descriptors.py,sha256=aeVFTsTb0HJn5O8zF6WwELEDDULdOLFkWaUrvD1Huds,866 +werkzeug/wrappers/cors.py,sha256=9Ho7aXd64sB2Msz71jRXAdAI8UyqIJgv-CJsnlfUSzM,814 +werkzeug/wrappers/etag.py,sha256=7SI34rtlXJHyJlqe8B0dFu4ouo6L0DJmYyqwWoY79oc,814 +werkzeug/wrappers/json.py,sha256=h_XfBZV5ZETkHYgONuoSyB9KXR9W90mgBh_mFUysp6c,394 +werkzeug/wrappers/request.py,sha256=I77nwHgCzynmgwJVNw7bo7MfTU_CusNBO0b4TjpIRdQ,24790 +werkzeug/wrappers/response.py,sha256=c24tBeq8G5RwPCU5iCJvJPaKyUEIrfMiWO4yGtTOwmI,35214 +werkzeug/wrappers/user_agent.py,sha256=IMUJCFohZSMsBTmqyJZtjG5y4sB1zxQBE690bixb6uY,419 +werkzeug/wsgi.py,sha256=L7s5-Rlt7BRVEZ1m81MaenGfMDP7yL3p1Kxt9Yssqzg,33727 diff --git a/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/WHEEL new file mode 100644 index 0000000..becc9a6 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/top_level.txt new file mode 100644 index 0000000..6fe8da8 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/Werkzeug-2.0.3.dist-info/top_level.txt @@ -0,0 +1 @@ +werkzeug diff --git a/dbt-env/lib/python3.8/site-packages/__pycache__/_pyrsistent_version.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/__pycache__/_pyrsistent_version.cpython-38.pyc new file mode 100644 index 0000000..f5e254f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/__pycache__/_pyrsistent_version.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc new file mode 100644 index 0000000..dfd1ff6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc new file mode 100644 index 0000000..05f96a0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/__pycache__/typing_extensions.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/__pycache__/typing_extensions.cpython-38.pyc new file mode 100644 index 0000000..9e85532 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/__pycache__/typing_extensions.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc new file mode 100644 index 0000000..70fcfe2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so b/dbt-env/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000..5e1a0d7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/_cffi_backend.cpython-38-x86_64-linux-gnu.so differ diff --git a/dbt-env/lib/python3.8/site-packages/_pyrsistent_version.py b/dbt-env/lib/python3.8/site-packages/_pyrsistent_version.py new file mode 100644 index 0000000..5877c8d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/_pyrsistent_version.py @@ -0,0 +1 @@ +__version__ = '0.18.1' diff --git a/dbt-env/lib/python3.8/site-packages/_yaml/__init__.py b/dbt-env/lib/python3.8/site-packages/_yaml/__init__.py new file mode 100644 index 0000000..7baa8c4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/_yaml/__init__.py @@ -0,0 +1,33 @@ +# This is a stub package designed to roughly emulate the _yaml +# extension module, which previously existed as a standalone module +# and has been moved into the `yaml` package namespace. +# It does not perfectly mimic its old counterpart, but should get +# close enough for anyone who's relying on it even when they shouldn't. +import yaml + +# in some circumstances, the yaml module we imoprted may be from a different version, so we need +# to tread carefully when poking at it here (it may not have the attributes we expect) +if not getattr(yaml, '__with_libyaml__', False): + from sys import version_info + + exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError + raise exc("No module named '_yaml'") +else: + from yaml._yaml import * + import warnings + warnings.warn( + 'The _yaml extension module is now located at yaml._yaml' + ' and its location is subject to change. To use the' + ' LibYAML-based parser and emitter, import from `yaml`:' + ' `from yaml import CLoader as Loader, CDumper as Dumper`.', + DeprecationWarning + ) + del warnings + # Don't `del yaml` here because yaml is actually an existing + # namespace member of _yaml. + +__name__ = '_yaml' +# If the module is top-level (i.e. not a part of any specific package) +# then the attribute should be set to ''. +# https://docs.python.org/3.8/library/types.html +__package__ = '' diff --git a/dbt-env/lib/python3.8/site-packages/_yaml/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/_yaml/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..070ce29 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/_yaml/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/DESCRIPTION.rst b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..78513b3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,27 @@ +.. image:: https://travis-ci.org/wireservice/agate.png + :target: https://travis-ci.org/wireservice/agate + :alt: Build status + +.. image:: https://img.shields.io/pypi/v/agate.svg + :target: https://pypi.python.org/pypi/agate + :alt: Version + +.. image:: https://img.shields.io/pypi/l/agate.svg + :target: https://pypi.python.org/pypi/agate + :alt: License + +.. image:: https://img.shields.io/pypi/pyversions/agate.svg + :target: https://pypi.python.org/pypi/agate + :alt: Support Python versions + +agate is a Python data analysis library that is optimized for humans instead of machines. It is an alternative to numpy and pandas that solves real-world problems with readable code. + +agate was previously known as journalism. + +Important links: + +* Documentation: http://agate.rtfd.org +* Repository: https://github.com/wireservice/agate +* Issues: https://github.com/wireservice/agate/issues + + diff --git a/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/METADATA new file mode 100644 index 0000000..8845201 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/METADATA @@ -0,0 +1,63 @@ +Metadata-Version: 2.0 +Name: agate +Version: 1.6.1 +Summary: A data analysis library that is optimized for humans instead of machines. +Home-page: http://agate.readthedocs.org/ +Author: Christopher Groskopf +Author-email: chrisgroskopf@gmail.com +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Framework :: IPython +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: MIT License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Scientific/Engineering :: Information Analysis +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Dist: six (>=1.9.0) +Requires-Dist: pytimeparse (>=1.1.5) +Requires-Dist: parsedatetime (>=2.1) +Requires-Dist: Babel (>=2.0) +Requires-Dist: isodate (>=0.5.4) +Requires-Dist: python-slugify (>=1.2.1) +Requires-Dist: leather (>=0.3.2) + +.. image:: https://travis-ci.org/wireservice/agate.png + :target: https://travis-ci.org/wireservice/agate + :alt: Build status + +.. image:: https://img.shields.io/pypi/v/agate.svg + :target: https://pypi.python.org/pypi/agate + :alt: Version + +.. image:: https://img.shields.io/pypi/l/agate.svg + :target: https://pypi.python.org/pypi/agate + :alt: License + +.. image:: https://img.shields.io/pypi/pyversions/agate.svg + :target: https://pypi.python.org/pypi/agate + :alt: Support Python versions + +agate is a Python data analysis library that is optimized for humans instead of machines. It is an alternative to numpy and pandas that solves real-world problems with readable code. + +agate was previously known as journalism. + +Important links: + +* Documentation: http://agate.rtfd.org +* Repository: https://github.com/wireservice/agate +* Issues: https://github.com/wireservice/agate/issues + + diff --git a/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/RECORD new file mode 100644 index 0000000..b05db1b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/RECORD @@ -0,0 +1,209 @@ +agate-1.6.1.dist-info/DESCRIPTION.rst,sha256=E0eu6ACbmMnthR_jdRfoFYb54dulf9NL3Sn_cv2kHUQ,921 +agate-1.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +agate-1.6.1.dist-info/METADATA,sha256=gRTMsicSWAsdTwHWMVkqpxRknsuZ6XtB_-rzjAhkIrc,2423 +agate-1.6.1.dist-info/RECORD,, +agate-1.6.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 +agate-1.6.1.dist-info/metadata.json,sha256=ihw3SgKy7zWQsioNkHckBkqaVGjzaNDSJRFXrfY7Rz4,1498 +agate-1.6.1.dist-info/top_level.txt,sha256=bLl7krix7yVfT_t0Th4-iA81Qfb-IGyh-IXyvQ3UA24,6 +agate/__init__.py,sha256=A0owELRATd_fqMV5lWDGF-9RFL0nXA1LEbxixIPJ5fA,853 +agate/__pycache__/__init__.cpython-38.pyc,, +agate/__pycache__/columns.cpython-38.pyc,, +agate/__pycache__/config.cpython-38.pyc,, +agate/__pycache__/csv_py2.cpython-38.pyc,, +agate/__pycache__/csv_py3.cpython-38.pyc,, +agate/__pycache__/exceptions.cpython-38.pyc,, +agate/__pycache__/fixed.cpython-38.pyc,, +agate/__pycache__/mapped_sequence.cpython-38.pyc,, +agate/__pycache__/rows.cpython-38.pyc,, +agate/__pycache__/testcase.cpython-38.pyc,, +agate/__pycache__/type_tester.cpython-38.pyc,, +agate/__pycache__/utils.cpython-38.pyc,, +agate/__pycache__/warns.cpython-38.pyc,, +agate/aggregations/__init__.py,sha256=CnRs2A_6jexDwPGSVLv4skHlWmaI4sap7U0qLyDNeic,1980 +agate/aggregations/__pycache__/__init__.cpython-38.pyc,, +agate/aggregations/__pycache__/all.cpython-38.pyc,, +agate/aggregations/__pycache__/any.cpython-38.pyc,, +agate/aggregations/__pycache__/base.cpython-38.pyc,, +agate/aggregations/__pycache__/count.cpython-38.pyc,, +agate/aggregations/__pycache__/deciles.cpython-38.pyc,, +agate/aggregations/__pycache__/first.cpython-38.pyc,, +agate/aggregations/__pycache__/has_nulls.cpython-38.pyc,, +agate/aggregations/__pycache__/iqr.cpython-38.pyc,, +agate/aggregations/__pycache__/mad.cpython-38.pyc,, +agate/aggregations/__pycache__/max.cpython-38.pyc,, +agate/aggregations/__pycache__/max_length.cpython-38.pyc,, +agate/aggregations/__pycache__/max_precision.cpython-38.pyc,, +agate/aggregations/__pycache__/mean.cpython-38.pyc,, +agate/aggregations/__pycache__/median.cpython-38.pyc,, +agate/aggregations/__pycache__/min.cpython-38.pyc,, +agate/aggregations/__pycache__/mode.cpython-38.pyc,, +agate/aggregations/__pycache__/percentiles.cpython-38.pyc,, +agate/aggregations/__pycache__/quartiles.cpython-38.pyc,, +agate/aggregations/__pycache__/quintiles.cpython-38.pyc,, +agate/aggregations/__pycache__/stdev.cpython-38.pyc,, +agate/aggregations/__pycache__/sum.cpython-38.pyc,, +agate/aggregations/__pycache__/summary.cpython-38.pyc,, +agate/aggregations/__pycache__/variance.cpython-38.pyc,, +agate/aggregations/all.py,sha256=9StlCXa50wiqRIpbQSrrguX-K0iyjFCfcPHERAcsCC8,1044 +agate/aggregations/any.py,sha256=Xr692g7q-VCpLs99zDOMeNzdXrqc93i5SAEBqVU8UuE,977 +agate/aggregations/base.py,sha256=BdY6fJp5qux4kL3iSPcvPJYuP0BwgX7EdI8zOW9F8_I,1705 +agate/aggregations/count.py,sha256=7yZPWTamNLNjywbY9i4n685vb0jibfQNoTdquQSun1Q,1299 +agate/aggregations/deciles.py,sha256=oI0oMfYZDwvdfbnh-oH2pdazjS0I66TXaYrXEgEJ-sA,1535 +agate/aggregations/first.py,sha256=9PZU43_YDNhGqYMbIjhP92EVWnAPWatg4_cjarbyWMc,1284 +agate/aggregations/has_nulls.py,sha256=Y2JFscncciK9zafQCO526B7LkqXuZXs1ZyiG3iZllXE,508 +agate/aggregations/iqr.py,sha256=Mi2R-7f91tT4B66hz3F4AGlLNRam7r6MMS6zyMrk_MA,1172 +agate/aggregations/mad.py,sha256=Il7k9UvZquWOgG3Gc-XVmAPWwTzY_pcK6qRgPNjlJwc,1347 +agate/aggregations/max.py,sha256=xCQ79ksGvh_MD9fnvhNTlOkr6fHOXqkdaqUly7dQVMc,1270 +agate/aggregations/max_length.py,sha256=Q1aV6CRhT80rqBAx1hxOLaoxEGvicyaLOeuV8KTuMRU,1256 +agate/aggregations/max_precision.py,sha256=QwVXlgCNFswqce01WMblI8NpOQwlm_VTUB0mtxMQ12s,907 +agate/aggregations/mean.py,sha256=ZR6e2pwacrK_ImxcCZmAwIWb415dz-CTqlFkALySYDI,1181 +agate/aggregations/median.py,sha256=4kJs7cYuzmQfjwb2TGPWnEl27A8C4m0adZ0kwUX4qsg,1254 +agate/aggregations/min.py,sha256=SJ6Q_YQ3rB_LORLVMdCSjtY5XwbCLaTL8UmEZxCP0BU,1270 +agate/aggregations/mode.py,sha256=b2L6vpyKgB6C4glWKLw1Pcau0cM75TEEluxtTwXQ5Ic,1223 +agate/aggregations/percentiles.py,sha256=JvKo1vjuSQ6NT-adi9pCk2SCnqrUfVegr2N4gk_pAnc,2233 +agate/aggregations/quartiles.py,sha256=kbiA3lWVG-pnGAsU3NP2GnN_gsKN96_8A-ooQL9tmH4,1548 +agate/aggregations/quintiles.py,sha256=mNb5_YbuvX4lyG7rKazXplcvDOrmMvfkXwA4jODGKwo,1551 +agate/aggregations/stdev.py,sha256=d4oMM9FHsoGtoiFlQGcFHT4h9LwUxNarU_5OpZlI1nY,2150 +agate/aggregations/sum.py,sha256=scko-iRaqMwh9ERdmDMNnCSTRQpgK33rwtDmekd_EC0,821 +agate/aggregations/summary.py,sha256=fjp0GSiBylQ0Zax7omyqot20H2vlSF4uomuV_vFM_dw,1052 +agate/aggregations/variance.py,sha256=H_P_JiSSagO0ll_f49ScsUgJ4Sfb_Rov8G-iI_ahsds,2350 +agate/columns.py,sha256=8N8CxDdR-lw5DIADJY2m5HV16xsnPoS61M6kQBUM7kg,3731 +agate/computations/__init__.py,sha256=1nRQZDix03gkDFgbFikKxav0i5kzkWoL8Y3J3QDvZoc,1133 +agate/computations/__pycache__/__init__.cpython-38.pyc,, +agate/computations/__pycache__/base.cpython-38.pyc,, +agate/computations/__pycache__/change.cpython-38.pyc,, +agate/computations/__pycache__/formula.cpython-38.pyc,, +agate/computations/__pycache__/percent.cpython-38.pyc,, +agate/computations/__pycache__/percent_change.cpython-38.pyc,, +agate/computations/__pycache__/percentile_rank.cpython-38.pyc,, +agate/computations/__pycache__/rank.cpython-38.pyc,, +agate/computations/__pycache__/slug.cpython-38.pyc,, +agate/computations/base.py,sha256=ddBpx1IrGwe_WngXhtDVv6spx-aSFd41BO2lVE5G4pI,1378 +agate/computations/change.py,sha256=Gt5zKuUvpOEcdMEODrkeEvxVZSHWTQulS8-8GS0zer4,2638 +agate/computations/formula.py,sha256=bqmik7AQ6JUpbN4qGf67RcrUN9dPFKFBo18cgStIL-Q,1056 +agate/computations/percent.py,sha256=OBDTn_0K0tf5aKPH6URJkipBk734iXOB39S8basV6qc,2510 +agate/computations/percent_change.py,sha256=tEaXEBBQZT4c3EO8OH63U9uCMMhJCNoxm5p7IB1otwQ,1967 +agate/computations/percentile_rank.py,sha256=INC7ZpL-K_TfirpQREtLExRImdwr1AXh4LjxuJZbYRk,989 +agate/computations/rank.py,sha256=YzshsUcx8Sgx1Gp1PpT5exZiMvLJdWcwsz1s1yKydTM,1832 +agate/computations/slug.py,sha256=WiQDEPI9I1KbP8u3qmgXMnw5qwz1MLqNBgqvLToomdQ,2082 +agate/config.py,sha256=2AfycDim-EqTTokUVAow25XfWl8fcGDcycZ89fGIDVw,4038 +agate/csv_py2.py,sha256=hEuj5gEqSJ1BoJjhHfGhVMdGXOX4z2-cZ5nKCjACYdM,7711 +agate/csv_py3.py,sha256=E1ZRh3P8BUhuXby6NH_A2KGnmqA9qRBuhJ5XPlvkQNU,4556 +agate/data_types/__init__.py,sha256=yOXZVo5EY3ucCnhXOtRjlYDOh4rU9mDD6mbYKxhIo6A,786 +agate/data_types/__pycache__/__init__.cpython-38.pyc,, +agate/data_types/__pycache__/base.cpython-38.pyc,, +agate/data_types/__pycache__/boolean.cpython-38.pyc,, +agate/data_types/__pycache__/date.cpython-38.pyc,, +agate/data_types/__pycache__/date_time.cpython-38.pyc,, +agate/data_types/__pycache__/number.cpython-38.pyc,, +agate/data_types/__pycache__/text.cpython-38.pyc,, +agate/data_types/__pycache__/time_delta.cpython-38.pyc,, +agate/data_types/base.py,sha256=zw3-sQHsYUVpApJkLZ4AcizTLsfJwVX0OHzWZ4mCSHE,1431 +agate/data_types/boolean.py,sha256=vXT8V0vJZsqTCp5CgDmvYa3A6jJ_-0jru8lKcX3Lh0c,2121 +agate/data_types/date.py,sha256=jSdpiEFVqV7-GJbN40yXWJR_-TVfxxASF2lJJeNZlG0,2655 +agate/data_types/date_time.py,sha256=AWxbpaMhXdSSwJnGAuZ1oRCwlU5l8saRESiqzbwk5-s,3432 +agate/data_types/number.py,sha256=u1wzSEdto2JARnRpn1hfL3P2d3Vqd7s2b5QxKA9gHtE,3108 +agate/data_types/text.py,sha256=2VyHWRavz6rwFT-NUGpHBnVaMe2k-EipCEA3GD-eiJU,922 +agate/data_types/time_delta.py,sha256=JDNg2tOE8w_UhBxDdU4Hr1-k_JSsGq7RL9Toy8sruu0,1082 +agate/exceptions.py,sha256=VlG_SFKtDbblCOeM5fjf_ze3eARHyeFQhooRArwLJBA,1076 +agate/fixed.py,sha256=BAc16qyCJzNzkYl0ZwHeMepQ8WSNkbP8dGv-2S2iknA,2107 +agate/mapped_sequence.py,sha256=leqUEUY39U8lUS6v-2pfRcVj891uhY07WckR5qSkxoI,4458 +agate/rows.py,sha256=bx-sKAVByCqAisuaT7kweG9lImQ7dyi2lrJKT9CFxB0,757 +agate/table/__init__.py,sha256=OdJwBrWd6wp6IakleI33Da4ke05vjIXSPj1U6TUy3Qk,13078 +agate/table/__pycache__/__init__.cpython-38.pyc,, +agate/table/__pycache__/aggregate.cpython-38.pyc,, +agate/table/__pycache__/bar_chart.cpython-38.pyc,, +agate/table/__pycache__/bins.cpython-38.pyc,, +agate/table/__pycache__/column_chart.cpython-38.pyc,, +agate/table/__pycache__/compute.cpython-38.pyc,, +agate/table/__pycache__/denormalize.cpython-38.pyc,, +agate/table/__pycache__/distinct.cpython-38.pyc,, +agate/table/__pycache__/exclude.cpython-38.pyc,, +agate/table/__pycache__/find.cpython-38.pyc,, +agate/table/__pycache__/from_csv.cpython-38.pyc,, +agate/table/__pycache__/from_fixed.cpython-38.pyc,, +agate/table/__pycache__/from_json.cpython-38.pyc,, +agate/table/__pycache__/from_object.cpython-38.pyc,, +agate/table/__pycache__/group_by.cpython-38.pyc,, +agate/table/__pycache__/homogenize.cpython-38.pyc,, +agate/table/__pycache__/join.cpython-38.pyc,, +agate/table/__pycache__/limit.cpython-38.pyc,, +agate/table/__pycache__/line_chart.cpython-38.pyc,, +agate/table/__pycache__/merge.cpython-38.pyc,, +agate/table/__pycache__/normalize.cpython-38.pyc,, +agate/table/__pycache__/order_by.cpython-38.pyc,, +agate/table/__pycache__/pivot.cpython-38.pyc,, +agate/table/__pycache__/print_bars.cpython-38.pyc,, +agate/table/__pycache__/print_html.cpython-38.pyc,, +agate/table/__pycache__/print_structure.cpython-38.pyc,, +agate/table/__pycache__/print_table.cpython-38.pyc,, +agate/table/__pycache__/rename.cpython-38.pyc,, +agate/table/__pycache__/scatterplot.cpython-38.pyc,, +agate/table/__pycache__/select.cpython-38.pyc,, +agate/table/__pycache__/to_csv.cpython-38.pyc,, +agate/table/__pycache__/to_json.cpython-38.pyc,, +agate/table/__pycache__/where.cpython-38.pyc,, +agate/table/aggregate.py,sha256=glgQISZLCMnH-tY0jgtiC3iqaqyR8Imnttw7DzrPfRU,996 +agate/table/bar_chart.py,sha256=OsWsgr4vGZOf3dkA26sWN1RtBSqQ_tFsekwnHC_VGv8,1303 +agate/table/bins.py,sha256=5HFN3EoowETbvxIm2AfT4d9kXjJVgus9FpWJoj75LPc,3367 +agate/table/column_chart.py,sha256=pKI6J5lGVCFJXkaOjhDwImMoYHuDWacgL8zltPo4ejw,1312 +agate/table/compute.py,sha256=kVsk6Gcgon-X44z7jsb5-fesjPQ4R3d6vnFh-6R4Mv0,2064 +agate/table/denormalize.py,sha256=XlwCOWsl4Ag-Isbpr_jU942hxGS6qLjgs5-FFOsJiIw,4475 +agate/table/distinct.py,sha256=-k6BrLwKOt6E9NzoDWlVXeqaJ20Mc-TrxEfOFNOmOEs,1231 +agate/table/exclude.py,sha256=Q9ltSQvn-hh19S5Wm5We_J80ANjngf6HvoxmjDEUVHY,503 +agate/table/find.py,sha256=8G0yxkIysNVW0iCEnxnuUznQ3c5oyEQG3HnRpIq_Jd8,438 +agate/table/from_csv.py,sha256=yjnaTfATacqnegsYop9vUdF93iwcTh5NdmMOsAPNZm0,2682 +agate/table/from_fixed.py,sha256=2mvsZnRHbpGEi3Ewq5hcljzifund49Cv0S8C5y2bWus,2021 +agate/table/from_json.py,sha256=NXXhmpGuX_EQUOQJ3ip6eeRJgVRNBC3bVf-AAqrsrw8,2105 +agate/table/from_object.py,sha256=DFdZq4HhbEGpsP8ajbRA1yyZ8gFly22i2FTdFr8MtuQ,1722 +agate/table/group_by.py,sha256=DSThErfzlZtYivjluOz8YIsdUxt2HXJSVgLz6aRWZ_Q,1963 +agate/table/homogenize.py,sha256=2qdQ8K7qPa3xzmSWk9SopDWFqFtgpfiiC5A1Kvxfd9M,2820 +agate/table/join.py,sha256=PUIz0d3L-nTsFArGg_VVhZOpd25mhaD1H8lNJ4T2lfU,8211 +agate/table/limit.py,sha256=C5FHCFJMbpz8ZBoeIpciHK9qzmSBQ3wlQSpOP90TCJw,879 +agate/table/line_chart.py,sha256=gx6ZHKzbfn3jzU3PT3wUTOXC8W_qId0zCpV2FuWeHEw,1207 +agate/table/merge.py,sha256=gcJz6kX_--mWgFkZ_dbUaGLkaqVtUVkygRgoG-7aFQU,2362 +agate/table/normalize.py,sha256=oBRKVtrn09ce3OSLbLOYTLwfEzA_Z2tAZIsmhgNZX70,3268 +agate/table/order_by.py,sha256=HkdFyiL86DtdkUq9uWiLFUcyW9QfJR5xHHMYkXh28Oo,1368 +agate/table/pivot.py,sha256=NVoOAB33ZesMAXW82Jf1Dd2hs8YhRWdmBrnDujxOh1k,4701 +agate/table/print_bars.py,sha256=I1LlytgH4DND_-BBoE7mA3Asv7ChvS2qPQHrV4liQzg,7790 +agate/table/print_html.py,sha256=ujL3fPwu2fCq714dvwsKc5hA4k7xTCBW2-KorVzDB-8,4004 +agate/table/print_structure.py,sha256=kwHzO4pEFKjoT2s5wr9r4-5t0JWqDtMGGBXM2vyvRUY,721 +agate/table/print_table.py,sha256=0pomXV5LEjh8PXbYtjYJVMvnbZhlO834TJaJEAuTUyQ,5123 +agate/table/rename.py,sha256=OKOzkxhvJ4NDdGqCTqBvd79OFoua-S2qPxyN45Hcpfo,2489 +agate/table/scatterplot.py,sha256=F-u1leWROTF0KE37wxAhwlY73HFoRoRpuVhuUhaRx9s,1209 +agate/table/select.py,sha256=vYP6dqK-kXyi6eEFyX-iwE4aSXSfllbd1bI_cQntKK0,689 +agate/table/to_csv.py,sha256=BycBZ2HukyKwqLQvV_slLkDsc8PkQe5u5WFIbfM4WTg,1109 +agate/table/to_json.py,sha256=VHyPyVJ3DZJv-nkOhhoac8CfWl2bhURLCfZ7vPfY5AQ,3089 +agate/table/where.py,sha256=VBfDfAlelmCn1DmAAPZMuT5u8NBbS9-XzcXHT_cdPmI,740 +agate/tableset/__init__.py,sha256=eWaJTkAOPHRBPoWlRxgcnVvNuC0MiP76mhqEqSFgnFw,7087 +agate/tableset/__pycache__/__init__.cpython-38.pyc,, +agate/tableset/__pycache__/aggregate.cpython-38.pyc,, +agate/tableset/__pycache__/bar_chart.cpython-38.pyc,, +agate/tableset/__pycache__/column_chart.cpython-38.pyc,, +agate/tableset/__pycache__/from_csv.cpython-38.pyc,, +agate/tableset/__pycache__/from_json.cpython-38.pyc,, +agate/tableset/__pycache__/having.cpython-38.pyc,, +agate/tableset/__pycache__/line_chart.cpython-38.pyc,, +agate/tableset/__pycache__/merge.cpython-38.pyc,, +agate/tableset/__pycache__/print_structure.cpython-38.pyc,, +agate/tableset/__pycache__/proxy_methods.cpython-38.pyc,, +agate/tableset/__pycache__/scatterplot.cpython-38.pyc,, +agate/tableset/__pycache__/to_csv.cpython-38.pyc,, +agate/tableset/__pycache__/to_json.cpython-38.pyc,, +agate/tableset/aggregate.py,sha256=D7D0URlxrE-d40ZmvzGcVF8oqVpFayNXx_MNV7pHb5s,2687 +agate/tableset/bar_chart.py,sha256=dTRAGKP976SJBWafAfVT7Al0j0hYPjK7FTuuyyFcwK0,1373 +agate/tableset/column_chart.py,sha256=voikbhmwr87Wi8QpJ9jGMfwdRM4DZwONZU6_S84DU-s,1382 +agate/tableset/from_csv.py,sha256=BKi84jo_rtMTUu8MglW0WM0BoLYZbwTwZ9FdHxP9HDE,1200 +agate/tableset/from_json.py,sha256=mjNJ-djFkdOV06SxPf7I61PkxTNFGwesCJHAzzV0Bwc,2213 +agate/tableset/having.py,sha256=Kdi_ztjnTG-0TbN6XYgyIdvxryHJyJkWJRw2UkuEBWg,1125 +agate/tableset/line_chart.py,sha256=Sw0LlU6LIfzkeIcs-YVrRaM_M_bLLtRCYVV39sWNHnw,1303 +agate/tableset/merge.py,sha256=P2nG2z4_QWh2irUpjHuz47c9ydx_5oT8qjyj_rA7x28,1852 +agate/tableset/print_structure.py,sha256=0lT4KUYY45gnS7hcBZf9R5kP8cHQ8uwJXieO0An4oYM,907 +agate/tableset/proxy_methods.py,sha256=rpj-BhWcu-t0o0w9E5wxy30numvbfJi2l1DuY4nb1Hc,2531 +agate/tableset/scatterplot.py,sha256=mlKONZ3frPcOjOW0jYc4M88yaYQoNppbARdT3X5DNJQ,1305 +agate/tableset/to_csv.py,sha256=MwUbHDzuXWh6KA7Xuae5AWW1DTQSR_6szuYrF9RYD7U,498 +agate/tableset/to_json.py,sha256=J-S56CWm_qNzhkHf4AjUtTs0_TPm1Pwod0SNxj-uYNg,1877 +agate/testcase.py,sha256=wwpTNQ_QpcvwWGiEM7vZKY8XHTtBFQobYtwoWV_sZ9M,1970 +agate/type_tester.py,sha256=sGjRByV73KUpVlwG63SPW0m45MVp_urFpphWACP_RRY,4390 +agate/utils.py,sha256=QlYxsZ_xBhcbKEyloudZSx9nzCUerWD4iOSkajMFuRs,8810 +agate/warns.py,sha256=aSpSbMMLvYavJ0UrsSDbbojYzhbX-zMMN8-Qhq3WhzA,1348 diff --git a/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/WHEEL new file mode 100644 index 0000000..7332a41 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.30.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/metadata.json b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/metadata.json new file mode 100644 index 0000000..59028eb --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Framework :: IPython", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Scientific/Engineering :: Information Analysis", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"contacts": [{"email": "chrisgroskopf@gmail.com", "name": "Christopher Groskopf", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "http://agate.readthedocs.org/"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "license": "MIT", "metadata_version": "2.0", "name": "agate", "run_requires": [{"requires": ["Babel (>=2.0)", "isodate (>=0.5.4)", "leather (>=0.3.2)", "parsedatetime (>=2.1)", "python-slugify (>=1.2.1)", "pytimeparse (>=1.1.5)", "six (>=1.9.0)"]}], "summary": "A data analysis library that is optimized for humans instead of machines.", "version": "1.6.1"} \ No newline at end of file diff --git a/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/top_level.txt new file mode 100644 index 0000000..e80b22b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate-1.6.1.dist-info/top_level.txt @@ -0,0 +1 @@ +agate diff --git a/dbt-env/lib/python3.8/site-packages/agate/__init__.py b/dbt-env/lib/python3.8/site-packages/agate/__init__.py new file mode 100644 index 0000000..cac0342 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/__init__.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python + +import six + +from agate.aggregations import * +from agate.data_types import * +from agate.columns import Column # noqa +from agate.computations import * +from agate.config import get_option, set_option, set_options # noqa +from agate.exceptions import * +# import agate.fixed as fixed # noqa +from agate.mapped_sequence import MappedSequence # noqa +from agate.rows import Row # noqa +from agate.table import Table # noqa +from agate.tableset import TableSet # noqa +from agate.testcase import AgateTestCase # noqa +from agate.type_tester import TypeTester # noqa +from agate.utils import * +from agate.warns import NullCalculationWarning, DuplicateColumnWarning, warn_null_calculation, warn_duplicate_column # noqa + +if six.PY2: # pragma: no cover + import agate.csv_py2 as csv # noqa +else: + import agate.csv_py3 as csv # noqa diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..a320c0f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/columns.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/columns.cpython-38.pyc new file mode 100644 index 0000000..fab76e3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/columns.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/config.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/config.cpython-38.pyc new file mode 100644 index 0000000..dcc3e5e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/config.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/csv_py2.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/csv_py2.cpython-38.pyc new file mode 100644 index 0000000..816c31e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/csv_py2.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/csv_py3.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/csv_py3.cpython-38.pyc new file mode 100644 index 0000000..aa122e4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/csv_py3.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/exceptions.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..4a21cda Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/exceptions.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/fixed.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/fixed.cpython-38.pyc new file mode 100644 index 0000000..9167dea Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/fixed.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/mapped_sequence.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/mapped_sequence.cpython-38.pyc new file mode 100644 index 0000000..1c45fa7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/mapped_sequence.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/rows.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/rows.cpython-38.pyc new file mode 100644 index 0000000..ee96c2b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/rows.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/testcase.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/testcase.cpython-38.pyc new file mode 100644 index 0000000..4a61aaa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/testcase.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/type_tester.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/type_tester.cpython-38.pyc new file mode 100644 index 0000000..11a9a2f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/type_tester.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/utils.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..4b56a87 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/utils.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/__pycache__/warns.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/warns.cpython-38.pyc new file mode 100644 index 0000000..b799a1c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/__pycache__/warns.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__init__.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__init__.py new file mode 100644 index 0000000..cf82a30 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__init__.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +""" +Aggregations create a new value by summarizing a :class:`.Column`. For +example, :class:`.Mean`, when applied to a column containing :class:`.Number` +data, returns a single :class:`decimal.Decimal` value which is the average of +all values in that column. + +Aggregations can be applied to single columns using the :meth:`.Table.aggregate` +method. The result is a single value if a one aggregation was applied, or +a tuple of values if a sequence of aggregations was applied. + +Aggregations can be applied to instances of :class:`.TableSet` using the +:meth:`.TableSet.aggregate` method. The result is a new :class:`.Table` +with a column for each aggregation and a row for each table in the set. +""" + +from agate.aggregations.base import Aggregation # noqa + +from agate.aggregations.all import All # noqa +from agate.aggregations.any import Any # noqa +from agate.aggregations.count import Count # noqa +from agate.aggregations.deciles import Deciles # noqa +from agate.aggregations.first import First # noqa +from agate.aggregations.has_nulls import HasNulls # noqa +from agate.aggregations.iqr import IQR # noqa +from agate.aggregations.mad import MAD # noqa +from agate.aggregations.max_length import MaxLength # noqa +from agate.aggregations.max_precision import MaxPrecision # noqa +from agate.aggregations.max import Max # noqa +from agate.aggregations.mean import Mean # noqa +from agate.aggregations.median import Median # noqa +from agate.aggregations.min import Min # noqa +from agate.aggregations.mode import Mode # noqa +from agate.aggregations.percentiles import Percentiles # noqa +from agate.aggregations.quartiles import Quartiles # noqa +from agate.aggregations.quintiles import Quintiles # noqa +from agate.aggregations.stdev import StDev, PopulationStDev # noqa +from agate.aggregations.sum import Sum # noqa +from agate.aggregations.summary import Summary # noqa +from agate.aggregations.variance import Variance, PopulationVariance # noqa diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..4b18c81 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/all.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/all.cpython-38.pyc new file mode 100644 index 0000000..0c0cc73 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/all.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/any.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/any.cpython-38.pyc new file mode 100644 index 0000000..184800e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/any.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/base.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000..e0c11a3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/base.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/count.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/count.cpython-38.pyc new file mode 100644 index 0000000..8e082e5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/count.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/deciles.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/deciles.cpython-38.pyc new file mode 100644 index 0000000..3e6f98f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/deciles.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/first.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/first.cpython-38.pyc new file mode 100644 index 0000000..4c14e8d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/first.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/has_nulls.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/has_nulls.cpython-38.pyc new file mode 100644 index 0000000..be69e49 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/has_nulls.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/iqr.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/iqr.cpython-38.pyc new file mode 100644 index 0000000..755afa1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/iqr.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mad.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mad.cpython-38.pyc new file mode 100644 index 0000000..31d4a67 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mad.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max.cpython-38.pyc new file mode 100644 index 0000000..a9320a9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max_length.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max_length.cpython-38.pyc new file mode 100644 index 0000000..849c482 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max_length.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max_precision.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max_precision.cpython-38.pyc new file mode 100644 index 0000000..083853a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/max_precision.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mean.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mean.cpython-38.pyc new file mode 100644 index 0000000..0d4a7af Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mean.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/median.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/median.cpython-38.pyc new file mode 100644 index 0000000..9d6afd3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/median.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/min.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/min.cpython-38.pyc new file mode 100644 index 0000000..8d18f57 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/min.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mode.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mode.cpython-38.pyc new file mode 100644 index 0000000..cbccbf7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/mode.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/percentiles.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/percentiles.cpython-38.pyc new file mode 100644 index 0000000..5bfe229 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/percentiles.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/quartiles.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/quartiles.cpython-38.pyc new file mode 100644 index 0000000..ab7cbb1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/quartiles.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/quintiles.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/quintiles.cpython-38.pyc new file mode 100644 index 0000000..684f551 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/quintiles.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/stdev.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/stdev.cpython-38.pyc new file mode 100644 index 0000000..ebe0727 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/stdev.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/sum.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/sum.cpython-38.pyc new file mode 100644 index 0000000..6fb37e5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/sum.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/summary.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/summary.cpython-38.pyc new file mode 100644 index 0000000..4cbf20c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/summary.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/variance.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/variance.cpython-38.pyc new file mode 100644 index 0000000..9ee086f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/aggregations/__pycache__/variance.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/all.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/all.py new file mode 100644 index 0000000..2a7f929 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/all.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Boolean + + +class All(Aggregation): + """ + Check if all values in a column pass a test. + + :param column_name: + The name of the column to check. + :param test: + Either a single value that all values in the column are compared against + (for equality) or a function that takes a column value and returns + `True` or `False`. + """ + def __init__(self, column_name, test): + self._column_name = column_name + + if callable(test): + self._test = test + else: + self._test = lambda d: d == test + + def get_aggregate_data_type(self, table): + return Boolean() + + def validate(self, table): + column = table.columns[self._column_name] + + def run(self, table): + """ + :returns: + :class:`bool` + """ + column = table.columns[self._column_name] + data = column.values() + + return all(self._test(d) for d in data) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/any.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/any.py new file mode 100644 index 0000000..8cea92a --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/any.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Boolean + + +class Any(Aggregation): + """ + Check if any value in a column passes a test. + + :param column_name: + The name of the column to check. + :param test: + Either a single value that all values in the column are compared against + (for equality) or a function that takes a column value and returns + `True` or `False`. + """ + def __init__(self, column_name, test): + self._column_name = column_name + + if callable(test): + self._test = test + else: + self._test = lambda d: d == test + + def get_aggregate_data_type(self, table): + return Boolean() + + def validate(self, table): + column = table.columns[self._column_name] + + def run(self, table): + column = table.columns[self._column_name] + data = column.values() + + return any(self._test(d) for d in data) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/base.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/base.py new file mode 100644 index 0000000..4173ac6 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/base.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +import six + +from agate.exceptions import UnsupportedAggregationError + + +@six.python_2_unicode_compatible +class Aggregation(object): # pragma: no cover + """ + Aggregations create a new value by summarizing a :class:`.Column`. + + Aggregations are applied with :meth:`.Table.aggregate` and + :meth:`.TableSet.aggregate`. + + When creating a custom aggregation, ensure that the values returned by + :meth:`.Aggregation.run` are of the type specified by + :meth:`.Aggregation.get_aggregate_data_type`. This can be ensured by using + the :meth:`.DataType.cast` method. See :class:`.Summary` for an example. + """ + def __str__(self): + """ + String representation of this column. May be used as a column name in + generated tables. + """ + return self.__class__.__name__ + + def get_aggregate_data_type(self, table): + """ + Get the data type that should be used when using this aggregation with + a :class:`.TableSet` to produce a new column. + + Should raise :class:`.UnsupportedAggregationError` if this column does + not support aggregation into a :class:`.TableSet`. (For example, if it + does not return a single value.) + """ + raise UnsupportedAggregationError() + + def validate(self, table): + """ + Perform any checks necessary to verify this aggregation can run on the + provided table without errors. This is called by + :meth:`.Table.aggregate` before :meth:`run`. + """ + pass + + def run(self, table): + """ + Execute this aggregation on a given column and return the result. + """ + raise NotImplementedError() diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/count.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/count.py new file mode 100644 index 0000000..e7c8391 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/count.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Number +from agate.utils import default + + +class Count(Aggregation): + """ + Count occurences of a value or values. + + This aggregation can be used in three ways: + + 1. If no arguments are specified, then it will count the number of rows in the table. + 2. If only :code:`column_name` is specified, then it will count the number of non-null values in that column. + 3. If both :code:`column_name` and :code:`value` are specified, then it will count occurrences of a specific value. + + :param column_name: + The column containing the values to be counted. + :param value: + Any value to be counted, including :code:`None`. + """ + def __init__(self, column_name=None, value=default): + self._column_name = column_name + self._value = value + + def get_aggregate_data_type(self, table): + return Number() + + def run(self, table): + if self._column_name is not None: + if self._value is not default: + return table.columns[self._column_name].values().count(self._value) + else: + return len(table.columns[self._column_name].values_without_nulls()) + else: + return len(table.rows) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/deciles.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/deciles.py new file mode 100644 index 0000000..188ee5f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/deciles.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.percentiles import Percentiles +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.utils import Quantiles +from agate.warns import warn_null_calculation + + +class Deciles(Aggregation): + """ + Calculate the deciles of a column based on its percentiles. + + Deciles will be equivalent to the 10th, 20th ... 90th percentiles. + + "Zeroth" (min value) and "Tenth" (max value) deciles are included for + reference and intuitive indexing. + + See :class:`Percentiles` for implementation details. + + This aggregation can not be applied to a :class:`.TableSet`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Deciles can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + """ + :returns: + An instance of :class:`Quantiles`. + """ + percentiles = Percentiles(self._column_name).run(table) + + return Quantiles([percentiles[i] for i in range(0, 101, 10)]) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/first.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/first.py new file mode 100644 index 0000000..37e1695 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/first.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Boolean + + +class First(Aggregation): + """ + Returns the first value that passes a test. + + If the test is omitted, the aggregation will return the first value in the column. + + If no values pass the test, the aggregation will raise an exception. + + :param column_name: + The name of the column to check. + :param test: + A function that takes a value and returns `True` or `False`. Test may be + omitted when checking :class:`.Boolean` data. + """ + def __init__(self, column_name, test=None): + self._column_name = column_name + self._test = test + + def get_aggregate_data_type(self, table): + return table.columns[self._column_name].data_type + + def validate(self, table): + column = table.columns[self._column_name] + data = column.values() + + if self._test is not None and len([d for d in data if self._test(d)]) == 0: + raise ValueError('No values pass the given test.') + + def run(self, table): + column = table.columns[self._column_name] + data = column.values() + + if self._test is None: + return data[0] + + return next((d for d in data if self._test(d))) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/has_nulls.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/has_nulls.py new file mode 100644 index 0000000..6f46493 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/has_nulls.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Boolean + + +class HasNulls(Aggregation): + """ + Check if the column contains null values. + + :param column_name: + The name of the column to check. + """ + def __init__(self, column_name): + self._column_name = column_name + + def get_aggregate_data_type(self, table): + return Boolean() + + def run(self, table): + return None in table.columns[self._column_name].values() diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/iqr.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/iqr.py new file mode 100644 index 0000000..81d7c73 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/iqr.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.percentiles import Percentiles +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class IQR(Aggregation): + """ + Calculate the interquartile range of a column. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._percentiles = Percentiles(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('IQR can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + percentiles = self._percentiles.run(table) + + return percentiles[75] - percentiles[25] diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/mad.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/mad.py new file mode 100644 index 0000000..122db1b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/mad.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.median import Median +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.utils import median +from agate.warns import warn_null_calculation + + +class MAD(Aggregation): + """ + Calculate the `median absolute deviation `_ + of a column. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._median = Median(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('MAD can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + column = table.columns[self._column_name] + + data = column.values_without_nulls_sorted() + m = self._median.run(table) + + return median(tuple(abs(n - m) for n in data)) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/max.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/max.py new file mode 100644 index 0000000..470fab7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/max.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Date, DateTime, Number +from agate.exceptions import DataTypeError + + +class Max(Aggregation): + """ + Find the maximum value in a column. + + This aggregation can be applied to columns containing :class:`.Date`, + :class:`.DateTime`, or :class:`.Number` data. + + :param column_name: + The name of the column to be searched. + """ + def __init__(self, column_name): + self._column_name = column_name + + def get_aggregate_data_type(self, table): + column = table.columns[self._column_name] + + if (isinstance(column.data_type, Number) or + isinstance(column.data_type, Date) or + isinstance(column.data_type, DateTime)): + return column.data_type + + def validate(self, table): + column = table.columns[self._column_name] + + if not (isinstance(column.data_type, Number) or + isinstance(column.data_type, Date) or + isinstance(column.data_type, DateTime)): + raise DataTypeError('Min can only be applied to columns containing DateTime orNumber data.') + + def run(self, table): + column = table.columns[self._column_name] + + return max(column.values_without_nulls()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/max_length.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/max_length.py new file mode 100644 index 0000000..ec9146b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/max_length.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +from decimal import Decimal + +from agate.aggregations.base import Aggregation +from agate.data_types import Number, Text +from agate.exceptions import DataTypeError + + +class MaxLength(Aggregation): + """ + Find the length of the longest string in a column. + + Note: On Python 2.7 this function may miscalcuate the length of unicode + strings that contain "wide characters". For details see this StackOverflow + answer: http://stackoverflow.com/a/35462951 + + :param column_name: + The name of a column containing :class:`.Text` data. + """ + def __init__(self, column_name): + self._column_name = column_name + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Text): + raise DataTypeError('MaxLength can only be applied to columns containing Text data.') + + def run(self, table): + """ + :returns: + :class:`int`. + """ + column = table.columns[self._column_name] + + lens = [len(d) for d in column.values_without_nulls()] + + if not lens: + return Decimal('0') + + return Decimal(max(lens)) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/max_precision.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/max_precision.py new file mode 100644 index 0000000..2aa6e5f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/max_precision.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.utils import max_precision + + +class MaxPrecision(Aggregation): + """ + Find the most decimal places present for any value in this column. + + :param column_name: + The name of the column to be searched. + """ + def __init__(self, column_name): + self._column_name = column_name + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('MaxPrecision can only be applied to columns containing Number data.') + + def run(self, table): + column = table.columns[self._column_name] + + return max_precision(column.values_without_nulls()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/mean.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/mean.py new file mode 100644 index 0000000..6a83c1a --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/mean.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.sum import Sum +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class Mean(Aggregation): + """ + Calculate the mean of a column. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._sum = Sum(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Mean can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + column = table.columns[self._column_name] + + sum_total = self._sum.run(table) + + return sum_total / len(column.values_without_nulls()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/median.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/median.py new file mode 100644 index 0000000..5abf4e0 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/median.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.percentiles import Percentiles +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class Median(Aggregation): + """ + Calculate the median of a column. + + Median is equivalent to the 50th percentile. See :class:`Percentiles` + for implementation details. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._percentiles = Percentiles(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Median can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + percentiles = self._percentiles.run(table) + + return percentiles[50] diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/min.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/min.py new file mode 100644 index 0000000..e74914d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/min.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Date, DateTime, Number +from agate.exceptions import DataTypeError + + +class Min(Aggregation): + """ + Find the minimum value in a column. + + This aggregation can be applied to columns containing :class:`.Date`, + :class:`.DateTime`, or :class:`.Number` data. + + :param column_name: + The name of the column to be searched. + """ + def __init__(self, column_name): + self._column_name = column_name + + def get_aggregate_data_type(self, table): + column = table.columns[self._column_name] + + if (isinstance(column.data_type, Number) or + isinstance(column.data_type, Date) or + isinstance(column.data_type, DateTime)): + return column.data_type + + def validate(self, table): + column = table.columns[self._column_name] + + if not (isinstance(column.data_type, Number) or + isinstance(column.data_type, Date) or + isinstance(column.data_type, DateTime)): + raise DataTypeError('Min can only be applied to columns containing DateTime orNumber data.') + + def run(self, table): + column = table.columns[self._column_name] + + return min(column.values_without_nulls()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/mode.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/mode.py new file mode 100644 index 0000000..d9aa1ac --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/mode.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +from collections import defaultdict + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class Mode(Aggregation): + """ + Calculate the mode of a column. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Sum can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + column = table.columns[self._column_name] + + data = column.values_without_nulls() + state = defaultdict(int) + + for n in data: + state[n] += 1 + + return max(state.keys(), key=lambda x: state[x]) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/percentiles.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/percentiles.py new file mode 100644 index 0000000..56b6ec1 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/percentiles.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python + +import math + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.utils import Quantiles +from agate.warns import warn_null_calculation + + +class Percentiles(Aggregation): + """ + Divide a column into 100 equal-size groups using the "CDF" method. + + See `this explanation `_ + of the various methods for computing percentiles. + + "Zeroth" (min value) and "Hundredth" (max value) percentiles are included + for reference and intuitive indexing. + + A reference implementation was provided by + `pycalcstats `_. + + This aggregation can not be applied to a :class:`.TableSet`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Percentiles can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + """ + :returns: + An instance of :class:`Quantiles`. + """ + column = table.columns[self._column_name] + + data = column.values_without_nulls_sorted() + + # Zeroth percentile is first datum + quantiles = [data[0]] + + for percentile in range(1, 100): + k = len(data) * (float(percentile) / 100) + + low = max(1, int(math.ceil(k))) + high = min(len(data), int(math.floor(k + 1))) + + # No remainder + if low == high: + value = data[low - 1] + # Remainder + else: + value = (data[low - 1] + data[high - 1]) / 2 + + quantiles.append(value) + + # Hundredth percentile is final datum + quantiles.append(data[-1]) + + return Quantiles(quantiles) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/quartiles.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/quartiles.py new file mode 100644 index 0000000..7025056 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/quartiles.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.percentiles import Percentiles +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.utils import Quantiles +from agate.warns import warn_null_calculation + + +class Quartiles(Aggregation): + """ + Calculate the quartiles of column based on its percentiles. + + Quartiles will be equivalent to the the 25th, 50th and 75th percentiles. + + "Zeroth" (min value) and "Fourth" (max value) quartiles are included for + reference and intuitive indexing. + + See :class:`Percentiles` for implementation details. + + This aggregation can not be applied to a :class:`.TableSet`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Quartiles can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + """ + :returns: + An instance of :class:`Quantiles`. + """ + percentiles = Percentiles(self._column_name).run(table) + + return Quantiles([percentiles[i] for i in range(0, 101, 25)]) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/quintiles.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/quintiles.py new file mode 100644 index 0000000..05bed63 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/quintiles.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.percentiles import Percentiles +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.utils import Quantiles +from agate.warns import warn_null_calculation + + +class Quintiles(Aggregation): + """ + Calculate the quintiles of a column based on its percentiles. + + Quintiles will be equivalent to the 20th, 40th, 60th and 80th percentiles. + + "Zeroth" (min value) and "Fifth" (max value) quintiles are included for + reference and intuitive indexing. + + See :class:`Percentiles` for implementation details. + + This aggregation can not be applied to a :class:`.TableSet`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Quintiles can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + """ + :returns: + An instance of :class:`Quantiles`. + """ + percentiles = Percentiles(self._column_name).run(table) + + return Quantiles([percentiles[i] for i in range(0, 101, 20)]) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/stdev.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/stdev.py new file mode 100644 index 0000000..74f1886 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/stdev.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +from agate.aggregations import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.variance import Variance, PopulationVariance +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class StDev(Aggregation): + """ + Calculate the sample standard of deviation of a column. + + For the population standard of deviation see :class:`.PopulationStDev`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._variance = Variance(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('StDev can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + return self._variance.run(table).sqrt() + + +class PopulationStDev(StDev): + """ + Calculate the population standard of deviation of a column. + + For the sample standard of deviation see :class:`.StDev`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._population_variance = PopulationVariance(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('PopulationStDev can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + return self._population_variance.run(table).sqrt() diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/sum.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/sum.py new file mode 100644 index 0000000..efe793d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/sum.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.data_types import Number +from agate.exceptions import DataTypeError + + +class Sum(Aggregation): + """ + Calculate the sum of a column. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Sum can only be applied to columns containing Number data.') + + def run(self, table): + column = table.columns[self._column_name] + + return sum(column.values_without_nulls()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/summary.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/summary.py new file mode 100644 index 0000000..1ae26f2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/summary.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation + + +class Summary(Aggregation): + """ + Apply an arbitrary function to a column. + + :param column_name: + The name of a column to be summarized. + :param data_type: + The return type of this aggregation. + :param func: + A function which will be passed the column for processing. + :param cast: + If :code:`True`, each return value will be cast to the specified + :code:`data_type` to ensure it is valid. Only disable this if you are + certain your summary always returns the correct type. + """ + def __init__(self, column_name, data_type, func, cast=True): + self._column_name = column_name + self._data_type = data_type + self._func = func + self._cast = cast + + def get_aggregate_data_type(self, table): + return self._data_type + + def run(self, table): + v = self._func(table.columns[self._column_name]) + + if self._cast: + v = self._data_type.cast(v) + + return v diff --git a/dbt-env/lib/python3.8/site-packages/agate/aggregations/variance.py b/dbt-env/lib/python3.8/site-packages/agate/aggregations/variance.py new file mode 100644 index 0000000..0dbc4d0 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/aggregations/variance.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python + +from agate.aggregations.base import Aggregation +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.mean import Mean +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class Variance(Aggregation): + """ + Calculate the sample variance of a column. + + For the population variance see :class:`.PopulationVariance`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._mean = Mean(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Variance can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + column = table.columns[self._column_name] + + data = column.values_without_nulls() + mean = self._mean.run(table) + + return sum((n - mean) ** 2 for n in data) / (len(data) - 1) + + +class PopulationVariance(Variance): + """ + Calculate the population variance of a column. + + For the sample variance see :class:`.Variance`. + + :param column_name: + The name of a column containing :class:`.Number` data. + """ + def __init__(self, column_name): + self._column_name = column_name + self._mean = Mean(column_name) + + def get_aggregate_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('PopulationVariance can only be applied to columns containing Number data.') + + has_nulls = HasNulls(self._column_name).run(table) + + if has_nulls: + warn_null_calculation(self, column) + + def run(self, table): + column = table.columns[self._column_name] + + data = column.values_without_nulls() + mean = self._mean.run(table) + + return sum((n - mean) ** 2 for n in data) / len(data) diff --git a/dbt-env/lib/python3.8/site-packages/agate/columns.py b/dbt-env/lib/python3.8/site-packages/agate/columns.py new file mode 100644 index 0000000..7e556c3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/columns.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python + +""" +This module contains the :class:`Column` class, which defines a "vertical" +array of tabular data. Whereas :class:`.Row` instances are independent of their +parent :class:`.Table`, columns depend on knowledge of both their position in +the parent (column name, data type) as well as the rows that contain their data. +""" +import six + +from agate.mapped_sequence import MappedSequence +from agate.utils import NullOrder, memoize + +if six.PY3: # pragma: no cover + # pylint: disable=W0622 + xrange = range + + +def null_handler(k): + """ + Key method for sorting nulls correctly. + """ + if k is None: + return NullOrder() + + return k + + +class Column(MappedSequence): + """ + Proxy access to column data. Instances of :class:`Column` should + not be constructed directly. They are created by :class:`.Table` + instances and are unique to them. + + Columns are implemented as subclass of :class:`.MappedSequence`. They + deviate from the underlying implementation in that loading of their data + is deferred until it is needed. + + :param name: + The name of this column. + :param data_type: + An instance of :class:`.DataType`. + :param rows: + A :class:`.MappedSequence` that contains the :class:`.Row` instances + containing the data for this column. + :param row_names: + An optional list of row names (keys) for this column. + """ + __slots__ = ['_index', '_name', '_data_type', '_rows', '_row_names'] + + def __init__(self, index, name, data_type, rows, row_names=None): + self._index = index + self._name = name + self._data_type = data_type + self._rows = rows + self._keys = row_names + + def __getstate__(self): + """ + Return state values to be pickled. + + This is necessary on Python2.7 when using :code:`__slots__`. + """ + return { + '_index': self._index, + '_name': self._name, + '_data_type': self._data_type, + '_rows': self._rows, + '_keys': self._keys + } + + def __setstate__(self, data): + """ + Restore pickled state. + + This is necessary on Python2.7 when using :code:`__slots__`. + """ + self._index = data['_index'] + self._name = data['_name'] + self._data_type = data['_data_type'] + self._rows = data['_rows'] + self._keys = data['_keys'] + + @property + def index(self): + """ + This column's index. + """ + return self._index + + @property + def name(self): + """ + This column's name. + """ + return self._name + + @property + def data_type(self): + """ + This column's data type. + """ + return self._data_type + + @memoize + def values(self): + """ + Get the values in this column, as a tuple. + """ + return tuple(row[self._index] for row in self._rows) + + @memoize + def values_distinct(self): + """ + Get the distinct values in this column, as a tuple. + """ + return tuple(set(self.values())) + + @memoize + def values_without_nulls(self): + """ + Get the values in this column with any null values removed. + """ + return tuple(d for d in self.values() if d is not None) + + @memoize + def values_sorted(self): + """ + Get the values in this column sorted. + """ + return sorted(self.values(), key=null_handler) + + @memoize + def values_without_nulls_sorted(self): + """ + Get the values in this column with any null values removed and sorted. + """ + return sorted(self.values_without_nulls(), key=null_handler) diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__init__.py b/dbt-env/lib/python3.8/site-packages/agate/computations/__init__.py new file mode 100644 index 0000000..14bdf52 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/__init__.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python + +""" +Computations create a new value for each :class:`.Row` in a :class:`.Table`. +When used with :meth:`.Table.compute` these new values become a new column. +For instance, the :class:`.PercentChange` computation takes two column names as +arguments and computes the percentage change between them for each row. + +There are a variety of basic computations, such as :class:`.Change` and +:class:`.Percent`. If none of these meet your needs you can use the +:class:`Formula` computation to apply an arbitrary function to the row. +If this still isn't flexible enough, it's simple to create a custom computation +class by inheriting from :class:`Computation`. +""" + +from agate.computations.base import Computation # noqa + +from agate.computations.formula import Formula # noqa +from agate.computations.change import Change # noqa +from agate.computations.percent import Percent # noqa +from agate.computations.percent_change import PercentChange # noqa +from agate.computations.rank import Rank # noqa +from agate.computations.percentile_rank import PercentileRank # noqa +from agate.computations.slug import Slug # noqa diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..8c9b59e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/base.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000..fca911c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/base.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/change.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/change.cpython-38.pyc new file mode 100644 index 0000000..4200e01 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/change.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/formula.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/formula.cpython-38.pyc new file mode 100644 index 0000000..7bc296b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/formula.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percent.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percent.cpython-38.pyc new file mode 100644 index 0000000..80e5d9c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percent.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percent_change.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percent_change.cpython-38.pyc new file mode 100644 index 0000000..2a27873 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percent_change.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percentile_rank.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percentile_rank.cpython-38.pyc new file mode 100644 index 0000000..6cea4da Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/percentile_rank.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/rank.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/rank.cpython-38.pyc new file mode 100644 index 0000000..2e77d58 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/rank.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/slug.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/slug.cpython-38.pyc new file mode 100644 index 0000000..ff248b2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/computations/__pycache__/slug.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/base.py b/dbt-env/lib/python3.8/site-packages/agate/computations/base.py new file mode 100644 index 0000000..6ea0518 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/base.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +import six + + +@six.python_2_unicode_compatible +class Computation(object): # pragma: no cover + """ + Computations produce a new column by performing a calculation on each row. + + Computations are applied with :class:`.TableSet.compute`. + + When implementing a custom computation, ensure that the values returned by + :meth:`.Computation.run` are of the type specified by + :meth:`.Computation.get_computed_data_type`. This can be ensured by using + the :meth:`.DataType.cast` method. See :class:`.Formula` for an example. + """ + def __str__(self): + """ + String representation of this column. May be used as a column name in + generated tables. + """ + return self.__class__.__name__ + + def get_computed_data_type(self, table): + """ + Returns an instantiated :class:`.DataType` which will be appended to + the table. + """ + raise NotImplementedError() + + def validate(self, table): + """ + Perform any checks necessary to verify this computation can run on the + provided table without errors. This is called by :meth:`.Table.compute` + before :meth:`run`. + """ + pass + + def run(self, table): + """ + When invoked with a table, returns a sequence of new column values. + """ + raise NotImplementedError() diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/change.py b/dbt-env/lib/python3.8/site-packages/agate/computations/change.py new file mode 100644 index 0000000..927bbeb --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/change.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +from agate.aggregations.has_nulls import HasNulls +from agate.computations.base import Computation +from agate.data_types import Date, DateTime, Number, TimeDelta +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class Change(Computation): + """ + Calculate the difference between two columns. + + This calculation can be applied to :class:`.Number` columns to calculate + numbers. It can also be applied to :class:`.Date`, :class:`.DateTime`, and + :class:`.TimeDelta` columns to calculate time deltas. + + :param before_column_name: + The name of a column containing the "before" values. + :param after_column_name: + The name of a column containing the "after" values. + """ + def __init__(self, before_column_name, after_column_name): + self._before_column_name = before_column_name + self._after_column_name = after_column_name + + def get_computed_data_type(self, table): + before_column = table.columns[self._before_column_name] + + if isinstance(before_column.data_type, Date): + return TimeDelta() + elif isinstance(before_column.data_type, DateTime): + return TimeDelta() + elif isinstance(before_column.data_type, TimeDelta): + return TimeDelta() + elif isinstance(before_column.data_type, Number): + return Number() + + def validate(self, table): + before_column = table.columns[self._before_column_name] + after_column = table.columns[self._after_column_name] + + for data_type in (Number, Date, DateTime, TimeDelta): + if isinstance(before_column.data_type, data_type): + if not isinstance(after_column.data_type, data_type): + raise DataTypeError('Specified columns must be of the same type') + + if HasNulls(self._before_column_name).run(table): + warn_null_calculation(self, before_column) + + if HasNulls(self._after_column_name).run(table): + warn_null_calculation(self, after_column) + + return + + raise DataTypeError('Change before and after columns must both contain data that is one of: Number, Date, DateTime or TimeDelta.') + + def run(self, table): + new_column = [] + + for row in table.rows: + before = row[self._before_column_name] + after = row[self._after_column_name] + + if before is not None and after is not None: + new_column.append(after - before) + else: + new_column.append(None) + + return new_column diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/formula.py b/dbt-env/lib/python3.8/site-packages/agate/computations/formula.py new file mode 100644 index 0000000..3a0f947 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/formula.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python + +from agate.computations.base import Computation + + +class Formula(Computation): + """ + Apply an arbitrary function to each row. + + :param data_type: + The data type this formula will return. + :param func: + The function to be applied to each row. Must return a valid value for + the specified data type. + :param cast: + If :code:`True`, each return value will be cast to the specified + :code:`data_type` to ensure it is valid. Only disable this if you are + certain your formula always returns the correct type. + """ + def __init__(self, data_type, func, cast=True): + self._data_type = data_type + self._func = func + self._cast = cast + + def get_computed_data_type(self, table): + return self._data_type + + def run(self, table): + new_column = [] + + for row in table.rows: + v = self._func(row) + + if self._cast: + v = self._data_type.cast(v) + + new_column.append(v) + + return new_column diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/percent.py b/dbt-env/lib/python3.8/site-packages/agate/computations/percent.py new file mode 100644 index 0000000..422ba4b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/percent.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python + + +from agate.aggregations.has_nulls import HasNulls +from agate.aggregations.sum import Sum +from agate.computations.base import Computation +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class Percent(Computation): + """ + Calculate each values percentage of a total. + + :param column_name: + The name of a column containing the :class:`.Number` values. + :param total: + If specified, the total value for each number to be divided into. By + default, the :class:`.Sum` of the values in the column will be used. + """ + def __init__(self, column_name, total=None): + self._column_name = column_name + self._total = total + + def get_computed_data_type(self, table): + return Number() + + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('Percent column must contain Number data.') + if self._total is not None and self._total <= 0: + raise DataTypeError('The total must be a positive number') + + # Throw a warning if there are nulls in there + if HasNulls(self._column_name).run(table): + warn_null_calculation(self, column) + + def run(self, table): + """ + :returns: + :class:`decimal.Decimal` + """ + # If the user has provided a total, use that + if self._total is not None: + total = self._total + # Otherwise compute the sum of all the values in that column to + # act as our denominator + else: + total = table.aggregate(Sum(self._column_name)) + # Raise error if sum is less than or equal to zero + if total <= 0: + raise DataTypeError('The sum of column values must be a positive number') + + # Create a list new rows + new_column = [] + + # Loop through the existing rows + for row in table.rows: + # Pull the value + value = row[self._column_name] + if value is None: + new_column.append(None) + continue + # Try to divide it out of the total + percent = value / total + # And multiply it by 100 + percent = percent * 100 + # Append the value to the new list + new_column.append(percent) + + # Pass out the list + return new_column diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/percent_change.py b/dbt-env/lib/python3.8/site-packages/agate/computations/percent_change.py new file mode 100644 index 0000000..8c28794 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/percent_change.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +from agate.aggregations.has_nulls import HasNulls +from agate.computations.base import Computation + +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate.warns import warn_null_calculation + + +class PercentChange(Computation): + """ + Calculate the percent difference between two columns. + + :param before_column_name: + The name of a column containing the "before" :class:`.Number` values. + :param after_column_name: + The name of a column containing the "after" :class:`.Number` values. + """ + def __init__(self, before_column_name, after_column_name): + self._before_column_name = before_column_name + self._after_column_name = after_column_name + + def get_computed_data_type(self, table): + return Number() + + def validate(self, table): + before_column = table.columns[self._before_column_name] + after_column = table.columns[self._after_column_name] + + if not isinstance(before_column.data_type, Number): + raise DataTypeError('PercentChange before column must contain Number data.') + + if not isinstance(after_column.data_type, Number): + raise DataTypeError('PercentChange after column must contain Number data.') + + if HasNulls(self._before_column_name).run(table): + warn_null_calculation(self, before_column) + + if HasNulls(self._after_column_name).run(table): + warn_null_calculation(self, after_column) + + def run(self, table): + """ + :returns: + :class:`decimal.Decimal` + """ + new_column = [] + + for row in table.rows: + before = row[self._before_column_name] + after = row[self._after_column_name] + + if before is not None and after is not None: + new_column.append((after - before) / before * 100) + else: + new_column.append(None) + + return new_column diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/percentile_rank.py b/dbt-env/lib/python3.8/site-packages/agate/computations/percentile_rank.py new file mode 100644 index 0000000..e3c912e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/percentile_rank.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +from agate.aggregations.percentiles import Percentiles +from agate.computations.rank import Rank +from agate.data_types import Number +from agate.exceptions import DataTypeError + + +class PercentileRank(Rank): + """ + Calculate the percentile into which each value falls. + + See :class:`.Percentiles` for implementation details. + + :param column_name: + The name of a column containing the :class:`.Number` values. + """ + def validate(self, table): + column = table.columns[self._column_name] + + if not isinstance(column.data_type, Number): + raise DataTypeError('PercentileRank column must contain Number data.') + + def run(self, table): + """ + :returns: + :class:`int` + """ + percentiles = Percentiles(self._column_name).run(table) + + new_column = [] + + for row in table.rows: + new_column.append(percentiles.locate(row[self._column_name])) + + return new_column diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/rank.py b/dbt-env/lib/python3.8/site-packages/agate/computations/rank.py new file mode 100644 index 0000000..46cec5c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/rank.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python + +from decimal import Decimal + +import six + +if six.PY3: + from functools import cmp_to_key + +from agate.computations.base import Computation +from agate.data_types import Number + + +class Rank(Computation): + """ + Calculate rank order of the values in a column. + + Uses the "competition" ranking method: if there are four values and the + middle two are tied, then the output will be `[1, 2, 2, 4]`. + + Null values will always be ranked last. + + :param column_name: + The name of the column to rank. + :param comparer: + An optional comparison function. If not specified ranking will be + ascending, with nulls ranked last. + :param reverse: + Reverse sort order before ranking. + """ + def __init__(self, column_name, comparer=None, reverse=None): + self._column_name = column_name + self._comparer = comparer + self._reverse = reverse + + def get_computed_data_type(self, table): + return Number() + + def run(self, table): + """ + :returns: + :class:`int` + """ + column = table.columns[self._column_name] + + if self._comparer: + if six.PY3: + data_sorted = sorted(column.values(), key=cmp_to_key(self._comparer)) + else: # pragma: no cover + data_sorted = sorted(column.values(), cmp=self._comparer) + else: + data_sorted = column.values_sorted() + + if self._reverse: + data_sorted.reverse() + + ranks = {} + rank = 0 + + for c in data_sorted: + rank += 1 + + if c in ranks: + continue + + ranks[c] = Decimal(rank) + + new_column = [] + + for row in table.rows: + new_column.append(ranks[row[self._column_name]]) + + return new_column diff --git a/dbt-env/lib/python3.8/site-packages/agate/computations/slug.py b/dbt-env/lib/python3.8/site-packages/agate/computations/slug.py new file mode 100644 index 0000000..b6780e6 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/computations/slug.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +from agate.aggregations.has_nulls import HasNulls +from agate.computations.base import Computation +from agate.data_types import Text +from agate.exceptions import DataTypeError +from agate.utils import slugify, issequence + + +class Slug(Computation): + """ + Convert text values from one or more columns into slugs. If multiple column + names are given, values from those columns will be appended in the given + order before standardizing. + + :param column_name: + The name of a column or a sequence of column names containing + :class:`.Text` values. + :param ensure_unique: + If True, any duplicate values will be appended with unique identifers. + Defaults to False. + """ + def __init__(self, column_name, ensure_unique=False, **kwargs): + self._column_name = column_name + self._ensure_unique = ensure_unique + self._slug_args = kwargs + + def get_computed_data_type(self, table): + return Text() + + def validate(self, table): + if issequence(self._column_name): + column_names = self._column_name + else: + column_names = [self._column_name] + + for column_name in column_names: + column = table.columns[column_name] + + if not isinstance(column.data_type, Text): + raise DataTypeError('Slug column must contain Text data.') + + if HasNulls(column_name).run(table): + raise ValueError('Slug column cannot contain `None`.') + + def run(self, table): + """ + :returns: + :class:`string` + """ + new_column = [] + + for row in table.rows: + if issequence(self._column_name): + column_value = '' + for column_name in self._column_name: + column_value = column_value + ' ' + row[column_name] + + new_column.append(column_value) + else: + new_column.append(row[self._column_name]) + + return slugify(new_column, ensure_unique=self._ensure_unique, **self._slug_args) diff --git a/dbt-env/lib/python3.8/site-packages/agate/config.py b/dbt-env/lib/python3.8/site-packages/agate/config.py new file mode 100644 index 0000000..f3c11db --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/config.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +# -*- coding: utf8 -*- + +""" +This module contains the global configuration for agate. Users should use +:meth:`get_option` and :meth:`set_option` to modify the global +configuration. + +**Available configuation options:** + ++-------------------------+------------------------------------------+-----------------------------------------+ +| Option | Description | Default value | ++=========================+==========================================+=========================================+ +| default_locale | Default locale for number formatting | default_locale('LC_NUMERIC') or 'en_US' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| horizontal_line_char | Character to render for horizontal lines | u'-' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| vertical_line_char | Character to render for vertical lines | u'|' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| bar_char | Character to render for bar chart units | u'░' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| printable_bar_char | Printable character for bar chart units | u':' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| zero_line_char | Character to render for zero line units | u'▓' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| printable_zero_line_char| Printable character for zero line units | u'|' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| tick_char | Character to render for axis ticks | u'+' | ++-------------------------+------------------------------------------+-----------------------------------------+ +| ellipsis_chars | Characters to render for ellipsis | u'...' | ++-------------------------+------------------------------------------+-----------------------------------------+ + +""" + +from babel.core import default_locale + + +_options = { + #: Default locale for number formatting + 'default_locale': default_locale('LC_NUMERIC') or 'en_US', + #: Character to render for horizontal lines + 'horizontal_line_char': u'-', + #: Character to render for vertical lines + 'vertical_line_char': u'|', + #: Character to render for bar chart units + 'bar_char': u'░', + #: Printable character to render for bar chart units + 'printable_bar_char': u':', + #: Character to render for zero line units + 'zero_line_char': u'▓', + #: Printable character to render for zero line units + 'printable_zero_line_char': u'|', + #: Character to render for axis ticks + 'tick_char': u'+', + #: Characters to render for ellipsis + 'ellipsis_chars': u'...', +} + + +def get_option(key): + """ + Get a global configuration option for agate. + + :param key: + The name of the configuration option. + """ + return _options[key] + + +def set_option(key, value): + """ + Set a global configuration option for agate. + + :param key: + The name of the configuration option. + :param value: + The new value to set for the configuration option. + """ + _options[key] = value + + +def set_options(options): + """ + Set a dictionary of options simultaneously. + + :param hash: + A dictionary of option names and values. + """ + _options.update(options) diff --git a/dbt-env/lib/python3.8/site-packages/agate/csv_py2.py b/dbt-env/lib/python3.8/site-packages/agate/csv_py2.py new file mode 100644 index 0000000..1ee2054 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/csv_py2.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python + +""" +This module contains the Python 2 replacement for :mod:`csv`. +""" + +import codecs +import csv + +import six + +from agate.exceptions import FieldSizeLimitError + +EIGHT_BIT_ENCODINGS = [ + 'utf-8', 'u8', 'utf', 'utf8', + 'latin-1', 'iso-8859-1', 'iso8859-1', '8859', 'cp819', 'latin', 'latin1', 'l1' +] + +POSSIBLE_DELIMITERS = [',', '\t', ';', ' ', ':', '|'] + + +class UTF8Recoder(six.Iterator): + """ + Iterator that reads an encoded stream and reencodes the input to UTF-8. + """ + def __init__(self, f, encoding): + self.reader = codecs.getreader(encoding)(f) + + def __iter__(self): + return self + + def __next__(self): + return next(self.reader).encode('utf-8') + + +class UnicodeReader(object): + """ + A CSV reader which will read rows from a file in a given encoding. + """ + def __init__(self, f, encoding='utf-8', field_size_limit=None, line_numbers=False, header=True, **kwargs): + self.line_numbers = line_numbers + self.header = header + + f = UTF8Recoder(f, encoding) + + self.reader = csv.reader(f, **kwargs) + + if field_size_limit: + csv.field_size_limit(field_size_limit) + + def next(self): + try: + row = next(self.reader) + except csv.Error as e: + # Terrible way to test for this exception, but there is no subclass + if 'field larger than field limit' in str(e): + raise FieldSizeLimitError(csv.field_size_limit()) + else: + raise e + + if self.line_numbers: + if self.header and self.line_num == 1: + row.insert(0, 'line_numbers') + else: + row.insert(0, str(self.line_num - 1 if self.header else self.line_num)) + + return [six.text_type(s, 'utf-8') for s in row] + + def __iter__(self): + return self + + @property + def dialect(self): + return self.reader.dialect + + @property + def line_num(self): + return self.reader.line_num + + +class UnicodeWriter(object): + """ + A CSV writer which will write rows to a file in the specified encoding. + + NB: Optimized so that eight-bit encodings skip re-encoding. See: + https://github.com/onyxfish/csvkit/issues/175 + """ + def __init__(self, f, encoding='utf-8', **kwargs): + self.encoding = encoding + self._eight_bit = (self.encoding.lower().replace('_', '-') in EIGHT_BIT_ENCODINGS) + + if self._eight_bit: + self.writer = csv.writer(f, **kwargs) + else: + # Redirect output to a queue for reencoding + self.queue = six.StringIO() + self.writer = csv.writer(self.queue, **kwargs) + self.stream = f + self.encoder = codecs.getincrementalencoder(encoding)() + + def writerow(self, row): + if self._eight_bit: + self.writer.writerow([six.text_type(s if s is not None else '').encode(self.encoding) for s in row]) + else: + self.writer.writerow([six.text_type(s if s is not None else '').encode('utf-8') for s in row]) + # Fetch UTF-8 output from the queue... + data = self.queue.getvalue() + data = data.decode('utf-8') + # ...and reencode it into the target encoding + data = self.encoder.encode(data) + # write to the file + self.stream.write(data) + # empty the queue + self.queue.truncate(0) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + +class UnicodeDictReader(csv.DictReader): + """ + Defer almost all implementation to :class:`csv.DictReader`, but wraps our + unicode reader instead of :func:`csv.reader`. + """ + def __init__(self, f, fieldnames=None, restkey=None, restval=None, *args, **kwargs): + reader = UnicodeReader(f, *args, **kwargs) + + if 'encoding' in kwargs: + kwargs.pop('encoding') + + csv.DictReader.__init__(self, f, fieldnames, restkey, restval, *args, **kwargs) + + self.reader = reader + + +class UnicodeDictWriter(csv.DictWriter): + """ + Defer almost all implementation to :class:`csv.DictWriter`, but wraps our + unicode writer instead of :func:`csv.writer`. + """ + def __init__(self, f, fieldnames, restval='', extrasaction='raise', *args, **kwds): + self.fieldnames = fieldnames + self.restval = restval + + if extrasaction.lower() not in ('raise', 'ignore'): + raise ValueError('extrasaction (%s) must be "raise" or "ignore"' % extrasaction) + + self.extrasaction = extrasaction + + self.writer = UnicodeWriter(f, *args, **kwds) + + +class Reader(UnicodeReader): + """ + A unicode-aware CSV reader. + """ + pass + + +class Writer(UnicodeWriter): + """ + A unicode-aware CSV writer. + """ + def __init__(self, f, encoding='utf-8', line_numbers=False, **kwargs): + self.row_count = 0 + self.line_numbers = line_numbers + + if 'lineterminator' not in kwargs: + kwargs['lineterminator'] = '\n' + + UnicodeWriter.__init__(self, f, encoding, **kwargs) + + def _append_line_number(self, row): + if self.row_count == 0: + row.insert(0, 'line_number') + else: + row.insert(0, self.row_count) + + self.row_count += 1 + + def writerow(self, row): + if self.line_numbers: + row = list(row) + self._append_line_number(row) + + # Convert embedded Mac line endings to unix style line endings so they get quoted + row = [i.replace('\r', '\n') if isinstance(i, six.string_types) else i for i in row] + + UnicodeWriter.writerow(self, row) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + +class DictReader(UnicodeDictReader): + """ + A unicode-aware CSV DictReader. + """ + pass + + +class DictWriter(UnicodeDictWriter): + """ + A unicode-aware CSV DictWriter. + """ + def __init__(self, f, fieldnames, encoding='utf-8', line_numbers=False, **kwargs): + self.row_count = 0 + self.line_numbers = line_numbers + + if 'lineterminator' not in kwargs: + kwargs['lineterminator'] = '\n' + + UnicodeDictWriter.__init__(self, f, fieldnames, encoding=encoding, **kwargs) + + def _append_line_number(self, row): + if self.row_count == 0: + row['line_number'] = 0 + else: + row['line_number'] = self.row_count + + self.row_count += 1 + + def writerow(self, row): + if self.line_numbers: + row = list(row) + self._append_line_number(row) + + # Convert embedded Mac line endings to unix style line endings so they get quoted + row = dict([(k, v.replace('\r', '\n')) if isinstance(v, basestring) else (k, v) for k, v in row.items()]) + + UnicodeDictWriter.writerow(self, row) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + +class Sniffer(object): + """ + A functinonal wrapper of ``csv.Sniffer()``. + """ + def sniff(self, sample): + """ + A functional version of ``csv.Sniffer().sniff``, that extends the + list of possible delimiters to include some seen in the wild. + """ + try: + dialect = csv.Sniffer().sniff(sample, POSSIBLE_DELIMITERS) + except: + dialect = None + + return dialect + + +def reader(*args, **kwargs): + """ + A replacement for Python's :func:`csv.reader` that uses + :class:`.csv_py2.Reader`. + """ + return Reader(*args, **kwargs) + + +def writer(*args, **kwargs): + """ + A replacement for Python's :func:`csv.writer` that uses + :class:`.csv_py2.Writer`. + """ + return Writer(*args, **kwargs) diff --git a/dbt-env/lib/python3.8/site-packages/agate/csv_py3.py b/dbt-env/lib/python3.8/site-packages/agate/csv_py3.py new file mode 100644 index 0000000..47eea8e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/csv_py3.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python + +""" +This module contains the Python 3 replacement for :mod:`csv`. +""" + +import csv + +import six + +from agate.exceptions import FieldSizeLimitError + +POSSIBLE_DELIMITERS = [',', '\t', ';', ' ', ':', '|'] + + +class Reader(six.Iterator): + """ + A wrapper around Python 3's builtin :func:`csv.reader`. + """ + def __init__(self, f, field_size_limit=None, line_numbers=False, header=True, **kwargs): + self.line_numbers = line_numbers + self.header = header + + if field_size_limit: + csv.field_size_limit(field_size_limit) + + self.reader = csv.reader(f, **kwargs) + + def __iter__(self): + return self + + def __next__(self): + try: + row = next(self.reader) + except csv.Error as e: + # Terrible way to test for this exception, but there is no subclass + if 'field larger than field limit' in str(e): + raise FieldSizeLimitError(csv.field_size_limit()) + else: + raise e + + if not self.line_numbers: + return row + else: + if self.line_numbers: + if self.header and self.line_num == 1: + row.insert(0, 'line_numbers') + else: + row.insert(0, str(self.line_num - 1 if self.header else self.line_num)) + + return row + + @property + def dialect(self): + return self.reader.dialect + + @property + def line_num(self): + return self.reader.line_num + + +class Writer(object): + """ + A wrapper around Python 3's builtin :func:`csv.writer`. + """ + def __init__(self, f, line_numbers=False, **kwargs): + self.row_count = 0 + self.line_numbers = line_numbers + + if 'lineterminator' not in kwargs: + kwargs['lineterminator'] = '\n' + + self.writer = csv.writer(f, **kwargs) + + def _append_line_number(self, row): + if self.row_count == 0: + row.insert(0, 'line_number') + else: + row.insert(0, self.row_count) + + self.row_count += 1 + + def writerow(self, row): + if self.line_numbers: + row = list(row) + self._append_line_number(row) + + # Convert embedded Mac line endings to unix style line endings so they get quoted + row = [i.replace('\r', '\n') if isinstance(i, six.string_types) else i for i in row] + + self.writer.writerow(row) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + +class DictReader(csv.DictReader): + """ + A wrapper around Python 3's builtin :class:`csv.DictReader`. + """ + pass + + +class DictWriter(csv.DictWriter): + """ + A wrapper around Python 3's builtin :class:`csv.DictWriter`. + """ + def __init__(self, f, fieldnames, line_numbers=False, **kwargs): + self.row_count = 0 + self.line_numbers = line_numbers + + if 'lineterminator' not in kwargs: + kwargs['lineterminator'] = '\n' + + if self.line_numbers: + fieldnames.insert(0, 'line_number') + + csv.DictWriter.__init__(self, f, fieldnames, **kwargs) + + def _append_line_number(self, row): + if self.row_count == 0: + row['line_number'] = 'line_number' + else: + row['line_number'] = self.row_count + + self.row_count += 1 + + def writerow(self, row): + # Convert embedded Mac line endings to unix style line endings so they get quoted + row = dict([(k, v.replace('\r', '\n')) if isinstance(v, six.string_types) else (k, v) for k, v in row.items()]) + + if self.line_numbers: + self._append_line_number(row) + + csv.DictWriter.writerow(self, row) + + def writerows(self, rows): + for row in rows: + self.writerow(row) + + +class Sniffer(object): + """ + A functinonal wrapper of ``csv.Sniffer()``. + """ + def sniff(self, sample): + """ + A functional version of ``csv.Sniffer().sniff``, that extends the + list of possible delimiters to include some seen in the wild. + """ + try: + dialect = csv.Sniffer().sniff(sample, POSSIBLE_DELIMITERS) + except: + dialect = None + + return dialect + + +def reader(*args, **kwargs): + """ + A replacement for Python's :func:`csv.reader` that uses + :class:`.csv_py3.Reader`. + """ + return Reader(*args, **kwargs) + + +def writer(*args, **kwargs): + """ + A replacement for Python's :func:`csv.writer` that uses + :class:`.csv_py3.Writer`. + """ + return Writer(*args, **kwargs) diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__init__.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/__init__.py new file mode 100644 index 0000000..1a9cef4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/__init__.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python + +""" +Data types define how data should be imported during the creation of a +:class:`.Table`. + +If column types are not explicitly specified when a :class:`.Table` is created, +agate will attempt to guess them. The :class:`.TypeTester` class can be used to +control how types are guessed. +""" + +from agate.data_types.base import DEFAULT_NULL_VALUES, DataType # noqa +from agate.data_types.boolean import Boolean, DEFAULT_TRUE_VALUES, DEFAULT_FALSE_VALUES # noqa +from agate.data_types.date import Date # noqa +from agate.data_types.date_time import DateTime # noqa +from agate.data_types.number import Number # noqa +from agate.data_types.text import Text # noqa +from agate.data_types.time_delta import TimeDelta # noqa +from agate.exceptions import CastError # noqa diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..e8e0ec6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/base.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000..61f53ca Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/base.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/boolean.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/boolean.cpython-38.pyc new file mode 100644 index 0000000..927589e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/boolean.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/date.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/date.cpython-38.pyc new file mode 100644 index 0000000..8195ebd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/date.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/date_time.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/date_time.cpython-38.pyc new file mode 100644 index 0000000..7612bb3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/date_time.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/number.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/number.cpython-38.pyc new file mode 100644 index 0000000..67b8d15 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/number.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/text.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/text.cpython-38.pyc new file mode 100644 index 0000000..fdf191c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/text.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/time_delta.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/time_delta.cpython-38.pyc new file mode 100644 index 0000000..748eef5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/data_types/__pycache__/time_delta.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/base.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/base.py new file mode 100644 index 0000000..de7eeb5 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/base.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +import six + +from agate.exceptions import CastError + +#: Default values which will be automatically cast to :code:`None` +DEFAULT_NULL_VALUES = ('', 'na', 'n/a', 'none', 'null', '.') + + +class DataType(object): # pragma: no cover + """ + Specifies how values should be parsed when creating a :class:`.Table`. + + :param null_values: A sequence of values which should be cast to + :code:`None` when encountered by this data type. + """ + def __init__(self, null_values=DEFAULT_NULL_VALUES): + self.null_values = null_values + + def test(self, d): + """ + Test, for purposes of type inference, if a value could possibly be + coerced to this data type. + + This is really just a thin wrapper around :meth:`DataType.cast`. + """ + try: + self.cast(d) + except CastError: + return False + + return True + + def cast(self, d): + """ + Coerce a given string value into this column's data type. + """ + raise NotImplementedError + + def csvify(self, d): + """ + Format a given native value for CSV serialization. + """ + if d is None: + return None + + return six.text_type(d) + + def jsonify(self, d): + """ + Format a given native value for JSON serialization. + """ + if d is None: + return None + + return six.text_type(d) diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/boolean.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/boolean.py new file mode 100644 index 0000000..ae890bd --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/boolean.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +try: + from cdecimal import Decimal +except ImportError: # pragma: no cover + from decimal import Decimal + +import six + +from agate.data_types.base import DataType, DEFAULT_NULL_VALUES +from agate.exceptions import CastError + +#: Default values which will be automatically cast to :code:`True`. +DEFAULT_TRUE_VALUES = ('yes', 'y', 'true', 't', '1') + +#: Default values which will be automatically cast to :code:`False`. +DEFAULT_FALSE_VALUES = ('no', 'n', 'false', 'f', '0') + + +class Boolean(DataType): + """ + Data representing true and false. + + Note that by default numerical `1` and `0` are considered valid boolean + values, but other numbers are not. + + :param true_values: A sequence of values which should be cast to + :code:`True` when encountered with this type. + :param false_values: A sequence of values which should be cast to + :code:`False` when encountered with this type. + """ + def __init__(self, true_values=DEFAULT_TRUE_VALUES, false_values=DEFAULT_FALSE_VALUES, null_values=DEFAULT_NULL_VALUES): + super(Boolean, self).__init__(null_values=null_values) + + self.true_values = true_values + self.false_values = false_values + + def cast(self, d): + """ + Cast a single value to :class:`bool`. + + :param d: A value to cast. + :returns: :class:`bool` or :code:`None`. + """ + if d is None: + return d + elif type(d) is bool and type(d) is not int: + return d + elif type(d) is int or isinstance(d, Decimal): + if d == 1: + return True + elif d == 0: + return False + elif isinstance(d, six.string_types): + d = d.replace(',', '').strip() + + d_lower = d.lower() + + if d_lower in self.null_values: + return None + elif d_lower in self.true_values: + return True + elif d_lower in self.false_values: + return False + + raise CastError('Can not convert value %s to bool.' % d) + + def jsonify(self, d): + return d diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/date.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/date.py new file mode 100644 index 0000000..b8c15f3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/date.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python + +from datetime import date, datetime, time + +import isodate +import parsedatetime +import six + +from agate.data_types.base import DataType +from agate.exceptions import CastError + + +ZERO_DT = datetime.combine(date.min, time.min) + + +class Date(DataType): + """ + Data representing dates alone. + + :param date_format: + A formatting string for :meth:`datetime.datetime.strptime` to use + instead of using regex-based parsing. + """ + def __init__(self, date_format=None, **kwargs): + super(Date, self).__init__(**kwargs) + + self.date_format = date_format + self.parser = parsedatetime.Calendar(version=parsedatetime.VERSION_CONTEXT_STYLE) + + def __getstate__(self): + """ + Return state values to be pickled. Exclude _parser because parsedatetime + cannot be pickled. + """ + odict = self.__dict__.copy() + del odict['parser'] + return odict + + def __setstate__(self, data): + """ + Restore state from the unpickled state values. Set _parser to an instance + of the parsedatetime Calendar class. + """ + self.__dict__.update(data) + self.parser = parsedatetime.Calendar(version=parsedatetime.VERSION_CONTEXT_STYLE) + + def cast(self, d): + """ + Cast a single value to a :class:`datetime.date`. + + :param date_format: + An optional :func:`datetime.strptime` format string for parsing + datetimes in this column. + :returns: + :class:`datetime.date` or :code:`None`. + """ + if type(d) is date or d is None: + return d + elif isinstance(d, six.string_types): + d = d.strip() + + if d.lower() in self.null_values: + return None + else: + raise CastError('Can not parse value "%s" as date.' % d) + + if self.date_format: + try: + dt = datetime.strptime(d, self.date_format) + except: + raise CastError('Value "%s" does not match date format.' % d) + + return dt.date() + + try: + (value, ctx, _, _, matched_text), = self.parser.nlp(d, sourceTime=ZERO_DT) + except (TypeError, ValueError): + raise CastError('Value "%s" does not match date format.' % d) + else: + if matched_text == d and ctx.hasDate and not ctx.hasTime: + return value.date() + + raise CastError('Can not parse value "%s" as date.' % d) + + def csvify(self, d): + if d is None: + return None + + return d.isoformat() + + def jsonify(self, d): + return self.csvify(d) diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/date_time.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/date_time.py new file mode 100644 index 0000000..482f3e2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/date_time.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +import datetime + +import isodate +import parsedatetime +import six + +from agate.data_types.base import DataType +from agate.exceptions import CastError + + +class DateTime(DataType): + """ + Data representing dates with times. + + :param datetime_format: + A formatting string for :meth:`datetime.datetime.strptime` to use + instead of using regex-based parsing. + :param timezone: + A `pytz `_ timezone to apply to each + parsed date. + """ + def __init__(self, datetime_format=None, timezone=None, **kwargs): + super(DateTime, self).__init__(**kwargs) + + self.datetime_format = datetime_format + self.timezone = timezone + + now = datetime.datetime.now() + self._source_time = datetime.datetime( + now.year, now.month, now.day, 0, 0, 0, 0, None + ) + self._parser = parsedatetime.Calendar(version=parsedatetime.VERSION_CONTEXT_STYLE) + + def __getstate__(self): + """ + Return state values to be pickled. Exclude _parser because parsedatetime + cannot be pickled. + """ + odict = self.__dict__.copy() + del odict['_parser'] + return odict + + def __setstate__(self, dict): + """ + Restore state from the unpickled state values. Set _parser to an instance + of the parsedatetime Calendar class. + """ + self.__dict__.update(dict) + self._parser = parsedatetime.Calendar(version=parsedatetime.VERSION_CONTEXT_STYLE) + + def cast(self, d): + """ + Cast a single value to a :class:`datetime.datetime`. + + :param datetime_format: + An optional :func:`datetime.strptime` format string for parsing + datetimes in this column. + :returns: + :class:`datetime.datetime` or :code:`None`. + """ + if isinstance(d, datetime.datetime) or d is None: + return d + elif isinstance(d, datetime.date): + return datetime.datetime.combine(d, datetime.time(0, 0, 0)) + elif isinstance(d, six.string_types): + d = d.strip() + + if d.lower() in self.null_values: + return None + else: + raise CastError('Can not parse value "%s" as datetime.' % d) + + if self.datetime_format: + try: + return datetime.datetime.strptime(d, self.datetime_format) + except: + raise CastError('Value "%s" does not match date format.' % d) + + try: + (_, _, _, _, matched_text), = self._parser.nlp(d, sourceTime=self._source_time) + except: + matched_text = None + else: + value, ctx = self._parser.parseDT( + d, + sourceTime=self._source_time, + tzinfo=self.timezone + ) + + if matched_text == d and ctx.hasDate and ctx.hasTime: + return value + elif matched_text == d and ctx.hasDate and not ctx.hasTime: + return datetime.datetime.combine(value.date(), datetime.time.min) + + try: + dt = isodate.parse_datetime(d) + + return dt + except: + pass + + raise CastError('Can not parse value "%s" as datetime.' % d) + + def csvify(self, d): + if d is None: + return None + + return d.isoformat() + + def jsonify(self, d): + return self.csvify(d) diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/number.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/number.py new file mode 100644 index 0000000..d5fdc15 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/number.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# -*- coding: utf8 -*- + +try: + from cdecimal import Decimal, InvalidOperation +except ImportError: # pragma: no cover + from decimal import Decimal, InvalidOperation + +import warnings + +from babel.core import Locale +import six + +from agate.data_types.base import DataType +from agate.exceptions import CastError + +#: A list of currency symbols sourced from `Xe `_. +DEFAULT_CURRENCY_SYMBOLS = [u'؋', u'$', u'ƒ', u'៛', u'¥', u'₡', u'₱', u'£', u'€', u'¢', u'﷼', u'₪', u'₩', u'₭', u'₮', u'₦', u'฿', u'₤', u'₫'] + +POSITIVE = Decimal('1') +NEGATIVE = Decimal('-1') + + +class Number(DataType): + """ + Data representing numbers. + + :param locale: + A locale specification such as :code:`en_US` or :code:`de_DE` to use + for parsing formatted numbers. + :param group_symbol: + A grouping symbol used in the numbers. Overrides the value provided by + the specified :code:`locale`. + :param decimal_symbol: + A decimal separate symbol used in the numbers. Overrides the value + provided by the specified :code:`locale`. + :param currency_symbols: + A sequence of currency symbols to strip from numbers. + """ + def __init__(self, locale='en_US', group_symbol=None, decimal_symbol=None, currency_symbols=DEFAULT_CURRENCY_SYMBOLS, **kwargs): + super(Number, self).__init__(**kwargs) + + self.locale = Locale.parse(locale) + + self.currency_symbols = currency_symbols + + # Suppress Babel warning on Python 3.6 + # See #665 + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + + self.group_symbol = group_symbol or self.locale.number_symbols.get('group', ',') + self.decimal_symbol = decimal_symbol or self.locale.number_symbols.get('decimal', '.') + + def cast(self, d): + """ + Cast a single value to a :class:`decimal.Decimal`. + + :returns: + :class:`decimal.Decimal` or :code:`None`. + """ + if isinstance(d, Decimal) or d is None: + return d + + t = type(d) + + if t is int: + return Decimal(d) + elif six.PY2 and t is long: + return Decimal(d) + elif t is float: + return Decimal(repr(d)) + elif not isinstance(d, six.string_types): + raise CastError('Can not parse value "%s" as Decimal.' % d) + + d = d.strip() + + if d.lower() in self.null_values: + return None + + d = d.strip('%') + + if len(d) > 0 and d[0] == '-': + d = d[1:] + sign = NEGATIVE + else: + sign = POSITIVE + + for symbol in self.currency_symbols: + d = d.strip(symbol) + + d = d.replace(self.group_symbol, '') + d = d.replace(self.decimal_symbol, '.') + + try: + return Decimal(d) * sign + except InvalidOperation: + pass + + raise CastError('Can not parse value "%s" as Decimal.' % d) + + def jsonify(self, d): + if d is None: + return d + + return float(d) diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/text.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/text.py new file mode 100644 index 0000000..6bd210e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/text.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +import six + +from agate.data_types.base import DataType + + +class Text(DataType): + """ + Data representing text. + + :param cast_nulls: + If :code:`True`, values in :data:`.DEFAULT_NULL_VALUES` will be + converted to `None`. Disable to retain them as strings. + """ + def __init__(self, cast_nulls=True, **kwargs): + super(Text, self).__init__(**kwargs) + + self.cast_nulls = cast_nulls + + def cast(self, d): + """ + Cast a single value to :func:`unicode` (:func:`str` in Python 3). + + :param d: + A value to cast. + :returns: + :func:`unicode` (:func:`str` in Python 3) or :code:`None` + """ + if d is None: + return d + elif isinstance(d, six.string_types): + if self.cast_nulls and d.strip().lower() in self.null_values: + return None + + return six.text_type(d) diff --git a/dbt-env/lib/python3.8/site-packages/agate/data_types/time_delta.py b/dbt-env/lib/python3.8/site-packages/agate/data_types/time_delta.py new file mode 100644 index 0000000..a577a81 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/data_types/time_delta.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +import datetime + +import pytimeparse +import six + +from agate.data_types.base import DataType +from agate.exceptions import CastError + + +class TimeDelta(DataType): + """ + Data representing the interval between two dates and/or times. + """ + def cast(self, d): + """ + Cast a single value to :class:`datetime.timedelta`. + + :param d: + A value to cast. + :returns: + :class:`datetime.timedelta` or :code:`None` + """ + if isinstance(d, datetime.timedelta) or d is None: + return d + elif isinstance(d, six.string_types): + d = d.strip() + + if d.lower() in self.null_values: + return None + else: + raise CastError('Can not parse value "%s" as timedelta.' % d) + + try: + seconds = pytimeparse.parse(d) + except AttributeError: + seconds = None + + if seconds is None: + raise CastError('Can not parse value "%s" to as timedelta.' % d) + + return datetime.timedelta(seconds=seconds) diff --git a/dbt-env/lib/python3.8/site-packages/agate/exceptions.py b/dbt-env/lib/python3.8/site-packages/agate/exceptions.py new file mode 100644 index 0000000..7fbfcc2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/exceptions.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +""" +This module contains various exceptions raised by agate. +""" + + +class DataTypeError(TypeError): # pragma: no cover + """ + A calculation was attempted with an invalid :class:`.DataType`. + """ + pass + + +class UnsupportedAggregationError(TypeError): # pragma: no cover + """ + An :class:`.Aggregation` was attempted which is not supported. + + For example, if a :class:`.Percentiles` is applied to a :class:`.TableSet`. + """ + pass + + +class CastError(Exception): # pragma: no cover + """ + A column value can not be cast to the correct type. + """ + pass + + +class FieldSizeLimitError(Exception): # pragma: no cover + """ + A field in a CSV file exceeds the maximum length. + + This length may be the default or one set by the user. + """ + def __init__(self, limit): + super(FieldSizeLimitError, self).__init__( + 'CSV contains fields longer than maximum length of %i characters. Try raising the maximum with the field_size_limit parameter, or try setting quoting=csv.QUOTE_NONE.' % limit + ) diff --git a/dbt-env/lib/python3.8/site-packages/agate/fixed.py b/dbt-env/lib/python3.8/site-packages/agate/fixed.py new file mode 100644 index 0000000..944e012 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/fixed.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +""" +This module contains a generic parser for fixed-width files. It operates +similar to Python's built-in CSV reader. +""" + +from collections import OrderedDict, namedtuple + +import six + + +Field = namedtuple('Field', ['name', 'start', 'length']) + + +class Reader(six.Iterator): + """ + Reads a fixed-width file using a column schema in CSV format. + + This works almost exactly like Python's built-in CSV reader. + + Schemas must be in the "ffs" format, with :code:`column`, :code:`start`, + and :code:`length` columns. There is a repository of such schemas + maintained at `wireservice/ffs `_. + """ + def __init__(self, f, schema_f): + from agate import csv + + self.file = f + self.fields = [] + + reader = csv.reader(schema_f) + header = next(reader) + + if header != ['column', 'start', 'length']: + raise ValueError('Schema must contain exactly three columns: "column", "start", and "length".') + + for row in reader: + self.fields.append(Field(row[0], int(row[1]), int(row[2]))) + + def __iter__(self): + return self + + def __next__(self): + line = next(self.file) + + values = [] + + for field in self.fields: + values.append(line[field.start:field.start + field.length].strip()) + + return values + + @property + def fieldnames(self): + """ + The names of the columns read from the schema. + """ + return [field.name for field in self.fields] + + +class DictReader(Reader): + """ + A fixed-width reader that returns :class:`collections.OrderedDict` rather + than a list. + """ + def __next__(self): + line = next(self.file) + values = OrderedDict() + + for field in self.fields: + values[field.name] = line[field.start:field.start + field.length].strip() + + return values + + +def reader(*args, **kwargs): + """ + A wrapper around :class:`.fixed.Reader`, so that it can be used in the same + way as a normal CSV reader. + """ + return Reader(*args, **kwargs) diff --git a/dbt-env/lib/python3.8/site-packages/agate/mapped_sequence.py b/dbt-env/lib/python3.8/site-packages/agate/mapped_sequence.py new file mode 100644 index 0000000..2e6222d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/mapped_sequence.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python + +""" +This module contains the :class:`MappedSequence` class that forms the foundation +for agate's :class:`.Row` and :class:`.Column` as well as for named sequences of +rows and columns. +""" + +from collections import OrderedDict, Sequence + +import six +from six.moves import range # pylint: disable=W0622 + +from agate.utils import memoize + + +class MappedSequence(Sequence): + """ + A generic container for immutable data that can be accessed either by + numeric index or by key. This is similar to an + :class:`collections.OrderedDict` except that the keys are optional and + iteration over it returns the values instead of keys. + + This is the base class for both :class:`.Column` and :class:`.Row`. + + :param values: + A sequence of values. + :param keys: + A sequence of keys. + """ + __slots__ = ['_values', '_keys'] + + def __init__(self, values, keys=None): + self._values = tuple(values) + + if keys is not None: + self._keys = keys + else: + self._keys = None + + def __getstate__(self): + """ + Return state values to be pickled. + + This is necessary on Python2.7 when using :code:`__slots__`. + """ + return { + '_values': self._values, + '_keys': self._keys + } + + def __setstate__(self, data): + """ + Restore pickled state. + + This is necessary on Python2.7 when using :code:`__slots__`. + """ + self._values = data['_values'] + self._keys = data['_keys'] + + def __unicode__(self): + """ + Print a unicode sample of the contents of this sequence. + """ + sample = u', '.join(repr(d) for d in self.values()[:5]) + + if len(self) > 5: + sample = u'%s, ...' % sample + + return u'' % (type(self).__name__, sample) + + def __str__(self): + """ + Print an ascii sample of the contents of this sequence. + """ + if six.PY2: # pragma: no cover + return str(self.__unicode__().encode('utf8')) + + return str(self.__unicode__()) + + def __getitem__(self, key): + """ + Retrieve values from this array by index, slice or key. + """ + if isinstance(key, slice): + indices = range(*key.indices(len(self))) + values = self.values() + + return tuple(values[i] for i in indices) + # Note: can't use isinstance because bool is a subclass of int + elif type(key) is int: + return self.values()[key] + else: + return self.dict()[key] + + def __setitem__(self, key, value): + """ + Set values by index, which we want to fail loudly. + """ + raise TypeError('Rows and columns can not be modified directly. You probably need to compute a new column.') + + def __iter__(self): + """ + Iterate over values. + """ + return iter(self.values()) + + @memoize + def __len__(self): + return len(self.values()) + + def __eq__(self, other): + """ + Equality test with other sequences. + """ + if not isinstance(other, Sequence): + return False + + return self.values() == tuple(other) + + def __ne__(self, other): + """ + Inequality test with other sequences. + """ + return not self.__eq__(other) + + def __contains__(self, value): + return self.values().__contains__(value) + + def keys(self): + """ + Equivalent to :meth:`collections.OrderedDict.keys`. + """ + return self._keys + + def values(self): + """ + Equivalent to :meth:`collections.OrderedDict.values`. + """ + return self._values + + @memoize + def items(self): + """ + Equivalent to :meth:`collections.OrderedDict.items`. + """ + return tuple(zip(self.keys(), self.values())) + + def get(self, key, default=None): + """ + Equivalent to :meth:`collections.OrderedDict.get`. + """ + try: + return self.dict()[key] + except KeyError: + if default: + return default + else: + return None + + @memoize + def dict(self): + """ + Retrieve the contents of this sequence as an + :class:`collections.OrderedDict`. + """ + if self.keys() is None: + raise KeyError + + return OrderedDict(self.items()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/rows.py b/dbt-env/lib/python3.8/site-packages/agate/rows.py new file mode 100644 index 0000000..431118d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/rows.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +""" +This module contains agate's :class:`Row` implementation. Rows are independent +of both the :class:`.Table` that contains them as well as the :class:`.Columns` +that access their data. This independence, combined with rows immutability +allows them to be safely shared between table instances. +""" + +from agate.mapped_sequence import MappedSequence + + +class Row(MappedSequence): + """ + A row of data. Values within a row can be accessed by column name or column + index. Row are immutable and may be shared between :class:`.Table` + instances. + + Currently row instances are a no-op subclass of :class:`MappedSequence`. + They are being maintained in this fashion in order to support future + features. + """ + pass diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__init__.py b/dbt-env/lib/python3.8/site-packages/agate/table/__init__.py new file mode 100644 index 0000000..150d9ee --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/__init__.py @@ -0,0 +1,361 @@ +#!/usr/bin/env python + +""" +The :class:`.Table` object is the most important class in agate. Tables are +created by supplying row data, column names and subclasses of :class:`.DataType` +to the constructor. Once created, the data in a table **can not be changed**. +This concept is central to agate. + +Instead of modifying the data, various methods can be used to create new, +derivative tables. For example, the :meth:`.Table.select` method creates a new +table with only the specified columns. The :meth:`.Table.where` method creates +a new table with only those rows that pass a test. And :meth:`.Table.order_by` +creates a sorted table. In all of these cases the output is a new :class:`.Table` +and the existing table remains unmodified. + +Tables are not themselves iterable, but the columns of the table can be +accessed via :attr:`.Table.columns` and the rows via :attr:`.Table.rows`. Both +sequences can be accessed either by numeric index or by name. (In the case of +rows, row names are optional.) +""" + +from itertools import chain +import sys +import warnings + +import six +from six.moves import range # pylint: disable=W0622 + +from agate.columns import Column +from agate.data_types import DataType +from agate.mapped_sequence import MappedSequence +from agate.rows import Row +from agate.type_tester import TypeTester +from agate import utils +from agate.exceptions import CastError +from agate.warns import warn_duplicate_column, warn_unnamed_column + + +@six.python_2_unicode_compatible +class Table(object): + """ + A dataset consisting of rows and columns. Columns refer to "vertical" slices + of data that must all be of the same type. Rows refer to "horizontal" slices + of data that may (and usually do) contain mixed types. + + The sequence of :class:`.Column` instances are retrieved via the + :attr:`.Table.columns` property. They may be accessed by either numeric + index or by unique column name. + + The sequence of :class:`.Row` instances are retrieved via the + :attr:`.Table.rows` property. They may be accessed by either numeric index + or, if specified, unique row names. + + :param rows: + The data as a sequence of any sequences: tuples, lists, etc. If + any row has fewer values than the number of columns, it will be filled + out with nulls. No row may have more values than the number of columns. + :param column_names: + A sequence of string names for each column or `None`, in which case + column names will be automatically assigned using :func:`.letter_name`. + :param column_types: + A sequence of instances of :class:`.DataType` or an instance of + :class:`.TypeTester` or `None` in which case a generic TypeTester will + be used. Alternatively, a dictionary with column names as keys and + instances of :class:`.DataType` as values to specify some types. + :param row_names: + Specifies unique names for each row. This parameter is + optional. If specified it may be 1) the name of a single column that + contains a unique identifier for each row, 2) a key function that takes + a :class:`.Row` and returns a unique identifier or 3) a sequence of + unique identifiers of the same length as the sequence of rows. The + uniqueness of resulting identifiers is not validated, so be certain + the values you provide are truly unique. + :param _is_fork: + Used internally to skip certain validation steps when data + is propagated from an existing table. When :code:`True`, rows are + assumed to be :class:`.Row` instances, rather than raw data. + """ + def __init__(self, rows, column_names=None, column_types=None, row_names=None, _is_fork=False): + if isinstance(rows, six.string_types): + raise ValueError('When created directly, the first argument to Table must be a sequence of rows. Did you want agate.Table.from_csv?') + + # Validate column names + if column_names: + self._column_names = utils.deduplicate(column_names, column_names=True) + elif rows: + self._column_names = tuple(utils.letter_name(i) for i in range(len(rows[0]))) + warnings.warn('Column names not specified. "%s" will be used as names.' % str(self._column_names), RuntimeWarning, stacklevel=2) + else: + self._column_names = tuple() + + len_column_names = len(self._column_names) + + # Validate column_types + if column_types is None: + column_types = TypeTester() + elif isinstance(column_types, dict): + for v in column_types.values(): + if not isinstance(v, DataType): + raise ValueError('Column types must be instances of DataType.') + + column_types = TypeTester(force=column_types) + elif not isinstance(column_types, TypeTester): + for column_type in column_types: + if not isinstance(column_type, DataType): + raise ValueError('Column types must be instances of DataType.') + + if isinstance(column_types, TypeTester): + self._column_types = column_types.run(rows, self._column_names) + else: + self._column_types = tuple(column_types) + + if len_column_names != len(self._column_types): + raise ValueError('column_names and column_types must be the same length.') + + if not _is_fork: + new_rows = [] + cast_funcs = [c.cast for c in self._column_types] + + for i, row in enumerate(rows): + len_row = len(row) + + if len_row > len_column_names: + raise ValueError('Row %i has %i values, but Table only has %i columns.' % (i, len_row, len_column_names)) + elif len(row) < len_column_names: + row = chain(row, [None] * (len_column_names - len_row)) + + row_values = [] + for j, d in enumerate(row): + try: + row_values.append(cast_funcs[j](d)) + except CastError as e: + raise CastError(str(e) + ' Error at row %s column %s.' % (i, self._column_names[j])) + + new_rows.append(Row(row_values, self._column_names)) + else: + new_rows = rows + + if row_names: + computed_row_names = [] + + if isinstance(row_names, six.string_types): + for row in new_rows: + name = row[row_names] + computed_row_names.append(name) + elif hasattr(row_names, '__call__'): + for row in new_rows: + name = row_names(row) + computed_row_names.append(name) + elif utils.issequence(row_names): + computed_row_names = row_names + else: + raise ValueError('row_names must be a column name, function or sequence') + + for row_name in computed_row_names: + if type(row_name) is int: + raise ValueError('Row names cannot be of type int. Use Decimal for numbered row names.') + + self._row_names = tuple(computed_row_names) + else: + self._row_names = None + + self._rows = MappedSequence(new_rows, self._row_names) + + # Build columns + new_columns = [] + + for i in range(len_column_names): + name = self._column_names[i] + data_type = self._column_types[i] + + column = Column(i, name, data_type, self._rows, row_names=self._row_names) + + new_columns.append(column) + + self._columns = MappedSequence(new_columns, self._column_names) + + def __str__(self): + """ + Print the table's structure using :meth:`.Table.print_structure`. + """ + structure = six.StringIO() + + self.print_structure(output=structure) + + return structure.getvalue() + + def __len__(self): + """ + Shorthand for :code:`len(table.rows)`. + """ + return self._rows.__len__() + + def __iter__(self): + """ + Shorthand for :code:`iter(table.rows)`. + """ + return self._rows.__iter__() + + def __getitem__(self, key): + """ + Shorthand for :code:`table.rows[foo]`. + """ + return self._rows.__getitem__(key) + + @property + def column_types(self): + """ + An tuple :class:`.DataType` instances. + """ + return self._column_types + + @property + def column_names(self): + """ + An tuple of strings. + """ + return self._column_names + + @property + def row_names(self): + """ + An tuple of strings, if this table has row names. + + If this table does not have row names, then :code:`None`. + """ + return self._row_names + + @property + def columns(self): + """ + A :class:`.MappedSequence` with column names for keys and + :class:`.Column` instances for values. + """ + return self._columns + + @property + def rows(self): + """ + A :class:`.MappedSeqeuence` with row names for keys (if specified) and + :class:`.Row` instances for values. + """ + return self._rows + + def _fork(self, rows, column_names=None, column_types=None, row_names=None): + """ + Create a new table using the metadata from this one. + + This method is used internally by functions like + :meth:`.Table.order_by`. + + :param rows: + Row data for the forked table. + :param column_names: + Column names for the forked table. If not specified, fork will use + this table's column names. + :param column_types: + Column types for the forked table. If not specified, fork will use + this table's column names. + :param row_names: + Row names for the forked table. If not specified, fork will use + this table's row names. + """ + if column_names is None: + column_names = self._column_names + + if column_types is None: + column_types = self._column_types + + if row_names is None: + row_names = self._row_names + + return Table(rows, column_names, column_types, row_names=row_names, _is_fork=True) + + def print_csv(self, **kwargs): + """ + Print this table as a CSV. + + This is the same as passing :code:`sys.stdout` to :meth:`.Table.to_csv`. + + :code:`kwargs` will be passed on to :meth:`.Table.to_csv`. + """ + self.to_csv(sys.stdout, **kwargs) + + def print_json(self, **kwargs): + """ + Print this table as JSON. + + This is the same as passing :code:`sys.stdout` to + :meth:`.Table.to_json`. + + :code:`kwargs` will be passed on to :meth:`.Table.to_json`. + """ + self.to_json(sys.stdout, **kwargs) + + +from agate.table.aggregate import aggregate +from agate.table.bar_chart import bar_chart +from agate.table.bins import bins +from agate.table.column_chart import column_chart +from agate.table.compute import compute +from agate.table.denormalize import denormalize +from agate.table.distinct import distinct +from agate.table.exclude import exclude +from agate.table.find import find +from agate.table.from_csv import from_csv +from agate.table.from_fixed import from_fixed +from agate.table.from_json import from_json +from agate.table.from_object import from_object +from agate.table.group_by import group_by +from agate.table.homogenize import homogenize +from agate.table.join import join +from agate.table.limit import limit +from agate.table.line_chart import line_chart +from agate.table.merge import merge +from agate.table.normalize import normalize +from agate.table.order_by import order_by +from agate.table.pivot import pivot +from agate.table.print_bars import print_bars +from agate.table.print_html import print_html +from agate.table.print_structure import print_structure +from agate.table.print_table import print_table +from agate.table.rename import rename +from agate.table.scatterplot import scatterplot +from agate.table.select import select +from agate.table.to_csv import to_csv +from agate.table.to_json import to_json +from agate.table.where import where + +Table.aggregate = aggregate +Table.bar_chart = bar_chart +Table.bins = bins +Table.column_chart = column_chart +Table.compute = compute +Table.denormalize = denormalize +Table.distinct = distinct +Table.exclude = exclude +Table.find = find +Table.from_csv = from_csv +Table.from_fixed = from_fixed +Table.from_json = from_json +Table.from_object = from_object +Table.group_by = group_by +Table.homogenize = homogenize +Table.join = join +Table.limit = limit +Table.line_chart = line_chart +Table.merge = merge +Table.normalize = normalize +Table.order_by = order_by +Table.pivot = pivot +Table.print_bars = print_bars +Table.print_html = print_html +Table.print_structure = print_structure +Table.print_table = print_table +Table.rename = rename +Table.scatterplot = scatterplot +Table.select = select +Table.to_csv = to_csv +Table.to_json = to_json +Table.where = where diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..64e9ab0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/aggregate.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/aggregate.cpython-38.pyc new file mode 100644 index 0000000..4689c8d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/aggregate.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/bar_chart.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/bar_chart.cpython-38.pyc new file mode 100644 index 0000000..753e11d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/bar_chart.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/bins.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/bins.cpython-38.pyc new file mode 100644 index 0000000..fb18925 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/bins.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/column_chart.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/column_chart.cpython-38.pyc new file mode 100644 index 0000000..063f6eb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/column_chart.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/compute.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/compute.cpython-38.pyc new file mode 100644 index 0000000..c1707ae Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/compute.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/denormalize.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/denormalize.cpython-38.pyc new file mode 100644 index 0000000..092598c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/denormalize.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/distinct.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/distinct.cpython-38.pyc new file mode 100644 index 0000000..ba5c2ba Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/distinct.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/exclude.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/exclude.cpython-38.pyc new file mode 100644 index 0000000..49dea8e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/exclude.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/find.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/find.cpython-38.pyc new file mode 100644 index 0000000..a05aa85 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/find.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_csv.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_csv.cpython-38.pyc new file mode 100644 index 0000000..4a52761 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_csv.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_fixed.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_fixed.cpython-38.pyc new file mode 100644 index 0000000..b9be608 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_fixed.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_json.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_json.cpython-38.pyc new file mode 100644 index 0000000..d76b532 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_json.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_object.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_object.cpython-38.pyc new file mode 100644 index 0000000..4f7c2c2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/from_object.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/group_by.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/group_by.cpython-38.pyc new file mode 100644 index 0000000..e7cc849 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/group_by.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/homogenize.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/homogenize.cpython-38.pyc new file mode 100644 index 0000000..9a6e193 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/homogenize.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/join.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/join.cpython-38.pyc new file mode 100644 index 0000000..3677cc7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/join.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/limit.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/limit.cpython-38.pyc new file mode 100644 index 0000000..cb07bda Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/limit.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/line_chart.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/line_chart.cpython-38.pyc new file mode 100644 index 0000000..1129a20 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/line_chart.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/merge.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/merge.cpython-38.pyc new file mode 100644 index 0000000..3fee300 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/merge.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/normalize.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/normalize.cpython-38.pyc new file mode 100644 index 0000000..2cf5073 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/normalize.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/order_by.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/order_by.cpython-38.pyc new file mode 100644 index 0000000..7db6978 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/order_by.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/pivot.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/pivot.cpython-38.pyc new file mode 100644 index 0000000..fc428e5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/pivot.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_bars.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_bars.cpython-38.pyc new file mode 100644 index 0000000..40a54f0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_bars.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_html.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_html.cpython-38.pyc new file mode 100644 index 0000000..698659b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_html.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_structure.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_structure.cpython-38.pyc new file mode 100644 index 0000000..ae7b594 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_structure.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_table.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_table.cpython-38.pyc new file mode 100644 index 0000000..2c3212a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/print_table.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/rename.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/rename.cpython-38.pyc new file mode 100644 index 0000000..28617c2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/rename.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/scatterplot.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/scatterplot.cpython-38.pyc new file mode 100644 index 0000000..bad8b1b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/scatterplot.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/select.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/select.cpython-38.pyc new file mode 100644 index 0000000..542ead0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/select.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/to_csv.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/to_csv.cpython-38.pyc new file mode 100644 index 0000000..35ab368 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/to_csv.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/to_json.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/to_json.cpython-38.pyc new file mode 100644 index 0000000..f0b3631 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/to_json.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/where.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/where.cpython-38.pyc new file mode 100644 index 0000000..0fc12a6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/table/__pycache__/where.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/aggregate.py b/dbt-env/lib/python3.8/site-packages/agate/table/aggregate.py new file mode 100644 index 0000000..f6c0437 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/aggregate.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from collections import OrderedDict + +from agate import utils + + +def aggregate(self, aggregations): + """ + Apply one or more :class:`.Aggregation` instances to this table. + + :param aggregations: + A single :class:`.Aggregation` instance or a sequence of tuples in the + format :code:`(name, aggregation)`, where each :code:`aggregation` is + an instance of :class:`.Aggregation`. + :returns: + If the input was a single :class:`Aggregation` then a single result + will be returned. If it was a sequence then an :class:`.OrderedDict` of + results will be returned. + """ + if utils.issequence(aggregations): + results = OrderedDict() + + for name, agg in aggregations: + agg.validate(self) + + for name, agg in aggregations: + results[name] = agg.run(self) + + return results + else: + aggregations.validate(self) + + return aggregations.run(self) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/bar_chart.py b/dbt-env/lib/python3.8/site-packages/agate/table/bar_chart.py new file mode 100644 index 0000000..9e3da51 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/bar_chart.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def bar_chart(self, label=0, value=1, path=None, width=None, height=None): + """ + Render a bar chart using :class:`leather.Chart`. + + :param label: + The name or index of a column to plot as the labels of the chart. + Defaults to the first column in the table. + :param value: + The name or index of a column to plot as the values of the chart. + Defaults to the second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(label) is int: + label_name = self.column_names[label] + else: + label_name = label + + if type(value) is int: + value_name = self.column_names[value] + else: + value_name = value + + chart = leather.Chart() + chart.add_x_axis(name=value_name) + chart.add_y_axis(name=label_name) + chart.add_bars(self, x=value, y=label) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/bins.py b/dbt-env/lib/python3.8/site-packages/agate/table/bins.py new file mode 100644 index 0000000..b4f3fe2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/bins.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +try: + from cdecimal import Decimal +except ImportError: # pragma: no cover + from decimal import Decimal + +from babel.numbers import format_decimal + +from agate.aggregations import Min, Max +from agate import utils + + +def bins(self, column_name, count=10, start=None, end=None): + """ + Generates (approximately) evenly sized bins for the values in a column. + Bins may not be perfectly even if the spread of the data does not divide + evenly, but all values will always be included in some bin. + + The resulting table will have two columns. The first will have + the same name as the specified column, but will be type :class:`.Text`. + The second will be named :code:`count` and will be of type + :class:`.Number`. + + :param column_name: + The name of the column to bin. Must be of type :class:`.Number` + :param count: + The number of bins to create. If not specified then each value will + be counted as its own bin. + :param start: + The minimum value to start the bins at. If not specified the + minimum value in the column will be used. + :param end: + The maximum value to end the bins at. If not specified the maximum + value in the column will be used. + :returns: + A new :class:`Table`. + """ + minimum, maximum = utils.round_limits( + Min(column_name).run(self), + Max(column_name).run(self) + ) + # Infer bin start/end positions + start = minimum if not start else Decimal(start) + end = maximum if not end else Decimal(end) + + # Calculate bin size + spread = abs(end - start) + size = spread / count + + breaks = [start] + + # Calculate breakpoints + for i in range(1, count + 1): + top = start + (size * i) + + breaks.append(top) + + # Format bin names + decimal_places = utils.max_precision(breaks) + break_formatter = utils.make_number_formatter(decimal_places) + + def name_bin(i, j, first_exclusive=True, last_exclusive=False): + inclusive = format_decimal(i, format=break_formatter) + exclusive = format_decimal(j, format=break_formatter) + + output = u'[' if first_exclusive else u'(' + output += u'%s - %s' % (inclusive, exclusive) + output += u']' if last_exclusive else u')' + + return output + + # Generate bins + bin_names = [] + + for i in range(1, len(breaks)): + last_exclusive = (i == len(breaks) - 1) + + if i == 1 and minimum < start: + name = name_bin(minimum, breaks[i], last_exclusive=last_exclusive) + elif i == len(breaks) - 1 and maximum > end: + name = name_bin(breaks[i - 1], maximum, last_exclusive=last_exclusive) + else: + name = name_bin(breaks[i - 1], breaks[i], last_exclusive=last_exclusive) + + bin_names.append(name) + + bin_names.append(None) + + # Lambda method for actually assigning values to bins + def binner(row): + value = row[column_name] + + if value is None: + return None + + i = 1 + + try: + while value >= breaks[i]: + i += 1 + except IndexError: + i -= 1 + + return bin_names[i - 1] + + # Pivot by lambda + table = self.pivot(binner, key_name=column_name) + + # Sort by bin order + return table.order_by(lambda r: bin_names.index(r[column_name])) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/column_chart.py b/dbt-env/lib/python3.8/site-packages/agate/table/column_chart.py new file mode 100644 index 0000000..11bca44 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/column_chart.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def column_chart(self, label=0, value=1, path=None, width=None, height=None): + """ + Render a column chart using :class:`leather.Chart`. + + :param label: + The name or index of a column to plot as the labels of the chart. + Defaults to the first column in the table. + :param value: + The name or index of a column to plot as the values of the chart. + Defaults to the second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(label) is int: + label_name = self.column_names[label] + else: + label_name = label + + if type(value) is int: + value_name = self.column_names[value] + else: + value_name = value + + chart = leather.Chart() + chart.add_x_axis(name=label_name) + chart.add_y_axis(name=value_name) + chart.add_columns(self, x=label, y=value) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/compute.py b/dbt-env/lib/python3.8/site-packages/agate/table/compute.py new file mode 100644 index 0000000..887fe88 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/compute.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from collections import OrderedDict +from copy import copy + +from agate.rows import Row + + +def compute(self, computations, replace=False): + """ + Create a new table by applying one or more :class:`.Computation` instances + to each row. + + :param computations: + A sequence of pairs of new column names and :class:`.Computation` + instances. + :param replace: + If :code:`True` then new column names can match existing names, and + those columns will be replaced with the computed data. + :returns: + A new :class:`.Table`. + """ + column_names = list(copy(self._column_names)) + column_types = list(copy(self._column_types)) + + for new_column_name, computation in computations: + new_column_type = computation.get_computed_data_type(self) + + if new_column_name in column_names: + if not replace: + raise ValueError('New column name "%s" already exists. Specify replace=True to replace with computed data.') + + i = column_names.index(new_column_name) + column_types[i] = new_column_type + else: + column_names.append(new_column_name) + column_types.append(new_column_type) + + computation.validate(self) + + new_columns = OrderedDict() + + for new_column_name, computation in computations: + new_columns[new_column_name] = computation.run(self) + + new_rows = [] + + for i, row in enumerate(self._rows): + # Slow version if using replace + if replace: + values = [] + + for j, column_name in enumerate(column_names): + if column_name in new_columns: + values.append(new_columns[column_name][i]) + else: + values.append(row[j]) + # Faster version if not using replace + else: + values = row.values() + tuple(c[i] for c in new_columns.values()) + + new_rows.append(Row(values, column_names)) + + return self._fork(new_rows, column_names, column_types) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/denormalize.py b/dbt-env/lib/python3.8/site-packages/agate/table/denormalize.py new file mode 100644 index 0000000..beec713 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/denormalize.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from collections import OrderedDict + +try: + from cdecimal import Decimal +except ImportError: # pragma: no cover + from decimal import Decimal + +import six + +from agate.data_types import Number +from agate.type_tester import TypeTester +from agate.rows import Row +from agate import utils + + +def denormalize(self, key=None, property_column='property', value_column='value', default_value=utils.default, column_types=None): + """ + Create a new table with row values converted into columns. + + For example: + + +---------+-----------+---------+ + | name | property | value | + +=========+===========+=========+ + | Jane | gender | female | + +---------+-----------+---------+ + | Jane | race | black | + +---------+-----------+---------+ + | Jane | age | 24 | + +---------+-----------+---------+ + | ... | ... | ... | + +---------+-----------+---------+ + + Can be denormalized so that each unique value in `field` becomes a + column with `value` used for its values. + + +---------+----------+--------+-------+ + | name | gender | race | age | + +=========+==========+========+=======+ + | Jane | female | black | 24 | + +---------+----------+--------+-------+ + | Jack | male | white | 35 | + +---------+----------+--------+-------+ + | Joe | male | black | 28 | + +---------+----------+--------+-------+ + + If one or more keys are specified then the resulting table will + automatically have :code:`row_names` set to those keys. + + This is the opposite of :meth:`.Table.normalize`. + + :param key: + A column name or a sequence of column names that should be + maintained as they are in the normalized table. Typically these + are the tables unique identifiers and any metadata about them. Or, + :code:`None` if there are no key columns. + :param field_column: + The column whose values should become column names in the new table. + :param property_column: + The column whose values should become the values of the property + columns in the new table. + :param default_value: + Value to be used for missing values in the pivot table. If not + specified :code:`Decimal(0)` will be used for aggregations that + return :class:`.Number` data and :code:`None` will be used for + all others. + :param column_types: + A sequence of column types with length equal to number of unique + values in field_column or an instance of :class:`.TypeTester`. + Defaults to a generic :class:`.TypeTester`. + :returns: + A new :class:`.Table`. + """ + from agate.table import Table + + if key is None: + key = [] + elif not utils.issequence(key): + key = [key] + + field_names = [] + row_data = OrderedDict() + + for row in self.rows: + row_key = tuple(row[k] for k in key) + + if row_key not in row_data: + row_data[row_key] = OrderedDict() + + f = six.text_type(row[property_column]) + v = row[value_column] + + if f not in field_names: + field_names.append(f) + + row_data[row_key][f] = v + + if default_value == utils.default: + if isinstance(self.columns[value_column].data_type, Number): + default_value = Decimal(0) + else: + default_value = None + + new_column_names = key + field_names + + new_rows = [] + row_names = [] + + for k, v in row_data.items(): + row = list(k) + + if len(k) == 1: + row_names.append(k[0]) + else: + row_names.append(k) + + for f in field_names: + if f in v: + row.append(v[f]) + else: + row.append(default_value) + + new_rows.append(Row(row, new_column_names)) + + key_column_types = [self.column_types[self.column_names.index(name)] for name in key] + + if column_types is None or isinstance(column_types, TypeTester): + tester = TypeTester() if column_types is None else column_types + force_update = dict(zip(key, key_column_types)) + force_update.update(tester._force) + tester._force = force_update + + new_column_types = tester.run(new_rows, new_column_names) + else: + new_column_types = key_column_types + list(column_types) + + return Table(new_rows, new_column_names, new_column_types, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/distinct.py b/dbt-env/lib/python3.8/site-packages/agate/table/distinct.py new file mode 100644 index 0000000..a991bc2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/distinct.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate import utils + + +def distinct(self, key=None): + """ + Create a new table with only unique rows. + + :param key: + Either the name of a single column to use to identify unique rows, a + sequence of such column names, a :class:`function` that takes a + row and returns a value to identify unique rows, or `None`, in + which case the entire row will be checked for uniqueness. + :returns: + A new :class:`.Table`. + """ + key_is_row_function = hasattr(key, '__call__') + key_is_sequence = utils.issequence(key) + + uniques = [] + rows = [] + + if self._row_names is not None: + row_names = [] + else: + row_names = None + + for i, row in enumerate(self._rows): + if key_is_row_function: + k = key(row) + elif key_is_sequence: + k = (row[j] for j in key) + elif key is None: + k = tuple(row) + else: + k = row[key] + + if k not in uniques: + uniques.append(k) + rows.append(row) + + if self._row_names is not None: + row_names.append(self._row_names[i]) + + return self._fork(rows, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/exclude.py b/dbt-env/lib/python3.8/site-packages/agate/table/exclude.py new file mode 100644 index 0000000..1e71350 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/exclude.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate import utils + + +def exclude(self, key): + """ + Create a new table without the specified columns. + + :param key: + Either the name of a single column to exclude or a sequence of such + names. + :returns: + A new :class:`.Table`. + """ + if not utils.issequence(key): + key = [key] + + selected_column_names = tuple(n for n in self._column_names if n not in key) + + return self.select(selected_column_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/find.py b/dbt-env/lib/python3.8/site-packages/agate/table/find.py new file mode 100644 index 0000000..d11ab1d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/find.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + + +def find(self, test): + """ + Find the first row that passes a test. + + :param test: + A function that takes a :class:`.Row` and returns :code:`True` if + it matches. + :type test: + :class:`function` + :returns: + A single :class:`.Row` if found, or `None`. + """ + for row in self._rows: + if test(row): + return row + + return None diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/from_csv.py b/dbt-env/lib/python3.8/site-packages/agate/table/from_csv.py new file mode 100644 index 0000000..6c2f803 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/from_csv.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +import io + +import six + + +@classmethod +def from_csv(cls, path, column_names=None, column_types=None, row_names=None, skip_lines=0, header=True, sniff_limit=0, encoding='utf-8', **kwargs): + """ + Create a new table from a CSV. + + This method uses agate's builtin CSV reader, which supplies encoding + support for both Python 2 and Python 3. + + :code:`kwargs` will be passed through to the CSV reader. + + :param path: + Filepath or file-like object from which to read CSV data. If a file-like + object is specified, it must be seekable. If using Python 2, the file + should be opened in binary mode (`rb`). + :param column_names: + See :meth:`.Table.__init__`. + :param column_types: + See :meth:`.Table.__init__`. + :param row_names: + See :meth:`.Table.__init__`. + :param skip_lines: + The number of lines to skip from the top of the file. Note that skip + lines will not work with + :param header: + If :code:`True`, the first row of the CSV is assumed to contain column + names. If :code:`header` and :code:`column_names` are both specified + then a row will be skipped, but :code:`column_names` will be used. + :param sniff_limit: + Limit CSV dialect sniffing to the specified number of bytes. Set to + None to sniff the entire file. Defaults to 0 (no sniffing). + :param encoding: + Character encoding of the CSV file. Note: if passing in a file + handle it is assumed you have already opened it with the correct + encoding specified. + """ + from agate import csv + from agate.table import Table + + close = False + + if hasattr(path, 'read'): + f = path + else: + if six.PY2: + f = open(path, 'Urb') + else: + f = io.open(path, encoding=encoding) + + close = True + + if isinstance(skip_lines, int): + while skip_lines > 0: + f.readline() + skip_lines -= 1 + else: + raise ValueError('skip_lines argument must be an int') + + contents = six.StringIO(f.read()) + + if sniff_limit is None: + kwargs['dialect'] = csv.Sniffer().sniff(contents.getvalue()) + elif sniff_limit > 0: + kwargs['dialect'] = csv.Sniffer().sniff(contents.getvalue()[:sniff_limit]) + + if six.PY2: + kwargs['encoding'] = encoding + + reader = csv.reader(contents, header=header, **kwargs) + + if header: + if column_names is None: + column_names = next(reader) + else: + next(reader) + + rows = tuple(reader) + + if close: + f.close() + + return Table(rows, column_names, column_types, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/from_fixed.py b/dbt-env/lib/python3.8/site-packages/agate/table/from_fixed.py new file mode 100644 index 0000000..7a05bd7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/from_fixed.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +import io + +from agate import fixed +from agate import utils + + +@classmethod +def from_fixed(cls, path, schema_path, column_names=utils.default, column_types=None, row_names=None, encoding='utf-8', schema_encoding='utf-8'): + """ + Create a new table from a fixed-width file and a CSV schema. + + Schemas must be in the "ffs" format. There is a repository of such schemas + maintained at `wireservice/ffs `_. + + :param path: + File path or file-like object from which to read fixed-width data. + :param schema_path: + File path or file-like object from which to read schema (CSV) data. + :param column_names: + By default, these will be parsed from the schema. For alternatives, see + :meth:`.Table.__init__`. + :param column_types: + See :meth:`.Table.__init__`. + :param row_names: + See :meth:`.Table.__init__`. + :param encoding: + Character encoding of the fixed-width file. Note: if passing in a file + handle it is assumed you have already opened it with the correct + encoding specified. + :param schema_encoding: + Character encoding of the schema file. Note: if passing in a file + handle it is assumed you have already opened it with the correct + encoding specified. + """ + from agate.table import Table + + close_f = False + + if not hasattr(path, 'read'): + f = io.open(path, encoding=encoding) + close_f = True + else: + f = path + + close_schema_f = False + + if not hasattr(schema_path, 'read'): + schema_f = io.open(schema_path, encoding=schema_encoding) + close_schema_f = True + else: + schema_f = path + + reader = fixed.reader(f, schema_f) + rows = list(reader) + + if close_f: + f.close() + + if close_schema_f: + schema_f.close() + + if column_names == utils.default: + column_names = reader.fieldnames + + return Table(rows, column_names, column_types, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/from_json.py b/dbt-env/lib/python3.8/site-packages/agate/table/from_json.py new file mode 100644 index 0000000..4195672 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/from_json.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python + +from collections import OrderedDict +from decimal import Decimal +import json + + +@classmethod +def from_json(cls, path, row_names=None, key=None, newline=False, column_types=None, **kwargs): + """ + Create a new table from a JSON file. + + Once the JSON has been deseralized, the resulting Python object is + passed to :meth:`.Table.from_object`. + + If the file contains a top-level dictionary you may specify what + property contains the row list using the :code:`key` parameter. + + :code:`kwargs` will be passed through to :meth:`json.load`. + + :param path: + Filepath or file-like object from which to read JSON data. + :param row_names: + See the :meth:`.Table.__init__`. + :param key: + The key of the top-level dictionary that contains a list of row + arrays. + :param newline: + If `True` then the file will be parsed as "newline-delimited JSON". + :param column_types: + See :meth:`.Table.__init__`. + """ + from agate.table import Table + + if key is not None and newline: + raise ValueError('key and newline may not be specified together.') + + if newline: + js = [] + + if hasattr(path, 'read'): + for line in path: + js.append(json.loads(line, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs)) + else: + with open(path, 'r') as f: + for line in f: + js.append(json.loads(line, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs)) + else: + if hasattr(path, 'read'): + js = json.load(path, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs) + else: + with open(path, 'r') as f: + js = json.load(f, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs) + + if isinstance(js, dict): + if not key: + raise TypeError('When converting a JSON document with a top-level dictionary element, a key must be specified.') + + js = js[key] + + return Table.from_object(js, row_names=row_names, column_types=column_types) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/from_object.py b/dbt-env/lib/python3.8/site-packages/agate/table/from_object.py new file mode 100644 index 0000000..5675d7a --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/from_object.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python + +from agate import utils + + +@classmethod +def from_object(cls, obj, row_names=None, column_types=None): + """ + Create a new table from a Python object. + + The object should be a list containing a dictionary for each "row". + Nested objects or lists will also be parsed. For example, this object: + + .. code-block:: python + + { + 'one': { + 'a': 1, + 'b': 2, + 'c': 3 + }, + 'two': [4, 5, 6], + 'three': 'd' + } + + Would generate these columns and values: + + .. code-block:: python + + { + 'one/a': 1, + 'one/b': 2, + 'one/c': 3, + 'two.0': 4, + 'two.1': 5, + 'two.2': 6, + 'three': 'd' + } + + Column names and types will be inferred from the data. + + Not all rows are required to have the same keys. Missing elements will + be filled in with null values. + + :param obj: + Filepath or file-like object from which to read JSON data. + :param row_names: + See :meth:`.Table.__init__`. + :param column_types: + See :meth:`.Table.__init__`. + """ + from agate.table import Table + + column_names = [] + row_objects = [] + + for sub in obj: + parsed = utils.parse_object(sub) + + for key in parsed.keys(): + if key not in column_names: + column_names.append(key) + + row_objects.append(parsed) + + rows = [] + + for sub in row_objects: + r = [] + + for name in column_names: + r.append(sub.get(name, None)) + + rows.append(r) + + return Table(rows, column_names, row_names=row_names, column_types=column_types) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/group_by.py b/dbt-env/lib/python3.8/site-packages/agate/table/group_by.py new file mode 100644 index 0000000..0a7d8e2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/group_by.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from collections import OrderedDict + +from agate.data_types import Text +from agate.tableset import TableSet + + +def group_by(self, key, key_name=None, key_type=None): + """ + Create a :class:`.TableSet` with a table for each unique key. + + Note that group names will always be coerced to a string, regardless of the + format of the input column. + + :param key: + Either the name of a column from the this table to group by, or a + :class:`function` that takes a row and returns a value to group by. + :param key_name: + A name that describes the grouped properties. Defaults to the + column name that was grouped on or "group" if grouping with a key + function. See :class:`.TableSet` for more. + :param key_type: + An instance of any subclass of :class:`.DataType`. If not provided + it will default to a :class`.Text`. + :returns: + A :class:`.TableSet` mapping where the keys are unique values from + the :code:`key` and the values are new :class:`.Table` instances + containing the grouped rows. + """ + key_is_row_function = hasattr(key, '__call__') + + if key_is_row_function: + key_name = key_name or 'group' + key_type = key_type or Text() + else: + column = self._columns[key] + + key_name = key_name or column.name + key_type = key_type or column.data_type + + groups = OrderedDict() + + for row in self._rows: + if key_is_row_function: + group_name = key(row) + else: + group_name = row[column.name] + + group_name = key_type.cast(group_name) + + if group_name not in groups: + groups[group_name] = [] + + groups[group_name].append(row) + + output = OrderedDict() + + for group, rows in groups.items(): + output[group] = self._fork(rows) + + return TableSet(output.values(), output.keys(), key_name=key_name, key_type=key_type) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/homogenize.py b/dbt-env/lib/python3.8/site-packages/agate/table/homogenize.py new file mode 100644 index 0000000..a6c2c45 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/homogenize.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate.rows import Row +from agate import utils + + +def homogenize(self, key, compare_values, default_row=None): + """ + Fill in missing rows in a series. + + This can be used, for instance, to add rows for missing years in a time + series. + + Missing rows are found by comparing the values in the :code:`key` columns + with those provided as :code:`compare_values`. + + Values not found in the table will be used to generate new rows with + the given :code:`default_row`. + + :code:`default_row` should be an array of values or an array-generating + function. If not specified, the new rows will have :code:`None` in columns + all columns not specified in :code:`key`. + + If :code:`default_row` is an array of values, its length should be row + length minus the number of column names provided in the :code:`key`. + + If it is an array-generating function, the function should take an array + of missing values for each new row and output a full row including those + values. + + :param key: + Either a column name or a sequence of such names. + :param compare_values: + Either an array of column values if key is a single column name or a + sequence of arrays of values if key is a sequence of names. It can + also be a generator that yields either of the two. A row is created for + each value or list of values not found in the rows of the table. + :param default_row: + An array of values or a function to generate new rows. The length of + the input array should be equal to row length minus column_names + count. The length of array generated by the function should be the + row length. + :returns: + A new :class:`.Table`. + """ + rows = list(self._rows) + + if not utils.issequence(key): + key = [key] + + if len(key) == 1: + if any(not utils.issequence(compare_value) for compare_value in compare_values): + compare_values = [[compare_value] for compare_value in compare_values] + + column_values = [self._columns.get(name) for name in key] + column_indexes = [self._column_names.index(name) for name in key] + + column_values = zip(*column_values) + differences = list(set(map(tuple, compare_values)) - set(column_values)) + + for difference in differences: + if callable(default_row): + rows.append(Row(default_row(difference), self._column_names)) + else: + if default_row is not None: + new_row = default_row + else: + new_row = [None] * (len(self._column_names) - len(key)) + + for i, d in zip(column_indexes, difference): + new_row.insert(i, d) + + rows.append(Row(new_row, self._column_names)) + + return self._fork(rows) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/join.py b/dbt-env/lib/python3.8/site-packages/agate/table/join.py new file mode 100644 index 0000000..48ee5ca --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/join.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate.rows import Row +from agate import utils + + +def join(self, right_table, left_key=None, right_key=None, inner=False, full_outer=False, require_match=False, columns=None): + """ + Create a new table by joining two table's on common values. This method + implements most varieties of SQL join, in addition to some unique features. + + If :code:`left_key` and :code:`right_key` are both :code:`None` then this + method will peform a "sequential join", which is to say it will join on row + number. The :code:`inner` and :code:`full_outer` arguments will determine + whether dangling left-hand and right-hand rows are included, respectively. + + If :code:`left_key` is specified, then a "left outer join" will be + performed. This will combine columns from the :code:`right_table` anywhere + that :code:`left_key` and :code:`right_key` are equal. Unmatched rows from + the left table will be included with the right-hand columns set to + :code:`None`. + + If :code:`inner` is :code:`True` then an "inner join" will be performed. + Unmatched rows from either table will be left out. + + If :code:`full_outer` is :code:`True` then a "full outer join" will be + performed. Unmatched rows from both tables will be included, with the + columns in the other table set to :code:`None`. + + In all cases, if :code:`right_key` is :code:`None` then it :code:`left_key` + will be used for both tables. + + If :code:`left_key` and :code:`right_key` are column names, the right-hand + identifier column will not be included in the output table. + + If :code:`require_match` is :code:`True` unmatched rows will raise an + exception. This is like an "inner join" except any row that doesn't have a + match will raise an exception instead of being dropped. This is useful for + enforcing expectations about datasets that should match. + + Column names from the right table which also exist in this table will + be suffixed "2" in the new table. + + A subset of columns from the right-hand table can be included in the joined + table using the :code:`columns` argument. + + :param right_table: + The "right" table to join to. + :param left_key: + Either the name of a column from the this table to join on, the index + of a column, a sequence of such column identifiers, a + :class:`function` that takes a row and returns a value to join on, or + :code:`None` in which case the tables will be joined on row number. + :param right_key: + Either the name of a column from :code:table` to join on, the index of + a column, a sequence of such column identifiers, or a :class:`function` + that takes a ow and returns a value to join on. If :code:`None` then + :code:`left_key` will be used for both. If :code:`left_key` is + :code:`None` then this value is ignored. + :param inner: + Perform a SQL-style "inner join" instead of a left outer join. Rows + which have no match for :code:`left_key` will not be included in + the output table. + :param full_outer: + Perform a SQL-style "full outer" join rather than a left or a right. + May not be used in combination with :code:`inner`. + :param require_match: + If true, an exception will be raised if there is a left_key with no + matching right_key. + :param columns: + A sequence of column names from :code:`right_table` to include in + the final output table. Defaults to all columns not in + :code:`right_key`. Ignored when :code:`full_outer` is :code:`True`. + :returns: + A new :class:`.Table`. + """ + if inner and full_outer: + raise ValueError('A join can not be both "inner" and "full_outer".') + + if right_key is None: + right_key = left_key + + # Get join columns + right_key_indices = [] + + left_key_is_func = hasattr(left_key, '__call__') + left_key_is_sequence = utils.issequence(left_key) + + # Left key is None + if left_key is None: + left_data = tuple(range(len(self._rows))) + # Left key is a function + elif left_key_is_func: + left_data = [left_key(row) for row in self._rows] + # Left key is a sequence + elif left_key_is_sequence: + left_columns = [self._columns[key] for key in left_key] + left_data = zip(*[column.values() for column in left_columns]) + # Left key is a column name/index + else: + left_data = self._columns[left_key].values() + + right_key_is_func = hasattr(right_key, '__call__') + right_key_is_sequence = utils.issequence(right_key) + + # Sequential join + if left_key is None: + right_data = tuple(range(len(right_table._rows))) + # Right key is a function + elif right_key_is_func: + right_data = [right_key(row) for row in right_table._rows] + # Right key is a sequence + elif right_key_is_sequence: + right_columns = [right_table._columns[key] for key in right_key] + right_data = zip(*[column.values() for column in right_columns]) + right_key_indices = [right_table._columns._keys.index(key) for key in right_key] + # Right key is a column name/index + else: + right_column = right_table._columns[right_key] + right_data = right_column.values() + right_key_indices = [right_table._columns.index(right_column)] + + # Build names and type lists + column_names = list(self._column_names) + column_types = list(self._column_types) + + for i, column in enumerate(right_table._columns): + name = column.name + + if not full_outer: + if columns is None and i in right_key_indices: + continue + + if columns is not None and name not in columns: + continue + + if name in self.column_names: + column_names.append('%s2' % name) + else: + column_names.append(name) + + column_types.append(column.data_type) + + if columns is not None and not full_outer: + right_table = right_table.select([n for n in right_table._column_names if n in columns]) + + right_hash = {} + + for i, value in enumerate(right_data): + if value not in right_hash: + right_hash[value] = [] + + right_hash[value].append(right_table._rows[i]) + + # Collect new rows + rows = [] + + if self._row_names is not None and not full_outer: + row_names = [] + else: + row_names = None + + # Iterate over left column + for left_index, left_value in enumerate(left_data): + matching_rows = right_hash.get(left_value, None) + + if require_match and matching_rows is None: + raise ValueError('Left key "%s" does not have a matching right key.' % left_value) + + # Rows with matches + if matching_rows: + for right_row in matching_rows: + new_row = list(self._rows[left_index]) + + for k, v in enumerate(right_row): + if columns is None and k in right_key_indices and not full_outer: + continue + + new_row.append(v) + + rows.append(Row(new_row, column_names)) + + if self._row_names is not None and not full_outer: + row_names.append(self._row_names[left_index]) + # Rows without matches + elif not inner: + new_row = list(self._rows[left_index]) + + for k, v in enumerate(right_table._column_names): + if columns is None and k in right_key_indices and not full_outer: + continue + + new_row.append(None) + + rows.append(Row(new_row, column_names)) + + if self._row_names is not None and not full_outer: + row_names.append(self._row_names[left_index]) + + # Full outer join + if full_outer: + left_set = set(left_data) + + for right_index, right_value in enumerate(right_data): + if right_value in left_set: + continue + + new_row = ([None] * len(self._columns)) + list(right_table.rows[right_index]) + + rows.append(Row(new_row, column_names)) + + return self._fork(rows, column_names, column_types, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/limit.py b/dbt-env/lib/python3.8/site-packages/agate/table/limit.py new file mode 100644 index 0000000..adc2988 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/limit.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + + +def limit(self, start_or_stop=None, stop=None, step=None): + """ + Create a new table with fewer rows. + + See also: Python's builtin :func:`slice`. + + :param start_or_stop: + If the only argument, then how many rows to include, otherwise, + the index of the first row to include. + :param stop: + The index of the last row to include. + :param step: + The size of the jump between rows to include. (`step=2` will return + every other row.) + :returns: + A new :class:`.Table`. + """ + if stop or step: + s = slice(start_or_stop, stop, step) + else: + s = slice(start_or_stop) + + rows = self._rows[s] + + if self._row_names is not None: + row_names = self._row_names[s] + else: + row_names = None + + return self._fork(rows, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/line_chart.py b/dbt-env/lib/python3.8/site-packages/agate/table/line_chart.py new file mode 100644 index 0000000..6e4c680 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/line_chart.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def line_chart(self, x=0, y=1, path=None, width=None, height=None): + """ + Render a line chart using :class:`leather.Chart`. + + :param x: + The name or index of a column to plot as the x-axis. Defaults to the + first column in the table. + :param y: + The name or index of a column to plot as the y-axis. Defaults to the + second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(x) is int: + x_name = self.column_names[x] + else: + x_name = x + + if type(y) is int: + y_name = self.column_names[y] + else: + y_name = y + + chart = leather.Chart() + chart.add_x_axis(name=x_name) + chart.add_y_axis(name=y_name) + chart.add_line(self, x=x, y=y) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/merge.py b/dbt-env/lib/python3.8/site-packages/agate/table/merge.py new file mode 100644 index 0000000..d47fbc7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/merge.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from collections import OrderedDict + +from agate.exceptions import DataTypeError +from agate.rows import Row + + +@classmethod +def merge(cls, tables, row_names=None, column_names=None): + """ + Create a new table from a sequence of similar tables. + + This method will not carry over row names from the merged tables, but new + row names can be specified with the :code:`row_names` argument. + + It is possible to limit the columns included in the new :class:`.Table` + with :code:`column_names` argument. For example, to only include columns + from a specific table, set :code:`column_names` equal to + :code:`table.column_names`. + + :param tables: + An sequence of :class:`.Table` instances. + :param row_names: + See :class:`.Table` for the usage of this parameter. + :param column_names: + A sequence of column names to include in the new :class:`.Table`. If + not specified, all distinct column names from `tables` are included. + :returns: + A new :class:`.Table`. + """ + from agate.table import Table + + new_columns = OrderedDict() + + for table in tables: + for i in range(0, len(table.columns)): + if column_names is None or table.column_names[i] in column_names: + column_name = table.column_names[i] + column_type = table.column_types[i] + + if column_name in new_columns: + if not isinstance(column_type, type(new_columns[column_name])): + raise DataTypeError('Tables contain columns with the same names, but different types.') + else: + new_columns[column_name] = column_type + + column_keys = tuple(new_columns.keys()) + column_types = tuple(new_columns.values()) + + rows = [] + + for table in tables: + # Performance optimization for identical table structures + if table.column_names == column_keys and table.column_types == column_types: + rows.extend(table.rows) + else: + for row in table.rows: + data = [] + + for column_key in column_keys: + data.append(row.get(column_key, None)) + + rows.append(Row(data, column_keys)) + + return Table(rows, column_keys, column_types, row_names=row_names, _is_fork=True) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/normalize.py b/dbt-env/lib/python3.8/site-packages/agate/table/normalize.py new file mode 100644 index 0000000..3b941f5 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/normalize.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate.type_tester import TypeTester +from agate.rows import Row +from agate import utils + + +def normalize(self, key, properties, property_column='property', value_column='value', column_types=None): + """ + Create a new table with columns converted into rows values. + + For example: + + +---------+----------+--------+-------+ + | name | gender | race | age | + +=========+==========+========+=======+ + | Jane | female | black | 24 | + +---------+----------+--------+-------+ + | Jack | male | white | 35 | + +---------+----------+--------+-------+ + | Joe | male | black | 28 | + +---------+----------+--------+-------+ + + can be normalized on columns 'gender', 'race' and 'age': + + +---------+-----------+---------+ + | name | property | value | + +=========+===========+=========+ + | Jane | gender | female | + +---------+-----------+---------+ + | Jane | race | black | + +---------+-----------+---------+ + | Jane | age | 24 | + +---------+-----------+---------+ + | ... | ... | ... | + +---------+-----------+---------+ + + This is the opposite of :meth:`.Table.denormalize`. + + :param key: + A column name or a sequence of column names that should be + maintained as they are in the normalized self. Typically these + are the tables unique identifiers and any metadata about them. + :param properties: + A column name or a sequence of column names that should be + converted to properties in the new self. + :param property_column: + The name to use for the column containing the property names. + :param value_column: + The name to use for the column containing the property values. + :param column_types: + A sequence of two column types for the property and value column in + that order or an instance of :class:`.TypeTester`. Defaults to a + generic :class:`.TypeTester`. + :returns: + A new :class:`.Table`. + """ + from agate.table import Table + + new_rows = [] + + if not utils.issequence(key): + key = [key] + + if not utils.issequence(properties): + properties = [properties] + + new_column_names = key + [property_column, value_column] + + row_names = [] + + for row in self._rows: + k = tuple(row[n] for n in key) + left_row = list(k) + + if len(k) == 1: + row_names.append(k[0]) + else: + row_names.append(k) + + for f in properties: + new_rows.append(Row((left_row + [f, row[f]]), new_column_names)) + + key_column_types = [self._column_types[self._column_names.index(name)] for name in key] + + if column_types is None or isinstance(column_types, TypeTester): + tester = TypeTester() if column_types is None else column_types + force_update = dict(zip(key, key_column_types)) + force_update.update(tester._force) + tester._force = force_update + + new_column_types = tester.run(new_rows, new_column_names) + else: + new_column_types = key_column_types + list(column_types) + + return Table(new_rows, new_column_names, new_column_types, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/order_by.py b/dbt-env/lib/python3.8/site-packages/agate/table/order_by.py new file mode 100644 index 0000000..80f93ce --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/order_by.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate import utils + + +def order_by(self, key, reverse=False): + """ + Create a new table that is sorted. + + :param key: + Either the name of a single column to sort by, a sequence of such + names, or a :class:`function` that takes a row and returns a value + to sort by. + :param reverse: + If `True` then sort in reverse (typically, descending) order. + :returns: + A new :class:`.Table`. + """ + if len(self._rows) == 0: + return self._fork(self._rows) + else: + key_is_row_function = hasattr(key, '__call__') + key_is_sequence = utils.issequence(key) + + def sort_key(data): + row = data[1] + + if key_is_row_function: + k = key(row) + elif key_is_sequence: + k = tuple(utils.NullOrder() if row[n] is None else row[n] for n in key) + else: + k = row[key] + + if k is None: + return utils.NullOrder() + + return k + + results = sorted(enumerate(self._rows), key=sort_key, reverse=reverse) + + indices, rows = zip(*results) + + if self._row_names is not None: + row_names = [self._row_names[i] for i in indices] + else: + row_names = None + + return self._fork(rows, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/pivot.py b/dbt-env/lib/python3.8/site-packages/agate/table/pivot.py new file mode 100644 index 0000000..32d2548 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/pivot.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import six + +from agate.aggregations import Count +from agate import utils + + +def pivot(self, key=None, pivot=None, aggregation=None, computation=None, default_value=utils.default, key_name=None): + """ + Create a new table by grouping the data, aggregating those groups, + applying a computation, and then organizing the groups into new rows and + columns. + + This is sometimes called a "crosstab". + + +---------+---------+--------+ + | name | race | gender | + +=========+=========+========+ + | Joe | white | male | + +---------+---------+--------+ + | Jane | black | female | + +---------+---------+--------+ + | Josh | black | male | + +---------+---------+--------+ + | Jim | asian | female | + +---------+---------+--------+ + + This table can be pivoted with :code:`key` equal to "race" and + :code:`columns` equal to "gender". The default aggregation is + :class:`.Count`. This would result in the following table. + + +---------+---------+--------+ + | race | male | female | + +=========+=========+========+ + | white | 1 | 0 | + +---------+---------+--------+ + | black | 1 | 1 | + +---------+---------+--------+ + | asian | 0 | 1 | + +---------+---------+--------+ + + If one or more keys are specified then the resulting table will + automatically have :code:`row_names` set to those keys. + + See also the related method :meth:`.Table.denormalize`. + + :param key: + Either the name of a column from the this table to group by, a + sequence of such column names, a :class:`function` that takes a + row and returns a value to group by, or :code:`None`, in which case + there will be only a single row in the output table. + :param pivot: + A column name whose unique values will become columns in the new + table, or :code:`None` in which case there will be a single value + column in the output table. + :param aggregation: + An instance of an :class:`.Aggregation` to perform on each group of + data in the pivot table. (Each cell is the result of an aggregation + of the grouped data.) + + If not specified this defaults to :class:`.Count` with no arguments. + :param computation: + An optional :class:`.Computation` instance to be applied to the + aggregated sequence of values before they are transposed into the + pivot table. + + Use the class name of the aggregation as your column name argument + when constructing your computation. (This is "Count" if using the + default value for :code:`aggregation`.) + :param default_value: + Value to be used for missing values in the pivot table. Defaults to + :code:`Decimal(0)`. If performing non-mathematical aggregations you + may wish to set this to :code:`None`. + :param key_name: + A name for the key column in the output table. This is most + useful when the provided key is a function. This argument is not + valid when :code:`key` is a sequence. + :returns: + A new :class:`.Table`. + """ + if key is None: + key = [] + elif not utils.issequence(key): + key = [key] + elif key_name: + raise ValueError('key_name is not a valid argument when key is a sequence.') + + if aggregation is None: + aggregation = Count() + + groups = self + + for k in key: + groups = groups.group_by(k, key_name=key_name) + + aggregation_name = six.text_type(aggregation) + computation_name = six.text_type(computation) if computation else None + + def apply_computation(table): + computed = table.compute([ + (computation_name, computation) + ]) + + excluded = computed.exclude([aggregation_name]) + + return excluded + + if pivot is not None: + groups = groups.group_by(pivot) + + column_type = aggregation.get_aggregate_data_type(groups) + + table = groups.aggregate([ + (aggregation_name, aggregation) + ]) + + pivot_count = len(set(table.columns[pivot].values())) + + if computation is not None: + column_types = computation.get_computed_data_type(table) + table = apply_computation(table) + + column_types = [column_type] * pivot_count + + table = table.denormalize(key, pivot, computation_name or aggregation_name, default_value=default_value, column_types=column_types) + else: + table = groups.aggregate([ + (aggregation_name, aggregation) + ]) + + if computation: + table = apply_computation(table) + + return table diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/print_bars.py b/dbt-env/lib/python3.8/site-packages/agate/table/print_bars.py new file mode 100644 index 0000000..838d42a --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/print_bars.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python +# -*- coding: utf8 -*- +# pylint: disable=W0212 + +from collections import OrderedDict + +try: + from cdecimal import Decimal +except ImportError: # pragma: no cover + from decimal import Decimal + +import sys + + +from babel.numbers import format_decimal +import six + +from agate.aggregations import Min, Max +from agate import config +from agate.data_types import Number +from agate.exceptions import DataTypeError +from agate import utils + + +def print_bars(self, label_column_name='group', value_column_name='Count', domain=None, width=120, output=sys.stdout, printable=False): + """ + Print a text-based bar chart based on this table. + + :param label_column_name: + The column containing the label values. Defaults to :code:`group`, which + is the default output of :meth:`.Table.pivot` or :meth:`.Table.bins`. + :param value_column_name: + The column containing the bar values. Defaults to :code:`Count`, which + is the default output of :meth:`.Table.pivot` or :meth:`.Table.bins`. + :param domain: + A 2-tuple containing the minimum and maximum values for the chart's + x-axis. The domain must be large enough to contain all values in + the column. + :param width: + The width, in characters, to use for the bar chart. Defaults to + :code:`120`. + :param output: + A file-like object to print to. Defaults to :code:`sys.stdout`. + :param printable: + If true, only printable characters will be outputed. + """ + tick_mark = config.get_option('tick_char') + horizontal_line = config.get_option('horizontal_line_char') + locale = config.get_option('default_locale') + + if printable: + bar_mark = config.get_option('printable_bar_char') + zero_mark = config.get_option('printable_zero_line_char') + else: + bar_mark = config.get_option('bar_char') + zero_mark = config.get_option('zero_line_char') + + y_label = label_column_name + label_column = self._columns[label_column_name] + + # if not isinstance(label_column.data_type, Text): + # raise ValueError('Only Text data is supported for bar chart labels.') + + x_label = value_column_name + value_column = self._columns[value_column_name] + + if not isinstance(value_column.data_type, Number): + raise DataTypeError('Only Number data is supported for bar chart values.') + + output = output + width = width + + # Format numbers + decimal_places = utils.max_precision(value_column) + value_formatter = utils.make_number_formatter(decimal_places) + + formatted_labels = [] + + for label in label_column: + formatted_labels.append(six.text_type(label)) + + formatted_values = [] + for value in value_column: + if value is None: + formatted_values.append('-') + else: + formatted_values.append(format_decimal( + value, + format=value_formatter, + locale=locale + )) + + max_label_width = max(max([len(l) for l in formatted_labels]), len(y_label)) + max_value_width = max(max([len(v) for v in formatted_values]), len(x_label)) + + plot_width = width - (max_label_width + max_value_width + 2) + + min_value = Min(value_column_name).run(self) + max_value = Max(value_column_name).run(self) + + # Calculate dimensions + if domain: + x_min = Decimal(domain[0]) + x_max = Decimal(domain[1]) + + if min_value < x_min or max_value > x_max: + raise ValueError('Column contains values outside specified domain') + else: + x_min, x_max = utils.round_limits(min_value, max_value) + + # All positive + if x_min >= 0: + x_min = Decimal('0') + plot_negative_width = 0 + zero_line = 0 + plot_positive_width = plot_width - 1 + # All negative + elif x_max <= 0: + x_max = Decimal('0') + plot_negative_width = plot_width - 1 + zero_line = plot_width - 1 + plot_positive_width = 0 + # Mixed signs + else: + spread = x_max - x_min + negative_portion = (x_min.copy_abs() / spread) + + # Subtract one for zero line + plot_negative_width = int(((plot_width - 1) * negative_portion).to_integral_value()) + zero_line = plot_negative_width + plot_positive_width = plot_width - (plot_negative_width + 1) + + def project(value): + if value >= 0: + return plot_negative_width + int((plot_positive_width * (value / x_max)).to_integral_value()) + else: + return plot_negative_width - int((plot_negative_width * (value / x_min)).to_integral_value()) + + # Calculate ticks + ticks = OrderedDict() + + # First tick + ticks[0] = x_min + ticks[plot_width - 1] = x_max + + tick_fractions = [Decimal('0.25'), Decimal('0.5'), Decimal('0.75')] + + # All positive + if x_min >= 0: + for fraction in tick_fractions: + value = x_max * fraction + ticks[project(value)] = value + # All negative + elif x_max <= 0: + for fraction in tick_fractions: + value = x_min * fraction + ticks[project(value)] = value + # Mixed signs + else: + # Zero tick + ticks[zero_line] = Decimal('0') + + # Halfway between min and 0 + value = x_min * Decimal('0.5') + ticks[project(value)] = value + + # Halfway between 0 and max + value = x_max * Decimal('0.5') + ticks[project(value)] = value + + decimal_places = utils.max_precision(ticks.values()) + tick_formatter = utils.make_number_formatter(decimal_places) + + ticks_formatted = OrderedDict() + + for k, v in ticks.items(): + ticks_formatted[k] = format_decimal( + v, + format=tick_formatter, + locale=locale + ) + + def write(line): + output.write(line + '\n') + + # Chart top + top_line = u'%s %s' % (y_label.ljust(max_label_width), x_label.rjust(max_value_width)) + write(top_line) + + # Bars + for i, label in enumerate(formatted_labels): + value = value_column[i] + if value == 0 or value is None: + bar_width = 0 + elif value > 0: + bar_width = project(value) - plot_negative_width + elif value < 0: + bar_width = plot_negative_width - project(value) + + label_text = label.ljust(max_label_width) + value_text = formatted_values[i].rjust(max_value_width) + + bar = bar_mark * bar_width + + if value is not None and value >= 0: + gap = (u' ' * plot_negative_width) + + # All positive + if x_min <= 0: + bar = gap + zero_mark + bar + else: + bar = bar + gap + zero_mark + else: + bar = u' ' * (plot_negative_width - bar_width) + bar + + # All negative or mixed signs + if value is None or x_max > value: + bar = bar + zero_mark + + bar = bar.ljust(plot_width) + + write('%s %s %s' % (label_text, value_text, bar)) + + # Axis & ticks + axis = horizontal_line * plot_width + tick_text = u' ' * width + + for i, (tick, label) in enumerate(ticks_formatted.items()): + # First tick + if tick == 0: + offset = 0 + # Last tick + elif tick == plot_width - 1: + offset = -(len(label) - 1) + else: + offset = int(-(len(label) / 2)) + + pos = (width - plot_width) + tick + offset + + # Don't print intermediate ticks that would overlap + if tick != 0 and tick != plot_width - 1: + if tick_text[pos - 1:pos + len(label) + 1] != ' ' * (len(label) + 2): + continue + + tick_text = tick_text[:pos] + label + tick_text[pos + len(label):] + axis = axis[:tick] + tick_mark + axis[tick + 1:] + + write(axis.rjust(width)) + write(tick_text) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/print_html.py b/dbt-env/lib/python3.8/site-packages/agate/table/print_html.py new file mode 100644 index 0000000..411a6bd --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/print_html.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import sys + +from babel.numbers import format_decimal +import six + +from agate import config +from agate.data_types import Number, Text +from agate import utils + + +def print_html(self, max_rows=20, max_columns=6, output=sys.stdout, max_column_width=20, locale=None): + """ + Print an HTML version of this table. + + :param max_rows: + The maximum number of rows to display before truncating the data. This + defaults to :code:`20` to prevent accidental printing of the entire + table. Pass :code:`None` to disable the limit. + :param max_columns: + The maximum number of columns to display before truncating the data. + This defaults to :code:`6` to prevent wrapping in most cases. Pass + :code:`None` to disable the limit. + :param output: + A file-like object to print to. Defaults to :code:`sys.stdout`, unless + running in Jupyter. (See above.) + :param max_column_width: + Truncate all columns to at most this width. The remainder will be + replaced with ellipsis. + :param locale: + Provide a locale you would like to be used to format the output. + By default it will use the system's setting. + """ + if max_rows is None: + max_rows = len(self._rows) + + if max_columns is None: + max_columns = len(self._columns) + + ellipsis = config.get_option('ellipsis_chars') + locale = locale or config.get_option('default_locale') + + rows_truncated = max_rows < len(self._rows) + columns_truncated = max_columns < len(self._column_names) + + column_names = list(self._column_names[:max_columns]) + + if columns_truncated: + column_names.append(ellipsis) + + number_formatters = [] + formatted_data = [] + + # Determine correct number of decimal places for each Number column + for i, c in enumerate(self._columns): + if i >= max_columns: + break + + if isinstance(c.data_type, Number): + max_places = utils.max_precision(c[:max_rows]) + number_formatters.append(utils.make_number_formatter(max_places)) + else: + number_formatters.append(None) + + # Format data + for i, row in enumerate(self._rows): + if i >= max_rows: + break + + formatted_row = [] + + for j, v in enumerate(row): + if j >= max_columns: + v = ellipsis + elif v is None: + v = '' + elif number_formatters[j] is not None: + v = format_decimal( + v, + format=number_formatters[j], + locale=locale + ) + else: + v = six.text_type(v) + + if max_column_width is not None and len(v) > max_column_width: + v = '%s...' % v[:max_column_width - 3] + + formatted_row.append(v) + + if j >= max_columns: + break + + formatted_data.append(formatted_row) + + def write(line): + output.write(line + '\n') + + def write_row(formatted_row): + """ + Helper function that formats individual rows. + """ + write('') + + for j, d in enumerate(formatted_row): + # Text is left-justified, all other values are right-justified + if isinstance(self._column_types[j], Text): + write('%s' % d) + else: + write('%s' % d) + + write('') + + # Header + write('') + write('') + write('') + + for i, col in enumerate(column_names): + write('' % col) + + write('') + write('') + write('') + + # Rows + for formatted_row in formatted_data: + write_row(formatted_row) + + # Row indicating data was truncated + if rows_truncated: + write_row([ellipsis for n in column_names]) + + # Footer + write('') + write('
%s
') diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/print_structure.py b/dbt-env/lib/python3.8/site-packages/agate/table/print_structure.py new file mode 100644 index 0000000..d099aea --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/print_structure.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import sys + +from agate.data_types import Text + + +def print_structure(self, output=sys.stdout, max_rows=None): + """ + Print this table's column names and types as a plain-text table. + + :param output: + The output to print to. + """ + from agate.table import Table + + name_column = [n for n in self._column_names] + type_column = [t.__class__.__name__ for t in self._column_types] + rows = zip(name_column, type_column) + column_names = ['column', 'data_type'] + text = Text() + column_types = [text, text] + + table = Table(rows, column_names, column_types) + + return table.print_table(output=output, max_column_width=None, max_rows=max_rows) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/print_table.py b/dbt-env/lib/python3.8/site-packages/agate/table/print_table.py new file mode 100644 index 0000000..4b29074 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/print_table.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import sys + +from babel.numbers import format_decimal +import six + +from agate import config +from agate.data_types import Number, Text +from agate import utils + + +def print_table(self, max_rows=20, max_columns=6, output=sys.stdout, max_column_width=20, locale=None, max_precision=3): + """ + Print a text-based view of the data in this table. + + The output of this method is Github Friendly Markdown (GFM) compatible. + + :param max_rows: + The maximum number of rows to display before truncating the data. This + defaults to :code:`20` to prevent accidental printing of the entire + table. Pass :code:`None` to disable the limit. + :param max_columns: + The maximum number of columns to display before truncating the data. + This defaults to :code:`6` to prevent wrapping in most cases. Pass + :code:`None` to disable the limit. + :param output: + A file-like object to print to. + :param max_column_width: + Truncate all columns to at most this width. The remainder will be + replaced with ellipsis. + :param locale: + Provide a locale you would like to be used to format the output. + By default it will use the system's setting. + :max_precision: + Puts a limit on the maximum precision displayed for number types. + Numbers with lesser precision won't be affected. + This defaults to :code:`3`. Pass :code:`None` to disable limit. + """ + if max_rows is None: + max_rows = len(self._rows) + + if max_columns is None: + max_columns = len(self._columns) + + if max_precision is None: + max_precision = float('inf') + + ellipsis = config.get_option('ellipsis_chars') + h_line = config.get_option('horizontal_line_char') + v_line = config.get_option('vertical_line_char') + locale = locale or config.get_option('default_locale') + + rows_truncated = max_rows < len(self._rows) + columns_truncated = max_columns < len(self._column_names) + column_names = [] + for column_name in self.column_names[:max_columns]: + if max_column_width is not None and len(column_name) > max_column_width: + column_names.append('%s...' % column_name[:max_column_width - 3]) + else: + column_names.append(column_name) + + if columns_truncated: + column_names.append(ellipsis) + + widths = [len(n) for n in column_names] + number_formatters = [] + formatted_data = [] + + # Determine correct number of decimal places for each Number column + for i, c in enumerate(self._columns): + if i >= max_columns: + break + + if isinstance(c.data_type, Number): + max_places = utils.max_precision(c[:max_rows]) + add_ellipsis = False + if max_places > max_precision: + add_ellipsis = True + max_places = max_precision + number_formatters.append(utils.make_number_formatter(max_places, add_ellipsis)) + else: + number_formatters.append(None) + + # Format data and display column widths + for i, row in enumerate(self._rows): + if i >= max_rows: + break + + formatted_row = [] + + for j, v in enumerate(row): + if j >= max_columns: + v = ellipsis + elif v is None: + v = '' + elif number_formatters[j] is not None: + v = format_decimal( + v, + format=number_formatters[j], + locale=locale + ) + else: + v = six.text_type(v) + + if max_column_width is not None and len(v) > max_column_width: + v = '%s...' % v[:max_column_width - 3] + + if len(v) > widths[j]: + widths[j] = len(v) + + formatted_row.append(v) + + if j >= max_columns: + break + + formatted_data.append(formatted_row) + + def write(line): + output.write(line + '\n') + + def write_row(formatted_row): + """ + Helper function that formats individual rows. + """ + row_output = [] + + for j, d in enumerate(formatted_row): + # Text is left-justified, all other values are right-justified + if isinstance(self._column_types[j], Text): + output = ' %s ' % d.ljust(widths[j]) + else: + output = ' %s ' % d.rjust(widths[j]) + + row_output.append(output) + + text = v_line.join(row_output) + + write('%s%s%s' % (v_line, text, v_line)) + + # Dashes span each width with '+' character at intersection of + # horizontal and vertical dividers. + divider = '%(v_line)s %(columns)s %(v_line)s' % { + 'h_line': h_line, + 'v_line': v_line, + 'columns': ' | '.join(h_line * w for w in widths) + } + + # Headers + write_row(column_names) + write(divider) + + # Rows + for formatted_row in formatted_data: + write_row(formatted_row) + + # Row indicating data was truncated + if rows_truncated: + write_row([ellipsis for n in column_names]) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/rename.py b/dbt-env/lib/python3.8/site-packages/agate/table/rename.py new file mode 100644 index 0000000..e023e35 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/rename.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate import utils + + +def rename(self, column_names=None, row_names=None, slug_columns=False, slug_rows=False, **kwargs): + """ + Create a copy of this table with different column names or row names. + + By enabling :code:`slug_columns` or :code:`slug_rows` and not specifying + new names you may slugify the table's existing names. + + :code:`kwargs` will be passed to the slugify method in python-slugify. See: + https://github.com/un33k/python-slugify + + :param column_names: + New column names for the renamed table. May be either an array or + a dictionary mapping existing column names to new names. If not + specified, will use this table's existing column names. + :param row_names: + New row names for the renamed table. May be either an array or + a dictionary mapping existing row names to new names. If not + specified, will use this table's existing row names. + :param slug_columns: + If True, column names will be converted to slugs and duplicate names + will have unique identifiers appended. + :param slug_rows: + If True, row names will be converted to slugs and dupicate names will + have unique identifiers appended. + """ + from agate.table import Table + + if isinstance(column_names, dict): + column_names = [column_names[name] if name in column_names else name for name in self._column_names] + + if isinstance(row_names, dict): + row_names = [row_names[name] if name in row_names else name for name in self._row_names] + + if slug_columns: + column_names = column_names or self._column_names + + if column_names is not None: + if column_names == self._column_names: + column_names = utils.slugify(column_names, ensure_unique=False, **kwargs) + else: + column_names = utils.slugify(column_names, ensure_unique=True, **kwargs) + + if slug_rows: + row_names = row_names or self.row_names + + if row_names is not None: + row_names = utils.slugify(row_names, ensure_unique=True, **kwargs) + + if column_names is not None and column_names != self._column_names: + if row_names is None: + row_names = self._row_names + + return Table(self._rows, column_names, self._column_types, row_names=row_names, _is_fork=False) + else: + return self._fork(self._rows, column_names, self._column_types, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/scatterplot.py b/dbt-env/lib/python3.8/site-packages/agate/table/scatterplot.py new file mode 100644 index 0000000..0de2966 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/scatterplot.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def scatterplot(self, x=0, y=1, path=None, width=None, height=None): + """ + Render a scatterplot using :class:`leather.Chart`. + + :param x: + The name or index of a column to plot as the x-axis. Defaults to the + first column in the table. + :param y: + The name or index of a column to plot as the y-axis. Defaults to the + second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(x) is int: + x_name = self.column_names[x] + else: + x_name = x + + if type(y) is int: + y_name = self.column_names[y] + else: + y_name = y + + chart = leather.Chart() + chart.add_x_axis(name=x_name) + chart.add_y_axis(name=y_name) + chart.add_dots(self, x=x, y=y) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/select.py b/dbt-env/lib/python3.8/site-packages/agate/table/select.py new file mode 100644 index 0000000..3321738 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/select.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate.rows import Row +from agate import utils + + +def select(self, key): + """ + Create a new table with only the specified columns. + + :param key: + Either the name of a single column to include or a sequence of such + names. + :returns: + A new :class:`.Table`. + """ + if not utils.issequence(key): + key = [key] + + indexes = tuple(self._column_names.index(k) for k in key) + column_types = tuple(self._column_types[i] for i in indexes) + new_rows = [] + + for row in self._rows: + new_rows.append(Row((row[i] for i in indexes), key)) + + return self._fork(new_rows, key, column_types) diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/to_csv.py b/dbt-env/lib/python3.8/site-packages/agate/table/to_csv.py new file mode 100644 index 0000000..9890fca --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/to_csv.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import os + + +def to_csv(self, path, **kwargs): + """ + Write this table to a CSV. This method uses agate's builtin CSV writer, + which supports unicode on both Python 2 and Python 3. + + `kwargs` will be passed through to the CSV writer. + + :param path: + Filepath or file-like object to write to. + """ + from agate import csv + + if 'lineterminator' not in kwargs: + kwargs['lineterminator'] = '\n' + + close = True + f = None + + try: + if hasattr(path, 'write'): + f = path + close = False + else: + dirpath = os.path.dirname(path) + + if dirpath and not os.path.exists(dirpath): + os.makedirs(dirpath) + + f = open(path, 'w') + + writer = csv.writer(f, **kwargs) + writer.writerow(self._column_names) + + csv_funcs = [c.csvify for c in self._column_types] + + for row in self._rows: + writer.writerow(tuple(csv_funcs[i](d) for i, d in enumerate(row))) + finally: + if close and f is not None: + f.close() diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/to_json.py b/dbt-env/lib/python3.8/site-packages/agate/table/to_json.py new file mode 100644 index 0000000..55346e1 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/to_json.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import codecs +from collections import OrderedDict +import json +import os + +import six + + +def to_json(self, path, key=None, newline=False, indent=None, **kwargs): + """ + Write this table to a JSON file or file-like object. + + :code:`kwargs` will be passed through to the JSON encoder. + + :param path: + File path or file-like object to write to. + :param key: + If specified, JSON will be output as an hash instead of a list. May + be either the name of a column from the this table containing + unique values or a :class:`function` that takes a row and returns + a unique value. + :param newline: + If `True`, output will be in the form of "newline-delimited JSON". + :param indent: + If specified, the number of spaces to indent the JSON for + formatting. + """ + if key is not None and newline: + raise ValueError('key and newline may not be specified together.') + + if newline and indent is not None: + raise ValueError('newline and indent may not be specified together.') + + key_is_row_function = hasattr(key, '__call__') + + json_kwargs = { + 'ensure_ascii': False, + 'indent': indent + } + + if six.PY2: + json_kwargs['encoding'] = 'utf-8' + + # Pass remaining kwargs through to JSON encoder + json_kwargs.update(kwargs) + + json_funcs = [c.jsonify for c in self._column_types] + + close = True + f = None + + try: + if hasattr(path, 'write'): + f = path + close = False + else: + if os.path.dirname(path) and not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) + f = open(path, 'w') + + if six.PY2: + f = codecs.getwriter('utf-8')(f) + + def dump_json(data): + json.dump(data, f, **json_kwargs) + + if newline: + f.write('\n') + + # Keyed + if key is not None: + output = OrderedDict() + + for row in self._rows: + if key_is_row_function: + k = key(row) + else: + k = str(row[key]) if six.PY3 else unicode(row[key]) + + if k in output: + raise ValueError('Value %s is not unique in the key column.' % six.text_type(k)) + + values = tuple(json_funcs[i](d) for i, d in enumerate(row)) + output[k] = OrderedDict(zip(row.keys(), values)) + dump_json(output) + # Newline-delimited + elif newline: + for row in self._rows: + values = tuple(json_funcs[i](d) for i, d in enumerate(row)) + dump_json(OrderedDict(zip(row.keys(), values))) + # Normal + else: + output = [] + + for row in self._rows: + values = tuple(json_funcs[i](d) for i, d in enumerate(row)) + output.append(OrderedDict(zip(row.keys(), values))) + + dump_json(output) + finally: + if close and f is not None: + f.close() diff --git a/dbt-env/lib/python3.8/site-packages/agate/table/where.py b/dbt-env/lib/python3.8/site-packages/agate/table/where.py new file mode 100644 index 0000000..cea3e36 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/table/where.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + + +def where(self, test): + """ + Create a new :class:`.Table` with only those rows that pass a test. + + :param test: + A function that takes a :class:`.Row` and returns :code:`True` if + it should be included in the new :class:`.Table`. + :type test: + :class:`function` + :returns: + A new :class:`.Table`. + """ + rows = [] + + if self._row_names is not None: + row_names = [] + else: + row_names = None + + for i, row in enumerate(self._rows): + if test(row): + rows.append(row) + + if row_names is not None: + row_names.append(self._row_names[i]) + + return self._fork(rows, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__init__.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/__init__.py new file mode 100644 index 0000000..cc5b0a5 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/__init__.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python + +""" +The :class:`.TableSet` class collects a set of related tables in a single data +structure. The most common way of creating a :class:`.TableSet` is using the +:meth:`.Table.group_by` method, which is similar to SQL's ``GROUP BY`` keyword. +The resulting set of tables will all have identical columns structure. + +:class:`.TableSet` functions as a dictionary. Individual tables in the set can +be accessed by using their name as a key. If the table set was created using +:meth:`.Table.group_by` then the names of the tables will be the grouping +factors found in the original data. + +:class:`.TableSet` replicates the majority of the features of :class:`.Table`. +When methods such as :meth:`.TableSet.select`, :meth:`.TableSet.where` or +:meth:`.TableSet.order_by` are used, the operation is applied to *each* table +in the set and the result is a new :class:`TableSet` instance made up of +entirely new :class:`.Table` instances. + +:class:`.TableSet` instances can also contain other TableSet's. This means you +can chain calls to :meth:`.Table.group_by` and :meth:`.TableSet.group_by` +and end up with data grouped across multiple dimensions. +:meth:`.TableSet.aggregate` on nested TableSets will then group across multiple +dimensions. +""" + +import six +from six.moves import zip_longest + +from agate.data_types import Text +from agate.mapped_sequence import MappedSequence +from agate.table import Table + + +class TableSet(MappedSequence): + """ + An group of named tables with identical column definitions. Supports + (almost) all the same operations as :class:`.Table`. When executed on a + :class:`TableSet`, any operation that would have returned a new + :class:`.Table` instead returns a new :class:`TableSet`. Any operation + that would have returned a single value instead returns a dictionary of + values. + + TableSet is implemented as a subclass of :class:`.MappedSequence` + + :param tables: + A sequence :class:`Table` instances. + :param keys: + A sequence of keys corresponding to the tables. These may be any type + except :class:`int`. + :param key_name: + A name that describes the grouping properties. Used as the column + header when the groups are aggregated. Defaults to the column name that + was grouped on. + :param key_type: + An instance some subclass of :class:`.DataType`. If not provided it + will default to a :class`.Text`. + :param _is_fork: + Used internally to skip certain validation steps when data + is propagated from an existing tablset. + """ + def __init__(self, tables, keys, key_name='group', key_type=None, _is_fork=False): + tables = tuple(tables) + keys = tuple(keys) + + self._key_name = key_name + self._key_type = key_type or Text() + self._sample_table = tables[0] + + while isinstance(self._sample_table, TableSet): + self._sample_table = self._sample_table[0] + + self._column_types = self._sample_table.column_types + self._column_names = self._sample_table.column_names + + if not _is_fork: + for table in tables: + if any(not isinstance(a, type(b)) for a, b in zip_longest(table.column_types, self._column_types)): + raise ValueError('Not all tables have the same column types!') + + if table.column_names != self._column_names: + raise ValueError('Not all tables have the same column names!') + + MappedSequence.__init__(self, tables, keys) + + def __str__(self): + """ + Print the tableset's structure via :meth:`TableSet.print_structure`. + """ + structure = six.StringIO() + + self.print_structure(output=structure) + + return structure.getvalue() + + @property + def key_name(self): + """ + Get the name of the key this TableSet is grouped by. (If created using + :meth:`.Table.group_by` then this is the original column name.) + """ + return self._key_name + + @property + def key_type(self): + """ + Get the :class:`.DataType` this TableSet is grouped by. (If created + using :meth:`.Table.group_by` then this is the original column type.) + """ + return self._key_type + + @property + def column_types(self): + """ + Get an ordered list of this :class:`.TableSet`'s column types. + + :returns: + A :class:`tuple` of :class:`.DataType` instances. + """ + return self._column_types + + @property + def column_names(self): + """ + Get an ordered list of this :class:`TableSet`'s column names. + + :returns: + A :class:`tuple` of strings. + """ + return self._column_names + + def _fork(self, tables, keys, key_name=None, key_type=None): + """ + Create a new :class:`.TableSet` using the metadata from this one. + + This method is used internally by functions like + :meth:`.TableSet.having`. + """ + if key_name is None: + key_name = self._key_name + + if key_type is None: + key_type = self._key_type + + return TableSet(tables, keys, key_name, key_type, _is_fork=True) + + def _proxy(self, method_name, *args, **kwargs): + """ + Calls a method on each table in this :class:`.TableSet`. + """ + tables = [] + + for key, table in self.items(): + tables.append(getattr(table, method_name)(*args, **kwargs)) + + return self._fork( + tables, + self.keys() + ) + + +from agate.tableset.aggregate import aggregate +from agate.tableset.bar_chart import bar_chart +from agate.tableset.column_chart import column_chart +from agate.tableset.from_csv import from_csv +from agate.tableset.from_json import from_json +from agate.tableset.having import having +from agate.tableset.line_chart import line_chart +from agate.tableset.merge import merge +from agate.tableset.print_structure import print_structure +from agate.tableset.proxy_methods import bins, compute, denormalize, distinct, \ + exclude, find, group_by, homogenize, join, limit, normalize, order_by, \ + pivot, select, where +from agate.tableset.scatterplot import scatterplot +from agate.tableset.to_csv import to_csv +from agate.tableset.to_json import to_json + +TableSet.aggregate = aggregate +TableSet.bar_chart = bar_chart +TableSet.bins = bins +TableSet.column_chart = column_chart +TableSet.compute = compute +TableSet.denormalize = denormalize +TableSet.distinct = distinct +TableSet.exclude = exclude +TableSet.find = find +TableSet.from_csv = from_csv +TableSet.from_json = from_json +TableSet.group_by = group_by +TableSet.having = having +TableSet.homogenize = homogenize +TableSet.join = join +TableSet.limit = limit +TableSet.line_chart = line_chart +TableSet.merge = merge +TableSet.normalize = normalize +TableSet.order_by = order_by +TableSet.pivot = pivot +TableSet.print_structure = print_structure +TableSet.scatterplot = scatterplot +TableSet.select = select +TableSet.to_csv = to_csv +TableSet.to_json = to_json +TableSet.where = where diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..89f8ffa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/aggregate.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/aggregate.cpython-38.pyc new file mode 100644 index 0000000..a313da2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/aggregate.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/bar_chart.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/bar_chart.cpython-38.pyc new file mode 100644 index 0000000..df67016 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/bar_chart.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/column_chart.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/column_chart.cpython-38.pyc new file mode 100644 index 0000000..9d25d7e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/column_chart.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/from_csv.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/from_csv.cpython-38.pyc new file mode 100644 index 0000000..e8dae15 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/from_csv.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/from_json.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/from_json.cpython-38.pyc new file mode 100644 index 0000000..f312709 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/from_json.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/having.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/having.cpython-38.pyc new file mode 100644 index 0000000..1c75cab Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/having.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/line_chart.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/line_chart.cpython-38.pyc new file mode 100644 index 0000000..3c41cc9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/line_chart.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/merge.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/merge.cpython-38.pyc new file mode 100644 index 0000000..66ba59a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/merge.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/print_structure.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/print_structure.cpython-38.pyc new file mode 100644 index 0000000..c78a6d4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/print_structure.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/proxy_methods.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/proxy_methods.cpython-38.pyc new file mode 100644 index 0000000..838efa2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/proxy_methods.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/scatterplot.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/scatterplot.cpython-38.pyc new file mode 100644 index 0000000..899469a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/scatterplot.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/to_csv.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/to_csv.cpython-38.pyc new file mode 100644 index 0000000..25bad88 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/to_csv.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/to_json.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/to_json.cpython-38.pyc new file mode 100644 index 0000000..44f52aa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/agate/tableset/__pycache__/to_json.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/aggregate.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/aggregate.py new file mode 100644 index 0000000..e6aff37 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/aggregate.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate.table import Table + + +def _aggregate(self, aggregations=[]): + """ + Recursive aggregation allowing for TableSet's to be nested inside + one another. + """ + from agate.tableset import TableSet + + output = [] + + # Process nested TableSet's + if isinstance(self._values[0], TableSet): + for key, nested_tableset in self.items(): + column_names, column_types, nested_output, row_name_columns = _aggregate(nested_tableset, aggregations) + + for row in nested_output: + row.insert(0, key) + + output.append(row) + + column_names.insert(0, self._key_name) + column_types.insert(0, self._key_type) + row_name_columns.insert(0, self._key_name) + # Regular Tables + else: + column_names = [self._key_name] + column_types = [self._key_type] + row_name_columns = [self._key_name] + + for new_column_name, aggregation in aggregations: + column_names.append(new_column_name) + column_types.append(aggregation.get_aggregate_data_type(self._sample_table)) + + for name, table in self.items(): + for new_column_name, aggregation in aggregations: + aggregation.validate(table) + + for name, table in self.items(): + new_row = [name] + + for new_column_name, aggregation in aggregations: + new_row.append(aggregation.run(table)) + + output.append(new_row) + + return column_names, column_types, output, row_name_columns + + +def aggregate(self, aggregations): + """ + Aggregate data from the tables in this set by performing some + set of column operations on the groups and coalescing the results into + a new :class:`.Table`. + + :code:`aggregations` must be a sequence of tuples, where each has two + parts: a :code:`new_column_name` and a :class:`.Aggregation` instance. + + The resulting table will have the keys from this :class:`TableSet` (and + any nested TableSets) set as its :code:`row_names`. See + :meth:`.Table.__init__` for more details. + + :param aggregations: + A list of tuples in the format :code:`(new_column_name, aggregation)`, + where each :code:`aggregation` is an instance of :class:`.Aggregation`. + :returns: + A new :class:`.Table`. + """ + column_names, column_types, output, row_name_columns = _aggregate(self, aggregations) + + if len(row_name_columns) == 1: + row_names = row_name_columns[0] + else: + def row_names(r): + return tuple(r[n] for n in row_name_columns) + + return Table(output, column_names, column_types, row_names=row_names) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/bar_chart.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/bar_chart.py new file mode 100644 index 0000000..4fd26b9 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/bar_chart.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def bar_chart(self, label=0, value=1, path=None, width=None, height=None): + """ + Render a lattice/grid of bar charts using :class:`leather.Lattice`. + + :param label: + The name or index of a column to plot as the labels of the chart. + Defaults to the first column in the table. + :param value: + The name or index of a column to plot as the values of the chart. + Defaults to the second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(label) is int: + label_name = self.column_names[label] + else: + label_name = label + + if type(value) is int: + value_name = self.column_names[value] + else: + value_name = value + + chart = leather.Lattice(shape=leather.Bars()) + chart.add_x_axis(name=value_name) + chart.add_y_axis(name=label_name) + chart.add_many(self.values(), x=value, y=label, titles=self.keys()) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/column_chart.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/column_chart.py new file mode 100644 index 0000000..4e5caf4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/column_chart.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def column_chart(self, label=0, value=1, path=None, width=None, height=None): + """ + Render a lattice/grid of column charts using :class:`leather.Lattice`. + + :param label: + The name or index of a column to plot as the labels of the chart. + Defaults to the first column in the table. + :param value: + The name or index of a column to plot as the values of the chart. + Defaults to the second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(label) is int: + label_name = self.column_names[label] + else: + label_name = label + + if type(value) is int: + value_name = self.column_names[value] + else: + value_name = value + + chart = leather.Lattice(shape=leather.Columns()) + chart.add_x_axis(name=label_name) + chart.add_y_axis(name=value_name) + chart.add_many(self.values(), x=label, y=value, titles=self.keys()) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/from_csv.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/from_csv.py new file mode 100644 index 0000000..81af9b4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/from_csv.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +from collections import OrderedDict +from glob import glob +import os + +from agate.table import Table + + +@classmethod +def from_csv(cls, dir_path, column_names=None, column_types=None, row_names=None, header=True, **kwargs): + """ + Create a new :class:`TableSet` from a directory of CSVs. + + See :meth:`.Table.from_csv` for additional details. + + :param dir_path: + Path to a directory full of CSV files. All CSV files in this + directory will be loaded. + :param column_names: + See :meth:`Table.__init__`. + :param column_types: + See :meth:`Table.__init__`. + :param row_names: + See :meth:`Table.__init__`. + :param header: + See :meth:`Table.from_csv`. + """ + from agate.tableset import TableSet + + if not os.path.isdir(dir_path): + raise IOError('Specified path doesn\'t exist or isn\'t a directory.') + + tables = OrderedDict() + + for path in glob(os.path.join(dir_path, '*.csv')): + name = os.path.split(path)[1].strip('.csv') + + tables[name] = Table.from_csv(path, column_names, column_types, row_names=row_names, header=header, **kwargs) + + return TableSet(tables.values(), tables.keys()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/from_json.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/from_json.py new file mode 100644 index 0000000..b2befe4 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/from_json.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +from collections import OrderedDict +from decimal import Decimal +from glob import glob +import json +import os + +import six + +from agate.table import Table + + +@classmethod +def from_json(cls, path, column_names=None, column_types=None, keys=None, **kwargs): + """ + Create a new :class:`TableSet` from a directory of JSON files or a + single JSON object with key value (Table key and list of row objects) + pairs for each :class:`Table`. + + See :meth:`.Table.from_json` for additional details. + + :param path: + Path to a directory containing JSON files or filepath/file-like + object of nested JSON file. + :param keys: + A list of keys of the top-level dictionaries for each file. If + specified, length must be equal to number of JSON files in path. + :param column_types: + See :meth:`Table.__init__`. + """ + from agate.tableset import TableSet + + if isinstance(path, six.string_types) and not os.path.isdir(path) and not os.path.isfile(path): + raise IOError('Specified path doesn\'t exist.') + + tables = OrderedDict() + + if isinstance(path, six.string_types) and os.path.isdir(path): + filepaths = glob(os.path.join(path, '*.json')) + + if keys is not None and len(keys) != len(filepaths): + raise ValueError('If specified, keys must have length equal to number of JSON files') + + for i, filepath in enumerate(filepaths): + name = os.path.split(filepath)[1].strip('.json') + + if keys is not None: + tables[name] = Table.from_json(filepath, keys[i], column_types=column_types, **kwargs) + else: + tables[name] = Table.from_json(filepath, column_types=column_types, **kwargs) + + else: + if hasattr(path, 'read'): + js = json.load(path, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs) + else: + with open(path, 'r') as f: + js = json.load(f, object_pairs_hook=OrderedDict, parse_float=Decimal, **kwargs) + + for key, value in js.items(): + tables[key] = Table.from_object(value, column_types=column_types, **kwargs) + + return TableSet(tables.values(), tables.keys()) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/having.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/having.py new file mode 100644 index 0000000..a366735 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/having.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + + +def having(self, aggregations, test): + """ + Create a new :class:`.TableSet` with only those tables that pass a test. + + This works by applying a sequence of :class:`Aggregation` instances to + each table. The resulting dictionary of properties is then passed to + the :code:`test` function. + + This method does not modify the underlying tables in any way. + + :param aggregations: + A list of tuples in the format :code:`(name, aggregation)`, where + each :code:`aggregation` is an instance of :class:`.Aggregation`. + :param test: + A function that takes a dictionary of aggregated properties and returns + :code:`True` if it should be included in the new :class:`.TableSet`. + :type test: + :class:`function` + :returns: + A new :class:`.TableSet`. + """ + new_tables = [] + new_keys = [] + + for key, table in self.items(): + props = table.aggregate(aggregations) + + if test(props): + new_tables.append(table) + new_keys.append(key) + + return self._fork(new_tables, new_keys) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/line_chart.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/line_chart.py new file mode 100644 index 0000000..7740088 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/line_chart.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def line_chart(self, x=0, y=1, path=None, width=None, height=None): + """ + Render a lattice/grid of line charts using :class:`leather.Lattice`. + + :param x: + The name or index of a column to plot as the x axis of the chart. + Defaults to the first column in the table. + :param y: + The name or index of a column to plot as the y axis of the chart. + Defaults to the second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(x) is int: + x_name = self.column_names[x] + else: + x_name = x + + if type(y) is int: + y_name = self.column_names[y] + else: + y_name = y + + chart = leather.Lattice(shape=leather.Line()) + chart.add_x_axis(name=x_name) + chart.add_y_axis(name=y_name) + chart.add_many(self.values(), x=x, y=y, titles=self.keys()) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/merge.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/merge.py new file mode 100644 index 0000000..aa1df5f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/merge.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +from agate.rows import Row +from agate.tableset import Table + + +def merge(self, groups=None, group_name=None, group_type=None): + """ + Convert this TableSet into a single table. This is the inverse of + :meth:`.Table.group_by`. + + Any `row_names` set on the merged tables will be lost in this + process. + + :param groups: + A list of grouping factors to add to merged rows in a new column. + If specified, it should have exactly one element per :class:`Table` + in the :class:`TableSet`. If not specified or None, the grouping + factor will be the name of the :class:`Row`'s original Table. + :param group_name: + This will be the column name of the grouping factors. If None, + defaults to the :attr:`TableSet.key_name`. + :param group_type: + This will be the column type of the grouping factors. If None, + defaults to the :attr:`TableSet.key_type`. + :returns: + A new :class:`Table`. + """ + if type(groups) is not list and groups is not None: + raise ValueError('Groups must be None or a list.') + + if type(groups) is list and len(groups) != len(self): + raise ValueError('Groups length must be equal to TableSet length.') + + column_names = list(self._column_names) + column_types = list(self._column_types) + + column_names.insert(0, group_name if group_name else self._key_name) + column_types.insert(0, group_type if group_type else self._key_type) + + rows = [] + + for index, (key, table) in enumerate(self.items()): + for row in table._rows: + if groups is None: + rows.append(Row((key,) + tuple(row), column_names)) + else: + rows.append(Row((groups[index],) + tuple(row), column_names)) + + return Table(rows, column_names, column_types) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/print_structure.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/print_structure.py new file mode 100644 index 0000000..f52b2e5 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/print_structure.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +import sys + +from agate.data_types import Text +from agate.table import Table + + +def print_structure(self, max_rows=20, output=sys.stdout): + """ + Print the keys and row counts of each table in the tableset. + + :param max_rows: + The maximum number of rows to display before truncating the data. + Defaults to 20. + :param output: + The output used to print the structure of the :class:`Table`. + :returns: + None + """ + max_length = min(len(self.items()), max_rows) + + name_column = self.keys()[0:max_length] + type_column = [str(len(table.rows)) for key, table in self.items()[0:max_length]] + rows = zip(name_column, type_column) + column_names = ['table', 'rows'] + text = Text() + column_types = [text, text] + + table = Table(rows, column_names, column_types) + + return table.print_table(output=output, max_column_width=None) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/proxy_methods.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/proxy_methods.py new file mode 100644 index 0000000..e8657b6 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/proxy_methods.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python + + +def bins(self, *args, **kwargs): + """ + Calls :meth:`.Table.bins` on each table in the TableSet. + """ + return self._proxy('bins', *args, **kwargs) + +def compute(self, *args, **kwargs): + """ + Calls :meth:`.Table.compute` on each table in the TableSet. + """ + return self._proxy('compute', *args, **kwargs) + +def denormalize(self, *args, **kwargs): + """ + Calls :meth:`.Table.denormalize` on each table in the TableSet. + """ + return self._proxy('denormalize', *args, **kwargs) + +def distinct(self, *args, **kwargs): + """ + Calls :meth:`.Table.distinct` on each table in the TableSet. + """ + return self._proxy('distinct', *args, **kwargs) + +def exclude(self, *args, **kwargs): + """ + Calls :meth:`.Table.exclude` on each table in the TableSet. + """ + return self._proxy('exclude', *args, **kwargs) + +def find(self, *args, **kwargs): + """ + Calls :meth:`.Table.find` on each table in the TableSet. + """ + return self._proxy('find', *args, **kwargs) + +def group_by(self, *args, **kwargs): + """ + Calls :meth:`.Table.group_by` on each table in the TableSet. + """ + return self._proxy('group_by', *args, **kwargs) + +def homogenize(self, *args, **kwargs): + """ + Calls :meth:`.Table.homogenize` on each table in the TableSet. + """ + return self._proxy('homogenize', *args, **kwargs) + +def join(self, *args, **kwargs): + """ + Calls :meth:`.Table.join` on each table in the TableSet. + """ + return self._proxy('join', *args, **kwargs) + +def limit(self, *args, **kwargs): + """ + Calls :meth:`.Table.limit` on each table in the TableSet. + """ + return self._proxy('limit', *args, **kwargs) + +def normalize(self, *args, **kwargs): + """ + Calls :meth:`.Table.normalize` on each table in the TableSet. + """ + return self._proxy('normalize', *args, **kwargs) + +def order_by(self, *args, **kwargs): + """ + Calls :meth:`.Table.order_by` on each table in the TableSet. + """ + return self._proxy('order_by', *args, **kwargs) + +def pivot(self, *args, **kwargs): + """ + Calls :meth:`.Table.pivot` on each table in the TableSet. + """ + return self._proxy('pivot', *args, **kwargs) + +def select(self, *args, **kwargs): + """ + Calls :meth:`.Table.select` on each table in the TableSet. + """ + return self._proxy('select', *args, **kwargs) + +def where(self, *args, **kwargs): + """ + Calls :meth:`.Table.where` on each table in the TableSet. + """ + return self._proxy('where', *args, **kwargs) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/scatterplot.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/scatterplot.py new file mode 100644 index 0000000..0d9f554 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/scatterplot.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# pylint: disable=W0212 + +import leather + +from agate import utils + + +def scatterplot(self, x=0, y=1, path=None, width=None, height=None): + """ + Render a lattice/grid of scatterplots using :class:`leather.Lattice`. + + :param x: + The name or index of a column to plot as the x axis of the chart. + Defaults to the first column in the table. + :param y: + The name or index of a column to plot as the y axis of the chart. + Defaults to the second column in the table. + :param path: + If specified, the resulting SVG will be saved to this location. If + :code:`None` and running in IPython, then the SVG will be rendered + inline. Otherwise, the SVG data will be returned as a string. + :param width: + The width of the output SVG. + :param height: + The height of the output SVG. + """ + if type(x) is int: + x_name = self.column_names[x] + else: + x_name = x + + if type(y) is int: + y_name = self.column_names[y] + else: + y_name = y + + chart = leather.Lattice(shape=leather.Dots()) + chart.add_x_axis(name=x_name) + chart.add_y_axis(name=y_name) + chart.add_many(self.values(), x=x, y=y, titles=self.keys()) + + return chart.to_svg(path=path, width=width, height=height) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/to_csv.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/to_csv.py new file mode 100644 index 0000000..7c268b5 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/to_csv.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python + +import os + + +def to_csv(self, dir_path, **kwargs): + """ + Write each table in this set to a separate CSV in a given + directory. + + See :meth:`.Table.to_csv` for additional details. + + :param dir_path: + Path to the directory to write the CSV files to. + """ + if not os.path.exists(dir_path): + os.makedirs(dir_path) + + for name, table in self.items(): + path = os.path.join(dir_path, '%s.csv' % name) + + table.to_csv(path, **kwargs) diff --git a/dbt-env/lib/python3.8/site-packages/agate/tableset/to_json.py b/dbt-env/lib/python3.8/site-packages/agate/tableset/to_json.py new file mode 100644 index 0000000..2a0ca26 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/tableset/to_json.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +from collections import OrderedDict +import json +import os + +import six + + +def to_json(self, path, nested=False, indent=None, **kwargs): + """ + Write :class:`TableSet` to either a set of JSON files for each table or + a single nested JSON file. + + See :meth:`.Table.to_json` for additional details. + + :param path: + Path to the directory to write the JSON file(s) to. If nested is + `True`, this should be a file path or file-like object to write to. + :param nested: + If `True`, the output will be a single nested JSON file with each + Table's key paired with a list of row objects. Otherwise, the output + will be a set of files for each table. Defaults to `False`. + :param indent: + See :meth:`Table.to_json`. + """ + if not nested: + if not os.path.exists(path): + os.makedirs(path) + + for name, table in self.items(): + filepath = os.path.join(path, '%s.json' % name) + + table.to_json(filepath, indent=indent, **kwargs) + else: + close = True + tableset_dict = OrderedDict() + + for name, table in self.items(): + output = six.StringIO() + table.to_json(output, **kwargs) + tableset_dict[name] = json.loads(output.getvalue(), object_pairs_hook=OrderedDict) + + if hasattr(path, 'write'): + f = path + close = False + else: + dirpath = os.path.dirname(path) + + if dirpath and not os.path.exists(dirpath): + os.makedirs(dirpath) + + f = open(path, 'w') + + json_kwargs = {'ensure_ascii': False, 'indent': indent} + + if six.PY2: + json_kwargs['encoding'] = 'utf-8' + + json_kwargs.update(kwargs) + json.dump(tableset_dict, f, **json_kwargs) + + if close and f is not None: + f.close() diff --git a/dbt-env/lib/python3.8/site-packages/agate/testcase.py b/dbt-env/lib/python3.8/site-packages/agate/testcase.py new file mode 100644 index 0000000..8943820 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/testcase.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python + +try: + import unittest2 as unittest +except ImportError: + import unittest + +import agate + + +class AgateTestCase(unittest.TestCase): + """ + Unittest case for quickly asserting logic about tables. + """ + def assertColumnNames(self, table, names): + """ + Verify the column names in the given table match what is expected. + """ + self.assertIsInstance(table, agate.Table) + + self.assertSequenceEqual(table.column_names, names) + self.assertSequenceEqual( + [c.name for c in table.columns], + names + ) + + for row in table.rows: + self.assertSequenceEqual( + row.keys(), + names + ) + + def assertColumnTypes(self, table, types): + """ + Verify the column types in the given table are of the expected types. + """ + self.assertIsInstance(table, agate.Table) + + table_types = table.column_types + column_types = [c.data_type for c in table.columns] + + for i, test_type in enumerate(types): + self.assertIsInstance(table_types[i], test_type) + self.assertIsInstance(column_types[i], test_type) + + def assertRows(self, table, rows): + """ + Verify the row data in the given table match what is expected. + """ + self.assertIsInstance(table, agate.Table) + + for i, row in enumerate(rows): + self.assertSequenceEqual(table.rows[i], row) + + def assertRowNames(self, table, names): + """ + Verify the row names in the given table match what is expected. + """ + self.assertIsInstance(table, agate.Table) + + self.assertSequenceEqual(table.row_names, names) + self.assertSequenceEqual( + table.rows.keys(), + names + ) + + for column in table.columns: + self.assertSequenceEqual( + column.keys(), + names + ) diff --git a/dbt-env/lib/python3.8/site-packages/agate/type_tester.py b/dbt-env/lib/python3.8/site-packages/agate/type_tester.py new file mode 100644 index 0000000..e153fdb --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/type_tester.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python + +from copy import copy + +from agate.data_types.boolean import Boolean +from agate.data_types.date import Date +from agate.data_types.date_time import DateTime +from agate.data_types.number import Number +from agate.data_types.text import Text +from agate.data_types.time_delta import TimeDelta + + +class TypeTester(object): + """ + Control how data types are inferred for columns in a given set of data. + + This class is used by passing it to the :code:`column_types` argument of + the :class:`.Table` constructor, or the same argument for any other method + that create a :class:`.Table` + + Type inference can be a slow process. To limit the number of rows of data to + be tested, pass the :code:`limit` argument. Note that may cause errors if + your data contains different types of values after the specified number of + rows. + + By default, data types will be tested against each column in this order: + + 1. :class:`.Boolean` + 2. :class:`.Number` + 3. :class:`.TimeDelta` + #. :class:`.Date` + #. :class:`.DateTime` + #. :class:`.Text` + + Individual types may be specified using the :code:`force` argument. The type + order by be changed, or entire types disabled, by using the :code:`types` + argument. Beware that changing the order of the types may cause unexpected + behavior. + + :param force: + A dictionary where each key is a column name and each value is a + :class:`.DataType` instance that overrides inference. + :param limit: + An optional limit on how many rows to evaluate before selecting the + most likely type. Note that applying a limit may mean errors arise when + the data is cast--if the guess is proved incorrect in further rows of + data. + :param types: + A sequence of possible types to test against. This be used to specify + what data formats you want to test against. For instance, you may want + to exclude :class:`TimeDelta` from testing. It can also be used to pass + options such as ``locale`` to :class:`.Number` or ``cast_nulls`` to + :class:`.Text`. Take care in specifying the order of the list. It is + the order they are tested in. :class:`.Text` should always be last. + """ + def __init__(self, force={}, limit=None, types=None): + self._force = force + self._limit = limit + + if types: + self._possible_types = types + else: + # In order of preference + self._possible_types = [ + Boolean(), + Number(), + TimeDelta(), + Date(), + DateTime(), + Text() + ] + + def run(self, rows, column_names): + """ + Apply type inference to the provided data and return an array of + column types. + + :param rows: + The data as a sequence of any sequences: tuples, lists, etc. + """ + num_columns = len(column_names) + hypotheses = [set(self._possible_types) for i in range(num_columns)] + force_indices = [] + + for name in self._force.keys(): + try: + force_indices.append(column_names.index(name)) + except ValueError: + raise ValueError('"%s" does not match the name of any column in this table.' % name) + + if self._limit: + sample_rows = rows[:self._limit] + elif self._limit == 0: + text = Text() + return tuple([text] * num_columns) + else: + sample_rows = rows + + for row in sample_rows: + for i in range(num_columns): + if i in force_indices: + continue + + h = hypotheses[i] + + if len(h) == 1: + continue + + for column_type in copy(h): + if len(row) > i and not column_type.test(row[i]): + h.remove(column_type) + + column_types = [] + + for i in range(num_columns): + if i in force_indices: + column_types.append(self._force[column_names[i]]) + continue + + h = hypotheses[i] + + # Select in prefer order + for t in self._possible_types: + if t in h: + column_types.append(t) + break + + return tuple(column_types) diff --git a/dbt-env/lib/python3.8/site-packages/agate/utils.py b/dbt-env/lib/python3.8/site-packages/agate/utils.py new file mode 100644 index 0000000..cb22ace --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/utils.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python +# -*- coding: utf8 -*- + +""" +This module contains a collection of utility classes and functions used in +agate. +""" + +from collections import OrderedDict, Sequence +from functools import wraps +import math +import string +import warnings +from slugify import slugify as pslugify +from agate.warns import warn_duplicate_column, warn_unnamed_column + +try: + from cdecimal import Decimal, ROUND_FLOOR, ROUND_CEILING, getcontext +except ImportError: # pragma: no cover + from decimal import Decimal, ROUND_FLOOR, ROUND_CEILING, getcontext + +import six + + +#: Sentinal for use when `None` is an valid argument value +default = object() + + +def memoize(func): + """ + Dead-simple memoize decorator for instance methods that take no arguments. + + This is especially useful since so many of our classes are immutable. + """ + memo = None + + @wraps(func) + def wrapper(self): + if memo is not None: + return memo + + return func(self) + + return wrapper + + +class NullOrder(object): + """ + Dummy object used for sorting in place of None. + + Sorts as "greater than everything but other nulls." + """ + def __lt__(self, other): + return False + + def __gt__(self, other): + if other is None: + return False + + return True + + +class Quantiles(Sequence): + """ + A class representing quantiles (percentiles, quartiles, etc.) for a given + column of Number data. + """ + def __init__(self, quantiles): + self._quantiles = quantiles + + def __getitem__(self, i): + return self._quantiles.__getitem__(i) + + def __iter__(self): + return self._quantiles.__iter__() + + def __len__(self): + return self._quantiles.__len__() + + def __repr__(self): + return repr(self._quantiles) + + def locate(self, value): + """ + Identify which quantile a given value is part of. + """ + i = 0 + + if value < self._quantiles[0]: + raise ValueError('Value is less than minimum quantile value.') + + if value > self._quantiles[-1]: + raise ValueError('Value is greater than maximum quantile value.') + + if value == self._quantiles[-1]: + return Decimal(len(self._quantiles) - 1) + + while value >= self._quantiles[i + 1]: + i += 1 + + return Decimal(i) + + +def median(data_sorted): + """ + Finds the median value of a given series of values. + + :param data_sorted: + The values to find the median of. Must be sorted. + """ + length = len(data_sorted) + + if length % 2 == 1: + return data_sorted[((length + 1) // 2) - 1] + + half = length // 2 + a = data_sorted[half - 1] + b = data_sorted[half] + + return (a + b) / 2 + + +def max_precision(values): + """ + Given a series of values (such as a :class:`.Column`) returns the most + significant decimal places present in any value. + + :param values: + The values to analyze. + """ + max_whole_places = 1 + max_decimal_places = 0 + precision = getcontext().prec + + for value in values: + if value is None or math.isnan(value): + continue + + sign, digits, exponent = value.normalize().as_tuple() + + exponent_places = exponent * -1 + whole_places = len(digits) - exponent_places + + if whole_places > max_whole_places: + max_whole_places = whole_places + + if exponent_places > max_decimal_places: + max_decimal_places = exponent_places + + # In Python 2 it was possible for the total digits to exceed the + # available context precision. This ensures that can't happen. See #412 + if max_whole_places + max_decimal_places > precision: # pragma: no cover + max_decimal_places = precision - max_whole_places + + return max_decimal_places + + +def make_number_formatter(decimal_places, add_ellipsis=False): + """ + Given a number of decimal places creates a formatting string that will + display numbers with that precision. + + :param decimal_places: + The number of decimal places + :param add_ellipsis: + Optionally add an ellipsis symbol at the end of a number + """ + fraction = u'0' * decimal_places + ellipsis = u'…' if add_ellipsis else u'' + return u''.join([u'#,##0.', fraction, ellipsis, u';-#,##0.', fraction, ellipsis]) + + +def round_limits(minimum, maximum): + """ + Rounds a pair of minimum and maximum values to form reasonable "round" + values suitable for use as axis minimum and maximum values. + + Values are rounded "out": up for maximum and down for minimum, and "off": + to one higher than the first significant digit shared by both. + + See unit tests for examples. + """ + min_bits = minimum.normalize().as_tuple() + max_bits = maximum.normalize().as_tuple() + + max_digits = max( + len(min_bits.digits) + min_bits.exponent, + len(max_bits.digits) + max_bits.exponent + ) + + # Whole number rounding + if max_digits > 0: + multiplier = Decimal('10') ** (max_digits - 1) + + min_fraction = (minimum / multiplier).to_integral_value(rounding=ROUND_FLOOR) + max_fraction = (maximum / multiplier).to_integral_value(rounding=ROUND_CEILING) + + return ( + min_fraction * multiplier, + max_fraction * multiplier + ) + + max_exponent = max(min_bits.exponent, max_bits.exponent) + + # Fractional rounding + q = Decimal('10') ** (max_exponent + 1) + + return ( + minimum.quantize(q, rounding=ROUND_FLOOR).normalize(), + maximum.quantize(q, rounding=ROUND_CEILING).normalize() + ) + + +def letter_name(index): + """ + Given a column index, assign a "letter" column name equivalent to + Excel. For example, index ``4`` would return ``E``. + Index ``30`` would return ``EE``. + """ + letters = string.ascii_lowercase + count = len(letters) + + return letters[index % count] * ((index // count) + 1) + + +def parse_object(obj, path=''): + """ + Recursively parse JSON-like Python objects as a dictionary of paths/keys + and values. + + Inspired by JSONPipe (https://github.com/dvxhouse/jsonpipe). + """ + if isinstance(obj, dict): + iterator = obj.items() + elif isinstance(obj, (list, tuple)): + iterator = enumerate(obj) + else: + return {path.strip('/'): obj} + + d = OrderedDict() + + for key, value in iterator: + key = six.text_type(key) + d.update(parse_object(value, path + key + '/')) + + return d + + +def issequence(obj): + """ + Returns :code:`True` if the given object is an instance of + :class:`.Sequence` that is not also a string. + """ + return isinstance(obj, Sequence) and not isinstance(obj, six.string_types) + + +def deduplicate(values, column_names=False, separator='_'): + """ + Append a unique identifer to duplicate strings in a given sequence of + strings. Identifers are an underscore followed by the occurance number of + the specific string. + + ['abc', 'abc', 'cde', 'abc'] -> ['abc', 'abc_2', 'cde', 'abc_3'] + + :param column_names: + If True, values are treated as column names. Warnings will be thrown + if column names are None or duplicates. None values will be replaced with + letter indices. + """ + final_values = [] + + for i, value in enumerate(values): + if column_names: + if not value: + new_value = letter_name(i) + warn_unnamed_column(i, new_value) + elif isinstance(value, six.string_types): + new_value = value + else: + raise ValueError('Column names must be strings or None.') + else: + new_value = value + + final_value = new_value + duplicates = 0 + + while final_value in final_values: + final_value = new_value + separator + str(duplicates + 2) + duplicates += 1 + + if column_names and duplicates > 0: + warn_duplicate_column(new_value, final_value) + + final_values.append(final_value) + + return tuple(final_values) + + +def slugify(values, ensure_unique=False, **kwargs): + """ + Given a sequence of strings, returns a standardized version of the sequence. + If ``ensure_unique`` is True, any duplicate strings will be appended with + a unique identifier. + + agate uses an underscore as a default separator but this can be changed with + kwargs. + + Any kwargs will be passed to the slugify method in python-slugify. See: + https://github.com/un33k/python-slugify + """ + slug_args = {'separator': '_'} + slug_args.update(kwargs) + + if ensure_unique: + new_values = tuple(pslugify(value, **slug_args) for value in values) + return deduplicate(new_values, separator=slug_args['separator']) + else: + return tuple(pslugify(value, **slug_args) for value in values) diff --git a/dbt-env/lib/python3.8/site-packages/agate/warns.py b/dbt-env/lib/python3.8/site-packages/agate/warns.py new file mode 100644 index 0000000..1106f7f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/agate/warns.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +import warnings + + +class NullCalculationWarning(RuntimeWarning): # pragma: no cover + """ + Warning raised if a calculation which can not logically + account for null values is performed on a :class:`.Column` containing + nulls. + """ + pass + + +def warn_null_calculation(operation, column): + warnings.warn('Column "%s" contains nulls. These will be excluded from %s calculation.' % ( + column.name, + operation.__class__.__name__ + ), NullCalculationWarning, stacklevel=2) + + +class DuplicateColumnWarning(RuntimeWarning): # pragma: no cover + """ + Warning raised if multiple columns with the same name are added to a new + :class:`.Table`. + """ + pass + + +def warn_duplicate_column(column_name, column_rename): + warnings.warn('Column name "%s" already exists in Table. Column will be renamed to "%s".' % ( + column_name, + column_rename + ), DuplicateColumnWarning, stacklevel=2) + + +class UnnamedColumnWarning(RuntimeWarning): # pragma: no cover + """ + Warning raised when a column has no name and an a programmatically generated + name is used. + """ + pass + + +def warn_unnamed_column(column_id, new_column_name): + warnings.warn('Column %i has no name. Using "%s".' % ( + column_id, + new_column_name + ), UnnamedColumnWarning, stacklevel=2) diff --git a/dbt-env/lib/python3.8/site-packages/attr/__init__.py b/dbt-env/lib/python3.8/site-packages/attr/__init__.py new file mode 100644 index 0000000..f95c96d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/__init__.py @@ -0,0 +1,80 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import sys + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._version_info import VersionInfo + + +__version__ = "21.4.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + +__all__ = [ + "Attribute", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "evolve", + "exceptions", + "fields", + "fields_dict", + "filters", + "get_run_validators", + "has", + "ib", + "make_class", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] + +if sys.version_info[:2] >= (3, 6): + from ._next_gen import define, field, frozen, mutable # noqa: F401 + + __all__.extend(("define", "field", "frozen", "mutable")) diff --git a/dbt-env/lib/python3.8/site-packages/attr/__init__.pyi b/dbt-env/lib/python3.8/site-packages/attr/__init__.pyi new file mode 100644 index 0000000..c0a2126 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/__init__.pyi @@ -0,0 +1,484 @@ +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List[Attribute[Any]]], List[Attribute[Any]] +] +_CompareWithType = Callable[[Any, Any], bool] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +# TODO: add support for returning NamedTuple from the mypy plugin +class _Fields(Tuple[Attribute[Any], ...]): + def __getattr__(self, name: str) -> Attribute[Any]: ... + +def fields(cls: type) -> _Fields: ... +def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... +def validate(inst: Any) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..3f820f8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_cmp.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_cmp.cpython-38.pyc new file mode 100644 index 0000000..2af8bb3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_cmp.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_compat.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000..c63a327 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_compat.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_config.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_config.cpython-38.pyc new file mode 100644 index 0000000..d65e323 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_config.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_funcs.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_funcs.cpython-38.pyc new file mode 100644 index 0000000..e006600 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_funcs.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_make.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_make.cpython-38.pyc new file mode 100644 index 0000000..7fd5e28 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_make.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_next_gen.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_next_gen.cpython-38.pyc new file mode 100644 index 0000000..4c6983a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_next_gen.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_version_info.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_version_info.cpython-38.pyc new file mode 100644 index 0000000..03ee651 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/_version_info.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/converters.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/converters.cpython-38.pyc new file mode 100644 index 0000000..a550b21 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/converters.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/exceptions.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..92189b6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/exceptions.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/filters.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/filters.cpython-38.pyc new file mode 100644 index 0000000..c4316b8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/filters.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/setters.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/setters.cpython-38.pyc new file mode 100644 index 0000000..76b8779 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/setters.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/__pycache__/validators.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/validators.cpython-38.pyc new file mode 100644 index 0000000..d10c0b8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attr/__pycache__/validators.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attr/_cmp.py b/dbt-env/lib/python3.8/site-packages/attr/_cmp.py new file mode 100644 index 0000000..6cffa4d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_cmp.py @@ -0,0 +1,154 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import functools + +from ._compat import new_class +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body)) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = "__%s__" % (name,) + method.__doc__ = "Return a %s b. Computed by attrs." % ( + _operation_names[name], + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/dbt-env/lib/python3.8/site-packages/attr/_cmp.pyi b/dbt-env/lib/python3.8/site-packages/attr/_cmp.pyi new file mode 100644 index 0000000..e71aaff --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Type + +from . import _CompareWithType + +def cmp_using( + eq: Optional[_CompareWithType], + lt: Optional[_CompareWithType], + le: Optional[_CompareWithType], + gt: Optional[_CompareWithType], + ge: Optional[_CompareWithType], + require_same_type: bool, + class_name: str, +) -> Type: ... diff --git a/dbt-env/lib/python3.8/site-packages/attr/_compat.py b/dbt-env/lib/python3.8/site-packages/attr/_compat.py new file mode 100644 index 0000000..dc0cb02 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_compat.py @@ -0,0 +1,261 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import platform +import sys +import threading +import types +import warnings + + +PY2 = sys.version_info[0] == 2 +PYPY = platform.python_implementation() == "PyPy" +PY36 = sys.version_info[:2] >= (3, 6) +HAS_F_STRINGS = PY36 +PY310 = sys.version_info[:2] >= (3, 10) + + +if PYPY or PY36: + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +if PY2: + from collections import Mapping, Sequence + + from UserDict import IterableUserDict + + # We 'bundle' isclass instead of using inspect as importing inspect is + # fairly expensive (order of 10-15 ms for a modern machine in 2016) + def isclass(klass): + return isinstance(klass, (type, types.ClassType)) + + def new_class(name, bases, kwds, exec_body): + """ + A minimal stub of types.new_class that we need for make_class. + """ + ns = {} + exec_body(ns) + + return type(name, bases, ns) + + # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. + TYPE = "type" + + def iteritems(d): + return d.iteritems() + + # Python 2 is bereft of a read-only dict proxy, so we make one! + class ReadOnlyDict(IterableUserDict): + """ + Best-effort read-only dict wrapper. + """ + + def __setitem__(self, key, val): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item assignment" + ) + + def update(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'update'" + ) + + def __delitem__(self, _): + # We gently pretend we're a Python 3 mappingproxy. + raise TypeError( + "'mappingproxy' object does not support item deletion" + ) + + def clear(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'clear'" + ) + + def pop(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'pop'" + ) + + def popitem(self): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'popitem'" + ) + + def setdefault(self, key, default=None): + # We gently pretend we're a Python 3 mappingproxy. + raise AttributeError( + "'mappingproxy' object has no attribute 'setdefault'" + ) + + def __repr__(self): + # Override to be identical to the Python 3 version. + return "mappingproxy(" + repr(self.data) + ")" + + def metadata_proxy(d): + res = ReadOnlyDict() + res.data.update(d) # We blocked update, so we have to do it like this. + return res + + def just_warn(*args, **kw): # pragma: no cover + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + +else: # Python 3 and later. + from collections.abc import Mapping, Sequence # noqa + + def just_warn(*args, **kw): + """ + We only warn on Python 3 because we are not aware of any concrete + consequences of not setting the cell on Python 2. + """ + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + def isclass(klass): + return isinstance(klass, type) + + TYPE = "class" + + def iteritems(d): + return d.items() + + new_class = types.new_class + + def metadata_proxy(d): + return types.MappingProxyType(dict(d)) + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + if PY2: + co = set_first_cellvar_to.func_code + else: + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + # CPython 3.8+ has an incompatible CodeType signature + # (added a posonlyargcount argument) but also added + # CodeType.replace() to do this without counting parameters. + set_first_freevar_code = co.replace( + co_cellvars=co.co_freevars, co_freevars=co.co_cellvars + ) + else: + args = [co.co_argcount] + if not PY2: + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + if PY2: + cell = make_func_with_cell().func_closure[0] + else: + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/dbt-env/lib/python3.8/site-packages/attr/_config.py b/dbt-env/lib/python3.8/site-packages/attr/_config.py new file mode 100644 index 0000000..fc9be29 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_config.py @@ -0,0 +1,33 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/dbt-env/lib/python3.8/site-packages/attr/_funcs.py b/dbt-env/lib/python3.8/site-packages/attr/_funcs.py new file mode 100644 index 0000000..4c90085 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_funcs.py @@ -0,0 +1,422 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy + +from ._compat import iteritems +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(v) + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in iteritems(val) + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in iteritems(v) + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in iteritems(changes): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + "{k} is not an attrs attribute on {cl}.".format( + k=k, cl=new.__class__ + ) + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = attr_name if attr_name[0] != "_" else attr_name[1:] + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/dbt-env/lib/python3.8/site-packages/attr/_make.py b/dbt-env/lib/python3.8/site-packages/attr/_make.py new file mode 100644 index 0000000..d46f8a3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_make.py @@ -0,0 +1,3173 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +import copy +import inspect +import linecache +import sys +import warnings + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import ( + HAS_F_STRINGS, + PY2, + PY310, + PYPY, + isclass, + iteritems, + metadata_proxy, + new_class, + ordered_dict, + set_closure_cell, +) +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + PythonTooOldError, + UnannotatedAttributeError, +) + + +if not PY2: + import typing + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_{}" +_tuple_property_pat = ( + " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" +) +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = metadata_proxy({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(object): + """ + Sentinel class to indicate the lack of a value when ``None`` is ambiguous. + + ``_Nothing`` is a singleton. There is only ever one of it. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + """ + + _singleton = None + + def __new__(cls): + if _Nothing._singleton is None: + _Nothing._singleton = super(_Nothing, cls).__new__(cls) + return _Nothing._singleton + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + def __len__(self): + return 0 # __bool__ for Python 2 + + +NOTHING = _Nothing() +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + if PY2: + # For some reason `type(None)` isn't callable in Python 2, but we don't + # actually need a constructor for None objects, we just need any + # available function that returns None. + def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): + return _none_constructor, _args + + else: + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending_metadata`. + :param type: The type of the attribute. In Python 3.6 or greater, the + preferred method to specify the type is using a variable annotation + (see `PEP 526 `_). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs=None): + """ + Create the method with the script given and return the method object. + """ + locs = {} + if globs is None: + globs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = "{}-{}>".format(base_filename[:-1], count) + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = "{}Attributes".format(cls_name) + attr_class_template = [ + "class {}(tuple):".format(attr_class_name), + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + _tuple_property_pat.format(index=i, attr_name=attr_name) + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + + Requires Python 3. + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _counter_getter(e): + """ + Key function for sorting to avoid re-creating a lambda for every class. + """ + return e[1].counter + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in iteritems(these)] + + if not isinstance(these, ordered_dict): + ca_list.sort(key=_counter_getter) + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + "default value or factory. Attribute in question: %r" % (a,) + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder(object): + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = set(a.name for a in base_attrs) + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + else: + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = object.__setattr__ + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in iteritems(self._cls_dict) + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = object.__setattr__ + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overriden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in iteritems(existing_slots) + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + qualname = getattr(self._cls, "__qualname__", None) + if qualname is not None: + cd["__qualname__"] = qualname + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . On Python 3, + # if a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return tuple(getattr(self, name) for name in state_attr_names) + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in zip(state_attr_names, state): + __bound_setattr(name, value) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = "Method generated by attrs for class %s." % ( + self._cls.__qualname__, + ) + except AttributeError: + pass + + return method + + +_CMP_DEPRECATION = ( + "The usage of `cmp` is deprecated and will be removed on or after " + "2021-06-01. Please use `eq` and `order` instead." +) + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + + auto_detect must be False on Python 2. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + A class decorator that adds `dunder + `_\ -methods according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + If *these* is an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the attributes inside *these*. Otherwise the order + of the definition of the attributes is used. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises + an `attrs.exceptions.PythonTooOldError`. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method + is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a `slotted class ` that's more + memory-efficient. Slotted classes are generally superior to the default + dict classes, but have some gotchas you should know about, so we + encourage you to read the `glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated + attributes (Python 3.6 and later only) from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ + :param bool kw_only: Make all attributes keyword-only (Python 3+) + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + `PEP 634 `_ (Structural + Pattern Matching). It is a tuple of all positional-only ``__init__`` + parameter names on Python 3.10 and later. Ignored on older Python + versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + """ + if auto_detect and PY2: + raise PythonTooOldError( + "auto_detect only works on Python 3 and later." + ) + + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + hash_ = hash # work around the lack of nonlocal + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + + if getattr(cls, "__class__", None) is None: + raise TypeError("attrs only works with new-style classes.") + + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + if ( + hash_ is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + else: + hash = hash_ + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +if PY2: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return ( + getattr(cls.__setattr__, "__module__", None) + == _frozen_setattrs.__module__ + and cls.__setattr__.__name__ == _frozen_setattrs.__name__ + ) + +else: + + def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ == _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + ) + return unique_filename + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + if not PY2: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + " %d," % (type_hash,), + ] + ) + + for a in attrs: + method_lines.append(indent + " self.%s," % a.name) + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) + if frozen: + append_hash_computation_lines( + "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + "self.%s = " % _hash_cache_field, tab * 2 + ) + method_lines.append(tab + "return self.%s" % _hash_cache_field) + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = "_%s_key" % (a.name,) + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append( + " %s(self.%s)," + % ( + cmp_name, + a.name, + ) + ) + others.append( + " %s(other.%s)," + % ( + cmp_name, + a.name, + ) + ) + else: + lines.append(" self.%s," % (a.name,)) + others.append(" other.%s," % (a.name,)) + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +if HAS_F_STRINGS: + + def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r + for name, r, _ in attr_names_with_reprs + if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name + if i + else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = ( + '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + ) + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + " return f'%s(%s)'" % (cls_name_fragment, repr_fragment), + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + +else: + + def _make_repr(attrs, ns, _): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the + full name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + already_repring = _compat.repr_context.already_repring + except AttributeError: + already_repring = set() + _compat.repr_context.already_repring = already_repring + + if id(self) in already_repring: + return "..." + real_cls = self.__class__ + if ns is None: + qualname = getattr(real_cls, "__qualname__", None) + if qualname is not None: # pragma: no cover + # This case only happens on Python 3.5 and 3.6. We exclude + # it from coverage, because we don't want to slow down our + # test suite by running them under coverage too for this + # one line. + class_name = qualname.rsplit(">.", 1)[-1] + else: + class_name = real_cls.__name__ + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) + # for the duration of this call, it's safe to depend on id(...) + # stability, and not need to track the instance and therefore + # worry about properties like weakref- or hash-ability. + already_repring.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + already_repring.remove(id(self)) + + return __repr__ + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: an ordered dict where keys are attribute names and values are + `attrs.Attribute`\\ s. This will be a `dict` if it's + naturally ordered like on Python 3.6+ or an + :class:`~collections.OrderedDict` otherwise. + + .. versionadded:: 18.1.0 + """ + if not isclass(cls): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError( + "{cls!r} is not an attrs-decorated class.".format(cls=cls) + ) + return ordered_dict(((a.name, a) for a in attrs)) + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr"] = _obj_setattr + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return "_setattr('%s', %s)" % (attr_name, value_var) + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return "self.%s = %s" % (attr_name, value) + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +if PY2: + + def _unpack_kw_only_py2(attr_name, default=None): + """ + Unpack *attr_name* from _kw_only dict. + """ + if default is not None: + arg_default = ", %s" % default + else: + arg_default = "" + return "%s = _kw_only.pop('%s'%s)" % ( + attr_name, + attr_name, + arg_default, + ) + + def _unpack_kw_only_lines_py2(kw_only_args): + """ + Unpack all *kw_only_args* from _kw_only dict and handle errors. + + Given a list of strings "{attr_name}" and "{attr_name}={default}" + generates list of lines of code that pop attrs from _kw_only dict and + raise TypeError similar to builtin if required attr is missing or + extra key is passed. + + >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) + try: + a = _kw_only.pop('a') + b = _kw_only.pop('b', 42) + except KeyError as _key_error: + raise TypeError( + ... + if _kw_only: + raise TypeError( + ... + """ + lines = ["try:"] + lines.extend( + " " + _unpack_kw_only_py2(*arg.split("=")) + for arg in kw_only_args + ) + lines += """\ +except KeyError as _key_error: + raise TypeError( + '__init__() missing required keyword-only argument: %s' % _key_error + ) +if _kw_only: + raise TypeError( + '__init__() got an unexpected keyword argument %r' + % next(iter(_kw_only)) + ) +""".split( + "\n" + ) + return lines + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr.__get__(self, self.__class__)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return "_inst_dict['%s'] = %s" % (attr_name, value_var) + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + arg_name = a.name.lstrip("_") + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + "(%s)" % (maybe_self,), + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + "attr_dict['%s'].default" % (attr_name,), + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = "%s=NOTHING" % (arg_name,) + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append("if %s is not NOTHING:" % (arg_name,)) + + init_factory_name = _init_factory_pat.format(a.name) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None and not PY2: + # Try to get the type from the converter. + sig = None + try: + sig = inspect.signature(a.converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + sig_params = list(sig.parameters.values()) + if ( + sig_params + and sig_params[0].annotation + is not inspect.Parameter.empty + ): + annotations[arg_name] = sig_params[0].annotation + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append( + " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) + ) + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join("self." + a.name for a in attrs if a.init) + + lines.append("BaseException.__init__(self, %s)" % (vals,)) + + args = ", ".join(args) + if kw_only_args: + if PY2: + lines = _unpack_kw_only_lines_py2(kw_only_args) + lines + + args += "%s**_kw_only" % (", " if args else "",) # leading comma + else: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + return ( + """\ +def {init_name}(self, {args}): + {lines} +""".format( + init_name=("__attrs_init__" if attrs_init else "__init__"), + args=args, + lines="\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +class Attribute(object): + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self, Attribute) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + metadata_proxy(metadata) + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict + ) + + @property + def cmp(self): + """ + Simulate the presence of a cmp attribute and warn. + """ + warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) + + return self.eq and self.order + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self, Attribute) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + metadata_proxy(value) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr(object): + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + ) + ) + ( + Attribute( + name="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory(object): + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, + `collections.OrderedDict` otherwise), the order is deduced from + the order of the names or attributes inside *attrs*. Otherwise the + order of the definition of the attributes is used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = dict((a, attrib()) for a in attrs) + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator(object): + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not PY2: + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type. + sig = None + try: + sig = inspect.signature(converters[0]) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if ( + params + and params[0].annotation is not inspect.Parameter.empty + ): + pipe_converter.__annotations__["val"] = params[ + 0 + ].annotation + # Get return type. + sig = None + try: + sig = inspect.signature(converters[-1]) + except (ValueError, TypeError): # inspect failed + pass + if sig and sig.return_annotation is not inspect.Signature().empty: + pipe_converter.__annotations__[ + "return" + ] = sig.return_annotation + + return pipe_converter diff --git a/dbt-env/lib/python3.8/site-packages/attr/_next_gen.py b/dbt-env/lib/python3.8/site-packages/attr/_next_gen.py new file mode 100644 index 0000000..0682536 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_next_gen.py @@ -0,0 +1,216 @@ +# SPDX-License-Identifier: MIT + +""" +These are Python 3.6+-only and keyword-only APIs that call `attr.s` and +`attr.ib` with different default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` + (c.f. *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* (see :term:`slotted classes` for potentially surprising + behaviors) + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - *match_args=True* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/dbt-env/lib/python3.8/site-packages/attr/_version_info.py b/dbt-env/lib/python3.8/site-packages/attr/_version_info.py new file mode 100644 index 0000000..cdaeec3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_version_info.py @@ -0,0 +1,87 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo(object): + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/dbt-env/lib/python3.8/site-packages/attr/_version_info.pyi b/dbt-env/lib/python3.8/site-packages/attr/_version_info.pyi new file mode 100644 index 0000000..45ced08 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/dbt-env/lib/python3.8/site-packages/attr/converters.py b/dbt-env/lib/python3.8/site-packages/attr/converters.py new file mode 100644 index 0000000..1fb6c05 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/converters.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import PY2 +from ._make import NOTHING, Factory, pipe + + +if not PY2: + import inspect + import typing + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + if not PY2: + sig = None + try: + sig = inspect.signature(converter) + except (ValueError, TypeError): # inspect failed + pass + if sig: + params = list(sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + optional_converter.__annotations__["val"] = typing.Optional[ + params[0].annotation + ] + if sig.return_annotation is not inspect.Signature.empty: + optional_converter.__annotations__["return"] = typing.Optional[ + sig.return_annotation + ] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError("Cannot convert value to bool: {}".format(val)) diff --git a/dbt-env/lib/python3.8/site-packages/attr/converters.pyi b/dbt-env/lib/python3.8/site-packages/attr/converters.pyi new file mode 100644 index 0000000..0f58088 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, Optional, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/dbt-env/lib/python3.8/site-packages/attr/exceptions.py b/dbt-env/lib/python3.8/site-packages/attr/exceptions.py new file mode 100644 index 0000000..b2f1edc --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/exceptions.py @@ -0,0 +1,94 @@ +# SPDX-License-Identifier: MIT + +from __future__ import absolute_import, division, print_function + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/dbt-env/lib/python3.8/site-packages/attr/exceptions.pyi b/dbt-env/lib/python3.8/site-packages/attr/exceptions.pyi new file mode 100644 index 0000000..f268011 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/dbt-env/lib/python3.8/site-packages/attr/filters.py b/dbt-env/lib/python3.8/site-packages/attr/filters.py new file mode 100644 index 0000000..a1978a8 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/filters.py @@ -0,0 +1,54 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from __future__ import absolute_import, division, print_function + +from ._compat import isclass +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isclass(cls)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/dbt-env/lib/python3.8/site-packages/attr/filters.pyi b/dbt-env/lib/python3.8/site-packages/attr/filters.pyi new file mode 100644 index 0000000..9938668 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/dbt-env/lib/python3.8/site-packages/attr/py.typed b/dbt-env/lib/python3.8/site-packages/attr/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/dbt-env/lib/python3.8/site-packages/attr/setters.py b/dbt-env/lib/python3.8/site-packages/attr/setters.py new file mode 100644 index 0000000..b1cbb5d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/setters.py @@ -0,0 +1,79 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + +from __future__ import absolute_import, division, print_function + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +NO_OP = object() +""" +Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. + +Does not work in `pipe` or within lists. + +.. versionadded:: 20.1.0 +""" diff --git a/dbt-env/lib/python3.8/site-packages/attr/setters.pyi b/dbt-env/lib/python3.8/site-packages/attr/setters.pyi new file mode 100644 index 0000000..3f5603c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar, cast + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/dbt-env/lib/python3.8/site-packages/attr/validators.py b/dbt-env/lib/python3.8/site-packages/attr/validators.py new file mode 100644 index 0000000..0b0c834 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/validators.py @@ -0,0 +1,561 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + +from __future__ import absolute_import, division, print_function + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator(object): + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator(object): + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call (options + are `re.fullmatch`, `re.search`, `re.match`, default + is ``None`` which means either `re.fullmatch` or an emulation of + it on Python 2). For performance reasons, they won't be used directly + but on a pre-`re.compile`\ ed pattern. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + fullmatch = getattr(re, "fullmatch", None) + valid_funcs = (fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + elif fullmatch: + match_func = pattern.fullmatch + else: # Python 2 fullmatch emulation (https://bugs.python.org/issue16203) + pattern = re.compile( + r"(?:{})\Z".format(pattern.pattern), pattern.flags + ) + match_func = pattern.match + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator(object): + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator(object): + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator(object): + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ) + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator(object): + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable(object): + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else " {iterable!r}".format(iterable=self.iterable_validator) + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping(object): + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator(object): + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator(object): + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(max=self.max_length) + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) diff --git a/dbt-env/lib/python3.8/site-packages/attr/validators.pyi b/dbt-env/lib/python3.8/site-packages/attr/validators.pyi new file mode 100644 index 0000000..5e00b85 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attr/validators.pyi @@ -0,0 +1,78 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... diff --git a/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/AUTHORS.rst b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/AUTHORS.rst new file mode 100644 index 0000000..f14ef6c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/AUTHORS.rst @@ -0,0 +1,11 @@ +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? diff --git a/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/LICENSE b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/LICENSE new file mode 100644 index 0000000..7ae3df9 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/METADATA new file mode 100644 index 0000000..aa327d5 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/METADATA @@ -0,0 +1,232 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 21.4.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Project-URL: Ko-fi, https://ko-fi.com/the_hynek +Keywords: class,attribute,boilerplate +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: AUTHORS.rst +Provides-Extra: dev +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'dev' +Requires-Dist: hypothesis ; extra == 'dev' +Requires-Dist: pympler ; extra == 'dev' +Requires-Dist: pytest (>=4.3.0) ; extra == 'dev' +Requires-Dist: six ; extra == 'dev' +Requires-Dist: mypy ; extra == 'dev' +Requires-Dist: pytest-mypy-plugins ; extra == 'dev' +Requires-Dist: zope.interface ; extra == 'dev' +Requires-Dist: furo ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: sphinx-notfound-page ; extra == 'dev' +Requires-Dist: pre-commit ; extra == 'dev' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Requires-Dist: sphinx-notfound-page ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests' +Requires-Dist: hypothesis ; extra == 'tests' +Requires-Dist: pympler ; extra == 'tests' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests' +Requires-Dist: six ; extra == 'tests' +Requires-Dist: mypy ; extra == 'tests' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests' +Provides-Extra: tests_no_zope +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests_no_zope' +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: six ; extra == 'tests_no_zope' +Requires-Dist: mypy ; extra == 'tests_no_zope' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests_no_zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' + + +.. image:: https://www.attrs.org/en/stable/_static/attrs_logo.png + :alt: attrs logo + :align: center + + +``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder methods `_). +`Trusted by NASA `_ for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + +.. teaser-end + +For that, it gives you a class decorator and a way to declaratively define the attributes on that class: + +.. -code-begin- + +.. code-block:: pycon + + >>> from attrs import asdict, define, make_class, Factory + + >>> @define + ... class SomeClass: + ... a_number: int = 42 + ... list_of_numbers: list[int] = Factory(list) + ... + ... def hard_math(self, another_number): + ... return self.a_number + sum(self.list_of_numbers) * another_number + + + >>> sc = SomeClass(1, [1, 2, 3]) + >>> sc + SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + + >>> sc.hard_math(3) + 19 + >>> sc == SomeClass(1, [1, 2, 3]) + True + >>> sc != SomeClass(2, [3, 2, 1]) + True + + >>> asdict(sc) + {'a_number': 1, 'list_of_numbers': [1, 2, 3]} + + >>> SomeClass() + SomeClass(a_number=42, list_of_numbers=[]) + + >>> C = make_class("C", ["a", "b"]) + >>> C("foo", "bar") + C(a='foo', b='bar') + + +After *declaring* your attributes ``attrs`` gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable ``__repr__``, +- a equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with ``attrs``. +Simply assign ``attrs.field()`` to the attributes instead of annotating them with types. + +---- + +This example uses ``attrs``'s modern APIs that have been introduced in version 20.1.0, and the ``attrs`` package import name that has been added in version 21.3.0. +The classic APIs (``@attr.s``, ``attr.ib``, plus their serious business aliases) and the ``attr`` package import name will remain **indefinitely**. + +Please check out `On The Core API Names `_ for a more in-depth explanation. + + +Data Classes +============ + +On the tin, ``attrs`` might remind you of ``dataclasses`` (and indeed, ``dataclasses`` are a descendant of ``attrs``). +In practice it does a lot more and is more flexible. +For instance it allows you to define `special handling of NumPy arrays for equality checks `_, or allows more ways to `plug into the initialization process `_. + +For more details, please refer to our `comparison page `_. + + +.. -getting-help- + +Getting Help +============ + +Please use the ``python-attrs`` tag on `Stack Overflow `_ to get help. + +Answering questions of your fellow developers is also a great way to help the project! + + +.. -project-information- + +Project Information +=================== + +``attrs`` is released under the `MIT `_ license, +its documentation lives at `Read the Docs `_, +the code on `GitHub `_, +and the latest release on `PyPI `_. +It’s rigorously tested on Python 2.7, 3.5+, and PyPy. + +We collect information on **third-party extensions** in our `wiki `_. +Feel free to browse and add your own! + +If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide `_ to get you started! + + +``attrs`` for Enterprise +------------------------ + +Available as part of the Tidelift Subscription. + +The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +`Learn more. `_ + + +Release Information +=================== + +21.4.0 (2021-12-29) +------------------- + +Changes +^^^^^^^ + +- Fixed the test suite on PyPy3.8 where ``cloudpickle`` does not work. + `#892 `_ +- Fixed ``coverage report`` for projects that use ``attrs`` and don't set a ``--source``. + `#895 `_, + `#896 `_ + +`Full changelog `_. + +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? + + diff --git a/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/RECORD new file mode 100644 index 0000000..e2d00cf --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/RECORD @@ -0,0 +1,56 @@ +attr/__init__.py,sha256=_zhJ4O8Q5KR5gaIrjX73vkR5nA6NjfpMGXQChEdNljI,1667 +attr/__init__.pyi,sha256=ubRkstoRHPpQN17iA0OCh8waIwZ5NeJgbz0lwI8XUjY,15100 +attr/__pycache__/__init__.cpython-38.pyc,, +attr/__pycache__/_cmp.cpython-38.pyc,, +attr/__pycache__/_compat.cpython-38.pyc,, +attr/__pycache__/_config.cpython-38.pyc,, +attr/__pycache__/_funcs.cpython-38.pyc,, +attr/__pycache__/_make.cpython-38.pyc,, +attr/__pycache__/_next_gen.cpython-38.pyc,, +attr/__pycache__/_version_info.cpython-38.pyc,, +attr/__pycache__/converters.cpython-38.pyc,, +attr/__pycache__/exceptions.cpython-38.pyc,, +attr/__pycache__/filters.cpython-38.pyc,, +attr/__pycache__/setters.cpython-38.pyc,, +attr/__pycache__/validators.cpython-38.pyc,, +attr/_cmp.py,sha256=JP0N7OIyTqIR3prUDfMZOR4DV4tlV_xXf39-bQg7xOo,4165 +attr/_cmp.pyi,sha256=oyjJVytrwwkUJOoe332IiYzp6pCVZEKKcKveH-ev604,317 +attr/_compat.py,sha256=i8u27AAK_4SzQnmTf3aliGV27UdYbJxdZ-O0tOHbLU8,8396 +attr/_config.py,sha256=aj1Lh8t2CuVa5nSxgCrLQtg_ZSdO8ZKeNJQd6RvpIp8,892 +attr/_funcs.py,sha256=sm_D12y2IyRW_bCnR7M-O7U5qHaieXr0BzINwJ7_K38,14753 +attr/_make.py,sha256=D05j0_ckcVIRFn2xHch5SPUCwh3t7WpeFj-3Ku9SocQ,102736 +attr/_next_gen.py,sha256=s5jCsVEQ4IhOjAykP4N0ETaWpg0RsgQttMvEZErUrhQ,5752 +attr/_version_info.py,sha256=sxD9yNai0jGbur_-RGEQHbgV2YX5_5G9PhrhBA5pA54,2194 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=uiiWTz8GLJe8I1Ty7UICK1DegVUnqHTXbOSnar7g7Nk,4078 +attr/converters.pyi,sha256=MQo7iEzPNVoFpKqD30sVwgVpdNoIeSCF2nsXvoxLZ-Y,416 +attr/exceptions.py,sha256=BMg7AljkJnvG-irMwL2TBHYlaLBXhSKnzoEWo4e42Zw,1981 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=JGZgvPGkdOfttkoL6XhXS6ZCoaVV5nZ8GCYeZNUN_mE,1124 +attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=rH_UtQuHgQEC7hfZyMO_SJW0R1Gus7-a83U8igZfqs8,1466 +attr/setters.pyi,sha256=7dM10rqpQVDW0y-iJUnq8rabdO5Wx2Sbo5LwNa0IXl0,573 +attr/validators.py,sha256=jVE9roaSOmTf0dJNSLHNaQNilkrlzc3pNNBKmv0g7pk,15966 +attr/validators.pyi,sha256=adn6rNbIXmRXlg_FKrTmWj0dOX0vKTsGG82Jd3YcJbQ,2268 +attrs-21.4.0.dist-info/AUTHORS.rst,sha256=wsqCNbGz_mklcJrt54APIZHZpoTIJLkXqEhhn4Nd8hc,752 +attrs-21.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-21.4.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082 +attrs-21.4.0.dist-info/METADATA,sha256=WwgR4MfxE55PpGGv21UOEOEtXZGCqwekfXYg-JgA5HY,9810 +attrs-21.4.0.dist-info/RECORD,, +attrs-21.4.0.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +attrs-21.4.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11 +attrs/__init__.py,sha256=CeyxLGVViAEKKsLOLaif8vF3vs1a28vsrRVLv7eMEgM,1109 +attrs/__init__.pyi,sha256=57aCxUJukK9lZlrUgk9RuWiBiPY5DzDKJAJkhbrStYw,1982 +attrs/__pycache__/__init__.cpython-38.pyc,, +attrs/__pycache__/converters.cpython-38.pyc,, +attrs/__pycache__/exceptions.cpython-38.pyc,, +attrs/__pycache__/filters.cpython-38.pyc,, +attrs/__pycache__/setters.cpython-38.pyc,, +attrs/__pycache__/validators.cpython-38.pyc,, +attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70 +attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70 +attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67 +attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70 diff --git a/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/WHEEL new file mode 100644 index 0000000..0b18a28 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/top_level.txt new file mode 100644 index 0000000..eca8ba9 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs-21.4.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +attr +attrs diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__init__.py b/dbt-env/lib/python3.8/site-packages/attrs/__init__.py new file mode 100644 index 0000000..a704b8b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs/__init__.py @@ -0,0 +1,70 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__init__.pyi b/dbt-env/lib/python3.8/site-packages/attrs/__init__.pyi new file mode 100644 index 0000000..7426fa5 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs/__init__.pyi @@ -0,0 +1,63 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import define as define +from attr import evolve as evolve +from attr import Factory as Factory +from attr import exceptions as exceptions +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d9accff Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/converters.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/converters.cpython-38.pyc new file mode 100644 index 0000000..1daae36 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/converters.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/exceptions.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..a9800a4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/exceptions.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/filters.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/filters.cpython-38.pyc new file mode 100644 index 0000000..5eadf5e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/filters.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/setters.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/setters.cpython-38.pyc new file mode 100644 index 0000000..7d1b09d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/setters.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/validators.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/validators.cpython-38.pyc new file mode 100644 index 0000000..e8d8a7a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/attrs/__pycache__/validators.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/attrs/converters.py b/dbt-env/lib/python3.8/site-packages/attrs/converters.py new file mode 100644 index 0000000..edfa8d3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/dbt-env/lib/python3.8/site-packages/attrs/exceptions.py b/dbt-env/lib/python3.8/site-packages/attrs/exceptions.py new file mode 100644 index 0000000..bd9efed --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/dbt-env/lib/python3.8/site-packages/attrs/filters.py b/dbt-env/lib/python3.8/site-packages/attrs/filters.py new file mode 100644 index 0000000..5295900 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/dbt-env/lib/python3.8/site-packages/attrs/py.typed b/dbt-env/lib/python3.8/site-packages/attrs/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/dbt-env/lib/python3.8/site-packages/attrs/setters.py b/dbt-env/lib/python3.8/site-packages/attrs/setters.py new file mode 100644 index 0000000..9b50770 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/dbt-env/lib/python3.8/site-packages/attrs/validators.py b/dbt-env/lib/python3.8/site-packages/attrs/validators.py new file mode 100644 index 0000000..ab2c9b3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa diff --git a/dbt-env/lib/python3.8/site-packages/babel/__init__.py b/dbt-env/lib/python3.8/site-packages/babel/__init__.py new file mode 100644 index 0000000..3e20e4b --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +""" + babel + ~~~~~ + + Integrated collection of utilities that assist in internationalizing and + localizing applications. + + This package is basically composed of two major parts: + + * tools to build and work with ``gettext`` message catalogs + * a Python interface to the CLDR (Common Locale Data Repository), providing + access to various locale display names, localized number and date + formatting, etc. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.core import UnknownLocaleError, Locale, default_locale, \ + negotiate_locale, parse_locale, get_locale_identifier + + +__version__ = '2.9.1' diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..b42bac1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/_compat.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000..9f923d4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/_compat.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/core.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/core.cpython-38.pyc new file mode 100644 index 0000000..bcb8149 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/core.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/dates.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/dates.cpython-38.pyc new file mode 100644 index 0000000..e7c404d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/dates.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/languages.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/languages.cpython-38.pyc new file mode 100644 index 0000000..358e773 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/languages.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/lists.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/lists.cpython-38.pyc new file mode 100644 index 0000000..2229006 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/lists.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/localedata.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/localedata.cpython-38.pyc new file mode 100644 index 0000000..b71409a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/localedata.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/numbers.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/numbers.cpython-38.pyc new file mode 100644 index 0000000..7c91a89 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/numbers.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/plural.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/plural.cpython-38.pyc new file mode 100644 index 0000000..783c722 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/plural.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/support.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/support.cpython-38.pyc new file mode 100644 index 0000000..ced0e05 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/support.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/units.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/units.cpython-38.pyc new file mode 100644 index 0000000..39d037e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/units.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/__pycache__/util.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000..dd80daf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/__pycache__/util.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/_compat.py b/dbt-env/lib/python3.8/site-packages/babel/_compat.py new file mode 100644 index 0000000..11b4d7a --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/_compat.py @@ -0,0 +1,79 @@ +import sys +import array + +PY2 = sys.version_info[0] == 2 + +_identity = lambda x: x + + +if not PY2: + text_type = str + string_types = (str,) + integer_types = (int, ) + + text_to_native = lambda s, enc: s + unichr = chr + + iterkeys = lambda d: iter(d.keys()) + itervalues = lambda d: iter(d.values()) + iteritems = lambda d: iter(d.items()) + + from io import StringIO, BytesIO + import pickle + + izip = zip + imap = map + range_type = range + + cmp = lambda a, b: (a > b) - (a < b) + + array_tobytes = array.array.tobytes + from collections import abc + +else: + text_type = unicode + string_types = (str, unicode) + integer_types = (int, long) + + text_to_native = lambda s, enc: s.encode(enc) + unichr = unichr + + iterkeys = lambda d: d.iterkeys() + itervalues = lambda d: d.itervalues() + iteritems = lambda d: d.iteritems() + + from cStringIO import StringIO as BytesIO + from StringIO import StringIO + import cPickle as pickle + + from itertools import imap + from itertools import izip + range_type = xrange + + cmp = cmp + + array_tobytes = array.array.tostring + import collections as abc + +number_types = integer_types + (float,) + + +def force_text(s, encoding='utf-8', errors='strict'): + if isinstance(s, text_type): + return s + if isinstance(s, bytes): + return s.decode(encoding, errors) + return text_type(s) + + +# +# Since Python 3.3, a fast decimal implementation is already included in the +# standard library. Otherwise use cdecimal when available +# +if sys.version_info[:2] >= (3, 3): + import decimal +else: + try: + import cdecimal as decimal + except ImportError: + import decimal diff --git a/dbt-env/lib/python3.8/site-packages/babel/core.py b/dbt-env/lib/python3.8/site-packages/babel/core.py new file mode 100644 index 0000000..a323a72 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/core.py @@ -0,0 +1,1133 @@ +# -*- coding: utf-8 -*- +""" + babel.core + ~~~~~~~~~~ + + Core locale representation and locale data access. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import os + +from babel import localedata +from babel._compat import pickle, string_types +from babel.plural import PluralRule + +__all__ = ['UnknownLocaleError', 'Locale', 'default_locale', 'negotiate_locale', + 'parse_locale'] + + +_global_data = None +_default_plural_rule = PluralRule({}) + + +def _raise_no_data_error(): + raise RuntimeError('The babel data files are not available. ' + 'This usually happens because you are using ' + 'a source checkout from Babel and you did ' + 'not build the data files. Just make sure ' + 'to run "python setup.py import_cldr" before ' + 'installing the library.') + + +def get_global(key): + """Return the dictionary for the given key in the global data. + + The global data is stored in the ``babel/global.dat`` file and contains + information independent of individual locales. + + >>> get_global('zone_aliases')['UTC'] + u'Etc/UTC' + >>> get_global('zone_territories')['Europe/Berlin'] + u'DE' + + The keys available are: + + - ``all_currencies`` + - ``currency_fractions`` + - ``language_aliases`` + - ``likely_subtags`` + - ``parent_exceptions`` + - ``script_aliases`` + - ``territory_aliases`` + - ``territory_currencies`` + - ``territory_languages`` + - ``territory_zones`` + - ``variant_aliases`` + - ``windows_zone_mapping`` + - ``zone_aliases`` + - ``zone_territories`` + + .. note:: The internal structure of the data may change between versions. + + .. versionadded:: 0.9 + + :param key: the data key + """ + global _global_data + if _global_data is None: + dirname = os.path.join(os.path.dirname(__file__)) + filename = os.path.join(dirname, 'global.dat') + if not os.path.isfile(filename): + _raise_no_data_error() + with open(filename, 'rb') as fileobj: + _global_data = pickle.load(fileobj) + return _global_data.get(key, {}) + + +LOCALE_ALIASES = { + 'ar': 'ar_SY', 'bg': 'bg_BG', 'bs': 'bs_BA', 'ca': 'ca_ES', 'cs': 'cs_CZ', + 'da': 'da_DK', 'de': 'de_DE', 'el': 'el_GR', 'en': 'en_US', 'es': 'es_ES', + 'et': 'et_EE', 'fa': 'fa_IR', 'fi': 'fi_FI', 'fr': 'fr_FR', 'gl': 'gl_ES', + 'he': 'he_IL', 'hu': 'hu_HU', 'id': 'id_ID', 'is': 'is_IS', 'it': 'it_IT', + 'ja': 'ja_JP', 'km': 'km_KH', 'ko': 'ko_KR', 'lt': 'lt_LT', 'lv': 'lv_LV', + 'mk': 'mk_MK', 'nl': 'nl_NL', 'nn': 'nn_NO', 'no': 'nb_NO', 'pl': 'pl_PL', + 'pt': 'pt_PT', 'ro': 'ro_RO', 'ru': 'ru_RU', 'sk': 'sk_SK', 'sl': 'sl_SI', + 'sv': 'sv_SE', 'th': 'th_TH', 'tr': 'tr_TR', 'uk': 'uk_UA' +} + + +class UnknownLocaleError(Exception): + """Exception thrown when a locale is requested for which no locale data + is available. + """ + + def __init__(self, identifier): + """Create the exception. + + :param identifier: the identifier string of the unsupported locale + """ + Exception.__init__(self, 'unknown locale %r' % identifier) + + #: The identifier of the locale that could not be found. + self.identifier = identifier + + +class Locale(object): + """Representation of a specific locale. + + >>> locale = Locale('en', 'US') + >>> repr(locale) + "Locale('en', territory='US')" + >>> locale.display_name + u'English (United States)' + + A `Locale` object can also be instantiated from a raw locale string: + + >>> locale = Locale.parse('en-US', sep='-') + >>> repr(locale) + "Locale('en', territory='US')" + + `Locale` objects provide access to a collection of locale data, such as + territory and language names, number and date format patterns, and more: + + >>> locale.number_symbols['decimal'] + u'.' + + If a locale is requested for which no locale data is available, an + `UnknownLocaleError` is raised: + + >>> Locale.parse('en_XX') + Traceback (most recent call last): + ... + UnknownLocaleError: unknown locale 'en_XX' + + For more information see :rfc:`3066`. + """ + + def __init__(self, language, territory=None, script=None, variant=None): + """Initialize the locale object from the given identifier components. + + >>> locale = Locale('en', 'US') + >>> locale.language + 'en' + >>> locale.territory + 'US' + + :param language: the language code + :param territory: the territory (country or region) code + :param script: the script code + :param variant: the variant code + :raise `UnknownLocaleError`: if no locale data is available for the + requested locale + """ + #: the language code + self.language = language + #: the territory (country or region) code + self.territory = territory + #: the script code + self.script = script + #: the variant code + self.variant = variant + self.__data = None + + identifier = str(self) + if not localedata.exists(identifier): + raise UnknownLocaleError(identifier) + + @classmethod + def default(cls, category=None, aliases=LOCALE_ALIASES): + """Return the system default locale for the specified category. + + >>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LC_MESSAGES']: + ... os.environ[name] = '' + >>> os.environ['LANG'] = 'fr_FR.UTF-8' + >>> Locale.default('LC_MESSAGES') + Locale('fr', territory='FR') + + The following fallbacks to the variable are always considered: + + - ``LANGUAGE`` + - ``LC_ALL`` + - ``LC_CTYPE`` + - ``LANG`` + + :param category: one of the ``LC_XXX`` environment variable names + :param aliases: a dictionary of aliases for locale identifiers + """ + # XXX: use likely subtag expansion here instead of the + # aliases dictionary. + locale_string = default_locale(category, aliases=aliases) + return cls.parse(locale_string) + + @classmethod + def negotiate(cls, preferred, available, sep='_', aliases=LOCALE_ALIASES): + """Find the best match between available and requested locale strings. + + >>> Locale.negotiate(['de_DE', 'en_US'], ['de_DE', 'de_AT']) + Locale('de', territory='DE') + >>> Locale.negotiate(['de_DE', 'en_US'], ['en', 'de']) + Locale('de') + >>> Locale.negotiate(['de_DE', 'de'], ['en_US']) + + You can specify the character used in the locale identifiers to separate + the differnet components. This separator is applied to both lists. Also, + case is ignored in the comparison: + + >>> Locale.negotiate(['de-DE', 'de'], ['en-us', 'de-de'], sep='-') + Locale('de', territory='DE') + + :param preferred: the list of locale identifers preferred by the user + :param available: the list of locale identifiers available + :param aliases: a dictionary of aliases for locale identifiers + """ + identifier = negotiate_locale(preferred, available, sep=sep, + aliases=aliases) + if identifier: + return Locale.parse(identifier, sep=sep) + + @classmethod + def parse(cls, identifier, sep='_', resolve_likely_subtags=True): + """Create a `Locale` instance for the given locale identifier. + + >>> l = Locale.parse('de-DE', sep='-') + >>> l.display_name + u'Deutsch (Deutschland)' + + If the `identifier` parameter is not a string, but actually a `Locale` + object, that object is returned: + + >>> Locale.parse(l) + Locale('de', territory='DE') + + This also can perform resolving of likely subtags which it does + by default. This is for instance useful to figure out the most + likely locale for a territory you can use ``'und'`` as the + language tag: + + >>> Locale.parse('und_AT') + Locale('de', territory='AT') + + :param identifier: the locale identifier string + :param sep: optional component separator + :param resolve_likely_subtags: if this is specified then a locale will + have its likely subtag resolved if the + locale otherwise does not exist. For + instance ``zh_TW`` by itself is not a + locale that exists but Babel can + automatically expand it to the full + form of ``zh_hant_TW``. Note that this + expansion is only taking place if no + locale exists otherwise. For instance + there is a locale ``en`` that can exist + by itself. + :raise `ValueError`: if the string does not appear to be a valid locale + identifier + :raise `UnknownLocaleError`: if no locale data is available for the + requested locale + """ + if identifier is None: + return None + elif isinstance(identifier, Locale): + return identifier + elif not isinstance(identifier, string_types): + raise TypeError('Unexpected value for identifier: %r' % (identifier,)) + + parts = parse_locale(identifier, sep=sep) + input_id = get_locale_identifier(parts) + + def _try_load(parts): + try: + return cls(*parts) + except UnknownLocaleError: + return None + + def _try_load_reducing(parts): + # Success on first hit, return it. + locale = _try_load(parts) + if locale is not None: + return locale + + # Now try without script and variant + locale = _try_load(parts[:2]) + if locale is not None: + return locale + + locale = _try_load(parts) + if locale is not None: + return locale + if not resolve_likely_subtags: + raise UnknownLocaleError(input_id) + + # From here onwards is some very bad likely subtag resolving. This + # whole logic is not entirely correct but good enough (tm) for the + # time being. This has been added so that zh_TW does not cause + # errors for people when they upgrade. Later we should properly + # implement ICU like fuzzy locale objects and provide a way to + # maximize and minimize locale tags. + + language, territory, script, variant = parts + language = get_global('language_aliases').get(language, language) + territory = get_global('territory_aliases').get(territory, (territory,))[0] + script = get_global('script_aliases').get(script, script) + variant = get_global('variant_aliases').get(variant, variant) + + if territory == 'ZZ': + territory = None + if script == 'Zzzz': + script = None + + parts = language, territory, script, variant + + # First match: try the whole identifier + new_id = get_locale_identifier(parts) + likely_subtag = get_global('likely_subtags').get(new_id) + if likely_subtag is not None: + locale = _try_load_reducing(parse_locale(likely_subtag)) + if locale is not None: + return locale + + # If we did not find anything so far, try again with a + # simplified identifier that is just the language + likely_subtag = get_global('likely_subtags').get(language) + if likely_subtag is not None: + language2, _, script2, variant2 = parse_locale(likely_subtag) + locale = _try_load_reducing((language2, territory, script2, variant2)) + if locale is not None: + return locale + + raise UnknownLocaleError(input_id) + + def __eq__(self, other): + for key in ('language', 'territory', 'script', 'variant'): + if not hasattr(other, key): + return False + return (self.language == other.language) and \ + (self.territory == other.territory) and \ + (self.script == other.script) and \ + (self.variant == other.variant) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash((self.language, self.territory, self.script, self.variant)) + + def __repr__(self): + parameters = [''] + for key in ('territory', 'script', 'variant'): + value = getattr(self, key) + if value is not None: + parameters.append('%s=%r' % (key, value)) + parameter_string = '%r' % self.language + ', '.join(parameters) + return 'Locale(%s)' % parameter_string + + def __str__(self): + return get_locale_identifier((self.language, self.territory, + self.script, self.variant)) + + @property + def _data(self): + if self.__data is None: + self.__data = localedata.LocaleDataDict(localedata.load(str(self))) + return self.__data + + def get_display_name(self, locale=None): + """Return the display name of the locale using the given locale. + + The display name will include the language, territory, script, and + variant, if those are specified. + + >>> Locale('zh', 'CN', script='Hans').get_display_name('en') + u'Chinese (Simplified, China)' + + :param locale: the locale to use + """ + if locale is None: + locale = self + locale = Locale.parse(locale) + retval = locale.languages.get(self.language) + if retval and (self.territory or self.script or self.variant): + details = [] + if self.script: + details.append(locale.scripts.get(self.script)) + if self.territory: + details.append(locale.territories.get(self.territory)) + if self.variant: + details.append(locale.variants.get(self.variant)) + details = filter(None, details) + if details: + retval += ' (%s)' % u', '.join(details) + return retval + + display_name = property(get_display_name, doc="""\ + The localized display name of the locale. + + >>> Locale('en').display_name + u'English' + >>> Locale('en', 'US').display_name + u'English (United States)' + >>> Locale('sv').display_name + u'svenska' + + :type: `unicode` + """) + + def get_language_name(self, locale=None): + """Return the language of this locale in the given locale. + + >>> Locale('zh', 'CN', script='Hans').get_language_name('de') + u'Chinesisch' + + .. versionadded:: 1.0 + + :param locale: the locale to use + """ + if locale is None: + locale = self + locale = Locale.parse(locale) + return locale.languages.get(self.language) + + language_name = property(get_language_name, doc="""\ + The localized language name of the locale. + + >>> Locale('en', 'US').language_name + u'English' + """) + + def get_territory_name(self, locale=None): + """Return the territory name in the given locale.""" + if locale is None: + locale = self + locale = Locale.parse(locale) + return locale.territories.get(self.territory) + + territory_name = property(get_territory_name, doc="""\ + The localized territory name of the locale if available. + + >>> Locale('de', 'DE').territory_name + u'Deutschland' + """) + + def get_script_name(self, locale=None): + """Return the script name in the given locale.""" + if locale is None: + locale = self + locale = Locale.parse(locale) + return locale.scripts.get(self.script) + + script_name = property(get_script_name, doc="""\ + The localized script name of the locale if available. + + >>> Locale('sr', 'ME', script='Latn').script_name + u'latinica' + """) + + @property + def english_name(self): + """The english display name of the locale. + + >>> Locale('de').english_name + u'German' + >>> Locale('de', 'DE').english_name + u'German (Germany)' + + :type: `unicode`""" + return self.get_display_name(Locale('en')) + + # { General Locale Display Names + + @property + def languages(self): + """Mapping of language codes to translated language names. + + >>> Locale('de', 'DE').languages['ja'] + u'Japanisch' + + See `ISO 639 `_ for + more information. + """ + return self._data['languages'] + + @property + def scripts(self): + """Mapping of script codes to translated script names. + + >>> Locale('en', 'US').scripts['Hira'] + u'Hiragana' + + See `ISO 15924 `_ + for more information. + """ + return self._data['scripts'] + + @property + def territories(self): + """Mapping of script codes to translated script names. + + >>> Locale('es', 'CO').territories['DE'] + u'Alemania' + + See `ISO 3166 `_ + for more information. + """ + return self._data['territories'] + + @property + def variants(self): + """Mapping of script codes to translated script names. + + >>> Locale('de', 'DE').variants['1901'] + u'Alte deutsche Rechtschreibung' + """ + return self._data['variants'] + + # { Number Formatting + + @property + def currencies(self): + """Mapping of currency codes to translated currency names. This + only returns the generic form of the currency name, not the count + specific one. If an actual number is requested use the + :func:`babel.numbers.get_currency_name` function. + + >>> Locale('en').currencies['COP'] + u'Colombian Peso' + >>> Locale('de', 'DE').currencies['COP'] + u'Kolumbianischer Peso' + """ + return self._data['currency_names'] + + @property + def currency_symbols(self): + """Mapping of currency codes to symbols. + + >>> Locale('en', 'US').currency_symbols['USD'] + u'$' + >>> Locale('es', 'CO').currency_symbols['USD'] + u'US$' + """ + return self._data['currency_symbols'] + + @property + def number_symbols(self): + """Symbols used in number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('fr', 'FR').number_symbols['decimal'] + u',' + """ + return self._data['number_symbols'] + + @property + def decimal_formats(self): + """Locale patterns for decimal number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').decimal_formats[None] + + """ + return self._data['decimal_formats'] + + @property + def currency_formats(self): + """Locale patterns for currency number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').currency_formats['standard'] + + >>> Locale('en', 'US').currency_formats['accounting'] + + """ + return self._data['currency_formats'] + + @property + def percent_formats(self): + """Locale patterns for percent number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').percent_formats[None] + + """ + return self._data['percent_formats'] + + @property + def scientific_formats(self): + """Locale patterns for scientific number formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').scientific_formats[None] + + """ + return self._data['scientific_formats'] + + # { Calendar Information and Date Formatting + + @property + def periods(self): + """Locale display names for day periods (AM/PM). + + >>> Locale('en', 'US').periods['am'] + u'AM' + """ + try: + return self._data['day_periods']['stand-alone']['wide'] + except KeyError: + return {} + + @property + def day_periods(self): + """Locale display names for various day periods (not necessarily only AM/PM). + + These are not meant to be used without the relevant `day_period_rules`. + """ + return self._data['day_periods'] + + @property + def day_period_rules(self): + """Day period rules for the locale. Used by `get_period_id`. + """ + return self._data.get('day_period_rules', {}) + + @property + def days(self): + """Locale display names for weekdays. + + >>> Locale('de', 'DE').days['format']['wide'][3] + u'Donnerstag' + """ + return self._data['days'] + + @property + def months(self): + """Locale display names for months. + + >>> Locale('de', 'DE').months['format']['wide'][10] + u'Oktober' + """ + return self._data['months'] + + @property + def quarters(self): + """Locale display names for quarters. + + >>> Locale('de', 'DE').quarters['format']['wide'][1] + u'1. Quartal' + """ + return self._data['quarters'] + + @property + def eras(self): + """Locale display names for eras. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').eras['wide'][1] + u'Anno Domini' + >>> Locale('en', 'US').eras['abbreviated'][0] + u'BC' + """ + return self._data['eras'] + + @property + def time_zones(self): + """Locale display names for time zones. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').time_zones['Europe/London']['long']['daylight'] + u'British Summer Time' + >>> Locale('en', 'US').time_zones['America/St_Johns']['city'] + u'St. John\u2019s' + """ + return self._data['time_zones'] + + @property + def meta_zones(self): + """Locale display names for meta time zones. + + Meta time zones are basically groups of different Olson time zones that + have the same GMT offset and daylight savings time. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').meta_zones['Europe_Central']['long']['daylight'] + u'Central European Summer Time' + + .. versionadded:: 0.9 + """ + return self._data['meta_zones'] + + @property + def zone_formats(self): + """Patterns related to the formatting of time zones. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').zone_formats['fallback'] + u'%(1)s (%(0)s)' + >>> Locale('pt', 'BR').zone_formats['region'] + u'Hor\\xe1rio %s' + + .. versionadded:: 0.9 + """ + return self._data['zone_formats'] + + @property + def first_week_day(self): + """The first day of a week, with 0 being Monday. + + >>> Locale('de', 'DE').first_week_day + 0 + >>> Locale('en', 'US').first_week_day + 6 + """ + return self._data['week_data']['first_day'] + + @property + def weekend_start(self): + """The day the weekend starts, with 0 being Monday. + + >>> Locale('de', 'DE').weekend_start + 5 + """ + return self._data['week_data']['weekend_start'] + + @property + def weekend_end(self): + """The day the weekend ends, with 0 being Monday. + + >>> Locale('de', 'DE').weekend_end + 6 + """ + return self._data['week_data']['weekend_end'] + + @property + def min_week_days(self): + """The minimum number of days in a week so that the week is counted as + the first week of a year or month. + + >>> Locale('de', 'DE').min_week_days + 4 + """ + return self._data['week_data']['min_days'] + + @property + def date_formats(self): + """Locale patterns for date formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').date_formats['short'] + + >>> Locale('fr', 'FR').date_formats['long'] + + """ + return self._data['date_formats'] + + @property + def time_formats(self): + """Locale patterns for time formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en', 'US').time_formats['short'] + + >>> Locale('fr', 'FR').time_formats['long'] + + """ + return self._data['time_formats'] + + @property + def datetime_formats(self): + """Locale patterns for datetime formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en').datetime_formats['full'] + u"{1} 'at' {0}" + >>> Locale('th').datetime_formats['medium'] + u'{1} {0}' + """ + return self._data['datetime_formats'] + + @property + def datetime_skeletons(self): + """Locale patterns for formatting parts of a datetime. + + >>> Locale('en').datetime_skeletons['MEd'] + + >>> Locale('fr').datetime_skeletons['MEd'] + + >>> Locale('fr').datetime_skeletons['H'] + + """ + return self._data['datetime_skeletons'] + + @property + def interval_formats(self): + """Locale patterns for interval formatting. + + .. note:: The format of the value returned may change between + Babel versions. + + How to format date intervals in Finnish when the day is the + smallest changing component: + + >>> Locale('fi_FI').interval_formats['MEd']['d'] + [u'E d. \u2013 ', u'E d.M.'] + + .. seealso:: + + The primary API to use this data is :py:func:`babel.dates.format_interval`. + + + :rtype: dict[str, dict[str, list[str]]] + """ + return self._data['interval_formats'] + + @property + def plural_form(self): + """Plural rules for the locale. + + >>> Locale('en').plural_form(1) + 'one' + >>> Locale('en').plural_form(0) + 'other' + >>> Locale('fr').plural_form(0) + 'one' + >>> Locale('ru').plural_form(100) + 'many' + """ + return self._data.get('plural_form', _default_plural_rule) + + @property + def list_patterns(self): + """Patterns for generating lists + + .. note:: The format of the value returned may change between + Babel versions. + + >>> Locale('en').list_patterns['standard']['start'] + u'{0}, {1}' + >>> Locale('en').list_patterns['standard']['end'] + u'{0}, and {1}' + >>> Locale('en_GB').list_patterns['standard']['end'] + u'{0} and {1}' + """ + return self._data['list_patterns'] + + @property + def ordinal_form(self): + """Plural rules for the locale. + + >>> Locale('en').ordinal_form(1) + 'one' + >>> Locale('en').ordinal_form(2) + 'two' + >>> Locale('en').ordinal_form(3) + 'few' + >>> Locale('fr').ordinal_form(2) + 'other' + >>> Locale('ru').ordinal_form(100) + 'other' + """ + return self._data.get('ordinal_form', _default_plural_rule) + + @property + def measurement_systems(self): + """Localized names for various measurement systems. + + >>> Locale('fr', 'FR').measurement_systems['US'] + u'am\\xe9ricain' + >>> Locale('en', 'US').measurement_systems['US'] + u'US' + + """ + return self._data['measurement_systems'] + + @property + def character_order(self): + """The text direction for the language. + + >>> Locale('de', 'DE').character_order + 'left-to-right' + >>> Locale('ar', 'SA').character_order + 'right-to-left' + """ + return self._data['character_order'] + + @property + def text_direction(self): + """The text direction for the language in CSS short-hand form. + + >>> Locale('de', 'DE').text_direction + 'ltr' + >>> Locale('ar', 'SA').text_direction + 'rtl' + """ + return ''.join(word[0] for word in self.character_order.split('-')) + + @property + def unit_display_names(self): + """Display names for units of measurement. + + .. seealso:: + + You may want to use :py:func:`babel.units.get_unit_name` instead. + + .. note:: The format of the value returned may change between + Babel versions. + + """ + return self._data['unit_display_names'] + + +def default_locale(category=None, aliases=LOCALE_ALIASES): + """Returns the system default locale for a given category, based on + environment variables. + + >>> for name in ['LANGUAGE', 'LC_ALL', 'LC_CTYPE']: + ... os.environ[name] = '' + >>> os.environ['LANG'] = 'fr_FR.UTF-8' + >>> default_locale('LC_MESSAGES') + 'fr_FR' + + The "C" or "POSIX" pseudo-locales are treated as aliases for the + "en_US_POSIX" locale: + + >>> os.environ['LC_MESSAGES'] = 'POSIX' + >>> default_locale('LC_MESSAGES') + 'en_US_POSIX' + + The following fallbacks to the variable are always considered: + + - ``LANGUAGE`` + - ``LC_ALL`` + - ``LC_CTYPE`` + - ``LANG`` + + :param category: one of the ``LC_XXX`` environment variable names + :param aliases: a dictionary of aliases for locale identifiers + """ + varnames = (category, 'LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG') + for name in filter(None, varnames): + locale = os.getenv(name) + if locale: + if name == 'LANGUAGE' and ':' in locale: + # the LANGUAGE variable may contain a colon-separated list of + # language codes; we just pick the language on the list + locale = locale.split(':')[0] + if locale.split('.')[0] in ('C', 'POSIX'): + locale = 'en_US_POSIX' + elif aliases and locale in aliases: + locale = aliases[locale] + try: + return get_locale_identifier(parse_locale(locale)) + except ValueError: + pass + + +def negotiate_locale(preferred, available, sep='_', aliases=LOCALE_ALIASES): + """Find the best match between available and requested locale strings. + + >>> negotiate_locale(['de_DE', 'en_US'], ['de_DE', 'de_AT']) + 'de_DE' + >>> negotiate_locale(['de_DE', 'en_US'], ['en', 'de']) + 'de' + + Case is ignored by the algorithm, the result uses the case of the preferred + locale identifier: + + >>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at']) + 'de_DE' + + >>> negotiate_locale(['de_DE', 'en_US'], ['de_de', 'de_at']) + 'de_DE' + + By default, some web browsers unfortunately do not include the territory + in the locale identifier for many locales, and some don't even allow the + user to easily add the territory. So while you may prefer using qualified + locale identifiers in your web-application, they would not normally match + the language-only locale sent by such browsers. To workaround that, this + function uses a default mapping of commonly used langauge-only locale + identifiers to identifiers including the territory: + + >>> negotiate_locale(['ja', 'en_US'], ['ja_JP', 'en_US']) + 'ja_JP' + + Some browsers even use an incorrect or outdated language code, such as "no" + for Norwegian, where the correct locale identifier would actually be "nb_NO" + (Bokmål) or "nn_NO" (Nynorsk). The aliases are intended to take care of + such cases, too: + + >>> negotiate_locale(['no', 'sv'], ['nb_NO', 'sv_SE']) + 'nb_NO' + + You can override this default mapping by passing a different `aliases` + dictionary to this function, or you can bypass the behavior althogher by + setting the `aliases` parameter to `None`. + + :param preferred: the list of locale strings preferred by the user + :param available: the list of locale strings available + :param sep: character that separates the different parts of the locale + strings + :param aliases: a dictionary of aliases for locale identifiers + """ + available = [a.lower() for a in available if a] + for locale in preferred: + ll = locale.lower() + if ll in available: + return locale + if aliases: + alias = aliases.get(ll) + if alias: + alias = alias.replace('_', sep) + if alias.lower() in available: + return alias + parts = locale.split(sep) + if len(parts) > 1 and parts[0].lower() in available: + return parts[0] + return None + + +def parse_locale(identifier, sep='_'): + """Parse a locale identifier into a tuple of the form ``(language, + territory, script, variant)``. + + >>> parse_locale('zh_CN') + ('zh', 'CN', None, None) + >>> parse_locale('zh_Hans_CN') + ('zh', 'CN', 'Hans', None) + + The default component separator is "_", but a different separator can be + specified using the `sep` parameter: + + >>> parse_locale('zh-CN', sep='-') + ('zh', 'CN', None, None) + + If the identifier cannot be parsed into a locale, a `ValueError` exception + is raised: + + >>> parse_locale('not_a_LOCALE_String') + Traceback (most recent call last): + ... + ValueError: 'not_a_LOCALE_String' is not a valid locale identifier + + Encoding information and locale modifiers are removed from the identifier: + + >>> parse_locale('it_IT@euro') + ('it', 'IT', None, None) + >>> parse_locale('en_US.UTF-8') + ('en', 'US', None, None) + >>> parse_locale('de_DE.iso885915@euro') + ('de', 'DE', None, None) + + See :rfc:`4646` for more information. + + :param identifier: the locale identifier string + :param sep: character that separates the different components of the locale + identifier + :raise `ValueError`: if the string does not appear to be a valid locale + identifier + """ + if '.' in identifier: + # this is probably the charset/encoding, which we don't care about + identifier = identifier.split('.', 1)[0] + if '@' in identifier: + # this is a locale modifier such as @euro, which we don't care about + # either + identifier = identifier.split('@', 1)[0] + + parts = identifier.split(sep) + lang = parts.pop(0).lower() + if not lang.isalpha(): + raise ValueError('expected only letters, got %r' % lang) + + script = territory = variant = None + if parts: + if len(parts[0]) == 4 and parts[0].isalpha(): + script = parts.pop(0).title() + + if parts: + if len(parts[0]) == 2 and parts[0].isalpha(): + territory = parts.pop(0).upper() + elif len(parts[0]) == 3 and parts[0].isdigit(): + territory = parts.pop(0) + + if parts: + if len(parts[0]) == 4 and parts[0][0].isdigit() or \ + len(parts[0]) >= 5 and parts[0][0].isalpha(): + variant = parts.pop() + + if parts: + raise ValueError('%r is not a valid locale identifier' % identifier) + + return lang, territory, script, variant + + +def get_locale_identifier(tup, sep='_'): + """The reverse of :func:`parse_locale`. It creates a locale identifier out + of a ``(language, territory, script, variant)`` tuple. Items can be set to + ``None`` and trailing ``None``\\s can also be left out of the tuple. + + >>> get_locale_identifier(('de', 'DE', None, '1999')) + 'de_DE_1999' + + .. versionadded:: 1.0 + + :param tup: the tuple as returned by :func:`parse_locale`. + :param sep: the separator for the identifier. + """ + tup = tuple(tup[:4]) + lang, territory, script, variant = tup + (None,) * (4 - len(tup)) + return sep.join(filter(None, (lang, script, territory, variant))) diff --git a/dbt-env/lib/python3.8/site-packages/babel/dates.py b/dbt-env/lib/python3.8/site-packages/babel/dates.py new file mode 100644 index 0000000..75e8f35 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/dates.py @@ -0,0 +1,1796 @@ +# -*- coding: utf-8 -*- +""" + babel.dates + ~~~~~~~~~~~ + + Locale dependent formatting and parsing of dates and times. + + The default locale for the functions in this module is determined by the + following environment variables, in that order: + + * ``LC_TIME``, + * ``LC_ALL``, and + * ``LANG`` + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from __future__ import division + +import re +import warnings +import pytz as _pytz + +from datetime import date, datetime, time, timedelta +from bisect import bisect_right + +from babel.core import default_locale, get_global, Locale +from babel.util import UTC, LOCALTZ +from babel._compat import string_types, integer_types, number_types, PY2 + +# "If a given short metazone form is known NOT to be understood in a given +# locale and the parent locale has this value such that it would normally +# be inherited, the inheritance of this value can be explicitly disabled by +# use of the 'no inheritance marker' as the value, which is 3 simultaneous [sic] +# empty set characters ( U+2205 )." +# - https://www.unicode.org/reports/tr35/tr35-dates.html#Metazone_Names + +NO_INHERITANCE_MARKER = u'\u2205\u2205\u2205' + + +LC_TIME = default_locale('LC_TIME') + +# Aliases for use in scopes where the modules are shadowed by local variables +date_ = date +datetime_ = datetime +time_ = time + + +def _get_dt_and_tzinfo(dt_or_tzinfo): + """ + Parse a `dt_or_tzinfo` value into a datetime and a tzinfo. + + See the docs for this function's callers for semantics. + + :rtype: tuple[datetime, tzinfo] + """ + if dt_or_tzinfo is None: + dt = datetime.now() + tzinfo = LOCALTZ + elif isinstance(dt_or_tzinfo, string_types): + dt = None + tzinfo = get_timezone(dt_or_tzinfo) + elif isinstance(dt_or_tzinfo, integer_types): + dt = None + tzinfo = UTC + elif isinstance(dt_or_tzinfo, (datetime, time)): + dt = _get_datetime(dt_or_tzinfo) + if dt.tzinfo is not None: + tzinfo = dt.tzinfo + else: + tzinfo = UTC + else: + dt = None + tzinfo = dt_or_tzinfo + return dt, tzinfo + + +def _get_tz_name(dt_or_tzinfo): + """ + Get the timezone name out of a time, datetime, or tzinfo object. + + :rtype: str + """ + dt, tzinfo = _get_dt_and_tzinfo(dt_or_tzinfo) + if hasattr(tzinfo, 'zone'): # pytz object + return tzinfo.zone + elif hasattr(tzinfo, 'key') and tzinfo.key is not None: # ZoneInfo object + return tzinfo.key + else: + return tzinfo.tzname(dt or datetime.utcnow()) + + +def _get_datetime(instant): + """ + Get a datetime out of an "instant" (date, time, datetime, number). + + .. warning:: The return values of this function may depend on the system clock. + + If the instant is None, the current moment is used. + If the instant is a time, it's augmented with today's date. + + Dates are converted to naive datetimes with midnight as the time component. + + >>> _get_datetime(date(2015, 1, 1)) + datetime.datetime(2015, 1, 1, 0, 0) + + UNIX timestamps are converted to datetimes. + + >>> _get_datetime(1400000000) + datetime.datetime(2014, 5, 13, 16, 53, 20) + + Other values are passed through as-is. + + >>> x = datetime(2015, 1, 1) + >>> _get_datetime(x) is x + True + + :param instant: date, time, datetime, integer, float or None + :type instant: date|time|datetime|int|float|None + :return: a datetime + :rtype: datetime + """ + if instant is None: + return datetime_.utcnow() + elif isinstance(instant, integer_types) or isinstance(instant, float): + return datetime_.utcfromtimestamp(instant) + elif isinstance(instant, time): + return datetime_.combine(date.today(), instant) + elif isinstance(instant, date) and not isinstance(instant, datetime): + return datetime_.combine(instant, time()) + # TODO (3.x): Add an assertion/type check for this fallthrough branch: + return instant + + +def _ensure_datetime_tzinfo(datetime, tzinfo=None): + """ + Ensure the datetime passed has an attached tzinfo. + + If the datetime is tz-naive to begin with, UTC is attached. + + If a tzinfo is passed in, the datetime is normalized to that timezone. + + >>> _ensure_datetime_tzinfo(datetime(2015, 1, 1)).tzinfo.zone + 'UTC' + + >>> tz = get_timezone("Europe/Stockholm") + >>> _ensure_datetime_tzinfo(datetime(2015, 1, 1, 13, 15, tzinfo=UTC), tzinfo=tz).hour + 14 + + :param datetime: Datetime to augment. + :param tzinfo: Optional tznfo. + :return: datetime with tzinfo + :rtype: datetime + """ + if datetime.tzinfo is None: + datetime = datetime.replace(tzinfo=UTC) + if tzinfo is not None: + datetime = datetime.astimezone(get_timezone(tzinfo)) + if hasattr(tzinfo, 'normalize'): # pytz + datetime = tzinfo.normalize(datetime) + return datetime + + +def _get_time(time, tzinfo=None): + """ + Get a timezoned time from a given instant. + + .. warning:: The return values of this function may depend on the system clock. + + :param time: time, datetime or None + :rtype: time + """ + if time is None: + time = datetime.utcnow() + elif isinstance(time, number_types): + time = datetime.utcfromtimestamp(time) + if time.tzinfo is None: + time = time.replace(tzinfo=UTC) + if isinstance(time, datetime): + if tzinfo is not None: + time = time.astimezone(tzinfo) + if hasattr(tzinfo, 'normalize'): # pytz + time = tzinfo.normalize(time) + time = time.timetz() + elif tzinfo is not None: + time = time.replace(tzinfo=tzinfo) + return time + + +def get_timezone(zone=None): + """Looks up a timezone by name and returns it. The timezone object + returned comes from ``pytz`` and corresponds to the `tzinfo` interface and + can be used with all of the functions of Babel that operate with dates. + + If a timezone is not known a :exc:`LookupError` is raised. If `zone` + is ``None`` a local zone object is returned. + + :param zone: the name of the timezone to look up. If a timezone object + itself is passed in, mit's returned unchanged. + """ + if zone is None: + return LOCALTZ + if not isinstance(zone, string_types): + return zone + try: + return _pytz.timezone(zone) + except _pytz.UnknownTimeZoneError: + raise LookupError('Unknown timezone %s' % zone) + + +def get_next_timezone_transition(zone=None, dt=None): + """Given a timezone it will return a :class:`TimezoneTransition` object + that holds the information about the next timezone transition that's going + to happen. For instance this can be used to detect when the next DST + change is going to happen and how it looks like. + + The transition is calculated relative to the given datetime object. The + next transition that follows the date is used. If a transition cannot + be found the return value will be `None`. + + Transition information can only be provided for timezones returned by + the :func:`get_timezone` function. + + :param zone: the timezone for which the transition should be looked up. + If not provided the local timezone is used. + :param dt: the date after which the next transition should be found. + If not given the current time is assumed. + """ + zone = get_timezone(zone) + dt = _get_datetime(dt).replace(tzinfo=None) + + if not hasattr(zone, '_utc_transition_times'): + raise TypeError('Given timezone does not have UTC transition ' + 'times. This can happen because the operating ' + 'system fallback local timezone is used or a ' + 'custom timezone object') + + try: + idx = max(0, bisect_right(zone._utc_transition_times, dt)) + old_trans = zone._transition_info[idx - 1] + new_trans = zone._transition_info[idx] + old_tz = zone._tzinfos[old_trans] + new_tz = zone._tzinfos[new_trans] + except (LookupError, ValueError): + return None + + return TimezoneTransition( + activates=zone._utc_transition_times[idx], + from_tzinfo=old_tz, + to_tzinfo=new_tz, + reference_date=dt + ) + + +class TimezoneTransition(object): + """A helper object that represents the return value from + :func:`get_next_timezone_transition`. + """ + + def __init__(self, activates, from_tzinfo, to_tzinfo, reference_date=None): + #: the time of the activation of the timezone transition in UTC. + self.activates = activates + #: the timezone from where the transition starts. + self.from_tzinfo = from_tzinfo + #: the timezone for after the transition. + self.to_tzinfo = to_tzinfo + #: the reference date that was provided. This is the `dt` parameter + #: to the :func:`get_next_timezone_transition`. + self.reference_date = reference_date + + @property + def from_tz(self): + """The name of the timezone before the transition.""" + return self.from_tzinfo._tzname + + @property + def to_tz(self): + """The name of the timezone after the transition.""" + return self.to_tzinfo._tzname + + @property + def from_offset(self): + """The UTC offset in seconds before the transition.""" + return int(self.from_tzinfo._utcoffset.total_seconds()) + + @property + def to_offset(self): + """The UTC offset in seconds after the transition.""" + return int(self.to_tzinfo._utcoffset.total_seconds()) + + def __repr__(self): + return ' %s (%s)>' % ( + self.from_tz, + self.to_tz, + self.activates, + ) + + +def get_period_names(width='wide', context='stand-alone', locale=LC_TIME): + """Return the names for day periods (AM/PM) used by the locale. + + >>> get_period_names(locale='en_US')['am'] + u'AM' + + :param width: the width to use, one of "abbreviated", "narrow", or "wide" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).day_periods[context][width] + + +def get_day_names(width='wide', context='format', locale=LC_TIME): + """Return the day names used by the locale for the specified format. + + >>> get_day_names('wide', locale='en_US')[1] + u'Tuesday' + >>> get_day_names('short', locale='en_US')[1] + u'Tu' + >>> get_day_names('abbreviated', locale='es')[1] + u'mar.' + >>> get_day_names('narrow', context='stand-alone', locale='de_DE')[1] + u'D' + + :param width: the width to use, one of "wide", "abbreviated", "short" or "narrow" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).days[context][width] + + +def get_month_names(width='wide', context='format', locale=LC_TIME): + """Return the month names used by the locale for the specified format. + + >>> get_month_names('wide', locale='en_US')[1] + u'January' + >>> get_month_names('abbreviated', locale='es')[1] + u'ene.' + >>> get_month_names('narrow', context='stand-alone', locale='de_DE')[1] + u'J' + + :param width: the width to use, one of "wide", "abbreviated", or "narrow" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).months[context][width] + + +def get_quarter_names(width='wide', context='format', locale=LC_TIME): + """Return the quarter names used by the locale for the specified format. + + >>> get_quarter_names('wide', locale='en_US')[1] + u'1st quarter' + >>> get_quarter_names('abbreviated', locale='de_DE')[1] + u'Q1' + >>> get_quarter_names('narrow', locale='de_DE')[1] + u'1' + + :param width: the width to use, one of "wide", "abbreviated", or "narrow" + :param context: the context, either "format" or "stand-alone" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).quarters[context][width] + + +def get_era_names(width='wide', locale=LC_TIME): + """Return the era names used by the locale for the specified format. + + >>> get_era_names('wide', locale='en_US')[1] + u'Anno Domini' + >>> get_era_names('abbreviated', locale='de_DE')[1] + u'n. Chr.' + + :param width: the width to use, either "wide", "abbreviated", or "narrow" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).eras[width] + + +def get_date_format(format='medium', locale=LC_TIME): + """Return the date formatting patterns used by the locale for the specified + format. + + >>> get_date_format(locale='en_US') + + >>> get_date_format('full', locale='de_DE') + + + :param format: the format to use, one of "full", "long", "medium", or + "short" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).date_formats[format] + + +def get_datetime_format(format='medium', locale=LC_TIME): + """Return the datetime formatting patterns used by the locale for the + specified format. + + >>> get_datetime_format(locale='en_US') + u'{1}, {0}' + + :param format: the format to use, one of "full", "long", "medium", or + "short" + :param locale: the `Locale` object, or a locale string + """ + patterns = Locale.parse(locale).datetime_formats + if format not in patterns: + format = None + return patterns[format] + + +def get_time_format(format='medium', locale=LC_TIME): + """Return the time formatting patterns used by the locale for the specified + format. + + >>> get_time_format(locale='en_US') + + >>> get_time_format('full', locale='de_DE') + + + :param format: the format to use, one of "full", "long", "medium", or + "short" + :param locale: the `Locale` object, or a locale string + """ + return Locale.parse(locale).time_formats[format] + + +def get_timezone_gmt(datetime=None, width='long', locale=LC_TIME, return_z=False): + """Return the timezone associated with the given `datetime` object formatted + as string indicating the offset from GMT. + + >>> dt = datetime(2007, 4, 1, 15, 30) + >>> get_timezone_gmt(dt, locale='en') + u'GMT+00:00' + >>> get_timezone_gmt(dt, locale='en', return_z=True) + 'Z' + >>> get_timezone_gmt(dt, locale='en', width='iso8601_short') + u'+00' + >>> tz = get_timezone('America/Los_Angeles') + >>> dt = tz.localize(datetime(2007, 4, 1, 15, 30)) + >>> get_timezone_gmt(dt, locale='en') + u'GMT-07:00' + >>> get_timezone_gmt(dt, 'short', locale='en') + u'-0700' + >>> get_timezone_gmt(dt, locale='en', width='iso8601_short') + u'-07' + + The long format depends on the locale, for example in France the acronym + UTC string is used instead of GMT: + + >>> get_timezone_gmt(dt, 'long', locale='fr_FR') + u'UTC-07:00' + + .. versionadded:: 0.9 + + :param datetime: the ``datetime`` object; if `None`, the current date and + time in UTC is used + :param width: either "long" or "short" or "iso8601" or "iso8601_short" + :param locale: the `Locale` object, or a locale string + :param return_z: True or False; Function returns indicator "Z" + when local time offset is 0 + """ + datetime = _ensure_datetime_tzinfo(_get_datetime(datetime)) + locale = Locale.parse(locale) + + offset = datetime.tzinfo.utcoffset(datetime) + seconds = offset.days * 24 * 60 * 60 + offset.seconds + hours, seconds = divmod(seconds, 3600) + if return_z and hours == 0 and seconds == 0: + return 'Z' + elif seconds == 0 and width == 'iso8601_short': + return u'%+03d' % hours + elif width == 'short' or width == 'iso8601_short': + pattern = u'%+03d%02d' + elif width == 'iso8601': + pattern = u'%+03d:%02d' + else: + pattern = locale.zone_formats['gmt'] % '%+03d:%02d' + return pattern % (hours, seconds // 60) + + +def get_timezone_location(dt_or_tzinfo=None, locale=LC_TIME, return_city=False): + u"""Return a representation of the given timezone using "location format". + + The result depends on both the local display name of the country and the + city associated with the time zone: + + >>> tz = get_timezone('America/St_Johns') + >>> print(get_timezone_location(tz, locale='de_DE')) + Kanada (St. John’s) Zeit + >>> print(get_timezone_location(tz, locale='en')) + Canada (St. John’s) Time + >>> print(get_timezone_location(tz, locale='en', return_city=True)) + St. John’s + >>> tz = get_timezone('America/Mexico_City') + >>> get_timezone_location(tz, locale='de_DE') + u'Mexiko (Mexiko-Stadt) Zeit' + + If the timezone is associated with a country that uses only a single + timezone, just the localized country name is returned: + + >>> tz = get_timezone('Europe/Berlin') + >>> get_timezone_name(tz, locale='de_DE') + u'Mitteleurop\\xe4ische Zeit' + + .. versionadded:: 0.9 + + :param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines + the timezone; if `None`, the current date and time in + UTC is assumed + :param locale: the `Locale` object, or a locale string + :param return_city: True or False, if True then return exemplar city (location) + for the time zone + :return: the localized timezone name using location format + + """ + locale = Locale.parse(locale) + + zone = _get_tz_name(dt_or_tzinfo) + + # Get the canonical time-zone code + zone = get_global('zone_aliases').get(zone, zone) + + info = locale.time_zones.get(zone, {}) + + # Otherwise, if there is only one timezone for the country, return the + # localized country name + region_format = locale.zone_formats['region'] + territory = get_global('zone_territories').get(zone) + if territory not in locale.territories: + territory = 'ZZ' # invalid/unknown + territory_name = locale.territories[territory] + if not return_city and territory and len(get_global('territory_zones').get(territory, [])) == 1: + return region_format % territory_name + + # Otherwise, include the city in the output + fallback_format = locale.zone_formats['fallback'] + if 'city' in info: + city_name = info['city'] + else: + metazone = get_global('meta_zones').get(zone) + metazone_info = locale.meta_zones.get(metazone, {}) + if 'city' in metazone_info: + city_name = metazone_info['city'] + elif '/' in zone: + city_name = zone.split('/', 1)[1].replace('_', ' ') + else: + city_name = zone.replace('_', ' ') + + if return_city: + return city_name + return region_format % (fallback_format % { + '0': city_name, + '1': territory_name + }) + + +def get_timezone_name(dt_or_tzinfo=None, width='long', uncommon=False, + locale=LC_TIME, zone_variant=None, return_zone=False): + r"""Return the localized display name for the given timezone. The timezone + may be specified using a ``datetime`` or `tzinfo` object. + + >>> dt = time(15, 30, tzinfo=get_timezone('America/Los_Angeles')) + >>> get_timezone_name(dt, locale='en_US') + u'Pacific Standard Time' + >>> get_timezone_name(dt, locale='en_US', return_zone=True) + 'America/Los_Angeles' + >>> get_timezone_name(dt, width='short', locale='en_US') + u'PST' + + If this function gets passed only a `tzinfo` object and no concrete + `datetime`, the returned display name is indenpendent of daylight savings + time. This can be used for example for selecting timezones, or to set the + time of events that recur across DST changes: + + >>> tz = get_timezone('America/Los_Angeles') + >>> get_timezone_name(tz, locale='en_US') + u'Pacific Time' + >>> get_timezone_name(tz, 'short', locale='en_US') + u'PT' + + If no localized display name for the timezone is available, and the timezone + is associated with a country that uses only a single timezone, the name of + that country is returned, formatted according to the locale: + + >>> tz = get_timezone('Europe/Berlin') + >>> get_timezone_name(tz, locale='de_DE') + u'Mitteleurop\xe4ische Zeit' + >>> get_timezone_name(tz, locale='pt_BR') + u'Hor\xe1rio da Europa Central' + + On the other hand, if the country uses multiple timezones, the city is also + included in the representation: + + >>> tz = get_timezone('America/St_Johns') + >>> get_timezone_name(tz, locale='de_DE') + u'Neufundland-Zeit' + + Note that short format is currently not supported for all timezones and + all locales. This is partially because not every timezone has a short + code in every locale. In that case it currently falls back to the long + format. + + For more information see `LDML Appendix J: Time Zone Display Names + `_ + + .. versionadded:: 0.9 + + .. versionchanged:: 1.0 + Added `zone_variant` support. + + :param dt_or_tzinfo: the ``datetime`` or ``tzinfo`` object that determines + the timezone; if a ``tzinfo`` object is used, the + resulting display name will be generic, i.e. + independent of daylight savings time; if `None`, the + current date in UTC is assumed + :param width: either "long" or "short" + :param uncommon: deprecated and ignored + :param zone_variant: defines the zone variation to return. By default the + variation is defined from the datetime object + passed in. If no datetime object is passed in, the + ``'generic'`` variation is assumed. The following + values are valid: ``'generic'``, ``'daylight'`` and + ``'standard'``. + :param locale: the `Locale` object, or a locale string + :param return_zone: True or False. If true then function + returns long time zone ID + """ + dt, tzinfo = _get_dt_and_tzinfo(dt_or_tzinfo) + locale = Locale.parse(locale) + + zone = _get_tz_name(dt_or_tzinfo) + + if zone_variant is None: + if dt is None: + zone_variant = 'generic' + else: + dst = tzinfo.dst(dt) + if dst: + zone_variant = 'daylight' + else: + zone_variant = 'standard' + else: + if zone_variant not in ('generic', 'standard', 'daylight'): + raise ValueError('Invalid zone variation') + + # Get the canonical time-zone code + zone = get_global('zone_aliases').get(zone, zone) + if return_zone: + return zone + info = locale.time_zones.get(zone, {}) + # Try explicitly translated zone names first + if width in info: + if zone_variant in info[width]: + return info[width][zone_variant] + + metazone = get_global('meta_zones').get(zone) + if metazone: + metazone_info = locale.meta_zones.get(metazone, {}) + if width in metazone_info: + name = metazone_info[width].get(zone_variant) + if width == 'short' and name == NO_INHERITANCE_MARKER: + # If the short form is marked no-inheritance, + # try to fall back to the long name instead. + name = metazone_info.get('long', {}).get(zone_variant) + if name: + return name + + # If we have a concrete datetime, we assume that the result can't be + # independent of daylight savings time, so we return the GMT offset + if dt is not None: + return get_timezone_gmt(dt, width=width, locale=locale) + + return get_timezone_location(dt_or_tzinfo, locale=locale) + + +def format_date(date=None, format='medium', locale=LC_TIME): + """Return a date formatted according to the given pattern. + + >>> d = date(2007, 4, 1) + >>> format_date(d, locale='en_US') + u'Apr 1, 2007' + >>> format_date(d, format='full', locale='de_DE') + u'Sonntag, 1. April 2007' + + If you don't want to use the locale default formats, you can specify a + custom date pattern: + + >>> format_date(d, "EEE, MMM d, ''yy", locale='en') + u"Sun, Apr 1, '07" + + :param date: the ``date`` or ``datetime`` object; if `None`, the current + date is used + :param format: one of "full", "long", "medium", or "short", or a custom + date/time pattern + :param locale: a `Locale` object or a locale identifier + """ + if date is None: + date = date_.today() + elif isinstance(date, datetime): + date = date.date() + + locale = Locale.parse(locale) + if format in ('full', 'long', 'medium', 'short'): + format = get_date_format(format, locale=locale) + pattern = parse_pattern(format) + return pattern.apply(date, locale) + + +def format_datetime(datetime=None, format='medium', tzinfo=None, + locale=LC_TIME): + r"""Return a date formatted according to the given pattern. + + >>> dt = datetime(2007, 4, 1, 15, 30) + >>> format_datetime(dt, locale='en_US') + u'Apr 1, 2007, 3:30:00 PM' + + For any pattern requiring the display of the time-zone, the third-party + ``pytz`` package is needed to explicitly specify the time-zone: + + >>> format_datetime(dt, 'full', tzinfo=get_timezone('Europe/Paris'), + ... locale='fr_FR') + u'dimanche 1 avril 2007 \xe0 17:30:00 heure d\u2019\xe9t\xe9 d\u2019Europe centrale' + >>> format_datetime(dt, "yyyy.MM.dd G 'at' HH:mm:ss zzz", + ... tzinfo=get_timezone('US/Eastern'), locale='en') + u'2007.04.01 AD at 11:30:00 EDT' + + :param datetime: the `datetime` object; if `None`, the current date and + time is used + :param format: one of "full", "long", "medium", or "short", or a custom + date/time pattern + :param tzinfo: the timezone to apply to the time for display + :param locale: a `Locale` object or a locale identifier + """ + datetime = _ensure_datetime_tzinfo(_get_datetime(datetime), tzinfo) + + locale = Locale.parse(locale) + if format in ('full', 'long', 'medium', 'short'): + return get_datetime_format(format, locale=locale) \ + .replace("'", "") \ + .replace('{0}', format_time(datetime, format, tzinfo=None, + locale=locale)) \ + .replace('{1}', format_date(datetime, format, locale=locale)) + else: + return parse_pattern(format).apply(datetime, locale) + + +def format_time(time=None, format='medium', tzinfo=None, locale=LC_TIME): + r"""Return a time formatted according to the given pattern. + + >>> t = time(15, 30) + >>> format_time(t, locale='en_US') + u'3:30:00 PM' + >>> format_time(t, format='short', locale='de_DE') + u'15:30' + + If you don't want to use the locale default formats, you can specify a + custom time pattern: + + >>> format_time(t, "hh 'o''clock' a", locale='en') + u"03 o'clock PM" + + For any pattern requiring the display of the time-zone a + timezone has to be specified explicitly: + + >>> t = datetime(2007, 4, 1, 15, 30) + >>> tzinfo = get_timezone('Europe/Paris') + >>> t = tzinfo.localize(t) + >>> format_time(t, format='full', tzinfo=tzinfo, locale='fr_FR') + u'15:30:00 heure d\u2019\xe9t\xe9 d\u2019Europe centrale' + >>> format_time(t, "hh 'o''clock' a, zzzz", tzinfo=get_timezone('US/Eastern'), + ... locale='en') + u"09 o'clock AM, Eastern Daylight Time" + + As that example shows, when this function gets passed a + ``datetime.datetime`` value, the actual time in the formatted string is + adjusted to the timezone specified by the `tzinfo` parameter. If the + ``datetime`` is "naive" (i.e. it has no associated timezone information), + it is assumed to be in UTC. + + These timezone calculations are **not** performed if the value is of type + ``datetime.time``, as without date information there's no way to determine + what a given time would translate to in a different timezone without + information about whether daylight savings time is in effect or not. This + means that time values are left as-is, and the value of the `tzinfo` + parameter is only used to display the timezone name if needed: + + >>> t = time(15, 30) + >>> format_time(t, format='full', tzinfo=get_timezone('Europe/Paris'), + ... locale='fr_FR') + u'15:30:00 heure normale d\u2019Europe centrale' + >>> format_time(t, format='full', tzinfo=get_timezone('US/Eastern'), + ... locale='en_US') + u'3:30:00 PM Eastern Standard Time' + + :param time: the ``time`` or ``datetime`` object; if `None`, the current + time in UTC is used + :param format: one of "full", "long", "medium", or "short", or a custom + date/time pattern + :param tzinfo: the time-zone to apply to the time for display + :param locale: a `Locale` object or a locale identifier + """ + time = _get_time(time, tzinfo) + + locale = Locale.parse(locale) + if format in ('full', 'long', 'medium', 'short'): + format = get_time_format(format, locale=locale) + return parse_pattern(format).apply(time, locale) + + +def format_skeleton(skeleton, datetime=None, tzinfo=None, fuzzy=True, locale=LC_TIME): + r"""Return a time and/or date formatted according to the given pattern. + + The skeletons are defined in the CLDR data and provide more flexibility + than the simple short/long/medium formats, but are a bit harder to use. + The are defined using the date/time symbols without order or punctuation + and map to a suitable format for the given locale. + + >>> t = datetime(2007, 4, 1, 15, 30) + >>> format_skeleton('MMMEd', t, locale='fr') + u'dim. 1 avr.' + >>> format_skeleton('MMMEd', t, locale='en') + u'Sun, Apr 1' + >>> format_skeleton('yMMd', t, locale='fi') # yMMd is not in the Finnish locale; yMd gets used + u'1.4.2007' + >>> format_skeleton('yMMd', t, fuzzy=False, locale='fi') # yMMd is not in the Finnish locale, an error is thrown + Traceback (most recent call last): + ... + KeyError: yMMd + + After the skeleton is resolved to a pattern `format_datetime` is called so + all timezone processing etc is the same as for that. + + :param skeleton: A date time skeleton as defined in the cldr data. + :param datetime: the ``time`` or ``datetime`` object; if `None`, the current + time in UTC is used + :param tzinfo: the time-zone to apply to the time for display + :param fuzzy: If the skeleton is not found, allow choosing a skeleton that's + close enough to it. + :param locale: a `Locale` object or a locale identifier + """ + locale = Locale.parse(locale) + if fuzzy and skeleton not in locale.datetime_skeletons: + skeleton = match_skeleton(skeleton, locale.datetime_skeletons) + format = locale.datetime_skeletons[skeleton] + return format_datetime(datetime, format, tzinfo, locale) + + +TIMEDELTA_UNITS = ( + ('year', 3600 * 24 * 365), + ('month', 3600 * 24 * 30), + ('week', 3600 * 24 * 7), + ('day', 3600 * 24), + ('hour', 3600), + ('minute', 60), + ('second', 1) +) + + +def format_timedelta(delta, granularity='second', threshold=.85, + add_direction=False, format='long', + locale=LC_TIME): + """Return a time delta according to the rules of the given locale. + + >>> format_timedelta(timedelta(weeks=12), locale='en_US') + u'3 months' + >>> format_timedelta(timedelta(seconds=1), locale='es') + u'1 segundo' + + The granularity parameter can be provided to alter the lowest unit + presented, which defaults to a second. + + >>> format_timedelta(timedelta(hours=3), granularity='day', + ... locale='en_US') + u'1 day' + + The threshold parameter can be used to determine at which value the + presentation switches to the next higher unit. A higher threshold factor + means the presentation will switch later. For example: + + >>> format_timedelta(timedelta(hours=23), threshold=0.9, locale='en_US') + u'1 day' + >>> format_timedelta(timedelta(hours=23), threshold=1.1, locale='en_US') + u'23 hours' + + In addition directional information can be provided that informs + the user if the date is in the past or in the future: + + >>> format_timedelta(timedelta(hours=1), add_direction=True, locale='en') + u'in 1 hour' + >>> format_timedelta(timedelta(hours=-1), add_direction=True, locale='en') + u'1 hour ago' + + The format parameter controls how compact or wide the presentation is: + + >>> format_timedelta(timedelta(hours=3), format='short', locale='en') + u'3 hr' + >>> format_timedelta(timedelta(hours=3), format='narrow', locale='en') + u'3h' + + :param delta: a ``timedelta`` object representing the time difference to + format, or the delta in seconds as an `int` value + :param granularity: determines the smallest unit that should be displayed, + the value can be one of "year", "month", "week", "day", + "hour", "minute" or "second" + :param threshold: factor that determines at which point the presentation + switches to the next higher unit + :param add_direction: if this flag is set to `True` the return value will + include directional information. For instance a + positive timedelta will include the information about + it being in the future, a negative will be information + about the value being in the past. + :param format: the format, can be "narrow", "short" or "long". ( + "medium" is deprecated, currently converted to "long" to + maintain compatibility) + :param locale: a `Locale` object or a locale identifier + """ + if format not in ('narrow', 'short', 'medium', 'long'): + raise TypeError('Format must be one of "narrow", "short" or "long"') + if format == 'medium': + warnings.warn('"medium" value for format param of format_timedelta' + ' is deprecated. Use "long" instead', + category=DeprecationWarning) + format = 'long' + if isinstance(delta, timedelta): + seconds = int((delta.days * 86400) + delta.seconds) + else: + seconds = delta + locale = Locale.parse(locale) + + def _iter_patterns(a_unit): + if add_direction: + unit_rel_patterns = locale._data['date_fields'][a_unit] + if seconds >= 0: + yield unit_rel_patterns['future'] + else: + yield unit_rel_patterns['past'] + a_unit = 'duration-' + a_unit + yield locale._data['unit_patterns'].get(a_unit, {}).get(format) + + for unit, secs_per_unit in TIMEDELTA_UNITS: + value = abs(seconds) / secs_per_unit + if value >= threshold or unit == granularity: + if unit == granularity and value > 0: + value = max(1, value) + value = int(round(value)) + plural_form = locale.plural_form(value) + pattern = None + for patterns in _iter_patterns(unit): + if patterns is not None: + pattern = patterns[plural_form] + break + # This really should not happen + if pattern is None: + return u'' + return pattern.replace('{0}', str(value)) + + return u'' + + +def _format_fallback_interval(start, end, skeleton, tzinfo, locale): + if skeleton in locale.datetime_skeletons: # Use the given skeleton + format = lambda dt: format_skeleton(skeleton, dt, tzinfo, locale=locale) + elif all((isinstance(d, date) and not isinstance(d, datetime)) for d in (start, end)): # Both are just dates + format = lambda dt: format_date(dt, locale=locale) + elif all((isinstance(d, time) and not isinstance(d, date)) for d in (start, end)): # Both are times + format = lambda dt: format_time(dt, tzinfo=tzinfo, locale=locale) + else: + format = lambda dt: format_datetime(dt, tzinfo=tzinfo, locale=locale) + + formatted_start = format(start) + formatted_end = format(end) + + if formatted_start == formatted_end: + return format(start) + + return ( + locale.interval_formats.get(None, "{0}-{1}"). + replace("{0}", formatted_start). + replace("{1}", formatted_end) + ) + + +def format_interval(start, end, skeleton=None, tzinfo=None, fuzzy=True, locale=LC_TIME): + """ + Format an interval between two instants according to the locale's rules. + + >>> format_interval(date(2016, 1, 15), date(2016, 1, 17), "yMd", locale="fi") + u'15.\u201317.1.2016' + + >>> format_interval(time(12, 12), time(16, 16), "Hm", locale="en_GB") + '12:12\u201316:16' + + >>> format_interval(time(5, 12), time(16, 16), "hm", locale="en_US") + '5:12 AM \u2013 4:16 PM' + + >>> format_interval(time(16, 18), time(16, 24), "Hm", locale="it") + '16:18\u201316:24' + + If the start instant equals the end instant, the interval is formatted like the instant. + + >>> format_interval(time(16, 18), time(16, 18), "Hm", locale="it") + '16:18' + + Unknown skeletons fall back to "default" formatting. + + >>> format_interval(date(2015, 1, 1), date(2017, 1, 1), "wzq", locale="ja") + '2015/01/01\uff5e2017/01/01' + + >>> format_interval(time(16, 18), time(16, 24), "xxx", locale="ja") + '16:18:00\uff5e16:24:00' + + >>> format_interval(date(2016, 1, 15), date(2016, 1, 17), "xxx", locale="de") + '15.01.2016 \u2013 17.01.2016' + + :param start: First instant (datetime/date/time) + :param end: Second instant (datetime/date/time) + :param skeleton: The "skeleton format" to use for formatting. + :param tzinfo: tzinfo to use (if none is already attached) + :param fuzzy: If the skeleton is not found, allow choosing a skeleton that's + close enough to it. + :param locale: A locale object or identifier. + :return: Formatted interval + """ + locale = Locale.parse(locale) + + # NB: The quote comments below are from the algorithm description in + # https://www.unicode.org/reports/tr35/tr35-dates.html#intervalFormats + + # > Look for the intervalFormatItem element that matches the "skeleton", + # > starting in the current locale and then following the locale fallback + # > chain up to, but not including root. + + interval_formats = locale.interval_formats + + if skeleton not in interval_formats or not skeleton: + # > If no match was found from the previous step, check what the closest + # > match is in the fallback locale chain, as in availableFormats. That + # > is, this allows for adjusting the string value field's width, + # > including adjusting between "MMM" and "MMMM", and using different + # > variants of the same field, such as 'v' and 'z'. + if skeleton and fuzzy: + skeleton = match_skeleton(skeleton, interval_formats) + else: + skeleton = None + if not skeleton: # Still no match whatsoever? + # > Otherwise, format the start and end datetime using the fallback pattern. + return _format_fallback_interval(start, end, skeleton, tzinfo, locale) + + skel_formats = interval_formats[skeleton] + + if start == end: + return format_skeleton(skeleton, start, tzinfo, fuzzy=fuzzy, locale=locale) + + start = _ensure_datetime_tzinfo(_get_datetime(start), tzinfo=tzinfo) + end = _ensure_datetime_tzinfo(_get_datetime(end), tzinfo=tzinfo) + + start_fmt = DateTimeFormat(start, locale=locale) + end_fmt = DateTimeFormat(end, locale=locale) + + # > If a match is found from previous steps, compute the calendar field + # > with the greatest difference between start and end datetime. If there + # > is no difference among any of the fields in the pattern, format as a + # > single date using availableFormats, and return. + + for field in PATTERN_CHAR_ORDER: # These are in largest-to-smallest order + if field in skel_formats: + if start_fmt.extract(field) != end_fmt.extract(field): + # > If there is a match, use the pieces of the corresponding pattern to + # > format the start and end datetime, as above. + return "".join( + parse_pattern(pattern).apply(instant, locale) + for pattern, instant + in zip(skel_formats[field], (start, end)) + ) + + # > Otherwise, format the start and end datetime using the fallback pattern. + + return _format_fallback_interval(start, end, skeleton, tzinfo, locale) + + +def get_period_id(time, tzinfo=None, type=None, locale=LC_TIME): + """ + Get the day period ID for a given time. + + This ID can be used as a key for the period name dictionary. + + >>> get_period_names(locale="de")[get_period_id(time(7, 42), locale="de")] + u'Morgen' + + :param time: The time to inspect. + :param tzinfo: The timezone for the time. See ``format_time``. + :param type: The period type to use. Either "selection" or None. + The selection type is used for selecting among phrases such as + “Your email arrived yesterday evening” or “Your email arrived last night”. + :param locale: the `Locale` object, or a locale string + :return: period ID. Something is always returned -- even if it's just "am" or "pm". + """ + time = _get_time(time, tzinfo) + seconds_past_midnight = int(time.hour * 60 * 60 + time.minute * 60 + time.second) + locale = Locale.parse(locale) + + # The LDML rules state that the rules may not overlap, so iterating in arbitrary + # order should be alright, though `at` periods should be preferred. + rulesets = locale.day_period_rules.get(type, {}).items() + + for rule_id, rules in rulesets: + for rule in rules: + if "at" in rule and rule["at"] == seconds_past_midnight: + return rule_id + + for rule_id, rules in rulesets: + for rule in rules: + start_ok = end_ok = False + + if "from" in rule and seconds_past_midnight >= rule["from"]: + start_ok = True + if "to" in rule and seconds_past_midnight <= rule["to"]: + # This rule type does not exist in the present CLDR data; + # excuse the lack of test coverage. + end_ok = True + if "before" in rule and seconds_past_midnight < rule["before"]: + end_ok = True + if "after" in rule: + raise NotImplementedError("'after' is deprecated as of CLDR 29.") + + if start_ok and end_ok: + return rule_id + + if seconds_past_midnight < 43200: + return "am" + else: + return "pm" + + +def parse_date(string, locale=LC_TIME): + """Parse a date from a string. + + This function uses the date format for the locale as a hint to determine + the order in which the date fields appear in the string. + + >>> parse_date('4/1/04', locale='en_US') + datetime.date(2004, 4, 1) + >>> parse_date('01.04.2004', locale='de_DE') + datetime.date(2004, 4, 1) + + :param string: the string containing the date + :param locale: a `Locale` object or a locale identifier + """ + # TODO: try ISO format first? + format = get_date_format(locale=locale).pattern.lower() + year_idx = format.index('y') + month_idx = format.index('m') + if month_idx < 0: + month_idx = format.index('l') + day_idx = format.index('d') + + indexes = [(year_idx, 'Y'), (month_idx, 'M'), (day_idx, 'D')] + indexes.sort() + indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)]) + + # FIXME: this currently only supports numbers, but should also support month + # names, both in the requested locale, and english + + numbers = re.findall(r'(\d+)', string) + year = numbers[indexes['Y']] + if len(year) == 2: + year = 2000 + int(year) + else: + year = int(year) + month = int(numbers[indexes['M']]) + day = int(numbers[indexes['D']]) + if month > 12: + month, day = day, month + return date(year, month, day) + + +def parse_time(string, locale=LC_TIME): + """Parse a time from a string. + + This function uses the time format for the locale as a hint to determine + the order in which the time fields appear in the string. + + >>> parse_time('15:30:00', locale='en_US') + datetime.time(15, 30) + + :param string: the string containing the time + :param locale: a `Locale` object or a locale identifier + :return: the parsed time + :rtype: `time` + """ + # TODO: try ISO format first? + format = get_time_format(locale=locale).pattern.lower() + hour_idx = format.index('h') + if hour_idx < 0: + hour_idx = format.index('k') + min_idx = format.index('m') + sec_idx = format.index('s') + + indexes = [(hour_idx, 'H'), (min_idx, 'M'), (sec_idx, 'S')] + indexes.sort() + indexes = dict([(item[1], idx) for idx, item in enumerate(indexes)]) + + # FIXME: support 12 hour clock, and 0-based hour specification + # and seconds should be optional, maybe minutes too + # oh, and time-zones, of course + + numbers = re.findall(r'(\d+)', string) + hour = int(numbers[indexes['H']]) + minute = int(numbers[indexes['M']]) + second = int(numbers[indexes['S']]) + return time(hour, minute, second) + + +class DateTimePattern(object): + + def __init__(self, pattern, format): + self.pattern = pattern + self.format = format + + def __repr__(self): + return '<%s %r>' % (type(self).__name__, self.pattern) + + def __unicode__(self): + return self.pattern + + def __str__(self): + pat = self.pattern + if PY2: + pat = pat.encode('utf-8') + return pat + + def __mod__(self, other): + if type(other) is not DateTimeFormat: + return NotImplemented + return self.format % other + + def apply(self, datetime, locale): + return self % DateTimeFormat(datetime, locale) + + +class DateTimeFormat(object): + + def __init__(self, value, locale): + assert isinstance(value, (date, datetime, time)) + if isinstance(value, (datetime, time)) and value.tzinfo is None: + value = value.replace(tzinfo=UTC) + self.value = value + self.locale = Locale.parse(locale) + + def __getitem__(self, name): + char = name[0] + num = len(name) + if char == 'G': + return self.format_era(char, num) + elif char in ('y', 'Y', 'u'): + return self.format_year(char, num) + elif char in ('Q', 'q'): + return self.format_quarter(char, num) + elif char in ('M', 'L'): + return self.format_month(char, num) + elif char in ('w', 'W'): + return self.format_week(char, num) + elif char == 'd': + return self.format(self.value.day, num) + elif char == 'D': + return self.format_day_of_year(num) + elif char == 'F': + return self.format_day_of_week_in_month() + elif char in ('E', 'e', 'c'): + return self.format_weekday(char, num) + elif char == 'a': + # TODO: Add support for the rest of the period formats (a*, b*, B*) + return self.format_period(char) + elif char == 'h': + if self.value.hour % 12 == 0: + return self.format(12, num) + else: + return self.format(self.value.hour % 12, num) + elif char == 'H': + return self.format(self.value.hour, num) + elif char == 'K': + return self.format(self.value.hour % 12, num) + elif char == 'k': + if self.value.hour == 0: + return self.format(24, num) + else: + return self.format(self.value.hour, num) + elif char == 'm': + return self.format(self.value.minute, num) + elif char == 's': + return self.format(self.value.second, num) + elif char == 'S': + return self.format_frac_seconds(num) + elif char == 'A': + return self.format_milliseconds_in_day(num) + elif char in ('z', 'Z', 'v', 'V', 'x', 'X', 'O'): + return self.format_timezone(char, num) + else: + raise KeyError('Unsupported date/time field %r' % char) + + def extract(self, char): + char = str(char)[0] + if char == 'y': + return self.value.year + elif char == 'M': + return self.value.month + elif char == 'd': + return self.value.day + elif char == 'H': + return self.value.hour + elif char == 'h': + return self.value.hour % 12 or 12 + elif char == 'm': + return self.value.minute + elif char == 'a': + return int(self.value.hour >= 12) # 0 for am, 1 for pm + else: + raise NotImplementedError("Not implemented: extracting %r from %r" % (char, self.value)) + + def format_era(self, char, num): + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[max(3, num)] + era = int(self.value.year >= 0) + return get_era_names(width, self.locale)[era] + + def format_year(self, char, num): + value = self.value.year + if char.isupper(): + value = self.value.isocalendar()[0] + year = self.format(value, num) + if num == 2: + year = year[-2:] + return year + + def format_quarter(self, char, num): + quarter = (self.value.month - 1) // 3 + 1 + if num <= 2: + return '%0*d' % (num, quarter) + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num] + context = {'Q': 'format', 'q': 'stand-alone'}[char] + return get_quarter_names(width, context, self.locale)[quarter] + + def format_month(self, char, num): + if num <= 2: + return '%0*d' % (num, self.value.month) + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow'}[num] + context = {'M': 'format', 'L': 'stand-alone'}[char] + return get_month_names(width, context, self.locale)[self.value.month] + + def format_week(self, char, num): + if char.islower(): # week of year + day_of_year = self.get_day_of_year() + week = self.get_week_number(day_of_year) + if week == 0: + date = self.value - timedelta(days=day_of_year) + week = self.get_week_number(self.get_day_of_year(date), + date.weekday()) + return self.format(week, num) + else: # week of month + week = self.get_week_number(self.value.day) + if week == 0: + date = self.value - timedelta(days=self.value.day) + week = self.get_week_number(date.day, date.weekday()) + return '%d' % week + + def format_weekday(self, char='E', num=4): + """ + Return weekday from parsed datetime according to format pattern. + + >>> format = DateTimeFormat(date(2016, 2, 28), Locale.parse('en_US')) + >>> format.format_weekday() + u'Sunday' + + 'E': Day of week - Use one through three letters for the abbreviated day name, four for the full (wide) name, + five for the narrow name, or six for the short name. + >>> format.format_weekday('E',2) + u'Sun' + + 'e': Local day of week. Same as E except adds a numeric value that will depend on the local starting day of the + week, using one or two letters. For this example, Monday is the first day of the week. + >>> format.format_weekday('e',2) + '01' + + 'c': Stand-Alone local day of week - Use one letter for the local numeric value (same as 'e'), three for the + abbreviated day name, four for the full (wide) name, five for the narrow name, or six for the short name. + >>> format.format_weekday('c',1) + '1' + + :param char: pattern format character ('e','E','c') + :param num: count of format character + + """ + if num < 3: + if char.islower(): + value = 7 - self.locale.first_week_day + self.value.weekday() + return self.format(value % 7 + 1, num) + num = 3 + weekday = self.value.weekday() + width = {3: 'abbreviated', 4: 'wide', 5: 'narrow', 6: 'short'}[num] + if char == 'c': + context = 'stand-alone' + else: + context = 'format' + return get_day_names(width, context, self.locale)[weekday] + + def format_day_of_year(self, num): + return self.format(self.get_day_of_year(), num) + + def format_day_of_week_in_month(self): + return '%d' % ((self.value.day - 1) // 7 + 1) + + def format_period(self, char): + period = {0: 'am', 1: 'pm'}[int(self.value.hour >= 12)] + for width in ('wide', 'narrow', 'abbreviated'): + period_names = get_period_names(context='format', width=width, locale=self.locale) + if period in period_names: + return period_names[period] + raise ValueError('Could not format period %s in %s' % (period, self.locale)) + + def format_frac_seconds(self, num): + """ Return fractional seconds. + + Rounds the time's microseconds to the precision given by the number \ + of digits passed in. + """ + value = self.value.microsecond / 1000000 + return self.format(round(value, num) * 10**num, num) + + def format_milliseconds_in_day(self, num): + msecs = self.value.microsecond // 1000 + self.value.second * 1000 + \ + self.value.minute * 60000 + self.value.hour * 3600000 + return self.format(msecs, num) + + def format_timezone(self, char, num): + width = {3: 'short', 4: 'long', 5: 'iso8601'}[max(3, num)] + if char == 'z': + return get_timezone_name(self.value, width, locale=self.locale) + elif char == 'Z': + if num == 5: + return get_timezone_gmt(self.value, width, locale=self.locale, return_z=True) + return get_timezone_gmt(self.value, width, locale=self.locale) + elif char == 'O': + if num == 4: + return get_timezone_gmt(self.value, width, locale=self.locale) + # TODO: To add support for O:1 + elif char == 'v': + return get_timezone_name(self.value.tzinfo, width, + locale=self.locale) + elif char == 'V': + if num == 1: + return get_timezone_name(self.value.tzinfo, width, + uncommon=True, locale=self.locale) + elif num == 2: + return get_timezone_name(self.value.tzinfo, locale=self.locale, return_zone=True) + elif num == 3: + return get_timezone_location(self.value.tzinfo, locale=self.locale, return_city=True) + return get_timezone_location(self.value.tzinfo, locale=self.locale) + # Included additional elif condition to add support for 'Xx' in timezone format + elif char == 'X': + if num == 1: + return get_timezone_gmt(self.value, width='iso8601_short', locale=self.locale, + return_z=True) + elif num in (2, 4): + return get_timezone_gmt(self.value, width='short', locale=self.locale, + return_z=True) + elif num in (3, 5): + return get_timezone_gmt(self.value, width='iso8601', locale=self.locale, + return_z=True) + elif char == 'x': + if num == 1: + return get_timezone_gmt(self.value, width='iso8601_short', locale=self.locale) + elif num in (2, 4): + return get_timezone_gmt(self.value, width='short', locale=self.locale) + elif num in (3, 5): + return get_timezone_gmt(self.value, width='iso8601', locale=self.locale) + + def format(self, value, length): + return '%0*d' % (length, value) + + def get_day_of_year(self, date=None): + if date is None: + date = self.value + return (date - date.replace(month=1, day=1)).days + 1 + + def get_week_number(self, day_of_period, day_of_week=None): + """Return the number of the week of a day within a period. This may be + the week number in a year or the week number in a month. + + Usually this will return a value equal to or greater than 1, but if the + first week of the period is so short that it actually counts as the last + week of the previous period, this function will return 0. + + >>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('de_DE')) + >>> format.get_week_number(6) + 1 + + >>> format = DateTimeFormat(date(2006, 1, 8), Locale.parse('en_US')) + >>> format.get_week_number(6) + 2 + + :param day_of_period: the number of the day in the period (usually + either the day of month or the day of year) + :param day_of_week: the week day; if ommitted, the week day of the + current date is assumed + """ + if day_of_week is None: + day_of_week = self.value.weekday() + first_day = (day_of_week - self.locale.first_week_day - + day_of_period + 1) % 7 + if first_day < 0: + first_day += 7 + week_number = (day_of_period + first_day - 1) // 7 + + if 7 - first_day >= self.locale.min_week_days: + week_number += 1 + + if self.locale.first_week_day == 0: + # Correct the weeknumber in case of iso-calendar usage (first_week_day=0). + # If the weeknumber exceeds the maximum number of weeks for the given year + # we must count from zero.For example the above calculation gives week 53 + # for 2018-12-31. By iso-calender definition 2018 has a max of 52 + # weeks, thus the weeknumber must be 53-52=1. + max_weeks = date(year=self.value.year, day=28, month=12).isocalendar()[1] + if week_number > max_weeks: + week_number -= max_weeks + + return week_number + + +PATTERN_CHARS = { + 'G': [1, 2, 3, 4, 5], # era + 'y': None, 'Y': None, 'u': None, # year + 'Q': [1, 2, 3, 4, 5], 'q': [1, 2, 3, 4, 5], # quarter + 'M': [1, 2, 3, 4, 5], 'L': [1, 2, 3, 4, 5], # month + 'w': [1, 2], 'W': [1], # week + 'd': [1, 2], 'D': [1, 2, 3], 'F': [1], 'g': None, # day + 'E': [1, 2, 3, 4, 5, 6], 'e': [1, 2, 3, 4, 5, 6], 'c': [1, 3, 4, 5, 6], # week day + 'a': [1], # period + 'h': [1, 2], 'H': [1, 2], 'K': [1, 2], 'k': [1, 2], # hour + 'm': [1, 2], # minute + 's': [1, 2], 'S': None, 'A': None, # second + 'z': [1, 2, 3, 4], 'Z': [1, 2, 3, 4, 5], 'O': [1, 4], 'v': [1, 4], # zone + 'V': [1, 2, 3, 4], 'x': [1, 2, 3, 4, 5], 'X': [1, 2, 3, 4, 5] # zone +} + +#: The pattern characters declared in the Date Field Symbol Table +#: (https://www.unicode.org/reports/tr35/tr35-dates.html#Date_Field_Symbol_Table) +#: in order of decreasing magnitude. +PATTERN_CHAR_ORDER = "GyYuUQqMLlwWdDFgEecabBChHKkjJmsSAzZOvVXx" + +_pattern_cache = {} + + +def parse_pattern(pattern): + """Parse date, time, and datetime format patterns. + + >>> parse_pattern("MMMMd").format + u'%(MMMM)s%(d)s' + >>> parse_pattern("MMM d, yyyy").format + u'%(MMM)s %(d)s, %(yyyy)s' + + Pattern can contain literal strings in single quotes: + + >>> parse_pattern("H:mm' Uhr 'z").format + u'%(H)s:%(mm)s Uhr %(z)s' + + An actual single quote can be used by using two adjacent single quote + characters: + + >>> parse_pattern("hh' o''clock'").format + u"%(hh)s o'clock" + + :param pattern: the formatting pattern to parse + """ + if type(pattern) is DateTimePattern: + return pattern + + if pattern in _pattern_cache: + return _pattern_cache[pattern] + + result = [] + + for tok_type, tok_value in tokenize_pattern(pattern): + if tok_type == "chars": + result.append(tok_value.replace('%', '%%')) + elif tok_type == "field": + fieldchar, fieldnum = tok_value + limit = PATTERN_CHARS[fieldchar] + if limit and fieldnum not in limit: + raise ValueError('Invalid length for field: %r' + % (fieldchar * fieldnum)) + result.append('%%(%s)s' % (fieldchar * fieldnum)) + else: + raise NotImplementedError("Unknown token type: %s" % tok_type) + + _pattern_cache[pattern] = pat = DateTimePattern(pattern, u''.join(result)) + return pat + + +def tokenize_pattern(pattern): + """ + Tokenize date format patterns. + + Returns a list of (token_type, token_value) tuples. + + ``token_type`` may be either "chars" or "field". + + For "chars" tokens, the value is the literal value. + + For "field" tokens, the value is a tuple of (field character, repetition count). + + :param pattern: Pattern string + :type pattern: str + :rtype: list[tuple] + """ + result = [] + quotebuf = None + charbuf = [] + fieldchar = [''] + fieldnum = [0] + + def append_chars(): + result.append(('chars', ''.join(charbuf).replace('\0', "'"))) + del charbuf[:] + + def append_field(): + result.append(('field', (fieldchar[0], fieldnum[0]))) + fieldchar[0] = '' + fieldnum[0] = 0 + + for idx, char in enumerate(pattern.replace("''", '\0')): + if quotebuf is None: + if char == "'": # quote started + if fieldchar[0]: + append_field() + elif charbuf: + append_chars() + quotebuf = [] + elif char in PATTERN_CHARS: + if charbuf: + append_chars() + if char == fieldchar[0]: + fieldnum[0] += 1 + else: + if fieldchar[0]: + append_field() + fieldchar[0] = char + fieldnum[0] = 1 + else: + if fieldchar[0]: + append_field() + charbuf.append(char) + + elif quotebuf is not None: + if char == "'": # end of quote + charbuf.extend(quotebuf) + quotebuf = None + else: # inside quote + quotebuf.append(char) + + if fieldchar[0]: + append_field() + elif charbuf: + append_chars() + + return result + + +def untokenize_pattern(tokens): + """ + Turn a date format pattern token stream back into a string. + + This is the reverse operation of ``tokenize_pattern``. + + :type tokens: Iterable[tuple] + :rtype: str + """ + output = [] + for tok_type, tok_value in tokens: + if tok_type == "field": + output.append(tok_value[0] * tok_value[1]) + elif tok_type == "chars": + if not any(ch in PATTERN_CHARS for ch in tok_value): # No need to quote + output.append(tok_value) + else: + output.append("'%s'" % tok_value.replace("'", "''")) + return "".join(output) + + +def split_interval_pattern(pattern): + """ + Split an interval-describing datetime pattern into multiple pieces. + + > The pattern is then designed to be broken up into two pieces by determining the first repeating field. + - https://www.unicode.org/reports/tr35/tr35-dates.html#intervalFormats + + >>> split_interval_pattern(u'E d.M. \u2013 E d.M.') + [u'E d.M. \u2013 ', 'E d.M.'] + >>> split_interval_pattern("Y 'text' Y 'more text'") + ["Y 'text '", "Y 'more text'"] + >>> split_interval_pattern(u"E, MMM d \u2013 E") + [u'E, MMM d \u2013 ', u'E'] + >>> split_interval_pattern("MMM d") + ['MMM d'] + >>> split_interval_pattern("y G") + ['y G'] + >>> split_interval_pattern(u"MMM d \u2013 d") + [u'MMM d \u2013 ', u'd'] + + :param pattern: Interval pattern string + :return: list of "subpatterns" + """ + + seen_fields = set() + parts = [[]] + + for tok_type, tok_value in tokenize_pattern(pattern): + if tok_type == "field": + if tok_value[0] in seen_fields: # Repeated field + parts.append([]) + seen_fields.clear() + seen_fields.add(tok_value[0]) + parts[-1].append((tok_type, tok_value)) + + return [untokenize_pattern(tokens) for tokens in parts] + + +def match_skeleton(skeleton, options, allow_different_fields=False): + """ + Find the closest match for the given datetime skeleton among the options given. + + This uses the rules outlined in the TR35 document. + + >>> match_skeleton('yMMd', ('yMd', 'yMMMd')) + 'yMd' + + >>> match_skeleton('yMMd', ('jyMMd',), allow_different_fields=True) + 'jyMMd' + + >>> match_skeleton('yMMd', ('qyMMd',), allow_different_fields=False) + + >>> match_skeleton('hmz', ('hmv',)) + 'hmv' + + :param skeleton: The skeleton to match + :type skeleton: str + :param options: An iterable of other skeletons to match against + :type options: Iterable[str] + :return: The closest skeleton match, or if no match was found, None. + :rtype: str|None + """ + + # TODO: maybe implement pattern expansion? + + # Based on the implementation in + # http://source.icu-project.org/repos/icu/icu4j/trunk/main/classes/core/src/com/ibm/icu/text/DateIntervalInfo.java + + # Filter out falsy values and sort for stability; when `interval_formats` is passed in, there may be a None key. + options = sorted(option for option in options if option) + + if 'z' in skeleton and not any('z' in option for option in options): + skeleton = skeleton.replace('z', 'v') + + get_input_field_width = dict(t[1] for t in tokenize_pattern(skeleton) if t[0] == "field").get + best_skeleton = None + best_distance = None + for option in options: + get_opt_field_width = dict(t[1] for t in tokenize_pattern(option) if t[0] == "field").get + distance = 0 + for field in PATTERN_CHARS: + input_width = get_input_field_width(field, 0) + opt_width = get_opt_field_width(field, 0) + if input_width == opt_width: + continue + if opt_width == 0 or input_width == 0: + if not allow_different_fields: # This one is not okay + option = None + break + distance += 0x1000 # Magic weight constant for "entirely different fields" + elif field == 'M' and ((input_width > 2 and opt_width <= 2) or (input_width <= 2 and opt_width > 2)): + distance += 0x100 # Magic weight for "text turns into a number" + else: + distance += abs(input_width - opt_width) + + if not option: # We lost the option along the way (probably due to "allow_different_fields") + continue + + if not best_skeleton or distance < best_distance: + best_skeleton = option + best_distance = distance + + if distance == 0: # Found a perfect match! + break + + return best_skeleton diff --git a/dbt-env/lib/python3.8/site-packages/babel/global.dat b/dbt-env/lib/python3.8/site-packages/babel/global.dat new file mode 100644 index 0000000..0ce2e98 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/global.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/languages.py b/dbt-env/lib/python3.8/site-packages/babel/languages.py new file mode 100644 index 0000000..0974367 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/languages.py @@ -0,0 +1,71 @@ +# -- encoding: UTF-8 -- +from babel.core import get_global + + +def get_official_languages(territory, regional=False, de_facto=False): + """ + Get the official language(s) for the given territory. + + The language codes, if any are known, are returned in order of descending popularity. + + If the `regional` flag is set, then languages which are regionally official are also returned. + + If the `de_facto` flag is set, then languages which are "de facto" official are also returned. + + .. warning:: Note that the data is as up to date as the current version of the CLDR used + by Babel. If you need scientifically accurate information, use another source! + + :param territory: Territory code + :type territory: str + :param regional: Whether to return regionally official languages too + :type regional: bool + :param de_facto: Whether to return de-facto official languages too + :type de_facto: bool + :return: Tuple of language codes + :rtype: tuple[str] + """ + + territory = str(territory).upper() + allowed_stati = {"official"} + if regional: + allowed_stati.add("official_regional") + if de_facto: + allowed_stati.add("de_facto_official") + + languages = get_global("territory_languages").get(territory, {}) + pairs = [ + (info['population_percent'], language) + for language, info in languages.items() + if info.get('official_status') in allowed_stati + ] + pairs.sort(reverse=True) + return tuple(lang for _, lang in pairs) + + +def get_territory_language_info(territory): + """ + Get a dictionary of language information for a territory. + + The dictionary is keyed by language code; the values are dicts with more information. + + The following keys are currently known for the values: + + * `population_percent`: The percentage of the territory's population speaking the + language. + * `official_status`: An optional string describing the officiality status of the language. + Known values are "official", "official_regional" and "de_facto_official". + + .. warning:: Note that the data is as up to date as the current version of the CLDR used + by Babel. If you need scientifically accurate information, use another source! + + .. note:: Note that the format of the dict returned may change between Babel versions. + + See https://www.unicode.org/cldr/charts/latest/supplemental/territory_language_information.html + + :param territory: Territory code + :type territory: str + :return: Language information dictionary + :rtype: dict[str, dict] + """ + territory = str(territory).upper() + return get_global("territory_languages").get(territory, {}).copy() diff --git a/dbt-env/lib/python3.8/site-packages/babel/lists.py b/dbt-env/lib/python3.8/site-packages/babel/lists.py new file mode 100644 index 0000000..8368b27 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/lists.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" + babel.lists + ~~~~~~~~~~~ + + Locale dependent formatting of lists. + + The default locale for the functions in this module is determined by the + following environment variables, in that order: + + * ``LC_ALL``, and + * ``LANG`` + + :copyright: (c) 2015-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.core import Locale, default_locale + +DEFAULT_LOCALE = default_locale() + + +def format_list(lst, style='standard', locale=DEFAULT_LOCALE): + """ + Format the items in `lst` as a list. + + >>> format_list(['apples', 'oranges', 'pears'], locale='en') + u'apples, oranges, and pears' + >>> format_list(['apples', 'oranges', 'pears'], locale='zh') + u'apples\u3001oranges\u548cpears' + >>> format_list(['omena', 'peruna', 'aplari'], style='or', locale='fi') + u'omena, peruna tai aplari' + + These styles are defined, but not all are necessarily available in all locales. + The following text is verbatim from the Unicode TR35-49 spec [1]. + + * standard: + A typical 'and' list for arbitrary placeholders. + eg. "January, February, and March" + * standard-short: + A short version of a 'and' list, suitable for use with short or abbreviated placeholder values. + eg. "Jan., Feb., and Mar." + * or: + A typical 'or' list for arbitrary placeholders. + eg. "January, February, or March" + * or-short: + A short version of an 'or' list. + eg. "Jan., Feb., or Mar." + * unit: + A list suitable for wide units. + eg. "3 feet, 7 inches" + * unit-short: + A list suitable for short units + eg. "3 ft, 7 in" + * unit-narrow: + A list suitable for narrow units, where space on the screen is very limited. + eg. "3′ 7″" + + [1]: https://www.unicode.org/reports/tr35/tr35-49/tr35-general.html#ListPatterns + + :param lst: a sequence of items to format in to a list + :param style: the style to format the list with. See above for description. + :param locale: the locale + """ + locale = Locale.parse(locale) + if not lst: + return '' + if len(lst) == 1: + return lst[0] + + if style not in locale.list_patterns: + raise ValueError('Locale %s does not support list formatting style %r (supported are %s)' % ( + locale, + style, + list(sorted(locale.list_patterns)), + )) + patterns = locale.list_patterns[style] + + if len(lst) == 2: + return patterns['2'].format(*lst) + + result = patterns['start'].format(lst[0], lst[1]) + for elem in lst[2:-1]: + result = patterns['middle'].format(result, elem) + result = patterns['end'].format(result, lst[-1]) + + return result diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/af.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/af.dat new file mode 100644 index 0000000..196d401 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/af.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/af_NA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/af_NA.dat new file mode 100644 index 0000000..0ab9dba Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/af_NA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/af_ZA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/af_ZA.dat new file mode 100644 index 0000000..b7487d7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/af_ZA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/agq.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/agq.dat new file mode 100644 index 0000000..3b55d88 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/agq.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/agq_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/agq_CM.dat new file mode 100644 index 0000000..4c968ef Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/agq_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ak.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ak.dat new file mode 100644 index 0000000..1a8cdcd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ak.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ak_GH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ak_GH.dat new file mode 100644 index 0000000..c2b4455 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ak_GH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/am.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/am.dat new file mode 100644 index 0000000..b325481 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/am.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/am_ET.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/am_ET.dat new file mode 100644 index 0000000..2fc6bd4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/am_ET.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar.dat new file mode 100644 index 0000000..f3b53fa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_001.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_001.dat new file mode 100644 index 0000000..b459a96 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_001.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_AE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_AE.dat new file mode 100644 index 0000000..3f83344 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_AE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_BH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_BH.dat new file mode 100644 index 0000000..d5208f0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_BH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_DJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_DJ.dat new file mode 100644 index 0000000..e268200 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_DJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_DZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_DZ.dat new file mode 100644 index 0000000..6f28eb7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_DZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_EG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_EG.dat new file mode 100644 index 0000000..c3b60c8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_EG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_EH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_EH.dat new file mode 100644 index 0000000..73bec13 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_EH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_ER.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_ER.dat new file mode 100644 index 0000000..03e0803 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_ER.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_IL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_IL.dat new file mode 100644 index 0000000..f4b3466 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_IL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_IQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_IQ.dat new file mode 100644 index 0000000..f44af3a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_IQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_JO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_JO.dat new file mode 100644 index 0000000..4fc364c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_JO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_KM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_KM.dat new file mode 100644 index 0000000..9d40c49 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_KM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_KW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_KW.dat new file mode 100644 index 0000000..c75957c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_KW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_LB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_LB.dat new file mode 100644 index 0000000..6877e4c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_LB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_LY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_LY.dat new file mode 100644 index 0000000..adbdec2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_LY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_MA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_MA.dat new file mode 100644 index 0000000..e4d6466 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_MA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_MR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_MR.dat new file mode 100644 index 0000000..a79d117 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_MR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_OM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_OM.dat new file mode 100644 index 0000000..2e9eb80 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_OM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_PS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_PS.dat new file mode 100644 index 0000000..322f0f1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_PS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_QA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_QA.dat new file mode 100644 index 0000000..609074c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_QA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SA.dat new file mode 100644 index 0000000..104f84f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SD.dat new file mode 100644 index 0000000..dab6415 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SO.dat new file mode 100644 index 0000000..e2d462e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SS.dat new file mode 100644 index 0000000..d4d7bdf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SY.dat new file mode 100644 index 0000000..018cccb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_SY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_TD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_TD.dat new file mode 100644 index 0000000..3ee8f73 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_TD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_TN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_TN.dat new file mode 100644 index 0000000..1b4d7b5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_TN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_YE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_YE.dat new file mode 100644 index 0000000..e135658 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ar_YE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/as.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/as.dat new file mode 100644 index 0000000..e12c4cd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/as.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/as_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/as_IN.dat new file mode 100644 index 0000000..647a92a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/as_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/asa.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/asa.dat new file mode 100644 index 0000000..8406a4c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/asa.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/asa_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/asa_TZ.dat new file mode 100644 index 0000000..190a6dd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/asa_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ast.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ast.dat new file mode 100644 index 0000000..bd94f16 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ast.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ast_ES.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ast_ES.dat new file mode 100644 index 0000000..3a30d74 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ast_ES.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/az.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az.dat new file mode 100644 index 0000000..b5c17aa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Cyrl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Cyrl.dat new file mode 100644 index 0000000..4396344 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Cyrl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Cyrl_AZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Cyrl_AZ.dat new file mode 100644 index 0000000..8fc93a3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Cyrl_AZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Latn.dat new file mode 100644 index 0000000..f2b58c7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Latn_AZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Latn_AZ.dat new file mode 100644 index 0000000..8fc93a3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/az_Latn_AZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bas.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bas.dat new file mode 100644 index 0000000..f10acc9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bas.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bas_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bas_CM.dat new file mode 100644 index 0000000..2936508 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bas_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/be.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/be.dat new file mode 100644 index 0000000..eba5f87 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/be.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/be_BY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/be_BY.dat new file mode 100644 index 0000000..3a9f989 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/be_BY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bem.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bem.dat new file mode 100644 index 0000000..96b49d1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bem.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bem_ZM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bem_ZM.dat new file mode 100644 index 0000000..26b6697 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bem_ZM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bez.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bez.dat new file mode 100644 index 0000000..9741a55 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bez.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bez_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bez_TZ.dat new file mode 100644 index 0000000..dc04ff7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bez_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bg.dat new file mode 100644 index 0000000..3645c67 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bg_BG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bg_BG.dat new file mode 100644 index 0000000..0ccd492 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bg_BG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bm.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bm.dat new file mode 100644 index 0000000..ca223c0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bm.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bm_ML.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bm_ML.dat new file mode 100644 index 0000000..892c264 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bm_ML.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn.dat new file mode 100644 index 0000000..991a149 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn_BD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn_BD.dat new file mode 100644 index 0000000..0d3834e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn_BD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn_IN.dat new file mode 100644 index 0000000..921f26a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bn_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo.dat new file mode 100644 index 0000000..78cd33c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo_CN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo_CN.dat new file mode 100644 index 0000000..ca4ade1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo_CN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo_IN.dat new file mode 100644 index 0000000..2a55cd6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bo_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/br.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/br.dat new file mode 100644 index 0000000..46a2955 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/br.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/br_FR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/br_FR.dat new file mode 100644 index 0000000..bdaeb3b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/br_FR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/brx.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/brx.dat new file mode 100644 index 0000000..6f5127a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/brx.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/brx_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/brx_IN.dat new file mode 100644 index 0000000..f3054ec Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/brx_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs.dat new file mode 100644 index 0000000..99cbd02 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Cyrl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Cyrl.dat new file mode 100644 index 0000000..569bc0b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Cyrl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Cyrl_BA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Cyrl_BA.dat new file mode 100644 index 0000000..66476e8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Cyrl_BA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Latn.dat new file mode 100644 index 0000000..ff389ae Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Latn_BA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Latn_BA.dat new file mode 100644 index 0000000..66476e8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/bs_Latn_BA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca.dat new file mode 100644 index 0000000..64b5b67 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_AD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_AD.dat new file mode 100644 index 0000000..af3db59 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_AD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_ES.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_ES.dat new file mode 100644 index 0000000..64798c0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_ES.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_ES_VALENCIA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_ES_VALENCIA.dat new file mode 100644 index 0000000..90d578e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_ES_VALENCIA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_FR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_FR.dat new file mode 100644 index 0000000..f734323 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_FR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_IT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_IT.dat new file mode 100644 index 0000000..5802b4f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ca_IT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp.dat new file mode 100644 index 0000000..162a486 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp_BD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp_BD.dat new file mode 100644 index 0000000..bb1e625 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp_BD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp_IN.dat new file mode 100644 index 0000000..78f8829 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ccp_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ce.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ce.dat new file mode 100644 index 0000000..ea72582 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ce.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ce_RU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ce_RU.dat new file mode 100644 index 0000000..4968d93 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ce_RU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ceb.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ceb.dat new file mode 100644 index 0000000..72c5f3f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ceb.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ceb_PH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ceb_PH.dat new file mode 100644 index 0000000..4f99a23 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ceb_PH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cgg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cgg.dat new file mode 100644 index 0000000..c1c02be Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cgg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cgg_UG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cgg_UG.dat new file mode 100644 index 0000000..2525414 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cgg_UG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/chr.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/chr.dat new file mode 100644 index 0000000..7397afe Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/chr.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/chr_US.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/chr_US.dat new file mode 100644 index 0000000..96e967f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/chr_US.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb.dat new file mode 100644 index 0000000..741a28b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb_IQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb_IQ.dat new file mode 100644 index 0000000..e7c108e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb_IQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb_IR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb_IR.dat new file mode 100644 index 0000000..fc4f2c0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ckb_IR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cs.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cs.dat new file mode 100644 index 0000000..9b46053 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cs.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cs_CZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cs_CZ.dat new file mode 100644 index 0000000..04b0b1a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cs_CZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cu.dat new file mode 100644 index 0000000..704761b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cu_RU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cu_RU.dat new file mode 100644 index 0000000..f379d2d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cu_RU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cy.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cy.dat new file mode 100644 index 0000000..d2efa7e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cy.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/cy_GB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cy_GB.dat new file mode 100644 index 0000000..1f75973 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/cy_GB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/da.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/da.dat new file mode 100644 index 0000000..7560380 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/da.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/da_DK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/da_DK.dat new file mode 100644 index 0000000..e24fd73 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/da_DK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/da_GL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/da_GL.dat new file mode 100644 index 0000000..1ee0420 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/da_GL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dav.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dav.dat new file mode 100644 index 0000000..b3c2c05 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dav.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dav_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dav_KE.dat new file mode 100644 index 0000000..3f99b94 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dav_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de.dat new file mode 100644 index 0000000..7afc79e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_AT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_AT.dat new file mode 100644 index 0000000..7c51c8f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_AT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_BE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_BE.dat new file mode 100644 index 0000000..38d3e78 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_BE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_CH.dat new file mode 100644 index 0000000..981e715 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_DE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_DE.dat new file mode 100644 index 0000000..029c566 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_DE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_IT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_IT.dat new file mode 100644 index 0000000..2bdc7f7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_IT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_LI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_LI.dat new file mode 100644 index 0000000..46702c4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_LI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_LU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_LU.dat new file mode 100644 index 0000000..d3099f8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/de_LU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dje.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dje.dat new file mode 100644 index 0000000..43c794b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dje.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dje_NE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dje_NE.dat new file mode 100644 index 0000000..bf016c4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dje_NE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dsb.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dsb.dat new file mode 100644 index 0000000..b936b7b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dsb.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dsb_DE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dsb_DE.dat new file mode 100644 index 0000000..9bf80ab Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dsb_DE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dua.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dua.dat new file mode 100644 index 0000000..4d00561 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dua.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dua_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dua_CM.dat new file mode 100644 index 0000000..40011c6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dua_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dyo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dyo.dat new file mode 100644 index 0000000..4f7f833 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dyo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dyo_SN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dyo_SN.dat new file mode 100644 index 0000000..bb00f00 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dyo_SN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dz.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dz.dat new file mode 100644 index 0000000..9a3a122 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dz.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/dz_BT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dz_BT.dat new file mode 100644 index 0000000..2bc4405 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/dz_BT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ebu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ebu.dat new file mode 100644 index 0000000..f442db7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ebu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ebu_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ebu_KE.dat new file mode 100644 index 0000000..92a9c4f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ebu_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee.dat new file mode 100644 index 0000000..e4d412f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee_GH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee_GH.dat new file mode 100644 index 0000000..46f0759 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee_GH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee_TG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee_TG.dat new file mode 100644 index 0000000..a9211be Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ee_TG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/el.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/el.dat new file mode 100644 index 0000000..a6206ed Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/el.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/el_CY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/el_CY.dat new file mode 100644 index 0000000..22b3765 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/el_CY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/el_GR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/el_GR.dat new file mode 100644 index 0000000..49b1cbb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/el_GR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en.dat new file mode 100644 index 0000000..abd0c59 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_001.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_001.dat new file mode 100644 index 0000000..75f7b10 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_001.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_150.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_150.dat new file mode 100644 index 0000000..32c84e5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_150.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AE.dat new file mode 100644 index 0000000..a2f67a6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AG.dat new file mode 100644 index 0000000..846b084 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AI.dat new file mode 100644 index 0000000..8e665ac Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AS.dat new file mode 100644 index 0000000..27c1b87 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AT.dat new file mode 100644 index 0000000..d7f1c64 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AU.dat new file mode 100644 index 0000000..618e7aa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_AU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BB.dat new file mode 100644 index 0000000..7fad698 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BE.dat new file mode 100644 index 0000000..ed42ee5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BI.dat new file mode 100644 index 0000000..9763fa2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BM.dat new file mode 100644 index 0000000..208321b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BS.dat new file mode 100644 index 0000000..0249529 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BW.dat new file mode 100644 index 0000000..6041060 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BZ.dat new file mode 100644 index 0000000..598c291 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_BZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CA.dat new file mode 100644 index 0000000..f872f83 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CC.dat new file mode 100644 index 0000000..a32c765 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CH.dat new file mode 100644 index 0000000..2bcfb89 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CK.dat new file mode 100644 index 0000000..358fdb3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CM.dat new file mode 100644 index 0000000..17abaab Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CX.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CX.dat new file mode 100644 index 0000000..086f98f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CX.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CY.dat new file mode 100644 index 0000000..69273f4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_CY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DE.dat new file mode 100644 index 0000000..eeda396 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DG.dat new file mode 100644 index 0000000..c80466e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DK.dat new file mode 100644 index 0000000..06f2060 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DM.dat new file mode 100644 index 0000000..46ea082 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_DM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ER.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ER.dat new file mode 100644 index 0000000..c1ac090 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ER.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FI.dat new file mode 100644 index 0000000..7ed9da0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FJ.dat new file mode 100644 index 0000000..b778afd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FK.dat new file mode 100644 index 0000000..8cb157d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FM.dat new file mode 100644 index 0000000..e69fb2d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_FM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GB.dat new file mode 100644 index 0000000..b58788b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GD.dat new file mode 100644 index 0000000..67bc784 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GG.dat new file mode 100644 index 0000000..7161bcc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GH.dat new file mode 100644 index 0000000..62744cb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GI.dat new file mode 100644 index 0000000..82a4b2f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GM.dat new file mode 100644 index 0000000..96f7e3e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GU.dat new file mode 100644 index 0000000..f909c16 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GY.dat new file mode 100644 index 0000000..5686ec0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_GY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_HK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_HK.dat new file mode 100644 index 0000000..b7cd349 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_HK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IE.dat new file mode 100644 index 0000000..4e54550 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IL.dat new file mode 100644 index 0000000..19d9abd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IM.dat new file mode 100644 index 0000000..f5f00eb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IN.dat new file mode 100644 index 0000000..338a902 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IO.dat new file mode 100644 index 0000000..76ee975 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_IO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_JE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_JE.dat new file mode 100644 index 0000000..9fd958f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_JE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_JM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_JM.dat new file mode 100644 index 0000000..d54dfee Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_JM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KE.dat new file mode 100644 index 0000000..a37de24 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KI.dat new file mode 100644 index 0000000..3b189dc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KN.dat new file mode 100644 index 0000000..9786607 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KY.dat new file mode 100644 index 0000000..77a51e6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_KY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LC.dat new file mode 100644 index 0000000..bcc71d6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LR.dat new file mode 100644 index 0000000..896d362 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LS.dat new file mode 100644 index 0000000..e8b1beb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_LS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MG.dat new file mode 100644 index 0000000..1d1ae71 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MH.dat new file mode 100644 index 0000000..2e51eb3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MO.dat new file mode 100644 index 0000000..b4adfae Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MP.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MP.dat new file mode 100644 index 0000000..be79944 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MP.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MS.dat new file mode 100644 index 0000000..7b4706e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MT.dat new file mode 100644 index 0000000..d5ea3a9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MU.dat new file mode 100644 index 0000000..842415b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MW.dat new file mode 100644 index 0000000..d6ba2d7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MY.dat new file mode 100644 index 0000000..96baf03 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_MY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NA.dat new file mode 100644 index 0000000..4410af8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NF.dat new file mode 100644 index 0000000..8081150 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NG.dat new file mode 100644 index 0000000..b863ef9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NL.dat new file mode 100644 index 0000000..d0ff025 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NR.dat new file mode 100644 index 0000000..55f7228 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NU.dat new file mode 100644 index 0000000..6ac54f5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NZ.dat new file mode 100644 index 0000000..3af1fcd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_NZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PG.dat new file mode 100644 index 0000000..2fb7788 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PH.dat new file mode 100644 index 0000000..566adea Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PK.dat new file mode 100644 index 0000000..272be03 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PN.dat new file mode 100644 index 0000000..d34279a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PR.dat new file mode 100644 index 0000000..154da6d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PW.dat new file mode 100644 index 0000000..4c1dd02 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_PW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_RW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_RW.dat new file mode 100644 index 0000000..f78cdeb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_RW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SB.dat new file mode 100644 index 0000000..9a159df Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SC.dat new file mode 100644 index 0000000..b4d8e7a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SD.dat new file mode 100644 index 0000000..3073ee6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SE.dat new file mode 100644 index 0000000..9141cd4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SG.dat new file mode 100644 index 0000000..4181bb1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SH.dat new file mode 100644 index 0000000..76d1c82 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SI.dat new file mode 100644 index 0000000..1a7f457 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SL.dat new file mode 100644 index 0000000..95f40e3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SS.dat new file mode 100644 index 0000000..413df19 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SX.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SX.dat new file mode 100644 index 0000000..7b5e76e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SX.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SZ.dat new file mode 100644 index 0000000..bddb168 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_SZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TC.dat new file mode 100644 index 0000000..8f4f279 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TK.dat new file mode 100644 index 0000000..918b92b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TO.dat new file mode 100644 index 0000000..4ee8f06 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TT.dat new file mode 100644 index 0000000..20a052d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TV.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TV.dat new file mode 100644 index 0000000..525ea20 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TV.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TZ.dat new file mode 100644 index 0000000..110c9b7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_UG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_UG.dat new file mode 100644 index 0000000..c512424 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_UG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_UM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_UM.dat new file mode 100644 index 0000000..ccba323 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_UM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_US.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_US.dat new file mode 100644 index 0000000..69c3299 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_US.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_US_POSIX.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_US_POSIX.dat new file mode 100644 index 0000000..ebccad2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_US_POSIX.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VC.dat new file mode 100644 index 0000000..55bc096 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VG.dat new file mode 100644 index 0000000..0ff40d8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VI.dat new file mode 100644 index 0000000..09634ab Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VU.dat new file mode 100644 index 0000000..0236313 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_VU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_WS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_WS.dat new file mode 100644 index 0000000..898cf42 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_WS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZA.dat new file mode 100644 index 0000000..38045e2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZM.dat new file mode 100644 index 0000000..3c65544 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZW.dat new file mode 100644 index 0000000..c67e4c1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/en_ZW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/eo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eo.dat new file mode 100644 index 0000000..c3705c9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/eo_001.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eo_001.dat new file mode 100644 index 0000000..ac2d1af Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eo_001.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es.dat new file mode 100644 index 0000000..67e0862 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_419.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_419.dat new file mode 100644 index 0000000..95c1cd4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_419.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_AR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_AR.dat new file mode 100644 index 0000000..9ce6ffa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_AR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BO.dat new file mode 100644 index 0000000..1a7f2f2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BR.dat new file mode 100644 index 0000000..da7adf9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BZ.dat new file mode 100644 index 0000000..a4b2bb5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_BZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CL.dat new file mode 100644 index 0000000..6298473 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CO.dat new file mode 100644 index 0000000..2cb9959 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CR.dat new file mode 100644 index 0000000..a6d1d5e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CU.dat new file mode 100644 index 0000000..036177c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_CU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_DO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_DO.dat new file mode 100644 index 0000000..af9e8d8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_DO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_EA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_EA.dat new file mode 100644 index 0000000..8faa702 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_EA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_EC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_EC.dat new file mode 100644 index 0000000..ee4f15f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_EC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_ES.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_ES.dat new file mode 100644 index 0000000..99fcd01 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_ES.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_GQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_GQ.dat new file mode 100644 index 0000000..2e2065c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_GQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_GT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_GT.dat new file mode 100644 index 0000000..bee155c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_GT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_HN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_HN.dat new file mode 100644 index 0000000..18c7445 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_HN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_IC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_IC.dat new file mode 100644 index 0000000..4820e98 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_IC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_MX.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_MX.dat new file mode 100644 index 0000000..d8dd058 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_MX.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_NI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_NI.dat new file mode 100644 index 0000000..3d9b1f1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_NI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PA.dat new file mode 100644 index 0000000..9c9f755 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PE.dat new file mode 100644 index 0000000..ed901ce Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PH.dat new file mode 100644 index 0000000..13747ca Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PR.dat new file mode 100644 index 0000000..eb10566 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PY.dat new file mode 100644 index 0000000..1749d49 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_PY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_SV.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_SV.dat new file mode 100644 index 0000000..db118c3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_SV.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_US.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_US.dat new file mode 100644 index 0000000..64e09df Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_US.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_UY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_UY.dat new file mode 100644 index 0000000..88caf15 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_UY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_VE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_VE.dat new file mode 100644 index 0000000..569ea00 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/es_VE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/et.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/et.dat new file mode 100644 index 0000000..54a914d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/et.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/et_EE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/et_EE.dat new file mode 100644 index 0000000..665202d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/et_EE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/eu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eu.dat new file mode 100644 index 0000000..da53ca0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/eu_ES.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eu_ES.dat new file mode 100644 index 0000000..ae93f26 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/eu_ES.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ewo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ewo.dat new file mode 100644 index 0000000..ccf6873 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ewo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ewo_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ewo_CM.dat new file mode 100644 index 0000000..db3fc2d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ewo_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa.dat new file mode 100644 index 0000000..7897e56 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa_AF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa_AF.dat new file mode 100644 index 0000000..b488a71 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa_AF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa_IR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa_IR.dat new file mode 100644 index 0000000..ceeecf8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fa_IR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff.dat new file mode 100644 index 0000000..bade070 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm.dat new file mode 100644 index 0000000..25efd3f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_BF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_BF.dat new file mode 100644 index 0000000..4a8e04d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_BF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_CM.dat new file mode 100644 index 0000000..d7cfcb5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GH.dat new file mode 100644 index 0000000..e41003f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GM.dat new file mode 100644 index 0000000..258004a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GN.dat new file mode 100644 index 0000000..cc554e7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GW.dat new file mode 100644 index 0000000..0fbc57b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_GW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_LR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_LR.dat new file mode 100644 index 0000000..40e9977 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_LR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_MR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_MR.dat new file mode 100644 index 0000000..0ef73bf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_MR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_NE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_NE.dat new file mode 100644 index 0000000..1ebfe82 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_NE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_NG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_NG.dat new file mode 100644 index 0000000..3117c27 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_NG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_SL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_SL.dat new file mode 100644 index 0000000..fa86b86 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_SL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_SN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_SN.dat new file mode 100644 index 0000000..1a709b9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Adlm_SN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn.dat new file mode 100644 index 0000000..6280fc3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_BF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_BF.dat new file mode 100644 index 0000000..f0fe2f0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_BF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_CM.dat new file mode 100644 index 0000000..08cf09c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GH.dat new file mode 100644 index 0000000..4e1dd1a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GM.dat new file mode 100644 index 0000000..8b6b2e9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GN.dat new file mode 100644 index 0000000..f3ee9cc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GW.dat new file mode 100644 index 0000000..fafde89 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_GW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_LR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_LR.dat new file mode 100644 index 0000000..95a7998 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_LR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_MR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_MR.dat new file mode 100644 index 0000000..c7d500a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_MR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_NE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_NE.dat new file mode 100644 index 0000000..0b9a2a2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_NE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_NG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_NG.dat new file mode 100644 index 0000000..eeeaa51 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_NG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_SL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_SL.dat new file mode 100644 index 0000000..b66dea4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_SL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_SN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_SN.dat new file mode 100644 index 0000000..16c7275 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ff_Latn_SN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fi.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fi.dat new file mode 100644 index 0000000..6e73595 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fi.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fi_FI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fi_FI.dat new file mode 100644 index 0000000..c6f723b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fi_FI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fil.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fil.dat new file mode 100644 index 0000000..79ae44e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fil.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fil_PH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fil_PH.dat new file mode 100644 index 0000000..1cac4bc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fil_PH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo.dat new file mode 100644 index 0000000..afae165 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo_DK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo_DK.dat new file mode 100644 index 0000000..ced96d9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo_DK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo_FO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo_FO.dat new file mode 100644 index 0000000..f55f5d9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fo_FO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr.dat new file mode 100644 index 0000000..76804e9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BE.dat new file mode 100644 index 0000000..1d953ae Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BF.dat new file mode 100644 index 0000000..a972a7b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BI.dat new file mode 100644 index 0000000..160ca05 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BJ.dat new file mode 100644 index 0000000..fed31e2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BL.dat new file mode 100644 index 0000000..46bbabe Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_BL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CA.dat new file mode 100644 index 0000000..b23c036 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CD.dat new file mode 100644 index 0000000..7194c67 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CF.dat new file mode 100644 index 0000000..ff84dd9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CG.dat new file mode 100644 index 0000000..94ae980 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CH.dat new file mode 100644 index 0000000..eca3a8f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CI.dat new file mode 100644 index 0000000..504d6dd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CM.dat new file mode 100644 index 0000000..e8afefd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_DJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_DJ.dat new file mode 100644 index 0000000..d75a6d8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_DJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_DZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_DZ.dat new file mode 100644 index 0000000..8bada3c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_DZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_FR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_FR.dat new file mode 100644 index 0000000..312e8c8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_FR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GA.dat new file mode 100644 index 0000000..42b9a40 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GF.dat new file mode 100644 index 0000000..d546df1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GN.dat new file mode 100644 index 0000000..258a1e3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GP.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GP.dat new file mode 100644 index 0000000..0a2a572 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GP.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GQ.dat new file mode 100644 index 0000000..d7acf13 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_GQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_HT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_HT.dat new file mode 100644 index 0000000..10a6021 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_HT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_KM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_KM.dat new file mode 100644 index 0000000..11aa921 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_KM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_LU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_LU.dat new file mode 100644 index 0000000..2d6b00a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_LU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MA.dat new file mode 100644 index 0000000..f7e55a9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MC.dat new file mode 100644 index 0000000..a794670 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MF.dat new file mode 100644 index 0000000..7ce27e1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MG.dat new file mode 100644 index 0000000..fd3f370 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_ML.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_ML.dat new file mode 100644 index 0000000..22de67c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_ML.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MQ.dat new file mode 100644 index 0000000..9f2676b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MR.dat new file mode 100644 index 0000000..f3f8672 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MU.dat new file mode 100644 index 0000000..83d9aa2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_MU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_NC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_NC.dat new file mode 100644 index 0000000..082d1c6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_NC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_NE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_NE.dat new file mode 100644 index 0000000..2b73188 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_NE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_PF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_PF.dat new file mode 100644 index 0000000..a953a11 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_PF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_PM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_PM.dat new file mode 100644 index 0000000..002bd6b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_PM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_RE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_RE.dat new file mode 100644 index 0000000..b485ea3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_RE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_RW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_RW.dat new file mode 100644 index 0000000..648bf3c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_RW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SC.dat new file mode 100644 index 0000000..d4ee8fd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SN.dat new file mode 100644 index 0000000..de9817b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SY.dat new file mode 100644 index 0000000..2621f57 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_SY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TD.dat new file mode 100644 index 0000000..c106229 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TG.dat new file mode 100644 index 0000000..8e05c88 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TN.dat new file mode 100644 index 0000000..3ceb26a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_TN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_VU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_VU.dat new file mode 100644 index 0000000..38e6dcb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_VU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_WF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_WF.dat new file mode 100644 index 0000000..15cff37 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_WF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_YT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_YT.dat new file mode 100644 index 0000000..701676b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fr_YT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fur.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fur.dat new file mode 100644 index 0000000..ee394da Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fur.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fur_IT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fur_IT.dat new file mode 100644 index 0000000..b900672 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fur_IT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fy.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fy.dat new file mode 100644 index 0000000..3b5abb5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fy.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/fy_NL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fy_NL.dat new file mode 100644 index 0000000..3578521 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/fy_NL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga.dat new file mode 100644 index 0000000..af17e55 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga_GB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga_GB.dat new file mode 100644 index 0000000..27cc2b9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga_GB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga_IE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga_IE.dat new file mode 100644 index 0000000..ef52839 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ga_IE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gd.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gd.dat new file mode 100644 index 0000000..b4138ac Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gd.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gd_GB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gd_GB.dat new file mode 100644 index 0000000..75c010f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gd_GB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gl.dat new file mode 100644 index 0000000..6024e30 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gl_ES.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gl_ES.dat new file mode 100644 index 0000000..ca376aa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gl_ES.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw.dat new file mode 100644 index 0000000..81bc905 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_CH.dat new file mode 100644 index 0000000..f334d3b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_FR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_FR.dat new file mode 100644 index 0000000..15b10b8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_FR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_LI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_LI.dat new file mode 100644 index 0000000..08995c0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gsw_LI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gu.dat new file mode 100644 index 0000000..0feb66a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gu_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gu_IN.dat new file mode 100644 index 0000000..f6c2931 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gu_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/guz.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/guz.dat new file mode 100644 index 0000000..e12bba5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/guz.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/guz_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/guz_KE.dat new file mode 100644 index 0000000..4e4255d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/guz_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gv.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gv.dat new file mode 100644 index 0000000..7862003 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gv.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/gv_IM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gv_IM.dat new file mode 100644 index 0000000..744d6e6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/gv_IM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha.dat new file mode 100644 index 0000000..389df22 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_GH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_GH.dat new file mode 100644 index 0000000..258618a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_GH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_NE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_NE.dat new file mode 100644 index 0000000..79fde7f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_NE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_NG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_NG.dat new file mode 100644 index 0000000..3209c59 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ha_NG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/haw.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/haw.dat new file mode 100644 index 0000000..cf4a6e3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/haw.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/haw_US.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/haw_US.dat new file mode 100644 index 0000000..1e2f497 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/haw_US.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/he.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/he.dat new file mode 100644 index 0000000..44817ad Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/he.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/he_IL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/he_IL.dat new file mode 100644 index 0000000..c51973b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/he_IL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hi.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hi.dat new file mode 100644 index 0000000..76ae47f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hi.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hi_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hi_IN.dat new file mode 100644 index 0000000..f63c615 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hi_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr.dat new file mode 100644 index 0000000..d076429 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr_BA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr_BA.dat new file mode 100644 index 0000000..8b96551 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr_BA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr_HR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr_HR.dat new file mode 100644 index 0000000..a6affba Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hr_HR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hsb.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hsb.dat new file mode 100644 index 0000000..d43df47 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hsb.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hsb_DE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hsb_DE.dat new file mode 100644 index 0000000..56322e2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hsb_DE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hu.dat new file mode 100644 index 0000000..e6c7c66 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hu_HU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hu_HU.dat new file mode 100644 index 0000000..8bedfed Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hu_HU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hy.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hy.dat new file mode 100644 index 0000000..12ebe0c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hy.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/hy_AM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hy_AM.dat new file mode 100644 index 0000000..43e5574 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/hy_AM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ia.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ia.dat new file mode 100644 index 0000000..8fcab92 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ia.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ia_001.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ia_001.dat new file mode 100644 index 0000000..070647f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ia_001.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/id.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/id.dat new file mode 100644 index 0000000..d7a13f1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/id.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/id_ID.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/id_ID.dat new file mode 100644 index 0000000..d22d10f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/id_ID.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ig.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ig.dat new file mode 100644 index 0000000..b0990d5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ig.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ig_NG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ig_NG.dat new file mode 100644 index 0000000..3ea7e4e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ig_NG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ii.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ii.dat new file mode 100644 index 0000000..6128acd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ii.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ii_CN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ii_CN.dat new file mode 100644 index 0000000..bf1b89d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ii_CN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/is.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/is.dat new file mode 100644 index 0000000..6fac62f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/is.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/is_IS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/is_IS.dat new file mode 100644 index 0000000..7e77fb1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/is_IS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/it.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it.dat new file mode 100644 index 0000000..8849fb0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_CH.dat new file mode 100644 index 0000000..14c5966 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_IT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_IT.dat new file mode 100644 index 0000000..8a5f277 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_IT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_SM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_SM.dat new file mode 100644 index 0000000..6c4c23c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_SM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_VA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_VA.dat new file mode 100644 index 0000000..3e308d3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/it_VA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ja.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ja.dat new file mode 100644 index 0000000..5a5fd28 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ja.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ja_JP.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ja_JP.dat new file mode 100644 index 0000000..be93fcc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ja_JP.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/jgo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jgo.dat new file mode 100644 index 0000000..eab305c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jgo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/jgo_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jgo_CM.dat new file mode 100644 index 0000000..c012c50 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jgo_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/jmc.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jmc.dat new file mode 100644 index 0000000..0204546 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jmc.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/jmc_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jmc_TZ.dat new file mode 100644 index 0000000..2a1942f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jmc_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/jv.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jv.dat new file mode 100644 index 0000000..9d751c0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jv.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/jv_ID.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jv_ID.dat new file mode 100644 index 0000000..42926ef Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/jv_ID.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ka.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ka.dat new file mode 100644 index 0000000..f87e05c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ka.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ka_GE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ka_GE.dat new file mode 100644 index 0000000..f21bf11 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ka_GE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kab.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kab.dat new file mode 100644 index 0000000..017ea5e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kab.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kab_DZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kab_DZ.dat new file mode 100644 index 0000000..d36214b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kab_DZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kam.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kam.dat new file mode 100644 index 0000000..38d8c47 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kam.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kam_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kam_KE.dat new file mode 100644 index 0000000..dd7f5b4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kam_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kde.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kde.dat new file mode 100644 index 0000000..8a07f63 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kde.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kde_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kde_TZ.dat new file mode 100644 index 0000000..3361937 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kde_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kea.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kea.dat new file mode 100644 index 0000000..bd23b8c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kea.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kea_CV.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kea_CV.dat new file mode 100644 index 0000000..a605c75 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kea_CV.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/khq.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/khq.dat new file mode 100644 index 0000000..8c7cd7c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/khq.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/khq_ML.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/khq_ML.dat new file mode 100644 index 0000000..fb52fbe Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/khq_ML.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ki.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ki.dat new file mode 100644 index 0000000..d6bf8fa Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ki.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ki_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ki_KE.dat new file mode 100644 index 0000000..0019c6f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ki_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kk.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kk.dat new file mode 100644 index 0000000..b45ca06 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kk.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kk_KZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kk_KZ.dat new file mode 100644 index 0000000..463855c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kk_KZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kkj.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kkj.dat new file mode 100644 index 0000000..c0b9e4f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kkj.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kkj_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kkj_CM.dat new file mode 100644 index 0000000..707a86d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kkj_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kl.dat new file mode 100644 index 0000000..f440afc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kl_GL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kl_GL.dat new file mode 100644 index 0000000..222744f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kl_GL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kln.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kln.dat new file mode 100644 index 0000000..7d92e85 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kln.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kln_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kln_KE.dat new file mode 100644 index 0000000..479542f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kln_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/km.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/km.dat new file mode 100644 index 0000000..02de1b7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/km.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/km_KH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/km_KH.dat new file mode 100644 index 0000000..db172fe Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/km_KH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kn.dat new file mode 100644 index 0000000..6fceb4c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kn_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kn_IN.dat new file mode 100644 index 0000000..bbf978f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kn_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko.dat new file mode 100644 index 0000000..29a9a4b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko_KP.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko_KP.dat new file mode 100644 index 0000000..79b8c4c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko_KP.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko_KR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko_KR.dat new file mode 100644 index 0000000..fc89679 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ko_KR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kok.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kok.dat new file mode 100644 index 0000000..29d49a0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kok.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kok_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kok_IN.dat new file mode 100644 index 0000000..3cb3a25 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kok_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks.dat new file mode 100644 index 0000000..5f33975 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks_Arab.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks_Arab.dat new file mode 100644 index 0000000..fcf3d30 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks_Arab.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks_Arab_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks_Arab_IN.dat new file mode 100644 index 0000000..f068e1f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ks_Arab_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksb.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksb.dat new file mode 100644 index 0000000..0d1ec3d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksb.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksb_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksb_TZ.dat new file mode 100644 index 0000000..f16d8e6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksb_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksf.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksf.dat new file mode 100644 index 0000000..f72fe15 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksf.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksf_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksf_CM.dat new file mode 100644 index 0000000..7b9c44d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksf_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksh.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksh.dat new file mode 100644 index 0000000..a77468f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksh.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksh_DE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksh_DE.dat new file mode 100644 index 0000000..94684c2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ksh_DE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ku.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ku.dat new file mode 100644 index 0000000..1a26aa9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ku.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ku_TR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ku_TR.dat new file mode 100644 index 0000000..bb52a64 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ku_TR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kw.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kw.dat new file mode 100644 index 0000000..2f4da7b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kw.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/kw_GB.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kw_GB.dat new file mode 100644 index 0000000..a9a68be Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/kw_GB.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ky.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ky.dat new file mode 100644 index 0000000..a143f9a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ky.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ky_KG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ky_KG.dat new file mode 100644 index 0000000..df59b77 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ky_KG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lag.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lag.dat new file mode 100644 index 0000000..11434a6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lag.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lag_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lag_TZ.dat new file mode 100644 index 0000000..87c8338 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lag_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lb.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lb.dat new file mode 100644 index 0000000..a9e0e2a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lb.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lb_LU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lb_LU.dat new file mode 100644 index 0000000..c986288 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lb_LU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lg.dat new file mode 100644 index 0000000..2d9eb4b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lg_UG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lg_UG.dat new file mode 100644 index 0000000..b3922af Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lg_UG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lkt.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lkt.dat new file mode 100644 index 0000000..e903df7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lkt.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lkt_US.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lkt_US.dat new file mode 100644 index 0000000..ebfe5ee Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lkt_US.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln.dat new file mode 100644 index 0000000..320786e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_AO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_AO.dat new file mode 100644 index 0000000..b4c748e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_AO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CD.dat new file mode 100644 index 0000000..6c0dcef Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CF.dat new file mode 100644 index 0000000..6d70e1c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CG.dat new file mode 100644 index 0000000..34abe4e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ln_CG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lo.dat new file mode 100644 index 0000000..bc6f5f4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lo_LA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lo_LA.dat new file mode 100644 index 0000000..5452765 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lo_LA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc.dat new file mode 100644 index 0000000..8347ff1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc_IQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc_IQ.dat new file mode 100644 index 0000000..212f8d5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc_IQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc_IR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc_IR.dat new file mode 100644 index 0000000..997d078 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lrc_IR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lt.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lt.dat new file mode 100644 index 0000000..63cd1de Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lt.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lt_LT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lt_LT.dat new file mode 100644 index 0000000..b161ec3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lt_LT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lu.dat new file mode 100644 index 0000000..62b7ebb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lu_CD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lu_CD.dat new file mode 100644 index 0000000..02d48cb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lu_CD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/luo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luo.dat new file mode 100644 index 0000000..10b28a6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/luo_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luo_KE.dat new file mode 100644 index 0000000..a910018 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luo_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/luy.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luy.dat new file mode 100644 index 0000000..e1ccb9a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luy.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/luy_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luy_KE.dat new file mode 100644 index 0000000..712c253 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/luy_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lv.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lv.dat new file mode 100644 index 0000000..7948c69 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lv.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/lv_LV.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lv_LV.dat new file mode 100644 index 0000000..8fe8bd3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/lv_LV.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mai.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mai.dat new file mode 100644 index 0000000..db1408e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mai.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mai_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mai_IN.dat new file mode 100644 index 0000000..e9607f7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mai_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas.dat new file mode 100644 index 0000000..1fe3753 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas_KE.dat new file mode 100644 index 0000000..930717f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas_TZ.dat new file mode 100644 index 0000000..892f661 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mas_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mer.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mer.dat new file mode 100644 index 0000000..f7975ec Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mer.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mer_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mer_KE.dat new file mode 100644 index 0000000..bd3dd82 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mer_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mfe.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mfe.dat new file mode 100644 index 0000000..c298613 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mfe.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mfe_MU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mfe_MU.dat new file mode 100644 index 0000000..d1a83e8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mfe_MU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mg.dat new file mode 100644 index 0000000..0016bf5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mg_MG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mg_MG.dat new file mode 100644 index 0000000..c6e3e5c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mg_MG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgh.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgh.dat new file mode 100644 index 0000000..847c66e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgh.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgh_MZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgh_MZ.dat new file mode 100644 index 0000000..737e355 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgh_MZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgo.dat new file mode 100644 index 0000000..fb20ab5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgo_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgo_CM.dat new file mode 100644 index 0000000..4f08d32 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mgo_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mi.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mi.dat new file mode 100644 index 0000000..cc63fec Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mi.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mi_NZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mi_NZ.dat new file mode 100644 index 0000000..37e1a31 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mi_NZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mk.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mk.dat new file mode 100644 index 0000000..380f22e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mk.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mk_MK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mk_MK.dat new file mode 100644 index 0000000..07c67a3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mk_MK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ml.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ml.dat new file mode 100644 index 0000000..0b8a959 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ml.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ml_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ml_IN.dat new file mode 100644 index 0000000..aff879a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ml_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mn.dat new file mode 100644 index 0000000..c08cc3c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mn_MN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mn_MN.dat new file mode 100644 index 0000000..89e61d5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mn_MN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni.dat new file mode 100644 index 0000000..02a31f1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni_Beng.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni_Beng.dat new file mode 100644 index 0000000..6e20d2a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni_Beng.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni_Beng_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni_Beng_IN.dat new file mode 100644 index 0000000..c03bced Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mni_Beng_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mr.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mr.dat new file mode 100644 index 0000000..1a1a221 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mr.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mr_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mr_IN.dat new file mode 100644 index 0000000..50daea2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mr_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms.dat new file mode 100644 index 0000000..e0395e5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_BN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_BN.dat new file mode 100644 index 0000000..a4db015 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_BN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_ID.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_ID.dat new file mode 100644 index 0000000..6aa9372 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_ID.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_MY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_MY.dat new file mode 100644 index 0000000..e9e39a5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_MY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_SG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_SG.dat new file mode 100644 index 0000000..041cc67 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ms_SG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mt.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mt.dat new file mode 100644 index 0000000..7a34734 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mt.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mt_MT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mt_MT.dat new file mode 100644 index 0000000..79462fc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mt_MT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mua.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mua.dat new file mode 100644 index 0000000..6bbbde7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mua.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mua_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mua_CM.dat new file mode 100644 index 0000000..0c408b8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mua_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/my.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/my.dat new file mode 100644 index 0000000..a3c3612 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/my.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/my_MM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/my_MM.dat new file mode 100644 index 0000000..8f391c1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/my_MM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mzn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mzn.dat new file mode 100644 index 0000000..9a3efdc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mzn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/mzn_IR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mzn_IR.dat new file mode 100644 index 0000000..d924320 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/mzn_IR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/naq.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/naq.dat new file mode 100644 index 0000000..a743ef2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/naq.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/naq_NA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/naq_NA.dat new file mode 100644 index 0000000..4b44e4e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/naq_NA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb.dat new file mode 100644 index 0000000..b79daf7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb_NO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb_NO.dat new file mode 100644 index 0000000..9239a12 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb_NO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb_SJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb_SJ.dat new file mode 100644 index 0000000..1125c06 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nb_SJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nd.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nd.dat new file mode 100644 index 0000000..22799b9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nd.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nd_ZW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nd_ZW.dat new file mode 100644 index 0000000..f5cc7ec Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nd_ZW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds.dat new file mode 100644 index 0000000..b0281ee Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds_DE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds_DE.dat new file mode 100644 index 0000000..5f7c48f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds_DE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds_NL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds_NL.dat new file mode 100644 index 0000000..6febe8b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nds_NL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne.dat new file mode 100644 index 0000000..c8f093d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne_IN.dat new file mode 100644 index 0000000..558d07e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne_NP.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne_NP.dat new file mode 100644 index 0000000..cc087af Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ne_NP.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl.dat new file mode 100644 index 0000000..34f760d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_AW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_AW.dat new file mode 100644 index 0000000..5409d84 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_AW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_BE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_BE.dat new file mode 100644 index 0000000..1d6a5ee Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_BE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_BQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_BQ.dat new file mode 100644 index 0000000..1697c58 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_BQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_CW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_CW.dat new file mode 100644 index 0000000..da23cb6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_CW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_NL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_NL.dat new file mode 100644 index 0000000..051b0fe Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_NL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_SR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_SR.dat new file mode 100644 index 0000000..3d45a53 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_SR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_SX.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_SX.dat new file mode 100644 index 0000000..f4041e4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nl_SX.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nmg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nmg.dat new file mode 100644 index 0000000..9991ec6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nmg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nmg_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nmg_CM.dat new file mode 100644 index 0000000..3f85753 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nmg_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nn.dat new file mode 100644 index 0000000..4d07b74 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nn_NO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nn_NO.dat new file mode 100644 index 0000000..2110597 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nn_NO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nnh.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nnh.dat new file mode 100644 index 0000000..22bffff Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nnh.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nnh_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nnh_CM.dat new file mode 100644 index 0000000..35a138f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nnh_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nus.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nus.dat new file mode 100644 index 0000000..7cfd5e6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nus.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nus_SS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nus_SS.dat new file mode 100644 index 0000000..b9b02a2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nus_SS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nyn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nyn.dat new file mode 100644 index 0000000..d0214e7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nyn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/nyn_UG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nyn_UG.dat new file mode 100644 index 0000000..459b56a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/nyn_UG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/om.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/om.dat new file mode 100644 index 0000000..51c9444 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/om.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/om_ET.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/om_ET.dat new file mode 100644 index 0000000..e515109 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/om_ET.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/om_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/om_KE.dat new file mode 100644 index 0000000..5864b43 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/om_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/or.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/or.dat new file mode 100644 index 0000000..8ba0752 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/or.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/or_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/or_IN.dat new file mode 100644 index 0000000..f22745a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/or_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/os.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/os.dat new file mode 100644 index 0000000..5764410 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/os.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/os_GE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/os_GE.dat new file mode 100644 index 0000000..221aa01 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/os_GE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/os_RU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/os_RU.dat new file mode 100644 index 0000000..a1de821 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/os_RU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa.dat new file mode 100644 index 0000000..32fa4bc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Arab.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Arab.dat new file mode 100644 index 0000000..2f7648f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Arab.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Arab_PK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Arab_PK.dat new file mode 100644 index 0000000..5de5edf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Arab_PK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Guru.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Guru.dat new file mode 100644 index 0000000..598c32a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Guru.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Guru_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Guru_IN.dat new file mode 100644 index 0000000..1086631 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pa_Guru_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pcm.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pcm.dat new file mode 100644 index 0000000..3d394f7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pcm.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pcm_NG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pcm_NG.dat new file mode 100644 index 0000000..2770f7e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pcm_NG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pl.dat new file mode 100644 index 0000000..c2bb0c7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pl_PL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pl_PL.dat new file mode 100644 index 0000000..c308d4e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pl_PL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/prg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/prg.dat new file mode 100644 index 0000000..40e95d2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/prg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/prg_001.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/prg_001.dat new file mode 100644 index 0000000..ef2fc52 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/prg_001.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps.dat new file mode 100644 index 0000000..f3e75af Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps_AF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps_AF.dat new file mode 100644 index 0000000..f83e208 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps_AF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps_PK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps_PK.dat new file mode 100644 index 0000000..219ca59 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ps_PK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt.dat new file mode 100644 index 0000000..17a2880 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_AO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_AO.dat new file mode 100644 index 0000000..c864d38 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_AO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_BR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_BR.dat new file mode 100644 index 0000000..1fee99b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_BR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_CH.dat new file mode 100644 index 0000000..6c76ea7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_CV.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_CV.dat new file mode 100644 index 0000000..0c6b877 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_CV.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_GQ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_GQ.dat new file mode 100644 index 0000000..8debbd5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_GQ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_GW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_GW.dat new file mode 100644 index 0000000..43fc98f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_GW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_LU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_LU.dat new file mode 100644 index 0000000..f9f59ec Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_LU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_MO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_MO.dat new file mode 100644 index 0000000..d80dee7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_MO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_MZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_MZ.dat new file mode 100644 index 0000000..7a23436 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_MZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_PT.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_PT.dat new file mode 100644 index 0000000..d3ac964 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_PT.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_ST.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_ST.dat new file mode 100644 index 0000000..3883dcb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_ST.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_TL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_TL.dat new file mode 100644 index 0000000..e056a34 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/pt_TL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu.dat new file mode 100644 index 0000000..055c2e4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_BO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_BO.dat new file mode 100644 index 0000000..ff01294 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_BO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_EC.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_EC.dat new file mode 100644 index 0000000..8d9ecee Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_EC.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_PE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_PE.dat new file mode 100644 index 0000000..4a7a5fd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/qu_PE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rm.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rm.dat new file mode 100644 index 0000000..b8c8eb0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rm.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rm_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rm_CH.dat new file mode 100644 index 0000000..652f61b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rm_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rn.dat new file mode 100644 index 0000000..4932a9c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rn_BI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rn_BI.dat new file mode 100644 index 0000000..1845c8a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rn_BI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro.dat new file mode 100644 index 0000000..15b1e38 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro_MD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro_MD.dat new file mode 100644 index 0000000..4d5d2ea Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro_MD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro_RO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro_RO.dat new file mode 100644 index 0000000..664fdde Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ro_RO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rof.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rof.dat new file mode 100644 index 0000000..2b71e50 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rof.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rof_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rof_TZ.dat new file mode 100644 index 0000000..f894d6c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rof_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/root.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/root.dat new file mode 100644 index 0000000..63aebe4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/root.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru.dat new file mode 100644 index 0000000..e699d93 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_BY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_BY.dat new file mode 100644 index 0000000..0106804 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_BY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_KG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_KG.dat new file mode 100644 index 0000000..0380b01 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_KG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_KZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_KZ.dat new file mode 100644 index 0000000..2688484 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_KZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_MD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_MD.dat new file mode 100644 index 0000000..f3b9468 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_MD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_RU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_RU.dat new file mode 100644 index 0000000..6d5eeb9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_RU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_UA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_UA.dat new file mode 100644 index 0000000..6d4e25d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ru_UA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rw.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rw.dat new file mode 100644 index 0000000..d7bc7ad Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rw.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rw_RW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rw_RW.dat new file mode 100644 index 0000000..62b699d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rw_RW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rwk.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rwk.dat new file mode 100644 index 0000000..1d8dcc5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rwk.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/rwk_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rwk_TZ.dat new file mode 100644 index 0000000..7f70c58 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/rwk_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sah.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sah.dat new file mode 100644 index 0000000..fbd5826 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sah.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sah_RU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sah_RU.dat new file mode 100644 index 0000000..2894ddf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sah_RU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/saq.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/saq.dat new file mode 100644 index 0000000..58e57d6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/saq.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/saq_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/saq_KE.dat new file mode 100644 index 0000000..b71e743 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/saq_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat.dat new file mode 100644 index 0000000..1bf1fc9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat_Olck.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat_Olck.dat new file mode 100644 index 0000000..1bdd8fc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat_Olck.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat_Olck_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat_Olck_IN.dat new file mode 100644 index 0000000..c9264f7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sat_Olck_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sbp.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sbp.dat new file mode 100644 index 0000000..41b700d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sbp.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sbp_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sbp_TZ.dat new file mode 100644 index 0000000..253c044 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sbp_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd.dat new file mode 100644 index 0000000..3c2a105 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Arab.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Arab.dat new file mode 100644 index 0000000..05bef30 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Arab.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Arab_PK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Arab_PK.dat new file mode 100644 index 0000000..0ae2a13 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Arab_PK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Deva.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Deva.dat new file mode 100644 index 0000000..91b3190 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Deva.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Deva_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Deva_IN.dat new file mode 100644 index 0000000..c758cd2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sd_Deva_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/se.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se.dat new file mode 100644 index 0000000..c9324e3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_FI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_FI.dat new file mode 100644 index 0000000..bc42425 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_FI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_NO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_NO.dat new file mode 100644 index 0000000..b230d1f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_NO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_SE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_SE.dat new file mode 100644 index 0000000..c0732f2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/se_SE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/seh.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/seh.dat new file mode 100644 index 0000000..6975493 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/seh.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/seh_MZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/seh_MZ.dat new file mode 100644 index 0000000..5dea7a1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/seh_MZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ses.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ses.dat new file mode 100644 index 0000000..4fc087d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ses.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ses_ML.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ses_ML.dat new file mode 100644 index 0000000..e85fdc1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ses_ML.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sg.dat new file mode 100644 index 0000000..b707ddf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sg_CF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sg_CF.dat new file mode 100644 index 0000000..41feacf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sg_CF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi.dat new file mode 100644 index 0000000..c1ed307 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Latn.dat new file mode 100644 index 0000000..d9b9b78 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Latn_MA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Latn_MA.dat new file mode 100644 index 0000000..1fbf417 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Latn_MA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Tfng.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Tfng.dat new file mode 100644 index 0000000..782031e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Tfng.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Tfng_MA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Tfng_MA.dat new file mode 100644 index 0000000..1fbf417 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/shi_Tfng_MA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/si.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/si.dat new file mode 100644 index 0000000..04030c5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/si.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/si_LK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/si_LK.dat new file mode 100644 index 0000000..c1ee1a1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/si_LK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sk.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sk.dat new file mode 100644 index 0000000..856fe8d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sk.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sk_SK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sk_SK.dat new file mode 100644 index 0000000..7a81b1d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sk_SK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sl.dat new file mode 100644 index 0000000..03ee4f9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sl_SI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sl_SI.dat new file mode 100644 index 0000000..50782c7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sl_SI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/smn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/smn.dat new file mode 100644 index 0000000..aa15da1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/smn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/smn_FI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/smn_FI.dat new file mode 100644 index 0000000..aa8c286 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/smn_FI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sn.dat new file mode 100644 index 0000000..e08a584 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sn_ZW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sn_ZW.dat new file mode 100644 index 0000000..e6f8850 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sn_ZW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/so.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so.dat new file mode 100644 index 0000000..23bda0b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_DJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_DJ.dat new file mode 100644 index 0000000..e26b1fe Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_DJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_ET.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_ET.dat new file mode 100644 index 0000000..6b0ef75 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_ET.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_KE.dat new file mode 100644 index 0000000..39837e4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_SO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_SO.dat new file mode 100644 index 0000000..3536253 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/so_SO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq.dat new file mode 100644 index 0000000..0d18cc7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_AL.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_AL.dat new file mode 100644 index 0000000..b376402 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_AL.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_MK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_MK.dat new file mode 100644 index 0000000..d149fec Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_MK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_XK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_XK.dat new file mode 100644 index 0000000..6a8acad Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sq_XK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr.dat new file mode 100644 index 0000000..32801f4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl.dat new file mode 100644 index 0000000..c6f8e4f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_BA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_BA.dat new file mode 100644 index 0000000..57065ea Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_BA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_ME.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_ME.dat new file mode 100644 index 0000000..417c62a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_ME.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_RS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_RS.dat new file mode 100644 index 0000000..ed764e2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_RS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_XK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_XK.dat new file mode 100644 index 0000000..c61d219 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Cyrl_XK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn.dat new file mode 100644 index 0000000..6dd0371 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_BA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_BA.dat new file mode 100644 index 0000000..86126e0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_BA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_ME.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_ME.dat new file mode 100644 index 0000000..e904be8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_ME.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_RS.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_RS.dat new file mode 100644 index 0000000..ed764e2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_RS.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_XK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_XK.dat new file mode 100644 index 0000000..4ecdca9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sr_Latn_XK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/su.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/su.dat new file mode 100644 index 0000000..4ced99f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/su.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/su_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/su_Latn.dat new file mode 100644 index 0000000..6018dc6 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/su_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/su_Latn_ID.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/su_Latn_ID.dat new file mode 100644 index 0000000..53d733d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/su_Latn_ID.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv.dat new file mode 100644 index 0000000..3e0872a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_AX.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_AX.dat new file mode 100644 index 0000000..b5a7778 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_AX.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_FI.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_FI.dat new file mode 100644 index 0000000..db0b85d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_FI.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_SE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_SE.dat new file mode 100644 index 0000000..fba24ad Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sv_SE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw.dat new file mode 100644 index 0000000..656c3c3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_CD.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_CD.dat new file mode 100644 index 0000000..043e4ab Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_CD.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_KE.dat new file mode 100644 index 0000000..bc802a4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_TZ.dat new file mode 100644 index 0000000..848ecb3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_UG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_UG.dat new file mode 100644 index 0000000..cfe9891 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/sw_UG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta.dat new file mode 100644 index 0000000..ff9e8c3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_IN.dat new file mode 100644 index 0000000..b0bbe4a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_LK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_LK.dat new file mode 100644 index 0000000..3dca7dc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_LK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_MY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_MY.dat new file mode 100644 index 0000000..995ad8e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_MY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_SG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_SG.dat new file mode 100644 index 0000000..1d7c04d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ta_SG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/te.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/te.dat new file mode 100644 index 0000000..f808a2b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/te.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/te_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/te_IN.dat new file mode 100644 index 0000000..47184a7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/te_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo.dat new file mode 100644 index 0000000..7026ee2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo_KE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo_KE.dat new file mode 100644 index 0000000..3b00c8e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo_KE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo_UG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo_UG.dat new file mode 100644 index 0000000..7a0a741 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/teo_UG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tg.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tg.dat new file mode 100644 index 0000000..e670002 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tg.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tg_TJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tg_TJ.dat new file mode 100644 index 0000000..7cf63ac Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tg_TJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/th.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/th.dat new file mode 100644 index 0000000..3cb763f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/th.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/th_TH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/th_TH.dat new file mode 100644 index 0000000..c5d4f40 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/th_TH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti.dat new file mode 100644 index 0000000..a3440b2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti_ER.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti_ER.dat new file mode 100644 index 0000000..de41923 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti_ER.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti_ET.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti_ET.dat new file mode 100644 index 0000000..4a8f968 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ti_ET.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tk.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tk.dat new file mode 100644 index 0000000..0099efd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tk.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tk_TM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tk_TM.dat new file mode 100644 index 0000000..523cbe2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tk_TM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/to.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/to.dat new file mode 100644 index 0000000..ecbbc95 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/to.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/to_TO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/to_TO.dat new file mode 100644 index 0000000..3309642 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/to_TO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr.dat new file mode 100644 index 0000000..dd49a9c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr_CY.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr_CY.dat new file mode 100644 index 0000000..cf47efc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr_CY.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr_TR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr_TR.dat new file mode 100644 index 0000000..a2f6c6d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tr_TR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tt.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tt.dat new file mode 100644 index 0000000..82a9cda Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tt.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tt_RU.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tt_RU.dat new file mode 100644 index 0000000..0c25192 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tt_RU.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/twq.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/twq.dat new file mode 100644 index 0000000..67e5faf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/twq.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/twq_NE.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/twq_NE.dat new file mode 100644 index 0000000..dbf6256 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/twq_NE.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tzm.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tzm.dat new file mode 100644 index 0000000..d838f07 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tzm.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/tzm_MA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tzm_MA.dat new file mode 100644 index 0000000..a025e50 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/tzm_MA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ug.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ug.dat new file mode 100644 index 0000000..4e23ed8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ug.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ug_CN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ug_CN.dat new file mode 100644 index 0000000..c3b1fbe Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ug_CN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uk.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uk.dat new file mode 100644 index 0000000..7a46d3c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uk.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uk_UA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uk_UA.dat new file mode 100644 index 0000000..8a011dc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uk_UA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur.dat new file mode 100644 index 0000000..918fc61 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur_IN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur_IN.dat new file mode 100644 index 0000000..4153a43 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur_IN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur_PK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur_PK.dat new file mode 100644 index 0000000..bdfae5c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/ur_PK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz.dat new file mode 100644 index 0000000..b49b9b4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Arab.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Arab.dat new file mode 100644 index 0000000..800ec14 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Arab.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Arab_AF.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Arab_AF.dat new file mode 100644 index 0000000..4c08cf0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Arab_AF.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Cyrl.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Cyrl.dat new file mode 100644 index 0000000..b4c3294 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Cyrl.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Cyrl_UZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Cyrl_UZ.dat new file mode 100644 index 0000000..fa25c53 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Cyrl_UZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Latn.dat new file mode 100644 index 0000000..47a4033 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Latn_UZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Latn_UZ.dat new file mode 100644 index 0000000..fa25c53 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/uz_Latn_UZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai.dat new file mode 100644 index 0000000..4f39ac1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Latn.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Latn.dat new file mode 100644 index 0000000..a07b666 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Latn.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Latn_LR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Latn_LR.dat new file mode 100644 index 0000000..dc7489c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Latn_LR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Vaii.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Vaii.dat new file mode 100644 index 0000000..331a1f0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Vaii.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Vaii_LR.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Vaii_LR.dat new file mode 100644 index 0000000..dc7489c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vai_Vaii_LR.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vi.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vi.dat new file mode 100644 index 0000000..dcdf9d4 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vi.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vi_VN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vi_VN.dat new file mode 100644 index 0000000..c8accbd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vi_VN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vo.dat new file mode 100644 index 0000000..42c0d64 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vo_001.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vo_001.dat new file mode 100644 index 0000000..87cf0dc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vo_001.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vun.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vun.dat new file mode 100644 index 0000000..a87cfc0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vun.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/vun_TZ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vun_TZ.dat new file mode 100644 index 0000000..4988c17 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/vun_TZ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/wae.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wae.dat new file mode 100644 index 0000000..b072772 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wae.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/wae_CH.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wae_CH.dat new file mode 100644 index 0000000..99e337c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wae_CH.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/wo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wo.dat new file mode 100644 index 0000000..7d02b1c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/wo_SN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wo_SN.dat new file mode 100644 index 0000000..5a51e8f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/wo_SN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/xh.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xh.dat new file mode 100644 index 0000000..54a04fd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xh.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/xh_ZA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xh_ZA.dat new file mode 100644 index 0000000..7bcbf8b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xh_ZA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/xog.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xog.dat new file mode 100644 index 0000000..8f9b656 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xog.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/xog_UG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xog_UG.dat new file mode 100644 index 0000000..2c2f265 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/xog_UG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yav.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yav.dat new file mode 100644 index 0000000..7e192ad Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yav.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yav_CM.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yav_CM.dat new file mode 100644 index 0000000..28aa403 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yav_CM.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yi.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yi.dat new file mode 100644 index 0000000..a6e2052 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yi.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yi_001.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yi_001.dat new file mode 100644 index 0000000..09fb4cc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yi_001.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo.dat new file mode 100644 index 0000000..36c3e08 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo_BJ.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo_BJ.dat new file mode 100644 index 0000000..e0b0971 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo_BJ.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo_NG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo_NG.dat new file mode 100644 index 0000000..fc84627 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yo_NG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue.dat new file mode 100644 index 0000000..b10f1e8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hans.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hans.dat new file mode 100644 index 0000000..5101fea Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hans.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hans_CN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hans_CN.dat new file mode 100644 index 0000000..6f0af0e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hans_CN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hant.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hant.dat new file mode 100644 index 0000000..a6f177a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hant.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hant_HK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hant_HK.dat new file mode 100644 index 0000000..88f0913 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/yue_Hant_HK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zgh.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zgh.dat new file mode 100644 index 0000000..3a56767 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zgh.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zgh_MA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zgh_MA.dat new file mode 100644 index 0000000..6acec16 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zgh_MA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh.dat new file mode 100644 index 0000000..a0af130 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans.dat new file mode 100644 index 0000000..cb30aaf Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_CN.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_CN.dat new file mode 100644 index 0000000..2e9d742 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_CN.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_HK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_HK.dat new file mode 100644 index 0000000..2f89a86 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_HK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_MO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_MO.dat new file mode 100644 index 0000000..99f8d38 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_MO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_SG.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_SG.dat new file mode 100644 index 0000000..bd67a4a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hans_SG.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant.dat new file mode 100644 index 0000000..914b6b0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_HK.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_HK.dat new file mode 100644 index 0000000..a082e43 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_HK.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_MO.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_MO.dat new file mode 100644 index 0000000..305cbe5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_MO.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_TW.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_TW.dat new file mode 100644 index 0000000..62221bc Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zh_Hant_TW.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zu.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zu.dat new file mode 100644 index 0000000..4325f03 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zu.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/locale-data/zu_ZA.dat b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zu_ZA.dat new file mode 100644 index 0000000..35cff0a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/locale-data/zu_ZA.dat differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/localedata.py b/dbt-env/lib/python3.8/site-packages/babel/localedata.py new file mode 100644 index 0000000..438afb6 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/localedata.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +""" + babel.localedata + ~~~~~~~~~~~~~~~~ + + Low-level locale data access. + + :note: The `Locale` class, which uses this module under the hood, provides a + more convenient interface for accessing the locale data. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import os +import re +import sys +import threading +from itertools import chain + +from babel._compat import pickle, string_types, abc + + +_cache = {} +_cache_lock = threading.RLock() +_dirname = os.path.join(os.path.dirname(__file__), 'locale-data') +_windows_reserved_name_re = re.compile("^(con|prn|aux|nul|com[0-9]|lpt[0-9])$", re.I) + + +def normalize_locale(name): + """Normalize a locale ID by stripping spaces and apply proper casing. + + Returns the normalized locale ID string or `None` if the ID is not + recognized. + """ + if not name or not isinstance(name, string_types): + return None + name = name.strip().lower() + for locale_id in chain.from_iterable([_cache, locale_identifiers()]): + if name == locale_id.lower(): + return locale_id + + +def resolve_locale_filename(name): + """ + Resolve a locale identifier to a `.dat` path on disk. + """ + + # Clean up any possible relative paths. + name = os.path.basename(name) + + # Ensure we're not left with one of the Windows reserved names. + if sys.platform == "win32" and _windows_reserved_name_re.match(os.path.splitext(name)[0]): + raise ValueError("Name %s is invalid on Windows" % name) + + # Build the path. + return os.path.join(_dirname, '%s.dat' % name) + + +def exists(name): + """Check whether locale data is available for the given locale. + + Returns `True` if it exists, `False` otherwise. + + :param name: the locale identifier string + """ + if not name or not isinstance(name, string_types): + return False + if name in _cache: + return True + file_found = os.path.exists(resolve_locale_filename(name)) + return True if file_found else bool(normalize_locale(name)) + + +def locale_identifiers(): + """Return a list of all locale identifiers for which locale data is + available. + + This data is cached after the first invocation in `locale_identifiers.cache`. + + Removing the `locale_identifiers.cache` attribute or setting it to `None` + will cause this function to re-read the list from disk. + + .. versionadded:: 0.8.1 + + :return: a list of locale identifiers (strings) + """ + data = getattr(locale_identifiers, 'cache', None) + if data is None: + locale_identifiers.cache = data = [ + stem + for stem, extension in + (os.path.splitext(filename) for filename in os.listdir(_dirname)) + if extension == '.dat' and stem != 'root' + ] + return data + + +def load(name, merge_inherited=True): + """Load the locale data for the given locale. + + The locale data is a dictionary that contains much of the data defined by + the Common Locale Data Repository (CLDR). This data is stored as a + collection of pickle files inside the ``babel`` package. + + >>> d = load('en_US') + >>> d['languages']['sv'] + u'Swedish' + + Note that the results are cached, and subsequent requests for the same + locale return the same dictionary: + + >>> d1 = load('en_US') + >>> d2 = load('en_US') + >>> d1 is d2 + True + + :param name: the locale identifier string (or "root") + :param merge_inherited: whether the inherited data should be merged into + the data of the requested locale + :raise `IOError`: if no locale data file is found for the given locale + identifer, or one of the locales it inherits from + """ + name = os.path.basename(name) + _cache_lock.acquire() + try: + data = _cache.get(name) + if not data: + # Load inherited data + if name == 'root' or not merge_inherited: + data = {} + else: + from babel.core import get_global + parent = get_global('parent_exceptions').get(name) + if not parent: + parts = name.split('_') + if len(parts) == 1: + parent = 'root' + else: + parent = '_'.join(parts[:-1]) + data = load(parent).copy() + filename = resolve_locale_filename(name) + with open(filename, 'rb') as fileobj: + if name != 'root' and merge_inherited: + merge(data, pickle.load(fileobj)) + else: + data = pickle.load(fileobj) + _cache[name] = data + return data + finally: + _cache_lock.release() + + +def merge(dict1, dict2): + """Merge the data from `dict2` into the `dict1` dictionary, making copies + of nested dictionaries. + + >>> d = {1: 'foo', 3: 'baz'} + >>> merge(d, {1: 'Foo', 2: 'Bar'}) + >>> sorted(d.items()) + [(1, 'Foo'), (2, 'Bar'), (3, 'baz')] + + :param dict1: the dictionary to merge into + :param dict2: the dictionary containing the data that should be merged + """ + for key, val2 in dict2.items(): + if val2 is not None: + val1 = dict1.get(key) + if isinstance(val2, dict): + if val1 is None: + val1 = {} + if isinstance(val1, Alias): + val1 = (val1, val2) + elif isinstance(val1, tuple): + alias, others = val1 + others = others.copy() + merge(others, val2) + val1 = (alias, others) + else: + val1 = val1.copy() + merge(val1, val2) + else: + val1 = val2 + dict1[key] = val1 + + +class Alias(object): + """Representation of an alias in the locale data. + + An alias is a value that refers to some other part of the locale data, + as specified by the `keys`. + """ + + def __init__(self, keys): + self.keys = tuple(keys) + + def __repr__(self): + return '<%s %r>' % (type(self).__name__, self.keys) + + def resolve(self, data): + """Resolve the alias based on the given data. + + This is done recursively, so if one alias resolves to a second alias, + that second alias will also be resolved. + + :param data: the locale data + :type data: `dict` + """ + base = data + for key in self.keys: + data = data[key] + if isinstance(data, Alias): + data = data.resolve(base) + elif isinstance(data, tuple): + alias, others = data + data = alias.resolve(base) + return data + + +class LocaleDataDict(abc.MutableMapping): + """Dictionary wrapper that automatically resolves aliases to the actual + values. + """ + + def __init__(self, data, base=None): + self._data = data + if base is None: + base = data + self.base = base + + def __len__(self): + return len(self._data) + + def __iter__(self): + return iter(self._data) + + def __getitem__(self, key): + orig = val = self._data[key] + if isinstance(val, Alias): # resolve an alias + val = val.resolve(self.base) + if isinstance(val, tuple): # Merge a partial dict with an alias + alias, others = val + val = alias.resolve(self.base).copy() + merge(val, others) + if type(val) is dict: # Return a nested alias-resolving dict + val = LocaleDataDict(val, base=self.base) + if val is not orig: + self._data[key] = val + return val + + def __setitem__(self, key, value): + self._data[key] = value + + def __delitem__(self, key): + del self._data[key] + + def copy(self): + return LocaleDataDict(self._data.copy(), base=self.base) diff --git a/dbt-env/lib/python3.8/site-packages/babel/localtime/__init__.py b/dbt-env/lib/python3.8/site-packages/babel/localtime/__init__.py new file mode 100644 index 0000000..bd39549 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/localtime/__init__.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +""" + babel.localtime + ~~~~~~~~~~~~~~~ + + Babel specific fork of tzlocal to determine the local timezone + of the system. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import sys +import pytz +import time +from datetime import timedelta +from datetime import tzinfo +from threading import RLock + +if sys.platform == 'win32': + from babel.localtime._win32 import _get_localzone +else: + from babel.localtime._unix import _get_localzone + + +_cached_tz = None +_cache_lock = RLock() + +STDOFFSET = timedelta(seconds=-time.timezone) +if time.daylight: + DSTOFFSET = timedelta(seconds=-time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET +ZERO = timedelta(0) + + +class _FallbackLocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = time.mktime(tt) + tt = time.localtime(stamp) + return tt.tm_isdst > 0 + + +def get_localzone(): + """Returns the current underlying local timezone object. + Generally this function does not need to be used, it's a + better idea to use the :data:`LOCALTZ` singleton instead. + """ + return _get_localzone() + + +try: + LOCALTZ = get_localzone() +except pytz.UnknownTimeZoneError: + LOCALTZ = _FallbackLocalTimezone() diff --git a/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..27a93e2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/_unix.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/_unix.cpython-38.pyc new file mode 100644 index 0000000..b5e5a0e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/_unix.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/_win32.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/_win32.cpython-38.pyc new file mode 100644 index 0000000..b9896b5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/localtime/__pycache__/_win32.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/localtime/_unix.py b/dbt-env/lib/python3.8/site-packages/babel/localtime/_unix.py new file mode 100644 index 0000000..c219469 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/localtime/_unix.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +from __future__ import with_statement +import os +import re +import sys +import pytz +import subprocess + +_systemconfig_tz = re.compile(r'^Time Zone: (.*)$', re.MULTILINE) + + +def _tz_from_env(tzenv): + if tzenv[0] == ':': + tzenv = tzenv[1:] + + # TZ specifies a file + if os.path.exists(tzenv): + with open(tzenv, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + # TZ specifies a zoneinfo zone. + try: + tz = pytz.timezone(tzenv) + # That worked, so we return this: + return tz + except pytz.UnknownTimeZoneError: + raise pytz.UnknownTimeZoneError( + "tzlocal() does not support non-zoneinfo timezones like %s. \n" + "Please use a timezone in the form of Continent/City") + + +def _get_localzone(_root='/'): + """Tries to find the local timezone configuration. + This method prefers finding the timezone name and passing that to pytz, + over passing in the localtime file, as in the later case the zoneinfo + name is unknown. + The parameter _root makes the function look for files like /etc/localtime + beneath the _root directory. This is primarily used by the tests. + In normal usage you call the function without parameters. + """ + + tzenv = os.environ.get('TZ') + if tzenv: + return _tz_from_env(tzenv) + + # This is actually a pretty reliable way to test for the local time + # zone on operating systems like OS X. On OS X especially this is the + # only one that actually works. + try: + link_dst = os.readlink('/etc/localtime') + except OSError: + pass + else: + pos = link_dst.find('/zoneinfo/') + if pos >= 0: + zone_name = link_dst[pos + 10:] + try: + return pytz.timezone(zone_name) + except pytz.UnknownTimeZoneError: + pass + + # If we are on OS X now we are pretty sure that the rest of the + # code will fail and just fall through until it hits the reading + # of /etc/localtime and using it without name. At this point we + # can invoke systemconfig which internally invokes ICU. ICU itself + # does the same thing we do (readlink + compare file contents) but + # since it knows where the zone files are that should be a bit + # better than reimplementing the logic here. + if sys.platform == 'darwin': + c = subprocess.Popen(['systemsetup', '-gettimezone'], + stdout=subprocess.PIPE) + sys_result = c.communicate()[0] + c.wait() + tz_match = _systemconfig_tz.search(sys_result) + if tz_match is not None: + zone_name = tz_match.group(1) + try: + return pytz.timezone(zone_name) + except pytz.UnknownTimeZoneError: + pass + + # Now look for distribution specific configuration files + # that contain the timezone name. + tzpath = os.path.join(_root, 'etc/timezone') + if os.path.exists(tzpath): + with open(tzpath, 'rb') as tzfile: + data = tzfile.read() + + # Issue #3 in tzlocal was that /etc/timezone was a zoneinfo file. + # That's a misconfiguration, but we need to handle it gracefully: + if data[:5] != b'TZif2': + etctz = data.strip().decode() + # Get rid of host definitions and comments: + if ' ' in etctz: + etctz, dummy = etctz.split(' ', 1) + if '#' in etctz: + etctz, dummy = etctz.split('#', 1) + return pytz.timezone(etctz.replace(' ', '_')) + + # CentOS has a ZONE setting in /etc/sysconfig/clock, + # OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and + # Gentoo has a TIMEZONE setting in /etc/conf.d/clock + # We look through these files for a timezone: + timezone_re = re.compile(r'\s*(TIME)?ZONE\s*=\s*"(?P.+)"') + + for filename in ('etc/sysconfig/clock', 'etc/conf.d/clock'): + tzpath = os.path.join(_root, filename) + if not os.path.exists(tzpath): + continue + with open(tzpath, 'rt') as tzfile: + for line in tzfile: + match = timezone_re.match(line) + if match is not None: + # We found a timezone + etctz = match.group("etctz") + return pytz.timezone(etctz.replace(' ', '_')) + + # No explicit setting existed. Use localtime + for filename in ('etc/localtime', 'usr/local/etc/localtime'): + tzpath = os.path.join(_root, filename) + + if not os.path.exists(tzpath): + continue + + with open(tzpath, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + raise pytz.UnknownTimeZoneError('Can not find any timezone configuration') diff --git a/dbt-env/lib/python3.8/site-packages/babel/localtime/_win32.py b/dbt-env/lib/python3.8/site-packages/babel/localtime/_win32.py new file mode 100644 index 0000000..65cc088 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/localtime/_win32.py @@ -0,0 +1,96 @@ +try: + import _winreg as winreg +except ImportError: + try: + import winreg + except ImportError: + winreg = None + +from babel.core import get_global +import pytz + + +# When building the cldr data on windows this module gets imported. +# Because at that point there is no global.dat yet this call will +# fail. We want to catch it down in that case then and just assume +# the mapping was empty. +try: + tz_names = get_global('windows_zone_mapping') +except RuntimeError: + tz_names = {} + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dict = {} + size = winreg.QueryInfoKey(key)[1] + for i in range(size): + data = winreg.EnumValue(key, i) + dict[data[0]] = data[1] + return dict + + +def get_localzone_name(): + # Windows is special. It has unique time zone names (in several + # meanings of the word) available, but unfortunately, they can be + # translated to the language of the operating system, so we need to + # do a backwards lookup, by going through all time zones and see which + # one matches. + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + + TZLOCALKEYNAME = r'SYSTEM\CurrentControlSet\Control\TimeZoneInformation' + localtz = winreg.OpenKey(handle, TZLOCALKEYNAME) + keyvalues = valuestodict(localtz) + localtz.Close() + if 'TimeZoneKeyName' in keyvalues: + # Windows 7 (and Vista?) + + # For some reason this returns a string with loads of NUL bytes at + # least on some systems. I don't know if this is a bug somewhere, I + # just work around it. + tzkeyname = keyvalues['TimeZoneKeyName'].split('\x00', 1)[0] + else: + # Windows 2000 or XP + + # This is the localized name: + tzwin = keyvalues['StandardName'] + + # Open the list of timezones to look up the real name: + TZKEYNAME = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones' + tzkey = winreg.OpenKey(handle, TZKEYNAME) + + # Now, match this value to Time Zone information + tzkeyname = None + for i in range(winreg.QueryInfoKey(tzkey)[0]): + subkey = winreg.EnumKey(tzkey, i) + sub = winreg.OpenKey(tzkey, subkey) + data = valuestodict(sub) + sub.Close() + if data.get('Std', None) == tzwin: + tzkeyname = subkey + break + + tzkey.Close() + handle.Close() + + if tzkeyname is None: + raise LookupError('Can not find Windows timezone configuration') + + timezone = tz_names.get(tzkeyname) + if timezone is None: + # Nope, that didn't work. Try adding 'Standard Time', + # it seems to work a lot of times: + timezone = tz_names.get(tzkeyname + ' Standard Time') + + # Return what we have. + if timezone is None: + raise pytz.UnknownTimeZoneError('Can not find timezone ' + tzkeyname) + + return timezone + + +def _get_localzone(): + if winreg is None: + raise pytz.UnknownTimeZoneError( + 'Runtime support not available') + return pytz.timezone(get_localzone_name()) diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__init__.py b/dbt-env/lib/python3.8/site-packages/babel/messages/__init__.py new file mode 100644 index 0000000..7d2587f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/messages/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +""" + babel.messages + ~~~~~~~~~~~~~~ + + Support for ``gettext`` message catalogs. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.messages.catalog import * diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..9790b1b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/catalog.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/catalog.cpython-38.pyc new file mode 100644 index 0000000..74c7fc2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/catalog.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/checkers.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/checkers.cpython-38.pyc new file mode 100644 index 0000000..b914941 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/checkers.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/extract.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/extract.cpython-38.pyc new file mode 100644 index 0000000..08aad3a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/extract.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/frontend.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/frontend.cpython-38.pyc new file mode 100644 index 0000000..97c1686 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/frontend.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/jslexer.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/jslexer.cpython-38.pyc new file mode 100644 index 0000000..fbe1e67 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/jslexer.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/mofile.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/mofile.cpython-38.pyc new file mode 100644 index 0000000..ac554f0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/mofile.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/plurals.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/plurals.cpython-38.pyc new file mode 100644 index 0000000..d9b454f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/plurals.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/pofile.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/pofile.cpython-38.pyc new file mode 100644 index 0000000..7577ee1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/babel/messages/__pycache__/pofile.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/catalog.py b/dbt-env/lib/python3.8/site-packages/babel/messages/catalog.py new file mode 100644 index 0000000..a19a3e6 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/messages/catalog.py @@ -0,0 +1,851 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.catalog + ~~~~~~~~~~~~~~~~~~~~~~ + + Data structures for message catalogs. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import re +import time + +from cgi import parse_header +from collections import OrderedDict +from datetime import datetime, time as time_ +from difflib import get_close_matches +from email import message_from_string +from copy import copy + +from babel import __version__ as VERSION +from babel.core import Locale, UnknownLocaleError +from babel.dates import format_datetime +from babel.messages.plurals import get_plural +from babel.util import distinct, LOCALTZ, FixedOffsetTimezone +from babel._compat import string_types, number_types, PY2, cmp, text_type, force_text + +__all__ = ['Message', 'Catalog', 'TranslationError'] + + +PYTHON_FORMAT = re.compile(r''' + \% + (?:\(([\w]*)\))? + ( + [-#0\ +]?(?:\*|[\d]+)? + (?:\.(?:\*|[\d]+))? + [hlL]? + ) + ([diouxXeEfFgGcrs%]) +''', re.VERBOSE) + + +def _parse_datetime_header(value): + match = re.match(r'^(?P.*?)(?P[+-]\d{4})?$', value) + + tt = time.strptime(match.group('datetime'), '%Y-%m-%d %H:%M') + ts = time.mktime(tt) + dt = datetime.fromtimestamp(ts) + + # Separate the offset into a sign component, hours, and # minutes + tzoffset = match.group('tzoffset') + if tzoffset is not None: + plus_minus_s, rest = tzoffset[0], tzoffset[1:] + hours_offset_s, mins_offset_s = rest[:2], rest[2:] + + # Make them all integers + plus_minus = int(plus_minus_s + '1') + hours_offset = int(hours_offset_s) + mins_offset = int(mins_offset_s) + + # Calculate net offset + net_mins_offset = hours_offset * 60 + net_mins_offset += mins_offset + net_mins_offset *= plus_minus + + # Create an offset object + tzoffset = FixedOffsetTimezone(net_mins_offset) + + # Store the offset in a datetime object + dt = dt.replace(tzinfo=tzoffset) + + return dt + + +class Message(object): + """Representation of a single message in a catalog.""" + + def __init__(self, id, string=u'', locations=(), flags=(), auto_comments=(), + user_comments=(), previous_id=(), lineno=None, context=None): + """Create the message object. + + :param id: the message ID, or a ``(singular, plural)`` tuple for + pluralizable messages + :param string: the translated message string, or a + ``(singular, plural)`` tuple for pluralizable messages + :param locations: a sequence of ``(filename, lineno)`` tuples + :param flags: a set or sequence of flags + :param auto_comments: a sequence of automatic comments for the message + :param user_comments: a sequence of user comments for the message + :param previous_id: the previous message ID, or a ``(singular, plural)`` + tuple for pluralizable messages + :param lineno: the line number on which the msgid line was found in the + PO file, if any + :param context: the message context + """ + self.id = id + if not string and self.pluralizable: + string = (u'', u'') + self.string = string + self.locations = list(distinct(locations)) + self.flags = set(flags) + if id and self.python_format: + self.flags.add('python-format') + else: + self.flags.discard('python-format') + self.auto_comments = list(distinct(auto_comments)) + self.user_comments = list(distinct(user_comments)) + if isinstance(previous_id, string_types): + self.previous_id = [previous_id] + else: + self.previous_id = list(previous_id) + self.lineno = lineno + self.context = context + + def __repr__(self): + return '<%s %r (flags: %r)>' % (type(self).__name__, self.id, + list(self.flags)) + + def __cmp__(self, other): + """Compare Messages, taking into account plural ids""" + def values_to_compare(obj): + if isinstance(obj, Message) and obj.pluralizable: + return obj.id[0], obj.context or '' + return obj.id, obj.context or '' + return cmp(values_to_compare(self), values_to_compare(other)) + + def __gt__(self, other): + return self.__cmp__(other) > 0 + + def __lt__(self, other): + return self.__cmp__(other) < 0 + + def __ge__(self, other): + return self.__cmp__(other) >= 0 + + def __le__(self, other): + return self.__cmp__(other) <= 0 + + def __eq__(self, other): + return self.__cmp__(other) == 0 + + def __ne__(self, other): + return self.__cmp__(other) != 0 + + def clone(self): + return Message(*map(copy, (self.id, self.string, self.locations, + self.flags, self.auto_comments, + self.user_comments, self.previous_id, + self.lineno, self.context))) + + def check(self, catalog=None): + """Run various validation checks on the message. Some validations + are only performed if the catalog is provided. This method returns + a sequence of `TranslationError` objects. + + :rtype: ``iterator`` + :param catalog: A catalog instance that is passed to the checkers + :see: `Catalog.check` for a way to perform checks for all messages + in a catalog. + """ + from babel.messages.checkers import checkers + errors = [] + for checker in checkers: + try: + checker(catalog, self) + except TranslationError as e: + errors.append(e) + return errors + + @property + def fuzzy(self): + """Whether the translation is fuzzy. + + >>> Message('foo').fuzzy + False + >>> msg = Message('foo', 'foo', flags=['fuzzy']) + >>> msg.fuzzy + True + >>> msg + + + :type: `bool`""" + return 'fuzzy' in self.flags + + @property + def pluralizable(self): + """Whether the message is plurizable. + + >>> Message('foo').pluralizable + False + >>> Message(('foo', 'bar')).pluralizable + True + + :type: `bool`""" + return isinstance(self.id, (list, tuple)) + + @property + def python_format(self): + """Whether the message contains Python-style parameters. + + >>> Message('foo %(name)s bar').python_format + True + >>> Message(('foo %(name)s', 'foo %(name)s')).python_format + True + + :type: `bool`""" + ids = self.id + if not isinstance(ids, (list, tuple)): + ids = [ids] + return any(PYTHON_FORMAT.search(id) for id in ids) + + +class TranslationError(Exception): + """Exception thrown by translation checkers when invalid message + translations are encountered.""" + + +DEFAULT_HEADER = u"""\ +# Translations template for PROJECT. +# Copyright (C) YEAR ORGANIZATION +# This file is distributed under the same license as the PROJECT project. +# FIRST AUTHOR , YEAR. +#""" + + +if PY2: + def _parse_header(header_string): + # message_from_string only works for str, not for unicode + headers = message_from_string(header_string.encode('utf8')) + decoded_headers = {} + for name, value in headers.items(): + name = name.decode('utf8') + value = value.decode('utf8') + decoded_headers[name] = value + return decoded_headers + +else: + _parse_header = message_from_string + + +class Catalog(object): + """Representation of a message catalog.""" + + def __init__(self, locale=None, domain=None, header_comment=DEFAULT_HEADER, + project=None, version=None, copyright_holder=None, + msgid_bugs_address=None, creation_date=None, + revision_date=None, last_translator=None, language_team=None, + charset=None, fuzzy=True): + """Initialize the catalog object. + + :param locale: the locale identifier or `Locale` object, or `None` + if the catalog is not bound to a locale (which basically + means it's a template) + :param domain: the message domain + :param header_comment: the header comment as string, or `None` for the + default header + :param project: the project's name + :param version: the project's version + :param copyright_holder: the copyright holder of the catalog + :param msgid_bugs_address: the email address or URL to submit bug + reports to + :param creation_date: the date the catalog was created + :param revision_date: the date the catalog was revised + :param last_translator: the name and email of the last translator + :param language_team: the name and email of the language team + :param charset: the encoding to use in the output (defaults to utf-8) + :param fuzzy: the fuzzy bit on the catalog header + """ + self.domain = domain + self.locale = locale + self._header_comment = header_comment + self._messages = OrderedDict() + + self.project = project or 'PROJECT' + self.version = version or 'VERSION' + self.copyright_holder = copyright_holder or 'ORGANIZATION' + self.msgid_bugs_address = msgid_bugs_address or 'EMAIL@ADDRESS' + + self.last_translator = last_translator or 'FULL NAME ' + """Name and email address of the last translator.""" + self.language_team = language_team or 'LANGUAGE ' + """Name and email address of the language team.""" + + self.charset = charset or 'utf-8' + + if creation_date is None: + creation_date = datetime.now(LOCALTZ) + elif isinstance(creation_date, datetime) and not creation_date.tzinfo: + creation_date = creation_date.replace(tzinfo=LOCALTZ) + self.creation_date = creation_date + if revision_date is None: + revision_date = 'YEAR-MO-DA HO:MI+ZONE' + elif isinstance(revision_date, datetime) and not revision_date.tzinfo: + revision_date = revision_date.replace(tzinfo=LOCALTZ) + self.revision_date = revision_date + self.fuzzy = fuzzy + + self.obsolete = OrderedDict() # Dictionary of obsolete messages + self._num_plurals = None + self._plural_expr = None + + def _set_locale(self, locale): + if locale is None: + self._locale_identifier = None + self._locale = None + return + + if isinstance(locale, Locale): + self._locale_identifier = text_type(locale) + self._locale = locale + return + + if isinstance(locale, string_types): + self._locale_identifier = text_type(locale) + try: + self._locale = Locale.parse(locale) + except UnknownLocaleError: + self._locale = None + return + + raise TypeError('`locale` must be a Locale, a locale identifier string, or None; got %r' % locale) + + def _get_locale(self): + return self._locale + + def _get_locale_identifier(self): + return self._locale_identifier + + locale = property(_get_locale, _set_locale) + locale_identifier = property(_get_locale_identifier) + + def _get_header_comment(self): + comment = self._header_comment + year = datetime.now(LOCALTZ).strftime('%Y') + if hasattr(self.revision_date, 'strftime'): + year = self.revision_date.strftime('%Y') + comment = comment.replace('PROJECT', self.project) \ + .replace('VERSION', self.version) \ + .replace('YEAR', year) \ + .replace('ORGANIZATION', self.copyright_holder) + locale_name = (self.locale.english_name if self.locale else self.locale_identifier) + if locale_name: + comment = comment.replace('Translations template', '%s translations' % locale_name) + return comment + + def _set_header_comment(self, string): + self._header_comment = string + + header_comment = property(_get_header_comment, _set_header_comment, doc="""\ + The header comment for the catalog. + + >>> catalog = Catalog(project='Foobar', version='1.0', + ... copyright_holder='Foo Company') + >>> print(catalog.header_comment) #doctest: +ELLIPSIS + # Translations template for Foobar. + # Copyright (C) ... Foo Company + # This file is distributed under the same license as the Foobar project. + # FIRST AUTHOR , .... + # + + The header can also be set from a string. Any known upper-case variables + will be replaced when the header is retrieved again: + + >>> catalog = Catalog(project='Foobar', version='1.0', + ... copyright_holder='Foo Company') + >>> catalog.header_comment = '''\\ + ... # The POT for my really cool PROJECT project. + ... # Copyright (C) 1990-2003 ORGANIZATION + ... # This file is distributed under the same license as the PROJECT + ... # project. + ... #''' + >>> print(catalog.header_comment) + # The POT for my really cool Foobar project. + # Copyright (C) 1990-2003 Foo Company + # This file is distributed under the same license as the Foobar + # project. + # + + :type: `unicode` + """) + + def _get_mime_headers(self): + headers = [] + headers.append(('Project-Id-Version', + '%s %s' % (self.project, self.version))) + headers.append(('Report-Msgid-Bugs-To', self.msgid_bugs_address)) + headers.append(('POT-Creation-Date', + format_datetime(self.creation_date, 'yyyy-MM-dd HH:mmZ', + locale='en'))) + if isinstance(self.revision_date, (datetime, time_) + number_types): + headers.append(('PO-Revision-Date', + format_datetime(self.revision_date, + 'yyyy-MM-dd HH:mmZ', locale='en'))) + else: + headers.append(('PO-Revision-Date', self.revision_date)) + headers.append(('Last-Translator', self.last_translator)) + if self.locale_identifier: + headers.append(('Language', str(self.locale_identifier))) + if self.locale_identifier and ('LANGUAGE' in self.language_team): + headers.append(('Language-Team', + self.language_team.replace('LANGUAGE', + str(self.locale_identifier)))) + else: + headers.append(('Language-Team', self.language_team)) + if self.locale is not None: + headers.append(('Plural-Forms', self.plural_forms)) + headers.append(('MIME-Version', '1.0')) + headers.append(('Content-Type', + 'text/plain; charset=%s' % self.charset)) + headers.append(('Content-Transfer-Encoding', '8bit')) + headers.append(('Generated-By', 'Babel %s\n' % VERSION)) + return headers + + def _set_mime_headers(self, headers): + for name, value in headers: + name = force_text(name.lower(), encoding=self.charset) + value = force_text(value, encoding=self.charset) + if name == 'project-id-version': + parts = value.split(' ') + self.project = u' '.join(parts[:-1]) + self.version = parts[-1] + elif name == 'report-msgid-bugs-to': + self.msgid_bugs_address = value + elif name == 'last-translator': + self.last_translator = value + elif name == 'language': + value = value.replace('-', '_') + self._set_locale(value) + elif name == 'language-team': + self.language_team = value + elif name == 'content-type': + mimetype, params = parse_header(value) + if 'charset' in params: + self.charset = params['charset'].lower() + elif name == 'plural-forms': + _, params = parse_header(' ;' + value) + self._num_plurals = int(params.get('nplurals', 2)) + self._plural_expr = params.get('plural', '(n != 1)') + elif name == 'pot-creation-date': + self.creation_date = _parse_datetime_header(value) + elif name == 'po-revision-date': + # Keep the value if it's not the default one + if 'YEAR' not in value: + self.revision_date = _parse_datetime_header(value) + + mime_headers = property(_get_mime_headers, _set_mime_headers, doc="""\ + The MIME headers of the catalog, used for the special ``msgid ""`` entry. + + The behavior of this property changes slightly depending on whether a locale + is set or not, the latter indicating that the catalog is actually a template + for actual translations. + + Here's an example of the output for such a catalog template: + + >>> from babel.dates import UTC + >>> created = datetime(1990, 4, 1, 15, 30, tzinfo=UTC) + >>> catalog = Catalog(project='Foobar', version='1.0', + ... creation_date=created) + >>> for name, value in catalog.mime_headers: + ... print('%s: %s' % (name, value)) + Project-Id-Version: Foobar 1.0 + Report-Msgid-Bugs-To: EMAIL@ADDRESS + POT-Creation-Date: 1990-04-01 15:30+0000 + PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE + Last-Translator: FULL NAME + Language-Team: LANGUAGE + MIME-Version: 1.0 + Content-Type: text/plain; charset=utf-8 + Content-Transfer-Encoding: 8bit + Generated-By: Babel ... + + And here's an example of the output when the locale is set: + + >>> revised = datetime(1990, 8, 3, 12, 0, tzinfo=UTC) + >>> catalog = Catalog(locale='de_DE', project='Foobar', version='1.0', + ... creation_date=created, revision_date=revised, + ... last_translator='John Doe ', + ... language_team='de_DE ') + >>> for name, value in catalog.mime_headers: + ... print('%s: %s' % (name, value)) + Project-Id-Version: Foobar 1.0 + Report-Msgid-Bugs-To: EMAIL@ADDRESS + POT-Creation-Date: 1990-04-01 15:30+0000 + PO-Revision-Date: 1990-08-03 12:00+0000 + Last-Translator: John Doe + Language: de_DE + Language-Team: de_DE + Plural-Forms: nplurals=2; plural=(n != 1) + MIME-Version: 1.0 + Content-Type: text/plain; charset=utf-8 + Content-Transfer-Encoding: 8bit + Generated-By: Babel ... + + :type: `list` + """) + + @property + def num_plurals(self): + """The number of plurals used by the catalog or locale. + + >>> Catalog(locale='en').num_plurals + 2 + >>> Catalog(locale='ga').num_plurals + 5 + + :type: `int`""" + if self._num_plurals is None: + num = 2 + if self.locale: + num = get_plural(self.locale)[0] + self._num_plurals = num + return self._num_plurals + + @property + def plural_expr(self): + """The plural expression used by the catalog or locale. + + >>> Catalog(locale='en').plural_expr + '(n != 1)' + >>> Catalog(locale='ga').plural_expr + '(n==1 ? 0 : n==2 ? 1 : n>=3 && n<=6 ? 2 : n>=7 && n<=10 ? 3 : 4)' + >>> Catalog(locale='ding').plural_expr # unknown locale + '(n != 1)' + + :type: `string_types`""" + if self._plural_expr is None: + expr = '(n != 1)' + if self.locale: + expr = get_plural(self.locale)[1] + self._plural_expr = expr + return self._plural_expr + + @property + def plural_forms(self): + """Return the plural forms declaration for the locale. + + >>> Catalog(locale='en').plural_forms + 'nplurals=2; plural=(n != 1)' + >>> Catalog(locale='pt_BR').plural_forms + 'nplurals=2; plural=(n > 1)' + + :type: `str`""" + return 'nplurals=%s; plural=%s' % (self.num_plurals, self.plural_expr) + + def __contains__(self, id): + """Return whether the catalog has a message with the specified ID.""" + return self._key_for(id) in self._messages + + def __len__(self): + """The number of messages in the catalog. + + This does not include the special ``msgid ""`` entry.""" + return len(self._messages) + + def __iter__(self): + """Iterates through all the entries in the catalog, in the order they + were added, yielding a `Message` object for every entry. + + :rtype: ``iterator``""" + buf = [] + for name, value in self.mime_headers: + buf.append('%s: %s' % (name, value)) + flags = set() + if self.fuzzy: + flags |= {'fuzzy'} + yield Message(u'', '\n'.join(buf), flags=flags) + for key in self._messages: + yield self._messages[key] + + def __repr__(self): + locale = '' + if self.locale: + locale = ' %s' % self.locale + return '<%s %r%s>' % (type(self).__name__, self.domain, locale) + + def __delitem__(self, id): + """Delete the message with the specified ID.""" + self.delete(id) + + def __getitem__(self, id): + """Return the message with the specified ID. + + :param id: the message ID + """ + return self.get(id) + + def __setitem__(self, id, message): + """Add or update the message with the specified ID. + + >>> catalog = Catalog() + >>> catalog[u'foo'] = Message(u'foo') + >>> catalog[u'foo'] + + + If a message with that ID is already in the catalog, it is updated + to include the locations and flags of the new message. + + >>> catalog = Catalog() + >>> catalog[u'foo'] = Message(u'foo', locations=[('main.py', 1)]) + >>> catalog[u'foo'].locations + [('main.py', 1)] + >>> catalog[u'foo'] = Message(u'foo', locations=[('utils.py', 5)]) + >>> catalog[u'foo'].locations + [('main.py', 1), ('utils.py', 5)] + + :param id: the message ID + :param message: the `Message` object + """ + assert isinstance(message, Message), 'expected a Message object' + key = self._key_for(id, message.context) + current = self._messages.get(key) + if current: + if message.pluralizable and not current.pluralizable: + # The new message adds pluralization + current.id = message.id + current.string = message.string + current.locations = list(distinct(current.locations + + message.locations)) + current.auto_comments = list(distinct(current.auto_comments + + message.auto_comments)) + current.user_comments = list(distinct(current.user_comments + + message.user_comments)) + current.flags |= message.flags + message = current + elif id == '': + # special treatment for the header message + self.mime_headers = _parse_header(message.string).items() + self.header_comment = '\n'.join([('# %s' % c).rstrip() for c + in message.user_comments]) + self.fuzzy = message.fuzzy + else: + if isinstance(id, (list, tuple)): + assert isinstance(message.string, (list, tuple)), \ + 'Expected sequence but got %s' % type(message.string) + self._messages[key] = message + + def add(self, id, string=None, locations=(), flags=(), auto_comments=(), + user_comments=(), previous_id=(), lineno=None, context=None): + """Add or update the message with the specified ID. + + >>> catalog = Catalog() + >>> catalog.add(u'foo') + + >>> catalog[u'foo'] + + + This method simply constructs a `Message` object with the given + arguments and invokes `__setitem__` with that object. + + :param id: the message ID, or a ``(singular, plural)`` tuple for + pluralizable messages + :param string: the translated message string, or a + ``(singular, plural)`` tuple for pluralizable messages + :param locations: a sequence of ``(filename, lineno)`` tuples + :param flags: a set or sequence of flags + :param auto_comments: a sequence of automatic comments + :param user_comments: a sequence of user comments + :param previous_id: the previous message ID, or a ``(singular, plural)`` + tuple for pluralizable messages + :param lineno: the line number on which the msgid line was found in the + PO file, if any + :param context: the message context + """ + message = Message(id, string, list(locations), flags, auto_comments, + user_comments, previous_id, lineno=lineno, + context=context) + self[id] = message + return message + + def check(self): + """Run various validation checks on the translations in the catalog. + + For every message which fails validation, this method yield a + ``(message, errors)`` tuple, where ``message`` is the `Message` object + and ``errors`` is a sequence of `TranslationError` objects. + + :rtype: ``iterator`` + """ + for message in self._messages.values(): + errors = message.check(catalog=self) + if errors: + yield message, errors + + def get(self, id, context=None): + """Return the message with the specified ID and context. + + :param id: the message ID + :param context: the message context, or ``None`` for no context + """ + return self._messages.get(self._key_for(id, context)) + + def delete(self, id, context=None): + """Delete the message with the specified ID and context. + + :param id: the message ID + :param context: the message context, or ``None`` for no context + """ + key = self._key_for(id, context) + if key in self._messages: + del self._messages[key] + + def update(self, template, no_fuzzy_matching=False, update_header_comment=False, keep_user_comments=True): + """Update the catalog based on the given template catalog. + + >>> from babel.messages import Catalog + >>> template = Catalog() + >>> template.add('green', locations=[('main.py', 99)]) + + >>> template.add('blue', locations=[('main.py', 100)]) + + >>> template.add(('salad', 'salads'), locations=[('util.py', 42)]) + + >>> catalog = Catalog(locale='de_DE') + >>> catalog.add('blue', u'blau', locations=[('main.py', 98)]) + + >>> catalog.add('head', u'Kopf', locations=[('util.py', 33)]) + + >>> catalog.add(('salad', 'salads'), (u'Salat', u'Salate'), + ... locations=[('util.py', 38)]) + + + >>> catalog.update(template) + >>> len(catalog) + 3 + + >>> msg1 = catalog['green'] + >>> msg1.string + >>> msg1.locations + [('main.py', 99)] + + >>> msg2 = catalog['blue'] + >>> msg2.string + u'blau' + >>> msg2.locations + [('main.py', 100)] + + >>> msg3 = catalog['salad'] + >>> msg3.string + (u'Salat', u'Salate') + >>> msg3.locations + [('util.py', 42)] + + Messages that are in the catalog but not in the template are removed + from the main collection, but can still be accessed via the `obsolete` + member: + + >>> 'head' in catalog + False + >>> list(catalog.obsolete.values()) + [] + + :param template: the reference catalog, usually read from a POT file + :param no_fuzzy_matching: whether to use fuzzy matching of message IDs + """ + messages = self._messages + remaining = messages.copy() + self._messages = OrderedDict() + + # Prepare for fuzzy matching + fuzzy_candidates = [] + if not no_fuzzy_matching: + fuzzy_candidates = dict([ + (self._key_for(msgid), messages[msgid].context) + for msgid in messages if msgid and messages[msgid].string + ]) + fuzzy_matches = set() + + def _merge(message, oldkey, newkey): + message = message.clone() + fuzzy = False + if oldkey != newkey: + fuzzy = True + fuzzy_matches.add(oldkey) + oldmsg = messages.get(oldkey) + if isinstance(oldmsg.id, string_types): + message.previous_id = [oldmsg.id] + else: + message.previous_id = list(oldmsg.id) + else: + oldmsg = remaining.pop(oldkey, None) + message.string = oldmsg.string + + if keep_user_comments: + message.user_comments = list(distinct(oldmsg.user_comments)) + + if isinstance(message.id, (list, tuple)): + if not isinstance(message.string, (list, tuple)): + fuzzy = True + message.string = tuple( + [message.string] + ([u''] * (len(message.id) - 1)) + ) + elif len(message.string) != self.num_plurals: + fuzzy = True + message.string = tuple(message.string[:len(oldmsg.string)]) + elif isinstance(message.string, (list, tuple)): + fuzzy = True + message.string = message.string[0] + message.flags |= oldmsg.flags + if fuzzy: + message.flags |= {u'fuzzy'} + self[message.id] = message + + for message in template: + if message.id: + key = self._key_for(message.id, message.context) + if key in messages: + _merge(message, key, key) + else: + if not no_fuzzy_matching: + # do some fuzzy matching with difflib + if isinstance(key, tuple): + matchkey = key[0] # just the msgid, no context + else: + matchkey = key + matches = get_close_matches(matchkey.lower().strip(), + fuzzy_candidates.keys(), 1) + if matches: + newkey = matches[0] + newctxt = fuzzy_candidates[newkey] + if newctxt is not None: + newkey = newkey, newctxt + _merge(message, newkey, key) + continue + + self[message.id] = message + + for msgid in remaining: + if no_fuzzy_matching or msgid not in fuzzy_matches: + self.obsolete[msgid] = remaining[msgid] + + if update_header_comment: + # Allow the updated catalog's header to be rewritten based on the + # template's header + self.header_comment = template.header_comment + + # Make updated catalog's POT-Creation-Date equal to the template + # used to update the catalog + self.creation_date = template.creation_date + + def _key_for(self, id, context=None): + """The key for a message is just the singular ID even for pluralizable + messages, but is a ``(msgid, msgctxt)`` tuple for context-specific + messages. + """ + key = id + if isinstance(key, (list, tuple)): + key = id[0] + if context is not None: + key = (key, context) + return key diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/checkers.py b/dbt-env/lib/python3.8/site-packages/babel/messages/checkers.py new file mode 100644 index 0000000..cba911d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/messages/checkers.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.checkers + ~~~~~~~~~~~~~~~~~~~~~~~ + + Various routines that help with validation of translations. + + :since: version 0.9 + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +from babel.messages.catalog import TranslationError, PYTHON_FORMAT +from babel._compat import string_types, izip + + +#: list of format chars that are compatible to each other +_string_format_compatibilities = [ + {'i', 'd', 'u'}, + {'x', 'X'}, + {'f', 'F', 'g', 'G'} +] + + +def num_plurals(catalog, message): + """Verify the number of plurals in the translation.""" + if not message.pluralizable: + if not isinstance(message.string, string_types): + raise TranslationError("Found plural forms for non-pluralizable " + "message") + return + + # skip further tests if no catalog is provided. + elif catalog is None: + return + + msgstrs = message.string + if not isinstance(msgstrs, (list, tuple)): + msgstrs = (msgstrs,) + if len(msgstrs) != catalog.num_plurals: + raise TranslationError("Wrong number of plural forms (expected %d)" % + catalog.num_plurals) + + +def python_format(catalog, message): + """Verify the format string placeholders in the translation.""" + if 'python-format' not in message.flags: + return + msgids = message.id + if not isinstance(msgids, (list, tuple)): + msgids = (msgids,) + msgstrs = message.string + if not isinstance(msgstrs, (list, tuple)): + msgstrs = (msgstrs,) + + for msgid, msgstr in izip(msgids, msgstrs): + if msgstr: + _validate_format(msgid, msgstr) + + +def _validate_format(format, alternative): + """Test format string `alternative` against `format`. `format` can be the + msgid of a message and `alternative` one of the `msgstr`\\s. The two + arguments are not interchangeable as `alternative` may contain less + placeholders if `format` uses named placeholders. + + The behavior of this function is undefined if the string does not use + string formattings. + + If the string formatting of `alternative` is compatible to `format` the + function returns `None`, otherwise a `TranslationError` is raised. + + Examples for compatible format strings: + + >>> _validate_format('Hello %s!', 'Hallo %s!') + >>> _validate_format('Hello %i!', 'Hallo %d!') + + Example for an incompatible format strings: + + >>> _validate_format('Hello %(name)s!', 'Hallo %s!') + Traceback (most recent call last): + ... + TranslationError: the format strings are of different kinds + + This function is used by the `python_format` checker. + + :param format: The original format string + :param alternative: The alternative format string that should be checked + against format + :raises TranslationError: on formatting errors + """ + + def _parse(string): + result = [] + for match in PYTHON_FORMAT.finditer(string): + name, format, typechar = match.groups() + if typechar == '%' and name is None: + continue + result.append((name, str(typechar))) + return result + + def _compatible(a, b): + if a == b: + return True + for set in _string_format_compatibilities: + if a in set and b in set: + return True + return False + + def _check_positional(results): + positional = None + for name, char in results: + if positional is None: + positional = name is None + else: + if (name is None) != positional: + raise TranslationError('format string mixes positional ' + 'and named placeholders') + return bool(positional) + + a, b = map(_parse, (format, alternative)) + + # now check if both strings are positional or named + a_positional, b_positional = map(_check_positional, (a, b)) + if a_positional and not b_positional and not b: + raise TranslationError('placeholders are incompatible') + elif a_positional != b_positional: + raise TranslationError('the format strings are of different kinds') + + # if we are operating on positional strings both must have the + # same number of format chars and those must be compatible + if a_positional: + if len(a) != len(b): + raise TranslationError('positional format placeholders are ' + 'unbalanced') + for idx, ((_, first), (_, second)) in enumerate(izip(a, b)): + if not _compatible(first, second): + raise TranslationError('incompatible format for placeholder ' + '%d: %r and %r are not compatible' % + (idx + 1, first, second)) + + # otherwise the second string must not have names the first one + # doesn't have and the types of those included must be compatible + else: + type_map = dict(a) + for name, typechar in b: + if name not in type_map: + raise TranslationError('unknown named placeholder %r' % name) + elif not _compatible(typechar, type_map[name]): + raise TranslationError('incompatible format for ' + 'placeholder %r: ' + '%r and %r are not compatible' % + (name, typechar, type_map[name])) + + +def _find_checkers(): + checkers = [] + try: + from pkg_resources import working_set + except ImportError: + pass + else: + for entry_point in working_set.iter_entry_points('babel.checkers'): + checkers.append(entry_point.load()) + if len(checkers) == 0: + # if pkg_resources is not available or no usable egg-info was found + # (see #230), just resort to hard-coded checkers + return [num_plurals, python_format] + return checkers + + +checkers = _find_checkers() diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/extract.py b/dbt-env/lib/python3.8/site-packages/babel/messages/extract.py new file mode 100644 index 0000000..6449776 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/messages/extract.py @@ -0,0 +1,645 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.extract + ~~~~~~~~~~~~~~~~~~~~~~ + + Basic infrastructure for extracting localizable messages from source files. + + This module defines an extensible system for collecting localizable message + strings from a variety of sources. A native extractor for Python source + files is builtin, extractors for other sources can be added using very + simple plugins. + + The main entry points into the extraction functionality are the functions + `extract_from_dir` and `extract_from_file`. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" + +import os +from os.path import relpath +import sys +from tokenize import generate_tokens, COMMENT, NAME, OP, STRING + +from babel.util import parse_encoding, parse_future_flags, pathmatch +from babel._compat import PY2, text_type +from textwrap import dedent + + +GROUP_NAME = 'babel.extractors' + +DEFAULT_KEYWORDS = { + '_': None, + 'gettext': None, + 'ngettext': (1, 2), + 'ugettext': None, + 'ungettext': (1, 2), + 'dgettext': (2,), + 'dngettext': (2, 3), + 'N_': None, + 'pgettext': ((1, 'c'), 2), + 'npgettext': ((1, 'c'), 2, 3) +} + +DEFAULT_MAPPING = [('**.py', 'python')] + +empty_msgid_warning = ( + '%s: warning: Empty msgid. It is reserved by GNU gettext: gettext("") ' + 'returns the header entry with meta information, not the empty string.') + + +def _strip_comment_tags(comments, tags): + """Helper function for `extract` that strips comment tags from strings + in a list of comment lines. This functions operates in-place. + """ + def _strip(line): + for tag in tags: + if line.startswith(tag): + return line[len(tag):].strip() + return line + comments[:] = map(_strip, comments) + + +def extract_from_dir(dirname=None, method_map=DEFAULT_MAPPING, + options_map=None, keywords=DEFAULT_KEYWORDS, + comment_tags=(), callback=None, strip_comment_tags=False): + """Extract messages from any source files found in the given directory. + + This function generates tuples of the form ``(filename, lineno, message, + comments, context)``. + + Which extraction method is used per file is determined by the `method_map` + parameter, which maps extended glob patterns to extraction method names. + For example, the following is the default mapping: + + >>> method_map = [ + ... ('**.py', 'python') + ... ] + + This basically says that files with the filename extension ".py" at any + level inside the directory should be processed by the "python" extraction + method. Files that don't match any of the mapping patterns are ignored. See + the documentation of the `pathmatch` function for details on the pattern + syntax. + + The following extended mapping would also use the "genshi" extraction + method on any file in "templates" subdirectory: + + >>> method_map = [ + ... ('**/templates/**.*', 'genshi'), + ... ('**.py', 'python') + ... ] + + The dictionary provided by the optional `options_map` parameter augments + these mappings. It uses extended glob patterns as keys, and the values are + dictionaries mapping options names to option values (both strings). + + The glob patterns of the `options_map` do not necessarily need to be the + same as those used in the method mapping. For example, while all files in + the ``templates`` folders in an application may be Genshi applications, the + options for those files may differ based on extension: + + >>> options_map = { + ... '**/templates/**.txt': { + ... 'template_class': 'genshi.template:TextTemplate', + ... 'encoding': 'latin-1' + ... }, + ... '**/templates/**.html': { + ... 'include_attrs': '' + ... } + ... } + + :param dirname: the path to the directory to extract messages from. If + not given the current working directory is used. + :param method_map: a list of ``(pattern, method)`` tuples that maps of + extraction method names to extended glob patterns + :param options_map: a dictionary of additional options (optional) + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of tags of translator comments to search for + and include in the results + :param callback: a function that is called for every file that message are + extracted from, just before the extraction itself is + performed; the function is passed the filename, the name + of the extraction method and and the options dictionary as + positional arguments, in that order + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :see: `pathmatch` + """ + if dirname is None: + dirname = os.getcwd() + if options_map is None: + options_map = {} + + absname = os.path.abspath(dirname) + for root, dirnames, filenames in os.walk(absname): + dirnames[:] = [ + subdir for subdir in dirnames + if not (subdir.startswith('.') or subdir.startswith('_')) + ] + dirnames.sort() + filenames.sort() + for filename in filenames: + filepath = os.path.join(root, filename).replace(os.sep, '/') + + for message_tuple in check_and_call_extract_file( + filepath, + method_map, + options_map, + callback, + keywords, + comment_tags, + strip_comment_tags, + dirpath=absname, + ): + yield message_tuple + + +def check_and_call_extract_file(filepath, method_map, options_map, + callback, keywords, comment_tags, + strip_comment_tags, dirpath=None): + """Checks if the given file matches an extraction method mapping, and if so, calls extract_from_file. + + Note that the extraction method mappings are based relative to dirpath. + So, given an absolute path to a file `filepath`, we want to check using + just the relative path from `dirpath` to `filepath`. + + Yields 5-tuples (filename, lineno, messages, comments, context). + + :param filepath: An absolute path to a file that exists. + :param method_map: a list of ``(pattern, method)`` tuples that maps of + extraction method names to extended glob patterns + :param options_map: a dictionary of additional options (optional) + :param callback: a function that is called for every file that message are + extracted from, just before the extraction itself is + performed; the function is passed the filename, the name + of the extraction method and and the options dictionary as + positional arguments, in that order + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of tags of translator comments to search for + and include in the results + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :param dirpath: the path to the directory to extract messages from. + :return: iterable of 5-tuples (filename, lineno, messages, comments, context) + :rtype: Iterable[tuple[str, int, str|tuple[str], list[str], str|None] + """ + # filename is the relative path from dirpath to the actual file + filename = relpath(filepath, dirpath) + + for pattern, method in method_map: + if not pathmatch(pattern, filename): + continue + + options = {} + for opattern, odict in options_map.items(): + if pathmatch(opattern, filename): + options = odict + if callback: + callback(filename, method, options) + for message_tuple in extract_from_file( + method, filepath, + keywords=keywords, + comment_tags=comment_tags, + options=options, + strip_comment_tags=strip_comment_tags + ): + yield (filename, ) + message_tuple + + break + + +def extract_from_file(method, filename, keywords=DEFAULT_KEYWORDS, + comment_tags=(), options=None, strip_comment_tags=False): + """Extract messages from a specific file. + + This function returns a list of tuples of the form ``(lineno, message, comments, context)``. + + :param filename: the path to the file to extract messages from + :param method: a string specifying the extraction method (.e.g. "python") + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of translator tags to search for and include + in the results + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :param options: a dictionary of additional options (optional) + :returns: list of tuples of the form ``(lineno, message, comments, context)`` + :rtype: list[tuple[int, str|tuple[str], list[str], str|None] + """ + if method == 'ignore': + return [] + + with open(filename, 'rb') as fileobj: + return list(extract(method, fileobj, keywords, comment_tags, + options, strip_comment_tags)) + + +def extract(method, fileobj, keywords=DEFAULT_KEYWORDS, comment_tags=(), + options=None, strip_comment_tags=False): + """Extract messages from the given file-like object using the specified + extraction method. + + This function returns tuples of the form ``(lineno, message, comments, context)``. + + The implementation dispatches the actual extraction to plugins, based on the + value of the ``method`` parameter. + + >>> source = b'''# foo module + ... def run(argv): + ... print(_('Hello, world!')) + ... ''' + + >>> from babel._compat import BytesIO + >>> for message in extract('python', BytesIO(source)): + ... print(message) + (3, u'Hello, world!', [], None) + + :param method: an extraction method (a callable), or + a string specifying the extraction method (.e.g. "python"); + if this is a simple name, the extraction function will be + looked up by entry point; if it is an explicit reference + to a function (of the form ``package.module:funcname`` or + ``package.module.funcname``), the corresponding function + will be imported and used + :param fileobj: the file-like object the messages should be extracted from + :param keywords: a dictionary mapping keywords (i.e. names of functions + that should be recognized as translation functions) to + tuples that specify which of their arguments contain + localizable strings + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :param strip_comment_tags: a flag that if set to `True` causes all comment + tags to be removed from the collected comments. + :raise ValueError: if the extraction method is not registered + :returns: iterable of tuples of the form ``(lineno, message, comments, context)`` + :rtype: Iterable[tuple[int, str|tuple[str], list[str], str|None] + """ + func = None + if callable(method): + func = method + elif ':' in method or '.' in method: + if ':' not in method: + lastdot = method.rfind('.') + module, attrname = method[:lastdot], method[lastdot + 1:] + else: + module, attrname = method.split(':', 1) + func = getattr(__import__(module, {}, {}, [attrname]), attrname) + else: + try: + from pkg_resources import working_set + except ImportError: + pass + else: + for entry_point in working_set.iter_entry_points(GROUP_NAME, + method): + func = entry_point.load(require=True) + break + if func is None: + # if pkg_resources is not available or no usable egg-info was found + # (see #230), we resort to looking up the builtin extractors + # directly + builtin = { + 'ignore': extract_nothing, + 'python': extract_python, + 'javascript': extract_javascript + } + func = builtin.get(method) + + if func is None: + raise ValueError('Unknown extraction method %r' % method) + + results = func(fileobj, keywords.keys(), comment_tags, + options=options or {}) + + for lineno, funcname, messages, comments in results: + if funcname: + spec = keywords[funcname] or (1,) + else: + spec = (1,) + if not isinstance(messages, (list, tuple)): + messages = [messages] + if not messages: + continue + + # Validate the messages against the keyword's specification + context = None + msgs = [] + invalid = False + # last_index is 1 based like the keyword spec + last_index = len(messages) + for index in spec: + if isinstance(index, tuple): + context = messages[index[0] - 1] + continue + if last_index < index: + # Not enough arguments + invalid = True + break + message = messages[index - 1] + if message is None: + invalid = True + break + msgs.append(message) + if invalid: + continue + + # keyword spec indexes are 1 based, therefore '-1' + if isinstance(spec[0], tuple): + # context-aware *gettext method + first_msg_index = spec[1] - 1 + else: + first_msg_index = spec[0] - 1 + if not messages[first_msg_index]: + # An empty string msgid isn't valid, emit a warning + where = '%s:%i' % (hasattr(fileobj, 'name') and + fileobj.name or '(unknown)', lineno) + sys.stderr.write((empty_msgid_warning % where) + '\n') + continue + + messages = tuple(msgs) + if len(messages) == 1: + messages = messages[0] + + if strip_comment_tags: + _strip_comment_tags(comments, comment_tags) + yield lineno, messages, comments, context + + +def extract_nothing(fileobj, keywords, comment_tags, options): + """Pseudo extractor that does not actually extract anything, but simply + returns an empty list. + """ + return [] + + +def extract_python(fileobj, keywords, comment_tags, options): + """Extract messages from Python source code. + + It returns an iterator yielding tuples in the following form ``(lineno, + funcname, message, comments)``. + + :param fileobj: the seekable, file-like object the messages should be + extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + :rtype: ``iterator`` + """ + funcname = lineno = message_lineno = None + call_stack = -1 + buf = [] + messages = [] + translator_comments = [] + in_def = in_translator_comments = False + comment_tag = None + + encoding = parse_encoding(fileobj) or options.get('encoding', 'UTF-8') + future_flags = parse_future_flags(fileobj, encoding) + + if PY2: + next_line = fileobj.readline + else: + next_line = lambda: fileobj.readline().decode(encoding) + + tokens = generate_tokens(next_line) + for tok, value, (lineno, _), _, _ in tokens: + if call_stack == -1 and tok == NAME and value in ('def', 'class'): + in_def = True + elif tok == OP and value == '(': + if in_def: + # Avoid false positives for declarations such as: + # def gettext(arg='message'): + in_def = False + continue + if funcname: + message_lineno = lineno + call_stack += 1 + elif in_def and tok == OP and value == ':': + # End of a class definition without parens + in_def = False + continue + elif call_stack == -1 and tok == COMMENT: + # Strip the comment token from the line + if PY2: + value = value.decode(encoding) + value = value[1:].strip() + if in_translator_comments and \ + translator_comments[-1][0] == lineno - 1: + # We're already inside a translator comment, continue appending + translator_comments.append((lineno, value)) + continue + # If execution reaches this point, let's see if comment line + # starts with one of the comment tags + for comment_tag in comment_tags: + if value.startswith(comment_tag): + in_translator_comments = True + translator_comments.append((lineno, value)) + break + elif funcname and call_stack == 0: + nested = (tok == NAME and value in keywords) + if (tok == OP and value == ')') or nested: + if buf: + messages.append(''.join(buf)) + del buf[:] + else: + messages.append(None) + + if len(messages) > 1: + messages = tuple(messages) + else: + messages = messages[0] + # Comments don't apply unless they immediately preceed the + # message + if translator_comments and \ + translator_comments[-1][0] < message_lineno - 1: + translator_comments = [] + + yield (message_lineno, funcname, messages, + [comment[1] for comment in translator_comments]) + + funcname = lineno = message_lineno = None + call_stack = -1 + messages = [] + translator_comments = [] + in_translator_comments = False + if nested: + funcname = value + elif tok == STRING: + # Unwrap quotes in a safe manner, maintaining the string's + # encoding + # https://sourceforge.net/tracker/?func=detail&atid=355470& + # aid=617979&group_id=5470 + code = compile('# coding=%s\n%s' % (str(encoding), value), + '', 'eval', future_flags) + value = eval(code, {'__builtins__': {}}, {}) + if PY2 and not isinstance(value, text_type): + value = value.decode(encoding) + buf.append(value) + elif tok == OP and value == ',': + if buf: + messages.append(''.join(buf)) + del buf[:] + else: + messages.append(None) + if translator_comments: + # We have translator comments, and since we're on a + # comma(,) user is allowed to break into a new line + # Let's increase the last comment's lineno in order + # for the comment to still be a valid one + old_lineno, old_comment = translator_comments.pop() + translator_comments.append((old_lineno + 1, old_comment)) + elif call_stack > 0 and tok == OP and value == ')': + call_stack -= 1 + elif funcname and call_stack == -1: + funcname = None + elif tok == NAME and value in keywords: + funcname = value + + +def extract_javascript(fileobj, keywords, comment_tags, options): + """Extract messages from JavaScript source code. + + :param fileobj: the seekable, file-like object the messages should be + extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results + :param options: a dictionary of additional options (optional) + Supported options are: + * `jsx` -- set to false to disable JSX/E4X support. + * `template_string` -- set to false to disable ES6 + template string support. + """ + from babel.messages.jslexer import Token, tokenize, unquote_string + funcname = message_lineno = None + messages = [] + last_argument = None + translator_comments = [] + concatenate_next = False + encoding = options.get('encoding', 'utf-8') + last_token = None + call_stack = -1 + dotted = any('.' in kw for kw in keywords) + + for token in tokenize( + fileobj.read().decode(encoding), + jsx=options.get("jsx", True), + template_string=options.get("template_string", True), + dotted=dotted + ): + if ( # Turn keyword`foo` expressions into keyword("foo") calls: + funcname and # have a keyword... + (last_token and last_token.type == 'name') and # we've seen nothing after the keyword... + token.type == 'template_string' # this is a template string + ): + message_lineno = token.lineno + messages = [unquote_string(token.value)] + call_stack = 0 + token = Token('operator', ')', token.lineno) + + if token.type == 'operator' and token.value == '(': + if funcname: + message_lineno = token.lineno + call_stack += 1 + + elif call_stack == -1 and token.type == 'linecomment': + value = token.value[2:].strip() + if translator_comments and \ + translator_comments[-1][0] == token.lineno - 1: + translator_comments.append((token.lineno, value)) + continue + + for comment_tag in comment_tags: + if value.startswith(comment_tag): + translator_comments.append((token.lineno, value.strip())) + break + + elif token.type == 'multilinecomment': + # only one multi-line comment may preceed a translation + translator_comments = [] + value = token.value[2:-2].strip() + for comment_tag in comment_tags: + if value.startswith(comment_tag): + lines = value.splitlines() + if lines: + lines[0] = lines[0].strip() + lines[1:] = dedent('\n'.join(lines[1:])).splitlines() + for offset, line in enumerate(lines): + translator_comments.append((token.lineno + offset, + line)) + break + + elif funcname and call_stack == 0: + if token.type == 'operator' and token.value == ')': + if last_argument is not None: + messages.append(last_argument) + if len(messages) > 1: + messages = tuple(messages) + elif messages: + messages = messages[0] + else: + messages = None + + # Comments don't apply unless they immediately precede the + # message + if translator_comments and \ + translator_comments[-1][0] < message_lineno - 1: + translator_comments = [] + + if messages is not None: + yield (message_lineno, funcname, messages, + [comment[1] for comment in translator_comments]) + + funcname = message_lineno = last_argument = None + concatenate_next = False + translator_comments = [] + messages = [] + call_stack = -1 + + elif token.type in ('string', 'template_string'): + new_value = unquote_string(token.value) + if concatenate_next: + last_argument = (last_argument or '') + new_value + concatenate_next = False + else: + last_argument = new_value + + elif token.type == 'operator': + if token.value == ',': + if last_argument is not None: + messages.append(last_argument) + last_argument = None + else: + messages.append(None) + concatenate_next = False + elif token.value == '+': + concatenate_next = True + + elif call_stack > 0 and token.type == 'operator' \ + and token.value == ')': + call_stack -= 1 + + elif funcname and call_stack == -1: + funcname = None + + elif call_stack == -1 and token.type == 'name' and \ + token.value in keywords and \ + (last_token is None or last_token.type != 'name' or + last_token.value != 'function'): + funcname = token.value + + last_token = token diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/frontend.py b/dbt-env/lib/python3.8/site-packages/babel/messages/frontend.py new file mode 100644 index 0000000..c5eb1de --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/messages/frontend.py @@ -0,0 +1,1041 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.frontend + ~~~~~~~~~~~~~~~~~~~~~~~ + + Frontends for the message extraction functionality. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" +from __future__ import print_function + +import logging +import optparse +import os +import re +import shutil +import sys +import tempfile +from collections import OrderedDict +from datetime import datetime +from locale import getpreferredencoding + +from babel import __version__ as VERSION +from babel import Locale, localedata +from babel._compat import StringIO, string_types, text_type, PY2 +from babel.core import UnknownLocaleError +from babel.messages.catalog import Catalog +from babel.messages.extract import DEFAULT_KEYWORDS, DEFAULT_MAPPING, check_and_call_extract_file, extract_from_dir +from babel.messages.mofile import write_mo +from babel.messages.pofile import read_po, write_po +from babel.util import LOCALTZ +from distutils import log as distutils_log +from distutils.cmd import Command as _Command +from distutils.errors import DistutilsOptionError, DistutilsSetupError + +try: + from ConfigParser import RawConfigParser +except ImportError: + from configparser import RawConfigParser + + +po_file_read_mode = ('rU' if PY2 else 'r') + + +def listify_value(arg, split=None): + """ + Make a list out of an argument. + + Values from `distutils` argument parsing are always single strings; + values from `optparse` parsing may be lists of strings that may need + to be further split. + + No matter the input, this function returns a flat list of whitespace-trimmed + strings, with `None` values filtered out. + + >>> listify_value("foo bar") + ['foo', 'bar'] + >>> listify_value(["foo bar"]) + ['foo', 'bar'] + >>> listify_value([["foo"], "bar"]) + ['foo', 'bar'] + >>> listify_value([["foo"], ["bar", None, "foo"]]) + ['foo', 'bar', 'foo'] + >>> listify_value("foo, bar, quux", ",") + ['foo', 'bar', 'quux'] + + :param arg: A string or a list of strings + :param split: The argument to pass to `str.split()`. + :return: + """ + out = [] + + if not isinstance(arg, (list, tuple)): + arg = [arg] + + for val in arg: + if val is None: + continue + if isinstance(val, (list, tuple)): + out.extend(listify_value(val, split=split)) + continue + out.extend(s.strip() for s in text_type(val).split(split)) + assert all(isinstance(val, string_types) for val in out) + return out + + +class Command(_Command): + # This class is a small shim between Distutils commands and + # optparse option parsing in the frontend command line. + + #: Option name to be input as `args` on the script command line. + as_args = None + + #: Options which allow multiple values. + #: This is used by the `optparse` transmogrification code. + multiple_value_options = () + + #: Options which are booleans. + #: This is used by the `optparse` transmogrification code. + # (This is actually used by distutils code too, but is never + # declared in the base class.) + boolean_options = () + + #: Option aliases, to retain standalone command compatibility. + #: Distutils does not support option aliases, but optparse does. + #: This maps the distutils argument name to an iterable of aliases + #: that are usable with optparse. + option_aliases = {} + + #: Choices for options that needed to be restricted to specific + #: list of choices. + option_choices = {} + + #: Log object. To allow replacement in the script command line runner. + log = distutils_log + + def __init__(self, dist=None): + # A less strict version of distutils' `__init__`. + self.distribution = dist + self.initialize_options() + self._dry_run = None + self.verbose = False + self.force = None + self.help = 0 + self.finalized = 0 + + +class compile_catalog(Command): + """Catalog compilation command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import compile_catalog + + setup( + ... + cmdclass = {'compile_catalog': compile_catalog} + ) + + .. versionadded:: 0.9 + """ + + description = 'compile message catalogs to binary MO files' + user_options = [ + ('domain=', 'D', + "domains of PO files (space separated list, default 'messages')"), + ('directory=', 'd', + 'path to base directory containing the catalogs'), + ('input-file=', 'i', + 'name of the input file'), + ('output-file=', 'o', + "name of the output file (default " + "'//LC_MESSAGES/.mo')"), + ('locale=', 'l', + 'locale of the catalog to compile'), + ('use-fuzzy', 'f', + 'also include fuzzy translations'), + ('statistics', None, + 'print statistics about translations') + ] + boolean_options = ['use-fuzzy', 'statistics'] + + def initialize_options(self): + self.domain = 'messages' + self.directory = None + self.input_file = None + self.output_file = None + self.locale = None + self.use_fuzzy = False + self.statistics = False + + def finalize_options(self): + self.domain = listify_value(self.domain) + if not self.input_file and not self.directory: + raise DistutilsOptionError('you must specify either the input file ' + 'or the base directory') + if not self.output_file and not self.directory: + raise DistutilsOptionError('you must specify either the output file ' + 'or the base directory') + + def run(self): + n_errors = 0 + for domain in self.domain: + for catalog, errors in self._run_domain(domain).items(): + n_errors += len(errors) + if n_errors: + self.log.error('%d errors encountered.' % n_errors) + return (1 if n_errors else 0) + + def _run_domain(self, domain): + po_files = [] + mo_files = [] + + if not self.input_file: + if self.locale: + po_files.append((self.locale, + os.path.join(self.directory, self.locale, + 'LC_MESSAGES', + domain + '.po'))) + mo_files.append(os.path.join(self.directory, self.locale, + 'LC_MESSAGES', + domain + '.mo')) + else: + for locale in os.listdir(self.directory): + po_file = os.path.join(self.directory, locale, + 'LC_MESSAGES', domain + '.po') + if os.path.exists(po_file): + po_files.append((locale, po_file)) + mo_files.append(os.path.join(self.directory, locale, + 'LC_MESSAGES', + domain + '.mo')) + else: + po_files.append((self.locale, self.input_file)) + if self.output_file: + mo_files.append(self.output_file) + else: + mo_files.append(os.path.join(self.directory, self.locale, + 'LC_MESSAGES', + domain + '.mo')) + + if not po_files: + raise DistutilsOptionError('no message catalogs found') + + catalogs_and_errors = {} + + for idx, (locale, po_file) in enumerate(po_files): + mo_file = mo_files[idx] + with open(po_file, 'rb') as infile: + catalog = read_po(infile, locale) + + if self.statistics: + translated = 0 + for message in list(catalog)[1:]: + if message.string: + translated += 1 + percentage = 0 + if len(catalog): + percentage = translated * 100 // len(catalog) + self.log.info( + '%d of %d messages (%d%%) translated in %s', + translated, len(catalog), percentage, po_file + ) + + if catalog.fuzzy and not self.use_fuzzy: + self.log.info('catalog %s is marked as fuzzy, skipping', po_file) + continue + + catalogs_and_errors[catalog] = catalog_errors = list(catalog.check()) + for message, errors in catalog_errors: + for error in errors: + self.log.error( + 'error: %s:%d: %s', po_file, message.lineno, error + ) + + self.log.info('compiling catalog %s to %s', po_file, mo_file) + + with open(mo_file, 'wb') as outfile: + write_mo(outfile, catalog, use_fuzzy=self.use_fuzzy) + + return catalogs_and_errors + + +class extract_messages(Command): + """Message extraction command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import extract_messages + + setup( + ... + cmdclass = {'extract_messages': extract_messages} + ) + """ + + description = 'extract localizable strings from the project code' + user_options = [ + ('charset=', None, + 'charset to use in the output file (default "utf-8")'), + ('keywords=', 'k', + 'space-separated list of keywords to look for in addition to the ' + 'defaults (may be repeated multiple times)'), + ('no-default-keywords', None, + 'do not include the default keywords'), + ('mapping-file=', 'F', + 'path to the mapping configuration file'), + ('no-location', None, + 'do not include location comments with filename and line number'), + ('add-location=', None, + 'location lines format. If it is not given or "full", it generates ' + 'the lines with both file name and line number. If it is "file", ' + 'the line number part is omitted. If it is "never", it completely ' + 'suppresses the lines (same as --no-location).'), + ('omit-header', None, + 'do not include msgid "" entry in header'), + ('output-file=', 'o', + 'name of the output file'), + ('width=', 'w', + 'set output line width (default 76)'), + ('no-wrap', None, + 'do not break long message lines, longer than the output line width, ' + 'into several lines'), + ('sort-output', None, + 'generate sorted output (default False)'), + ('sort-by-file', None, + 'sort output by file location (default False)'), + ('msgid-bugs-address=', None, + 'set report address for msgid'), + ('copyright-holder=', None, + 'set copyright holder in output'), + ('project=', None, + 'set project name in output'), + ('version=', None, + 'set project version in output'), + ('add-comments=', 'c', + 'place comment block with TAG (or those preceding keyword lines) in ' + 'output file. Separate multiple TAGs with commas(,)'), # TODO: Support repetition of this argument + ('strip-comments', 's', + 'strip the comment TAGs from the comments.'), + ('input-paths=', None, + 'files or directories that should be scanned for messages. Separate multiple ' + 'files or directories with commas(,)'), # TODO: Support repetition of this argument + ('input-dirs=', None, # TODO (3.x): Remove me. + 'alias for input-paths (does allow files as well as directories).'), + ] + boolean_options = [ + 'no-default-keywords', 'no-location', 'omit-header', 'no-wrap', + 'sort-output', 'sort-by-file', 'strip-comments' + ] + as_args = 'input-paths' + multiple_value_options = ('add-comments', 'keywords') + option_aliases = { + 'keywords': ('--keyword',), + 'mapping-file': ('--mapping',), + 'output-file': ('--output',), + 'strip-comments': ('--strip-comment-tags',), + } + option_choices = { + 'add-location': ('full', 'file', 'never',), + } + + def initialize_options(self): + self.charset = 'utf-8' + self.keywords = None + self.no_default_keywords = False + self.mapping_file = None + self.no_location = False + self.add_location = None + self.omit_header = False + self.output_file = None + self.input_dirs = None + self.input_paths = None + self.width = None + self.no_wrap = False + self.sort_output = False + self.sort_by_file = False + self.msgid_bugs_address = None + self.copyright_holder = None + self.project = None + self.version = None + self.add_comments = None + self.strip_comments = False + self.include_lineno = True + + def finalize_options(self): + if self.input_dirs: + if not self.input_paths: + self.input_paths = self.input_dirs + else: + raise DistutilsOptionError( + 'input-dirs and input-paths are mutually exclusive' + ) + + if self.no_default_keywords: + keywords = {} + else: + keywords = DEFAULT_KEYWORDS.copy() + + keywords.update(parse_keywords(listify_value(self.keywords))) + + self.keywords = keywords + + if not self.keywords: + raise DistutilsOptionError('you must specify new keywords if you ' + 'disable the default ones') + + if not self.output_file: + raise DistutilsOptionError('no output file specified') + if self.no_wrap and self.width: + raise DistutilsOptionError("'--no-wrap' and '--width' are mutually " + "exclusive") + if not self.no_wrap and not self.width: + self.width = 76 + elif self.width is not None: + self.width = int(self.width) + + if self.sort_output and self.sort_by_file: + raise DistutilsOptionError("'--sort-output' and '--sort-by-file' " + "are mutually exclusive") + + if self.input_paths: + if isinstance(self.input_paths, string_types): + self.input_paths = re.split(r',\s*', self.input_paths) + elif self.distribution is not None: + self.input_paths = dict.fromkeys([ + k.split('.', 1)[0] + for k in (self.distribution.packages or ()) + ]).keys() + else: + self.input_paths = [] + + if not self.input_paths: + raise DistutilsOptionError("no input files or directories specified") + + for path in self.input_paths: + if not os.path.exists(path): + raise DistutilsOptionError("Input path: %s does not exist" % path) + + self.add_comments = listify_value(self.add_comments or (), ",") + + if self.distribution: + if not self.project: + self.project = self.distribution.get_name() + if not self.version: + self.version = self.distribution.get_version() + + if self.add_location == 'never': + self.no_location = True + elif self.add_location == 'file': + self.include_lineno = False + + def run(self): + mappings = self._get_mappings() + with open(self.output_file, 'wb') as outfile: + catalog = Catalog(project=self.project, + version=self.version, + msgid_bugs_address=self.msgid_bugs_address, + copyright_holder=self.copyright_holder, + charset=self.charset) + + for path, method_map, options_map in mappings: + def callback(filename, method, options): + if method == 'ignore': + return + + # If we explicitly provide a full filepath, just use that. + # Otherwise, path will be the directory path and filename + # is the relative path from that dir to the file. + # So we can join those to get the full filepath. + if os.path.isfile(path): + filepath = path + else: + filepath = os.path.normpath(os.path.join(path, filename)) + + optstr = '' + if options: + optstr = ' (%s)' % ', '.join(['%s="%s"' % (k, v) for + k, v in options.items()]) + self.log.info('extracting messages from %s%s', filepath, optstr) + + if os.path.isfile(path): + current_dir = os.getcwd() + extracted = check_and_call_extract_file( + path, method_map, options_map, + callback, self.keywords, self.add_comments, + self.strip_comments, current_dir + ) + else: + extracted = extract_from_dir( + path, method_map, options_map, + keywords=self.keywords, + comment_tags=self.add_comments, + callback=callback, + strip_comment_tags=self.strip_comments + ) + for filename, lineno, message, comments, context in extracted: + if os.path.isfile(path): + filepath = filename # already normalized + else: + filepath = os.path.normpath(os.path.join(path, filename)) + + catalog.add(message, None, [(filepath, lineno)], + auto_comments=comments, context=context) + + self.log.info('writing PO template file to %s', self.output_file) + write_po(outfile, catalog, width=self.width, + no_location=self.no_location, + omit_header=self.omit_header, + sort_output=self.sort_output, + sort_by_file=self.sort_by_file, + include_lineno=self.include_lineno) + + def _get_mappings(self): + mappings = [] + + if self.mapping_file: + with open(self.mapping_file, po_file_read_mode) as fileobj: + method_map, options_map = parse_mapping(fileobj) + for path in self.input_paths: + mappings.append((path, method_map, options_map)) + + elif getattr(self.distribution, 'message_extractors', None): + message_extractors = self.distribution.message_extractors + for path, mapping in message_extractors.items(): + if isinstance(mapping, string_types): + method_map, options_map = parse_mapping(StringIO(mapping)) + else: + method_map, options_map = [], {} + for pattern, method, options in mapping: + method_map.append((pattern, method)) + options_map[pattern] = options or {} + mappings.append((path, method_map, options_map)) + + else: + for path in self.input_paths: + mappings.append((path, DEFAULT_MAPPING, {})) + + return mappings + + +def check_message_extractors(dist, name, value): + """Validate the ``message_extractors`` keyword argument to ``setup()``. + + :param dist: the distutils/setuptools ``Distribution`` object + :param name: the name of the keyword argument (should always be + "message_extractors") + :param value: the value of the keyword argument + :raise `DistutilsSetupError`: if the value is not valid + """ + assert name == 'message_extractors' + if not isinstance(value, dict): + raise DistutilsSetupError('the value of the "message_extractors" ' + 'parameter must be a dictionary') + + +class init_catalog(Command): + """New catalog initialization command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import init_catalog + + setup( + ... + cmdclass = {'init_catalog': init_catalog} + ) + """ + + description = 'create a new catalog based on a POT file' + user_options = [ + ('domain=', 'D', + "domain of PO file (default 'messages')"), + ('input-file=', 'i', + 'name of the input file'), + ('output-dir=', 'd', + 'path to output directory'), + ('output-file=', 'o', + "name of the output file (default " + "'//LC_MESSAGES/.po')"), + ('locale=', 'l', + 'locale for the new localized catalog'), + ('width=', 'w', + 'set output line width (default 76)'), + ('no-wrap', None, + 'do not break long message lines, longer than the output line width, ' + 'into several lines'), + ] + boolean_options = ['no-wrap'] + + def initialize_options(self): + self.output_dir = None + self.output_file = None + self.input_file = None + self.locale = None + self.domain = 'messages' + self.no_wrap = False + self.width = None + + def finalize_options(self): + if not self.input_file: + raise DistutilsOptionError('you must specify the input file') + + if not self.locale: + raise DistutilsOptionError('you must provide a locale for the ' + 'new catalog') + try: + self._locale = Locale.parse(self.locale) + except UnknownLocaleError as e: + raise DistutilsOptionError(e) + + if not self.output_file and not self.output_dir: + raise DistutilsOptionError('you must specify the output directory') + if not self.output_file: + self.output_file = os.path.join(self.output_dir, self.locale, + 'LC_MESSAGES', self.domain + '.po') + + if not os.path.exists(os.path.dirname(self.output_file)): + os.makedirs(os.path.dirname(self.output_file)) + if self.no_wrap and self.width: + raise DistutilsOptionError("'--no-wrap' and '--width' are mutually " + "exclusive") + if not self.no_wrap and not self.width: + self.width = 76 + elif self.width is not None: + self.width = int(self.width) + + def run(self): + self.log.info( + 'creating catalog %s based on %s', self.output_file, self.input_file + ) + + with open(self.input_file, 'rb') as infile: + # Although reading from the catalog template, read_po must be fed + # the locale in order to correctly calculate plurals + catalog = read_po(infile, locale=self.locale) + + catalog.locale = self._locale + catalog.revision_date = datetime.now(LOCALTZ) + catalog.fuzzy = False + + with open(self.output_file, 'wb') as outfile: + write_po(outfile, catalog, width=self.width) + + +class update_catalog(Command): + """Catalog merging command for use in ``setup.py`` scripts. + + If correctly installed, this command is available to Setuptools-using + setup scripts automatically. For projects using plain old ``distutils``, + the command needs to be registered explicitly in ``setup.py``:: + + from babel.messages.frontend import update_catalog + + setup( + ... + cmdclass = {'update_catalog': update_catalog} + ) + + .. versionadded:: 0.9 + """ + + description = 'update message catalogs from a POT file' + user_options = [ + ('domain=', 'D', + "domain of PO file (default 'messages')"), + ('input-file=', 'i', + 'name of the input file'), + ('output-dir=', 'd', + 'path to base directory containing the catalogs'), + ('output-file=', 'o', + "name of the output file (default " + "'//LC_MESSAGES/.po')"), + ('omit-header', None, + "do not include msgid "" entry in header"), + ('locale=', 'l', + 'locale of the catalog to compile'), + ('width=', 'w', + 'set output line width (default 76)'), + ('no-wrap', None, + 'do not break long message lines, longer than the output line width, ' + 'into several lines'), + ('ignore-obsolete=', None, + 'whether to omit obsolete messages from the output'), + ('no-fuzzy-matching', 'N', + 'do not use fuzzy matching'), + ('update-header-comment', None, + 'update target header comment'), + ('previous', None, + 'keep previous msgids of translated messages'), + ] + boolean_options = [ + 'omit-header', 'no-wrap', 'ignore-obsolete', 'no-fuzzy-matching', + 'previous', 'update-header-comment', + ] + + def initialize_options(self): + self.domain = 'messages' + self.input_file = None + self.output_dir = None + self.output_file = None + self.omit_header = False + self.locale = None + self.width = None + self.no_wrap = False + self.ignore_obsolete = False + self.no_fuzzy_matching = False + self.update_header_comment = False + self.previous = False + + def finalize_options(self): + if not self.input_file: + raise DistutilsOptionError('you must specify the input file') + if not self.output_file and not self.output_dir: + raise DistutilsOptionError('you must specify the output file or ' + 'directory') + if self.output_file and not self.locale: + raise DistutilsOptionError('you must specify the locale') + if self.no_wrap and self.width: + raise DistutilsOptionError("'--no-wrap' and '--width' are mutually " + "exclusive") + if not self.no_wrap and not self.width: + self.width = 76 + elif self.width is not None: + self.width = int(self.width) + if self.no_fuzzy_matching and self.previous: + self.previous = False + + def run(self): + po_files = [] + if not self.output_file: + if self.locale: + po_files.append((self.locale, + os.path.join(self.output_dir, self.locale, + 'LC_MESSAGES', + self.domain + '.po'))) + else: + for locale in os.listdir(self.output_dir): + po_file = os.path.join(self.output_dir, locale, + 'LC_MESSAGES', + self.domain + '.po') + if os.path.exists(po_file): + po_files.append((locale, po_file)) + else: + po_files.append((self.locale, self.output_file)) + + if not po_files: + raise DistutilsOptionError('no message catalogs found') + + domain = self.domain + if not domain: + domain = os.path.splitext(os.path.basename(self.input_file))[0] + + with open(self.input_file, 'rb') as infile: + template = read_po(infile) + + for locale, filename in po_files: + self.log.info('updating catalog %s based on %s', filename, self.input_file) + with open(filename, 'rb') as infile: + catalog = read_po(infile, locale=locale, domain=domain) + + catalog.update( + template, self.no_fuzzy_matching, + update_header_comment=self.update_header_comment + ) + + tmpname = os.path.join(os.path.dirname(filename), + tempfile.gettempprefix() + + os.path.basename(filename)) + try: + with open(tmpname, 'wb') as tmpfile: + write_po(tmpfile, catalog, + omit_header=self.omit_header, + ignore_obsolete=self.ignore_obsolete, + include_previous=self.previous, width=self.width) + except: + os.remove(tmpname) + raise + + try: + os.rename(tmpname, filename) + except OSError: + # We're probably on Windows, which doesn't support atomic + # renames, at least not through Python + # If the error is in fact due to a permissions problem, that + # same error is going to be raised from one of the following + # operations + os.remove(filename) + shutil.copy(tmpname, filename) + os.remove(tmpname) + + +class CommandLineInterface(object): + """Command-line interface. + + This class provides a simple command-line interface to the message + extraction and PO file generation functionality. + """ + + usage = '%%prog %s [options] %s' + version = '%%prog %s' % VERSION + commands = { + 'compile': 'compile message catalogs to MO files', + 'extract': 'extract messages from source files and generate a POT file', + 'init': 'create new message catalogs from a POT file', + 'update': 'update existing message catalogs from a POT file' + } + + command_classes = { + 'compile': compile_catalog, + 'extract': extract_messages, + 'init': init_catalog, + 'update': update_catalog, + } + + log = None # Replaced on instance level + + def run(self, argv=None): + """Main entry point of the command-line interface. + + :param argv: list of arguments passed on the command-line + """ + + if argv is None: + argv = sys.argv + + self.parser = optparse.OptionParser(usage=self.usage % ('command', '[args]'), + version=self.version) + self.parser.disable_interspersed_args() + self.parser.print_help = self._help + self.parser.add_option('--list-locales', dest='list_locales', + action='store_true', + help="print all known locales and exit") + self.parser.add_option('-v', '--verbose', action='store_const', + dest='loglevel', const=logging.DEBUG, + help='print as much as possible') + self.parser.add_option('-q', '--quiet', action='store_const', + dest='loglevel', const=logging.ERROR, + help='print as little as possible') + self.parser.set_defaults(list_locales=False, loglevel=logging.INFO) + + options, args = self.parser.parse_args(argv[1:]) + + self._configure_logging(options.loglevel) + if options.list_locales: + identifiers = localedata.locale_identifiers() + longest = max([len(identifier) for identifier in identifiers]) + identifiers.sort() + format = u'%%-%ds %%s' % (longest + 1) + for identifier in identifiers: + locale = Locale.parse(identifier) + output = format % (identifier, locale.english_name) + print(output.encode(sys.stdout.encoding or + getpreferredencoding() or + 'ascii', 'replace')) + return 0 + + if not args: + self.parser.error('no valid command or option passed. ' + 'Try the -h/--help option for more information.') + + cmdname = args[0] + if cmdname not in self.commands: + self.parser.error('unknown command "%s"' % cmdname) + + cmdinst = self._configure_command(cmdname, args[1:]) + return cmdinst.run() + + def _configure_logging(self, loglevel): + self.log = logging.getLogger('babel') + self.log.setLevel(loglevel) + # Don't add a new handler for every instance initialization (#227), this + # would cause duplicated output when the CommandLineInterface as an + # normal Python class. + if self.log.handlers: + handler = self.log.handlers[0] + else: + handler = logging.StreamHandler() + self.log.addHandler(handler) + handler.setLevel(loglevel) + formatter = logging.Formatter('%(message)s') + handler.setFormatter(formatter) + + def _help(self): + print(self.parser.format_help()) + print("commands:") + longest = max([len(command) for command in self.commands]) + format = " %%-%ds %%s" % max(8, longest + 1) + commands = sorted(self.commands.items()) + for name, description in commands: + print(format % (name, description)) + + def _configure_command(self, cmdname, argv): + """ + :type cmdname: str + :type argv: list[str] + """ + cmdclass = self.command_classes[cmdname] + cmdinst = cmdclass() + if self.log: + cmdinst.log = self.log # Use our logger, not distutils'. + assert isinstance(cmdinst, Command) + cmdinst.initialize_options() + + parser = optparse.OptionParser( + usage=self.usage % (cmdname, ''), + description=self.commands[cmdname] + ) + as_args = getattr(cmdclass, "as_args", ()) + for long, short, help in cmdclass.user_options: + name = long.strip("=") + default = getattr(cmdinst, name.replace('-', '_')) + strs = ["--%s" % name] + if short: + strs.append("-%s" % short) + strs.extend(cmdclass.option_aliases.get(name, ())) + choices = cmdclass.option_choices.get(name, None) + if name == as_args: + parser.usage += "<%s>" % name + elif name in cmdclass.boolean_options: + parser.add_option(*strs, action="store_true", help=help) + elif name in cmdclass.multiple_value_options: + parser.add_option(*strs, action="append", help=help, choices=choices) + else: + parser.add_option(*strs, help=help, default=default, choices=choices) + options, args = parser.parse_args(argv) + + if as_args: + setattr(options, as_args.replace('-', '_'), args) + + for key, value in vars(options).items(): + setattr(cmdinst, key, value) + + try: + cmdinst.ensure_finalized() + except DistutilsOptionError as err: + parser.error(str(err)) + + return cmdinst + + +def main(): + return CommandLineInterface().run(sys.argv) + + +def parse_mapping(fileobj, filename=None): + """Parse an extraction method mapping from a file-like object. + + >>> buf = StringIO(''' + ... [extractors] + ... custom = mypackage.module:myfunc + ... + ... # Python source files + ... [python: **.py] + ... + ... # Genshi templates + ... [genshi: **/templates/**.html] + ... include_attrs = + ... [genshi: **/templates/**.txt] + ... template_class = genshi.template:TextTemplate + ... encoding = latin-1 + ... + ... # Some custom extractor + ... [custom: **/custom/*.*] + ... ''') + + >>> method_map, options_map = parse_mapping(buf) + >>> len(method_map) + 4 + + >>> method_map[0] + ('**.py', 'python') + >>> options_map['**.py'] + {} + >>> method_map[1] + ('**/templates/**.html', 'genshi') + >>> options_map['**/templates/**.html']['include_attrs'] + '' + >>> method_map[2] + ('**/templates/**.txt', 'genshi') + >>> options_map['**/templates/**.txt']['template_class'] + 'genshi.template:TextTemplate' + >>> options_map['**/templates/**.txt']['encoding'] + 'latin-1' + + >>> method_map[3] + ('**/custom/*.*', 'mypackage.module:myfunc') + >>> options_map['**/custom/*.*'] + {} + + :param fileobj: a readable file-like object containing the configuration + text to parse + :see: `extract_from_directory` + """ + extractors = {} + method_map = [] + options_map = {} + + parser = RawConfigParser() + parser._sections = OrderedDict(parser._sections) # We need ordered sections + + if PY2: + parser.readfp(fileobj, filename) + else: + parser.read_file(fileobj, filename) + + for section in parser.sections(): + if section == 'extractors': + extractors = dict(parser.items(section)) + else: + method, pattern = [part.strip() for part in section.split(':', 1)] + method_map.append((pattern, method)) + options_map[pattern] = dict(parser.items(section)) + + if extractors: + for idx, (pattern, method) in enumerate(method_map): + if method in extractors: + method = extractors[method] + method_map[idx] = (pattern, method) + + return method_map, options_map + + +def parse_keywords(strings=[]): + """Parse keywords specifications from the given list of strings. + + >>> kw = sorted(parse_keywords(['_', 'dgettext:2', 'dngettext:2,3', 'pgettext:1c,2']).items()) + >>> for keyword, indices in kw: + ... print((keyword, indices)) + ('_', None) + ('dgettext', (2,)) + ('dngettext', (2, 3)) + ('pgettext', ((1, 'c'), 2)) + """ + keywords = {} + for string in strings: + if ':' in string: + funcname, indices = string.split(':') + else: + funcname, indices = string, None + if funcname not in keywords: + if indices: + inds = [] + for x in indices.split(','): + if x[-1] == 'c': + inds.append((int(x[:-1]), 'c')) + else: + inds.append(int(x)) + indices = tuple(inds) + keywords[funcname] = indices + return keywords + + +if __name__ == '__main__': + main() diff --git a/dbt-env/lib/python3.8/site-packages/babel/messages/jslexer.py b/dbt-env/lib/python3.8/site-packages/babel/messages/jslexer.py new file mode 100644 index 0000000..c57b121 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/babel/messages/jslexer.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +""" + babel.messages.jslexer + ~~~~~~~~~~~~~~~~~~~~~~ + + A simple JavaScript 1.5 lexer which is used for the JavaScript + extractor. + + :copyright: (c) 2013-2021 by the Babel Team. + :license: BSD, see LICENSE for more details. +""" +from collections import namedtuple +import re +from babel._compat import unichr + +operators = sorted([ + '+', '-', '*', '%', '!=', '==', '<', '>', '<=', '>=', '=', + '+=', '-=', '*=', '%=', '<<', '>>', '>>>', '<<=', '>>=', + '>>>=', '&', '&=', '|', '|=', '&&', '||', '^', '^=', '(', ')', + '[', ']', '{', '}', '!', '--', '++', '~', ',', ';', '.', ':' +], key=len, reverse=True) + +escapes = {'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t'} + +name_re = re.compile(r'[\w$_][\w\d$_]*', re.UNICODE) +dotted_name_re = re.compile(r'[\w$_][\w\d$_.]*[\w\d$_.]', re.UNICODE) +division_re = re.compile(r'/=?') +regex_re = re.compile(r'/(?:[^/\\]*(?:\\.[^/\\]*)*)/[a-zA-Z]*', re.DOTALL) +line_re = re.compile(r'(\r\n|\n|\r)') +line_join_re = re.compile(r'\\' + line_re.pattern) +uni_escape_re = re.compile(r'[a-fA-F0-9]{1,4}') + +Token = namedtuple('Token', 'type value lineno') + +_rules = [ + (None, re.compile(r'\s+', re.UNICODE)), + (None, re.compile(r' Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + # When having a tough decision, use the result that decoded as many multi-byte as possible. + if chaos_difference == 0.0 and self.coherence == other.coherence: + return self.multi_byte_usage > other.multi_byte_usage + return self.coherence > other.coherence + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - len(str(self)) / len(self.raw) + + @property + def chaos_secondary_pass(self) -> float: + """ + Check once again chaos in decoded text, except this time, with full content. + Use with caution, this can be very slow. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "chaos_secondary_pass is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return mess_ratio(str(self), 1.0) + + @property + def coherence_non_latin(self) -> float: + """ + Coherence ratio on the first non-latin language detected if ANY. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "coherence_non_latin is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return 0.0 + + @property + def w_counter(self) -> Counter: + """ + Word counter instance on decoded text. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "w_counter is deprecated and will be removed in 3.0", DeprecationWarning + ) + + string_printable_only = sub(NOT_PRINTABLE_PATTERN, " ", str(self).lower()) + + return Counter(string_printable_only.split()) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as = [] # type: List[str] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges = [ + unicode_range(char) for char in str(self) + ] # type: List[Optional[str]] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def first(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def best(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: List[CharsetMatch] = None): + self._results = sorted(results) if results else [] # type: List[CharsetMatch] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path = path # type: str + self.unicode_path = unicode_path # type: Optional[str] + self.encoding = encoding # type: Optional[str] + self.encoding_aliases = encoding_aliases # type: List[str] + self.alternative_encodings = alternative_encodings # type: List[str] + self.language = language # type: str + self.alphabets = alphabets # type: List[str] + self.has_sig_or_bom = has_sig_or_bom # type: bool + self.chaos = chaos # type: float + self.coherence = coherence # type: float + self.is_preferred = is_preferred # type: bool + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/dbt-env/lib/python3.8/site-packages/charset_normalizer/py.typed b/dbt-env/lib/python3.8/site-packages/charset_normalizer/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/dbt-env/lib/python3.8/site-packages/charset_normalizer/utils.py b/dbt-env/lib/python3.8/site-packages/charset_normalizer/utils.py new file mode 100644 index 0000000..dcb14df --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/charset_normalizer/utils.py @@ -0,0 +1,342 @@ +try: + import unicodedata2 as unicodedata +except ImportError: + import unicodedata # type: ignore[no-redef] + +import importlib +import logging +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder # type: ignore + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description = unicodedata.name(character) # type: str + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed = unicodedata.decomposition(character) # type: str + if not decomposed: + return character + + codes = decomposed.split(" ") # type: List[str] + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord = ord(character) # type: int + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description = unicodedata.name(character) # type: str + except ValueError: + return False + return "LATIN" in description + + +def is_ascii(character: str) -> bool: + try: + character.encode("ascii") + except UnicodeEncodeError: + return False + return True + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + if "P" in character_category: + return True + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + if "S" in character_category or "N" in character_category: + return True + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Forms" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Emoticons" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}: + return True + + character_category = unicodedata.category(character) # type: str + + return "Z" in character_category + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +def is_private_use_only(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + return character_category == "Co" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len = len(sequence) # type: int + + results = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) # type: List[str] + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, # type: ignore + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks = ENCODING_MARKS[iana_encoding] # type: Union[bytes, List[bytes]] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges = set() # type: Set[str] + + for character in decoded_sequence: + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module("encodings.{}".format(iana_name_a)).IncrementalDecoder # type: ignore + decoder_b = importlib.import_module("encodings.{}".format(iana_name_b)).IncrementalDecoder # type: ignore + + id_a = decoder_a(errors="ignore") # type: IncrementalDecoder + id_b = decoder_b(errors="ignore") # type: IncrementalDecoder + + character_match_count = 0 # type: int + + for i in range(255): + to_be_decoded = bytes([i]) # type: bytes + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) diff --git a/dbt-env/lib/python3.8/site-packages/charset_normalizer/version.py b/dbt-env/lib/python3.8/site-packages/charset_normalizer/version.py new file mode 100644 index 0000000..77cfff2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "2.0.12" +VERSION = __version__.split(".") diff --git a/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/INSTALLER b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/LICENSE.txt b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/LICENSE.txt new file mode 100644 index 0000000..3105888 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/LICENSE.txt @@ -0,0 +1,27 @@ +Copyright (c) 2010 Jonathan Hartley +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holders, nor those of its contributors + may be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/METADATA b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/METADATA new file mode 100644 index 0000000..2a175c2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/METADATA @@ -0,0 +1,415 @@ +Metadata-Version: 2.1 +Name: colorama +Version: 0.4.4 +Summary: Cross-platform colored terminal text. +Home-page: https://github.com/tartley/colorama +Author: Jonathan Hartley +Author-email: tartley@tartley.com +Maintainer: Arnon Yaari +License: BSD +Keywords: color colour terminal text ansi windows crossplatform xplatform +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Terminals +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* + +.. image:: https://img.shields.io/pypi/v/colorama.svg + :target: https://pypi.org/project/colorama/ + :alt: Latest Version + +.. image:: https://img.shields.io/pypi/pyversions/colorama.svg + :target: https://pypi.org/project/colorama/ + :alt: Supported Python versions + +.. image:: https://travis-ci.org/tartley/colorama.svg?branch=master + :target: https://travis-ci.org/tartley/colorama + :alt: Build Status + +Colorama +======== + +Makes ANSI escape character sequences (for producing colored terminal text and +cursor positioning) work under MS Windows. + +.. |donate| image:: https://www.paypalobjects.com/en_US/i/btn/btn_donate_SM.gif + :target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=2MZ9D2GMLYCUJ&item_name=Colorama¤cy_code=USD + :alt: Donate with Paypal + +`PyPI for releases `_ · +`Github for source `_ · +`Colorama for enterprise on Tidelift `_ + +If you find Colorama useful, please |donate| to the authors. Thank you! + + +Installation +------------ + +.. code-block:: bash + + pip install colorama + # or + conda install -c anaconda colorama + + +Description +----------- + +ANSI escape character sequences have long been used to produce colored terminal +text and cursor positioning on Unix and Macs. Colorama makes this work on +Windows, too, by wrapping ``stdout``, stripping ANSI sequences it finds (which +would appear as gobbledygook in the output), and converting them into the +appropriate win32 calls to modify the state of the terminal. On other platforms, +Colorama does nothing. + +This has the upshot of providing a simple cross-platform API for printing +colored terminal text from Python, and has the happy side-effect that existing +applications or libraries which use ANSI sequences to produce colored output on +Linux or Macs can now also work on Windows, simply by calling +``colorama.init()``. + +An alternative approach is to install ``ansi.sys`` on Windows machines, which +provides the same behaviour for all applications running in terminals. Colorama +is intended for situations where that isn't easy (e.g., maybe your app doesn't +have an installer.) + +Demo scripts in the source code repository print some colored text using +ANSI sequences. Compare their output under Gnome-terminal's built in ANSI +handling, versus on Windows Command-Prompt using Colorama: + +.. image:: https://github.com/tartley/colorama/raw/master/screenshots/ubuntu-demo.png + :width: 661 + :height: 357 + :alt: ANSI sequences on Ubuntu under gnome-terminal. + +.. image:: https://github.com/tartley/colorama/raw/master/screenshots/windows-demo.png + :width: 668 + :height: 325 + :alt: Same ANSI sequences on Windows, using Colorama. + +These screenshots show that, on Windows, Colorama does not support ANSI 'dim +text'; it looks the same as 'normal text'. + +Usage +----- + +Initialisation +.............. + +Applications should initialise Colorama using: + +.. code-block:: python + + from colorama import init + init() + +On Windows, calling ``init()`` will filter ANSI escape sequences out of any +text sent to ``stdout`` or ``stderr``, and replace them with equivalent Win32 +calls. + +On other platforms, calling ``init()`` has no effect (unless you request other +optional functionality; see "Init Keyword Args", below). By design, this permits +applications to call ``init()`` unconditionally on all platforms, after which +ANSI output should just work. + +To stop using Colorama before your program exits, simply call ``deinit()``. +This will restore ``stdout`` and ``stderr`` to their original values, so that +Colorama is disabled. To resume using Colorama again, call ``reinit()``; it is +cheaper than calling ``init()`` again (but does the same thing). + + +Colored Output +.............. + +Cross-platform printing of colored text can then be done using Colorama's +constant shorthand for ANSI escape sequences: + +.. code-block:: python + + from colorama import Fore, Back, Style + print(Fore.RED + 'some red text') + print(Back.GREEN + 'and with a green background') + print(Style.DIM + 'and in dim text') + print(Style.RESET_ALL) + print('back to normal now') + +...or simply by manually printing ANSI sequences from your own code: + +.. code-block:: python + + print('\033[31m' + 'some red text') + print('\033[39m') # and reset to default color + +...or, Colorama can be used in conjunction with existing ANSI libraries +such as the venerable `Termcolor `_ +or the fabulous `Blessings `_. +This is highly recommended for anything more than trivial coloring: + +.. code-block:: python + + from colorama import init + from termcolor import colored + + # use Colorama to make Termcolor work on Windows too + init() + + # then use Termcolor for all colored text output + print(colored('Hello, World!', 'green', 'on_red')) + +Available formatting constants are:: + + Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. + Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. + Style: DIM, NORMAL, BRIGHT, RESET_ALL + +``Style.RESET_ALL`` resets foreground, background, and brightness. Colorama will +perform this reset automatically on program exit. + + +Cursor Positioning +.................. + +ANSI codes to reposition the cursor are supported. See ``demos/demo06.py`` for +an example of how to generate them. + + +Init Keyword Args +................. + +``init()`` accepts some ``**kwargs`` to override default behaviour. + +init(autoreset=False): + If you find yourself repeatedly sending reset sequences to turn off color + changes at the end of every print, then ``init(autoreset=True)`` will + automate that: + + .. code-block:: python + + from colorama import init + init(autoreset=True) + print(Fore.RED + 'some red text') + print('automatically back to default color again') + +init(strip=None): + Pass ``True`` or ``False`` to override whether ANSI codes should be + stripped from the output. The default behaviour is to strip if on Windows + or if output is redirected (not a tty). + +init(convert=None): + Pass ``True`` or ``False`` to override whether to convert ANSI codes in the + output into win32 calls. The default behaviour is to convert if on Windows + and output is to a tty (terminal). + +init(wrap=True): + On Windows, Colorama works by replacing ``sys.stdout`` and ``sys.stderr`` + with proxy objects, which override the ``.write()`` method to do their work. + If this wrapping causes you problems, then this can be disabled by passing + ``init(wrap=False)``. The default behaviour is to wrap if ``autoreset`` or + ``strip`` or ``convert`` are True. + + When wrapping is disabled, colored printing on non-Windows platforms will + continue to work as normal. To do cross-platform colored output, you can + use Colorama's ``AnsiToWin32`` proxy directly: + + .. code-block:: python + + import sys + from colorama import init, AnsiToWin32 + init(wrap=False) + stream = AnsiToWin32(sys.stderr).stream + + # Python 2 + print >>stream, Fore.BLUE + 'blue text on stderr' + + # Python 3 + print(Fore.BLUE + 'blue text on stderr', file=stream) + + +Recognised ANSI Sequences +......................... + +ANSI sequences generally take the form:: + + ESC [ ; ... + +Where ```` is an integer, and ```` is a single letter. Zero or +more params are passed to a ````. If no params are passed, it is +generally synonymous with passing a single zero. No spaces exist in the +sequence; they have been inserted here simply to read more easily. + +The only ANSI sequences that Colorama converts into win32 calls are:: + + ESC [ 0 m # reset all (colors and brightness) + ESC [ 1 m # bright + ESC [ 2 m # dim (looks same as normal brightness) + ESC [ 22 m # normal brightness + + # FOREGROUND: + ESC [ 30 m # black + ESC [ 31 m # red + ESC [ 32 m # green + ESC [ 33 m # yellow + ESC [ 34 m # blue + ESC [ 35 m # magenta + ESC [ 36 m # cyan + ESC [ 37 m # white + ESC [ 39 m # reset + + # BACKGROUND + ESC [ 40 m # black + ESC [ 41 m # red + ESC [ 42 m # green + ESC [ 43 m # yellow + ESC [ 44 m # blue + ESC [ 45 m # magenta + ESC [ 46 m # cyan + ESC [ 47 m # white + ESC [ 49 m # reset + + # cursor positioning + ESC [ y;x H # position cursor at x across, y down + ESC [ y;x f # position cursor at x across, y down + ESC [ n A # move cursor n lines up + ESC [ n B # move cursor n lines down + ESC [ n C # move cursor n characters forward + ESC [ n D # move cursor n characters backward + + # clear the screen + ESC [ mode J # clear the screen + + # clear the line + ESC [ mode K # clear the line + +Multiple numeric params to the ``'m'`` command can be combined into a single +sequence:: + + ESC [ 36 ; 45 ; 1 m # bright cyan text on magenta background + +All other ANSI sequences of the form ``ESC [ ; ... `` +are silently stripped from the output on Windows. + +Any other form of ANSI sequence, such as single-character codes or alternative +initial characters, are not recognised or stripped. It would be cool to add +them though. Let me know if it would be useful for you, via the Issues on +GitHub. + + +Status & Known Problems +----------------------- + +I've personally only tested it on Windows XP (CMD, Console2), Ubuntu +(gnome-terminal, xterm), and OS X. + +Some presumably valid ANSI sequences aren't recognised (see details below), +but to my knowledge nobody has yet complained about this. Puzzling. + +See outstanding issues and wish-list: +https://github.com/tartley/colorama/issues + +If anything doesn't work for you, or doesn't do what you expected or hoped for, +I'd love to hear about it on that issues list, would be delighted by patches, +and would be happy to grant commit access to anyone who submits a working patch +or two. + + +License +------- + +Copyright Jonathan Hartley & Arnon Yaari, 2013-2020. BSD 3-Clause license; see +LICENSE file. + + +Development +----------- + +Help and fixes welcome! + +Tested on CPython 2.7, 3.5, 3.6, 3.7 and 3.8. + +No requirements other than the standard library. +Development requirements are captured in requirements-dev.txt. + +To create and populate a virtual environment:: + + ./bootstrap.ps1 # Windows + make bootstrap # Linux + +To run tests:: + + ./test.ps1 # Windows + make test # Linux + +If you use nose to run the tests, you must pass the ``-s`` flag; otherwise, +``nosetests`` applies its own proxy to ``stdout``, which confuses the unit +tests. + + +Professional support +-------------------- + +.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png + :alt: Tidelift + :target: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme + +.. list-table:: + :widths: 10 100 + + * - |tideliftlogo| + - Professional support for colorama is available as part of the + `Tidelift Subscription`_. + Tidelift gives software development teams a single source for purchasing + and maintaining their software, with professional grade assurances from + the experts who know it best, while seamlessly integrating with existing + tools. + +.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme + + +Thanks +------ + +* Marc Schlaich (schlamar) for a ``setup.py`` fix for Python2.5. +* Marc Abramowitz, reported & fixed a crash on exit with closed ``stdout``, + providing a solution to issue #7's setuptools/distutils debate, + and other fixes. +* User 'eryksun', for guidance on correctly instantiating ``ctypes.windll``. +* Matthew McCormick for politely pointing out a longstanding crash on non-Win. +* Ben Hoyt, for a magnificent fix under 64-bit Windows. +* Jesse at Empty Square for submitting a fix for examples in the README. +* User 'jamessp', an observant documentation fix for cursor positioning. +* User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7 + fix. +* Julien Stuyck, for wisely suggesting Python3 compatible updates to README. +* Daniel Griffith for multiple fabulous patches. +* Oscar Lesta for a valuable fix to stop ANSI chars being sent to non-tty + output. +* Roger Binns, for many suggestions, valuable feedback, & bug reports. +* Tim Golden for thought and much appreciated feedback on the initial idea. +* User 'Zearin' for updates to the README file. +* John Szakmeister for adding support for light colors +* Charles Merriam for adding documentation to demos +* Jurko for a fix on 64-bit Windows CPython2.5 w/o ctypes +* Florian Bruhin for a fix when stdout or stderr are None +* Thomas Weininger for fixing ValueError on Windows +* Remi Rampin for better Github integration and fixes to the README file +* Simeon Visser for closing a file handle using 'with' and updating classifiers + to include Python 3.3 and 3.4 +* Andy Neff for fixing RESET of LIGHT_EX colors. +* Jonathan Hartley for the initial idea and implementation. + + + diff --git a/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/RECORD b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/RECORD new file mode 100644 index 0000000..dd51b6a --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/RECORD @@ -0,0 +1,18 @@ +colorama-0.4.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +colorama-0.4.4.dist-info/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491 +colorama-0.4.4.dist-info/METADATA,sha256=JmU7ePpEh1xcqZV0JKcrrlU7cp5o4InDlHJXbo_FTQw,14551 +colorama-0.4.4.dist-info/RECORD,, +colorama-0.4.4.dist-info/WHEEL,sha256=gxPaqcqKPLUXaSAKwmfHO7_iAOlVvmp33DewnUluBB8,116 +colorama-0.4.4.dist-info/top_level.txt,sha256=_Kx6-Cni2BT1PEATPhrSRxo0d7kSgfBbHf5o7IF1ABw,9 +colorama/__init__.py,sha256=pCdErryzLSzDW5P-rRPBlPLqbBtIRNJB6cMgoeJns5k,239 +colorama/__pycache__/__init__.cpython-38.pyc,, +colorama/__pycache__/ansi.cpython-38.pyc,, +colorama/__pycache__/ansitowin32.cpython-38.pyc,, +colorama/__pycache__/initialise.cpython-38.pyc,, +colorama/__pycache__/win32.cpython-38.pyc,, +colorama/__pycache__/winterm.cpython-38.pyc,, +colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 +colorama/ansitowin32.py,sha256=yV7CEmCb19MjnJKODZEEvMH_fnbJhwnpzo4sxZuGXmA,10517 +colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915 +colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404 +colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438 diff --git a/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/WHEEL b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/WHEEL new file mode 100644 index 0000000..ecd4e92 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/top_level.txt b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/top_level.txt new file mode 100644 index 0000000..3fcfb51 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama-0.4.4.dist-info/top_level.txt @@ -0,0 +1 @@ +colorama diff --git a/dbt-env/lib/python3.8/site-packages/colorama/__init__.py b/dbt-env/lib/python3.8/site-packages/colorama/__init__.py new file mode 100644 index 0000000..b149ed7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama/__init__.py @@ -0,0 +1,6 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from .initialise import init, deinit, reinit, colorama_text +from .ansi import Fore, Back, Style, Cursor +from .ansitowin32 import AnsiToWin32 + +__version__ = '0.4.4' diff --git a/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..56b1224 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/ansi.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/ansi.cpython-38.pyc new file mode 100644 index 0000000..47d56f3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/ansi.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/ansitowin32.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/ansitowin32.cpython-38.pyc new file mode 100644 index 0000000..14ca35e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/ansitowin32.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/initialise.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/initialise.cpython-38.pyc new file mode 100644 index 0000000..2aca494 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/initialise.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/win32.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/win32.cpython-38.pyc new file mode 100644 index 0000000..16d2577 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/win32.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/winterm.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/winterm.cpython-38.pyc new file mode 100644 index 0000000..8fa56c9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/colorama/__pycache__/winterm.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/colorama/ansi.py b/dbt-env/lib/python3.8/site-packages/colorama/ansi.py new file mode 100644 index 0000000..11ec695 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama/ansi.py @@ -0,0 +1,102 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +''' +This module generates ANSI character codes to printing colors to terminals. +See: http://en.wikipedia.org/wiki/ANSI_escape_code +''' + +CSI = '\033[' +OSC = '\033]' +BEL = '\a' + + +def code_to_chars(code): + return CSI + str(code) + 'm' + +def set_title(title): + return OSC + '2;' + title + BEL + +def clear_screen(mode=2): + return CSI + str(mode) + 'J' + +def clear_line(mode=2): + return CSI + str(mode) + 'K' + + +class AnsiCodes(object): + def __init__(self): + # the subclasses declare class attributes which are numbers. + # Upon instantiation we define instance attributes, which are the same + # as the class attributes but wrapped with the ANSI escape sequence + for name in dir(self): + if not name.startswith('_'): + value = getattr(self, name) + setattr(self, name, code_to_chars(value)) + + +class AnsiCursor(object): + def UP(self, n=1): + return CSI + str(n) + 'A' + def DOWN(self, n=1): + return CSI + str(n) + 'B' + def FORWARD(self, n=1): + return CSI + str(n) + 'C' + def BACK(self, n=1): + return CSI + str(n) + 'D' + def POS(self, x=1, y=1): + return CSI + str(y) + ';' + str(x) + 'H' + + +class AnsiFore(AnsiCodes): + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + RESET = 39 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 90 + LIGHTRED_EX = 91 + LIGHTGREEN_EX = 92 + LIGHTYELLOW_EX = 93 + LIGHTBLUE_EX = 94 + LIGHTMAGENTA_EX = 95 + LIGHTCYAN_EX = 96 + LIGHTWHITE_EX = 97 + + +class AnsiBack(AnsiCodes): + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 + MAGENTA = 45 + CYAN = 46 + WHITE = 47 + RESET = 49 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 100 + LIGHTRED_EX = 101 + LIGHTGREEN_EX = 102 + LIGHTYELLOW_EX = 103 + LIGHTBLUE_EX = 104 + LIGHTMAGENTA_EX = 105 + LIGHTCYAN_EX = 106 + LIGHTWHITE_EX = 107 + + +class AnsiStyle(AnsiCodes): + BRIGHT = 1 + DIM = 2 + NORMAL = 22 + RESET_ALL = 0 + +Fore = AnsiFore() +Back = AnsiBack() +Style = AnsiStyle() +Cursor = AnsiCursor() diff --git a/dbt-env/lib/python3.8/site-packages/colorama/ansitowin32.py b/dbt-env/lib/python3.8/site-packages/colorama/ansitowin32.py new file mode 100644 index 0000000..6039a05 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama/ansitowin32.py @@ -0,0 +1,258 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import re +import sys +import os + +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL +from .winterm import WinTerm, WinColor, WinStyle +from .win32 import windll, winapi_test + + +winterm = None +if windll is not None: + winterm = WinTerm() + + +class StreamWrapper(object): + ''' + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()', which is delegated to our + Converter instance. + ''' + def __init__(self, wrapped, converter): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + self.__convertor = converter + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def __enter__(self, *args, **kwargs): + # special method lookup bypasses __getattr__/__getattribute__, see + # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit + # thus, contextlib magic methods are not proxied via __getattr__ + return self.__wrapped.__enter__(*args, **kwargs) + + def __exit__(self, *args, **kwargs): + return self.__wrapped.__exit__(*args, **kwargs) + + def write(self, text): + self.__convertor.write(text) + + def isatty(self): + stream = self.__wrapped + if 'PYCHARM_HOSTED' in os.environ: + if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): + return True + try: + stream_isatty = stream.isatty + except AttributeError: + return False + else: + return stream_isatty() + + @property + def closed(self): + stream = self.__wrapped + try: + return stream.closed + except AttributeError: + return True + + +class AnsiToWin32(object): + ''' + Implements a 'write()' method which, on Windows, will strip ANSI character + sequences from the text, and if outputting to a tty, will convert them into + win32 function calls. + ''' + ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer + ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command + + def __init__(self, wrapped, convert=None, strip=None, autoreset=False): + # The wrapped stream (normally sys.stdout or sys.stderr) + self.wrapped = wrapped + + # should we reset colors to defaults after every .write() + self.autoreset = autoreset + + # create the proxy wrapping our output stream + self.stream = StreamWrapper(wrapped, self) + + on_windows = os.name == 'nt' + # We test if the WinAPI works, because even if we are on Windows + # we may be using a terminal that doesn't support the WinAPI + # (e.g. Cygwin Terminal). In this case it's up to the terminal + # to support the ANSI codes. + conversion_supported = on_windows and winapi_test() + + # should we strip ANSI sequences from our output? + if strip is None: + strip = conversion_supported or (not self.stream.closed and not self.stream.isatty()) + self.strip = strip + + # should we should convert ANSI sequences into win32 calls? + if convert is None: + convert = conversion_supported and not self.stream.closed and self.stream.isatty() + self.convert = convert + + # dict of ansi codes to win32 functions and parameters + self.win32_calls = self.get_win32_calls() + + # are we wrapping stderr? + self.on_stderr = self.wrapped is sys.stderr + + def should_wrap(self): + ''' + True if this class is actually needed. If false, then the output + stream will not be affected, nor will win32 calls be issued, so + wrapping stdout is not actually required. This will generally be + False on non-Windows platforms, unless optional functionality like + autoreset has been requested using kwargs to init() + ''' + return self.convert or self.strip or self.autoreset + + def get_win32_calls(self): + if self.convert and winterm: + return { + AnsiStyle.RESET_ALL: (winterm.reset_all, ), + AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), + AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), + AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), + AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), + AnsiFore.RED: (winterm.fore, WinColor.RED), + AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), + AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), + AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), + AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), + AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), + AnsiFore.WHITE: (winterm.fore, WinColor.GREY), + AnsiFore.RESET: (winterm.fore, ), + AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), + AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), + AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), + AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), + AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), + AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), + AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), + AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), + AnsiBack.BLACK: (winterm.back, WinColor.BLACK), + AnsiBack.RED: (winterm.back, WinColor.RED), + AnsiBack.GREEN: (winterm.back, WinColor.GREEN), + AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), + AnsiBack.BLUE: (winterm.back, WinColor.BLUE), + AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), + AnsiBack.CYAN: (winterm.back, WinColor.CYAN), + AnsiBack.WHITE: (winterm.back, WinColor.GREY), + AnsiBack.RESET: (winterm.back, ), + AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), + AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), + AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), + AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), + AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), + AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), + AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), + AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), + } + return dict() + + def write(self, text): + if self.strip or self.convert: + self.write_and_convert(text) + else: + self.wrapped.write(text) + self.wrapped.flush() + if self.autoreset: + self.reset_all() + + + def reset_all(self): + if self.convert: + self.call_win32('m', (0,)) + elif not self.strip and not self.stream.closed: + self.wrapped.write(Style.RESET_ALL) + + + def write_and_convert(self, text): + ''' + Write the given text to our wrapped stream, stripping any ANSI + sequences from the text, and optionally converting them into win32 + calls. + ''' + cursor = 0 + text = self.convert_osc(text) + for match in self.ANSI_CSI_RE.finditer(text): + start, end = match.span() + self.write_plain_text(text, cursor, start) + self.convert_ansi(*match.groups()) + cursor = end + self.write_plain_text(text, cursor, len(text)) + + + def write_plain_text(self, text, start, end): + if start < end: + self.wrapped.write(text[start:end]) + self.wrapped.flush() + + + def convert_ansi(self, paramstring, command): + if self.convert: + params = self.extract_params(command, paramstring) + self.call_win32(command, params) + + + def extract_params(self, command, paramstring): + if command in 'Hf': + params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) + while len(params) < 2: + # defaults: + params = params + (1,) + else: + params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) + if len(params) == 0: + # defaults: + if command in 'JKm': + params = (0,) + elif command in 'ABCD': + params = (1,) + + return params + + + def call_win32(self, command, params): + if command == 'm': + for param in params: + if param in self.win32_calls: + func_args = self.win32_calls[param] + func = func_args[0] + args = func_args[1:] + kwargs = dict(on_stderr=self.on_stderr) + func(*args, **kwargs) + elif command in 'J': + winterm.erase_screen(params[0], on_stderr=self.on_stderr) + elif command in 'K': + winterm.erase_line(params[0], on_stderr=self.on_stderr) + elif command in 'Hf': # cursor position - absolute + winterm.set_cursor_position(params, on_stderr=self.on_stderr) + elif command in 'ABCD': # cursor position - relative + n = params[0] + # A - up, B - down, C - forward, D - back + x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] + winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) + + + def convert_osc(self, text): + for match in self.ANSI_OSC_RE.finditer(text): + start, end = match.span() + text = text[:start] + text[end:] + paramstring, command = match.groups() + if command == BEL: + if paramstring.count(";") == 1: + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) + return text diff --git a/dbt-env/lib/python3.8/site-packages/colorama/initialise.py b/dbt-env/lib/python3.8/site-packages/colorama/initialise.py new file mode 100644 index 0000000..430d066 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama/initialise.py @@ -0,0 +1,80 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import atexit +import contextlib +import sys + +from .ansitowin32 import AnsiToWin32 + + +orig_stdout = None +orig_stderr = None + +wrapped_stdout = None +wrapped_stderr = None + +atexit_done = False + + +def reset_all(): + if AnsiToWin32 is not None: # Issue #74: objects might become None at exit + AnsiToWin32(orig_stdout).reset_all() + + +def init(autoreset=False, convert=None, strip=None, wrap=True): + + if not wrap and any([autoreset, convert, strip]): + raise ValueError('wrap=False conflicts with any other arg=True') + + global wrapped_stdout, wrapped_stderr + global orig_stdout, orig_stderr + + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + if sys.stdout is None: + wrapped_stdout = None + else: + sys.stdout = wrapped_stdout = \ + wrap_stream(orig_stdout, convert, strip, autoreset, wrap) + if sys.stderr is None: + wrapped_stderr = None + else: + sys.stderr = wrapped_stderr = \ + wrap_stream(orig_stderr, convert, strip, autoreset, wrap) + + global atexit_done + if not atexit_done: + atexit.register(reset_all) + atexit_done = True + + +def deinit(): + if orig_stdout is not None: + sys.stdout = orig_stdout + if orig_stderr is not None: + sys.stderr = orig_stderr + + +@contextlib.contextmanager +def colorama_text(*args, **kwargs): + init(*args, **kwargs) + try: + yield + finally: + deinit() + + +def reinit(): + if wrapped_stdout is not None: + sys.stdout = wrapped_stdout + if wrapped_stderr is not None: + sys.stderr = wrapped_stderr + + +def wrap_stream(stream, convert, strip, autoreset, wrap): + if wrap: + wrapper = AnsiToWin32(stream, + convert=convert, strip=strip, autoreset=autoreset) + if wrapper.should_wrap(): + stream = wrapper.stream + return stream diff --git a/dbt-env/lib/python3.8/site-packages/colorama/win32.py b/dbt-env/lib/python3.8/site-packages/colorama/win32.py new file mode 100644 index 0000000..c2d8360 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama/win32.py @@ -0,0 +1,152 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. + +# from winbase.h +STDOUT = -11 +STDERR = -12 + +try: + import ctypes + from ctypes import LibraryLoader + windll = LibraryLoader(ctypes.WinDLL) + from ctypes import wintypes +except (AttributeError, ImportError): + windll = None + SetConsoleTextAttribute = lambda *_: None + winapi_test = lambda *_: None +else: + from ctypes import byref, Structure, c_char, POINTER + + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + """struct in wincon.h.""" + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + def __str__(self): + return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( + self.dwSize.Y, self.dwSize.X + , self.dwCursorPosition.Y, self.dwCursorPosition.X + , self.wAttributes + , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right + , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X + ) + + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [ + wintypes.DWORD, + ] + _GetStdHandle.restype = wintypes.HANDLE + + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + ] + _SetConsoleTextAttribute.restype = wintypes.BOOL + + _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition + _SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + COORD, + ] + _SetConsoleCursorPosition.restype = wintypes.BOOL + + _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA + _FillConsoleOutputCharacterA.argtypes = [ + wintypes.HANDLE, + c_char, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputCharacterA.restype = wintypes.BOOL + + _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute + _FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputAttribute.restype = wintypes.BOOL + + _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW + _SetConsoleTitleW.argtypes = [ + wintypes.LPCWSTR + ] + _SetConsoleTitleW.restype = wintypes.BOOL + + def _winapi_test(handle): + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return bool(success) + + def winapi_test(): + return any(_winapi_test(h) for h in + (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) + + def GetConsoleScreenBufferInfo(stream_id=STDOUT): + handle = _GetStdHandle(stream_id) + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return csbi + + def SetConsoleTextAttribute(stream_id, attrs): + handle = _GetStdHandle(stream_id) + return _SetConsoleTextAttribute(handle, attrs) + + def SetConsoleCursorPosition(stream_id, position, adjust=True): + position = COORD(*position) + # If the position is out of range, do nothing. + if position.Y <= 0 or position.X <= 0: + return + # Adjust for Windows' SetConsoleCursorPosition: + # 1. being 0-based, while ANSI is 1-based. + # 2. expecting (x,y), while ANSI uses (y,x). + adjusted_position = COORD(position.Y - 1, position.X - 1) + if adjust: + # Adjust for viewport's scroll position + sr = GetConsoleScreenBufferInfo(STDOUT).srWindow + adjusted_position.Y += sr.Top + adjusted_position.X += sr.Left + # Resume normal processing + handle = _GetStdHandle(stream_id) + return _SetConsoleCursorPosition(handle, adjusted_position) + + def FillConsoleOutputCharacter(stream_id, char, length, start): + handle = _GetStdHandle(stream_id) + char = c_char(char.encode()) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + success = _FillConsoleOutputCharacterA( + handle, char, length, start, byref(num_written)) + return num_written.value + + def FillConsoleOutputAttribute(stream_id, attr, length, start): + ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' + handle = _GetStdHandle(stream_id) + attribute = wintypes.WORD(attr) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + return _FillConsoleOutputAttribute( + handle, attribute, length, start, byref(num_written)) + + def SetConsoleTitle(title): + return _SetConsoleTitleW(title) diff --git a/dbt-env/lib/python3.8/site-packages/colorama/winterm.py b/dbt-env/lib/python3.8/site-packages/colorama/winterm.py new file mode 100644 index 0000000..0fdb4ec --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/colorama/winterm.py @@ -0,0 +1,169 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from . import win32 + + +# from wincon.h +class WinColor(object): + BLACK = 0 + BLUE = 1 + GREEN = 2 + CYAN = 3 + RED = 4 + MAGENTA = 5 + YELLOW = 6 + GREY = 7 + +# from wincon.h +class WinStyle(object): + NORMAL = 0x00 # dim text, dim background + BRIGHT = 0x08 # bright text, dim background + BRIGHT_BACKGROUND = 0x80 # dim text, bright background + +class WinTerm(object): + + def __init__(self): + self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes + self.set_attrs(self._default) + self._default_fore = self._fore + self._default_back = self._back + self._default_style = self._style + # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. + # So that LIGHT_EX colors and BRIGHT style do not clobber each other, + # we track them separately, since LIGHT_EX is overwritten by Fore/Back + # and BRIGHT is overwritten by Style codes. + self._light = 0 + + def get_attrs(self): + return self._fore + self._back * 16 + (self._style | self._light) + + def set_attrs(self, value): + self._fore = value & 7 + self._back = (value >> 4) & 7 + self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) + + def reset_all(self, on_stderr=None): + self.set_attrs(self._default) + self.set_console(attrs=self._default) + self._light = 0 + + def fore(self, fore=None, light=False, on_stderr=False): + if fore is None: + fore = self._default_fore + self._fore = fore + # Emulate LIGHT_EX with BRIGHT Style + if light: + self._light |= WinStyle.BRIGHT + else: + self._light &= ~WinStyle.BRIGHT + self.set_console(on_stderr=on_stderr) + + def back(self, back=None, light=False, on_stderr=False): + if back is None: + back = self._default_back + self._back = back + # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style + if light: + self._light |= WinStyle.BRIGHT_BACKGROUND + else: + self._light &= ~WinStyle.BRIGHT_BACKGROUND + self.set_console(on_stderr=on_stderr) + + def style(self, style=None, on_stderr=False): + if style is None: + style = self._default_style + self._style = style + self.set_console(on_stderr=on_stderr) + + def set_console(self, attrs=None, on_stderr=False): + if attrs is None: + attrs = self.get_attrs() + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleTextAttribute(handle, attrs) + + def get_position(self, handle): + position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition + # Because Windows coordinates are 0-based, + # and win32.SetConsoleCursorPosition expects 1-based. + position.X += 1 + position.Y += 1 + return position + + def set_cursor_position(self, position=None, on_stderr=False): + if position is None: + # I'm not currently tracking the position, so there is no default. + # position = self.get_position() + return + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleCursorPosition(handle, position) + + def cursor_adjust(self, x, y, on_stderr=False): + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + position = self.get_position(handle) + adjusted_position = (position.Y + y, position.X + x) + win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) + + def erase_screen(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the screen. + # 1 should clear from the cursor to the beginning of the screen. + # 2 should clear the entire screen, and move cursor to (1,1) + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + # get the number of character cells in the current buffer + cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y + # get number of character cells before current cursor position + cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = cells_in_screen - cells_before_cursor + elif mode == 1: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_before_cursor + elif mode == 2: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_in_screen + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + if mode == 2: + # put the cursor where needed + win32.SetConsoleCursorPosition(handle, (1, 1)) + + def erase_line(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the line. + # 1 should clear from the cursor to the beginning of the line. + # 2 should clear the entire line. + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X + elif mode == 1: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwCursorPosition.X + elif mode == 2: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwSize.X + else: + # invalid mode + return + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + + def set_title(self, title): + win32.SetConsoleTitle(title) diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__init__.py b/dbt-env/lib/python3.8/site-packages/dateutil/__init__.py new file mode 100644 index 0000000..0defb82 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +try: + from ._version import version as __version__ +except ImportError: + __version__ = 'unknown' + +__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz', + 'utils', 'zoneinfo'] diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..063c948 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/_common.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/_common.cpython-38.pyc new file mode 100644 index 0000000..8c5d2e7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/_common.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/_version.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/_version.cpython-38.pyc new file mode 100644 index 0000000..4709002 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/_version.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/easter.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/easter.cpython-38.pyc new file mode 100644 index 0000000..c44d346 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/easter.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/relativedelta.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/relativedelta.cpython-38.pyc new file mode 100644 index 0000000..77579df Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/relativedelta.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/rrule.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/rrule.cpython-38.pyc new file mode 100644 index 0000000..88a2c74 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/rrule.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/tzwin.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/tzwin.cpython-38.pyc new file mode 100644 index 0000000..b065098 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/tzwin.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/utils.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..9196b44 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/__pycache__/utils.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/_common.py b/dbt-env/lib/python3.8/site-packages/dateutil/_common.py new file mode 100644 index 0000000..4eb2659 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/_common.py @@ -0,0 +1,43 @@ +""" +Common code used in multiple modules. +""" + + +class weekday(object): + __slots__ = ["weekday", "n"] + + def __init__(self, weekday, n=None): + self.weekday = weekday + self.n = n + + def __call__(self, n): + if n == self.n: + return self + else: + return self.__class__(self.weekday, n) + + def __eq__(self, other): + try: + if self.weekday != other.weekday or self.n != other.n: + return False + except AttributeError: + return False + return True + + def __hash__(self): + return hash(( + self.weekday, + self.n, + )) + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday] + if not self.n: + return s + else: + return "%s(%+d)" % (s, self.n) + +# vim:ts=4:sw=4:et diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/_version.py b/dbt-env/lib/python3.8/site-packages/dateutil/_version.py new file mode 100644 index 0000000..b723056 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/_version.py @@ -0,0 +1,5 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '2.8.2' +version_tuple = (2, 8, 2) diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/easter.py b/dbt-env/lib/python3.8/site-packages/dateutil/easter.py new file mode 100644 index 0000000..f74d1f7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/easter.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic Easter computing method for any given year, using +Western, Orthodox or Julian algorithms. +""" + +import datetime + +__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"] + +EASTER_JULIAN = 1 +EASTER_ORTHODOX = 2 +EASTER_WESTERN = 3 + + +def easter(year, method=EASTER_WESTERN): + """ + This method was ported from the work done by GM Arts, + on top of the algorithm by Claus Tondering, which was + based in part on the algorithm of Ouding (1940), as + quoted in "Explanatory Supplement to the Astronomical + Almanac", P. Kenneth Seidelmann, editor. + + This algorithm implements three different Easter + calculation methods: + + 1. Original calculation in Julian calendar, valid in + dates after 326 AD + 2. Original method, with date converted to Gregorian + calendar, valid in years 1583 to 4099 + 3. Revised method, in Gregorian calendar, valid in + years 1583 to 4099 as well + + These methods are represented by the constants: + + * ``EASTER_JULIAN = 1`` + * ``EASTER_ORTHODOX = 2`` + * ``EASTER_WESTERN = 3`` + + The default method is method 3. + + More about the algorithm may be found at: + + `GM Arts: Easter Algorithms `_ + + and + + `The Calendar FAQ: Easter `_ + + """ + + if not (1 <= method <= 3): + raise ValueError("invalid method") + + # g - Golden year - 1 + # c - Century + # h - (23 - Epact) mod 30 + # i - Number of days from March 21 to Paschal Full Moon + # j - Weekday for PFM (0=Sunday, etc) + # p - Number of days from March 21 to Sunday on or before PFM + # (-6 to 28 methods 1 & 3, to 56 for method 2) + # e - Extra days to add for method 2 (converting Julian + # date to Gregorian date) + + y = year + g = y % 19 + e = 0 + if method < 3: + # Old method + i = (19*g + 15) % 30 + j = (y + y//4 + i) % 7 + if method == 2: + # Extra dates to convert Julian to Gregorian date + e = 10 + if y > 1600: + e = e + y//100 - 16 - (y//100 - 16)//4 + else: + # New method + c = y//100 + h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30 + i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11)) + j = (y + y//4 + i + 2 - c + c//4) % 7 + + # p can be from -6 to 56 corresponding to dates 22 March to 23 May + # (later dates apply to method 2, although 23 May never actually occurs) + p = i - j + e + d = 1 + (p + 27 + (p + 6)//40) % 31 + m = 3 + (p + 26)//30 + return datetime.date(int(y), int(m), int(d)) diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/parser/__init__.py b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__init__.py new file mode 100644 index 0000000..d174b0e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__init__.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +from ._parser import parse, parser, parserinfo, ParserError +from ._parser import DEFAULTPARSER, DEFAULTTZPARSER +from ._parser import UnknownTimezoneWarning + +from ._parser import __doc__ + +from .isoparser import isoparser, isoparse + +__all__ = ['parse', 'parser', 'parserinfo', + 'isoparse', 'isoparser', + 'ParserError', + 'UnknownTimezoneWarning'] + + +### +# Deprecate portions of the private interface so that downstream code that +# is improperly relying on it is given *some* notice. + + +def __deprecated_private_func(f): + from functools import wraps + import warnings + + msg = ('{name} is a private function and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=f.__name__) + + @wraps(f) + def deprecated_func(*args, **kwargs): + warnings.warn(msg, DeprecationWarning) + return f(*args, **kwargs) + + return deprecated_func + +def __deprecate_private_class(c): + import warnings + + msg = ('{name} is a private class and may break without warning, ' + 'it will be moved and or renamed in future versions.') + msg = msg.format(name=c.__name__) + + class private_class(c): + __doc__ = c.__doc__ + + def __init__(self, *args, **kwargs): + warnings.warn(msg, DeprecationWarning) + super(private_class, self).__init__(*args, **kwargs) + + private_class.__name__ = c.__name__ + + return private_class + + +from ._parser import _timelex, _resultbase +from ._parser import _tzparser, _parsetz + +_timelex = __deprecate_private_class(_timelex) +_tzparser = __deprecate_private_class(_tzparser) +_resultbase = __deprecate_private_class(_resultbase) +_parsetz = __deprecated_private_func(_parsetz) diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..081ed89 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/_parser.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/_parser.cpython-38.pyc new file mode 100644 index 0000000..b50b32c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/_parser.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/isoparser.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/isoparser.cpython-38.pyc new file mode 100644 index 0000000..90e9b3a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/parser/__pycache__/isoparser.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/parser/_parser.py b/dbt-env/lib/python3.8/site-packages/dateutil/parser/_parser.py new file mode 100644 index 0000000..37d1663 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/parser/_parser.py @@ -0,0 +1,1613 @@ +# -*- coding: utf-8 -*- +""" +This module offers a generic date/time string parser which is able to parse +most known formats to represent a date and/or time. + +This module attempts to be forgiving with regards to unlikely input formats, +returning a datetime object even for dates which are ambiguous. If an element +of a date/time stamp is omitted, the following rules are applied: + +- If AM or PM is left unspecified, a 24-hour clock is assumed, however, an hour + on a 12-hour clock (``0 <= hour <= 12``) *must* be specified if AM or PM is + specified. +- If a time zone is omitted, a timezone-naive datetime is returned. + +If any other elements are missing, they are taken from the +:class:`datetime.datetime` object passed to the parameter ``default``. If this +results in a day number exceeding the valid number of days per month, the +value falls back to the end of the month. + +Additional resources about date/time string formats can be found below: + +- `A summary of the international standard date and time notation + `_ +- `W3C Date and Time Formats `_ +- `Time Formats (Planetary Rings Node) `_ +- `CPAN ParseDate module + `_ +- `Java SimpleDateFormat Class + `_ +""" +from __future__ import unicode_literals + +import datetime +import re +import string +import time +import warnings + +from calendar import monthrange +from io import StringIO + +import six +from six import integer_types, text_type + +from decimal import Decimal + +from warnings import warn + +from .. import relativedelta +from .. import tz + +__all__ = ["parse", "parserinfo", "ParserError"] + + +# TODO: pandas.core.tools.datetimes imports this explicitly. Might be worth +# making public and/or figuring out if there is something we can +# take off their plate. +class _timelex(object): + # Fractional seconds are sometimes split by a comma + _split_decimal = re.compile("([.,])") + + def __init__(self, instream): + if isinstance(instream, (bytes, bytearray)): + instream = instream.decode() + + if isinstance(instream, text_type): + instream = StringIO(instream) + elif getattr(instream, 'read', None) is None: + raise TypeError('Parser must be a string or character stream, not ' + '{itype}'.format(itype=instream.__class__.__name__)) + + self.instream = instream + self.charstack = [] + self.tokenstack = [] + self.eof = False + + def get_token(self): + """ + This function breaks the time string into lexical units (tokens), which + can be parsed by the parser. Lexical units are demarcated by changes in + the character set, so any continuous string of letters is considered + one unit, any continuous string of numbers is considered one unit. + + The main complication arises from the fact that dots ('.') can be used + both as separators (e.g. "Sep.20.2009") or decimal points (e.g. + "4:30:21.447"). As such, it is necessary to read the full context of + any dot-separated strings before breaking it into tokens; as such, this + function maintains a "token stack", for when the ambiguous context + demands that multiple tokens be parsed at once. + """ + if self.tokenstack: + return self.tokenstack.pop(0) + + seenletters = False + token = None + state = None + + while not self.eof: + # We only realize that we've reached the end of a token when we + # find a character that's not part of the current token - since + # that character may be part of the next token, it's stored in the + # charstack. + if self.charstack: + nextchar = self.charstack.pop(0) + else: + nextchar = self.instream.read(1) + while nextchar == '\x00': + nextchar = self.instream.read(1) + + if not nextchar: + self.eof = True + break + elif not state: + # First character of the token - determines if we're starting + # to parse a word, a number or something else. + token = nextchar + if self.isword(nextchar): + state = 'a' + elif self.isnum(nextchar): + state = '0' + elif self.isspace(nextchar): + token = ' ' + break # emit token + else: + break # emit token + elif state == 'a': + # If we've already started reading a word, we keep reading + # letters until we find something that's not part of a word. + seenletters = True + if self.isword(nextchar): + token += nextchar + elif nextchar == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0': + # If we've already started reading a number, we keep reading + # numbers until we find something that doesn't fit. + if self.isnum(nextchar): + token += nextchar + elif nextchar == '.' or (nextchar == ',' and len(token) >= 2): + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == 'a.': + # If we've seen some letters and a dot separator, continue + # parsing, and the tokens will be broken up later. + seenletters = True + if nextchar == '.' or self.isword(nextchar): + token += nextchar + elif self.isnum(nextchar) and token[-1] == '.': + token += nextchar + state = '0.' + else: + self.charstack.append(nextchar) + break # emit token + elif state == '0.': + # If we've seen at least one dot separator, keep going, we'll + # break up the tokens later. + if nextchar == '.' or self.isnum(nextchar): + token += nextchar + elif self.isword(nextchar) and token[-1] == '.': + token += nextchar + state = 'a.' + else: + self.charstack.append(nextchar) + break # emit token + + if (state in ('a.', '0.') and (seenletters or token.count('.') > 1 or + token[-1] in '.,')): + l = self._split_decimal.split(token) + token = l[0] + for tok in l[1:]: + if tok: + self.tokenstack.append(tok) + + if state == '0.' and token.count('.') == 0: + token = token.replace(',', '.') + + return token + + def __iter__(self): + return self + + def __next__(self): + token = self.get_token() + if token is None: + raise StopIteration + + return token + + def next(self): + return self.__next__() # Python 2.x support + + @classmethod + def split(cls, s): + return list(cls(s)) + + @classmethod + def isword(cls, nextchar): + """ Whether or not the next character is part of a word """ + return nextchar.isalpha() + + @classmethod + def isnum(cls, nextchar): + """ Whether the next character is part of a number """ + return nextchar.isdigit() + + @classmethod + def isspace(cls, nextchar): + """ Whether the next character is whitespace """ + return nextchar.isspace() + + +class _resultbase(object): + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def _repr(self, classname): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (classname, ", ".join(l)) + + def __len__(self): + return (sum(getattr(self, attr) is not None + for attr in self.__slots__)) + + def __repr__(self): + return self._repr(self.__class__.__name__) + + +class parserinfo(object): + """ + Class which handles what inputs are accepted. Subclass this to customize + the language and acceptable values for each parameter. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. Default is ``False``. + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + Default is ``False``. + """ + + # m from a.m/p.m, t from ISO T separator + JUMP = [" ", ".", ",", ";", "-", "/", "'", + "at", "on", "and", "ad", "m", "t", "of", + "st", "nd", "rd", "th"] + + WEEKDAYS = [("Mon", "Monday"), + ("Tue", "Tuesday"), # TODO: "Tues" + ("Wed", "Wednesday"), + ("Thu", "Thursday"), # TODO: "Thurs" + ("Fri", "Friday"), + ("Sat", "Saturday"), + ("Sun", "Sunday")] + MONTHS = [("Jan", "January"), + ("Feb", "February"), # TODO: "Febr" + ("Mar", "March"), + ("Apr", "April"), + ("May", "May"), + ("Jun", "June"), + ("Jul", "July"), + ("Aug", "August"), + ("Sep", "Sept", "September"), + ("Oct", "October"), + ("Nov", "November"), + ("Dec", "December")] + HMS = [("h", "hour", "hours"), + ("m", "minute", "minutes"), + ("s", "second", "seconds")] + AMPM = [("am", "a"), + ("pm", "p")] + UTCZONE = ["UTC", "GMT", "Z", "z"] + PERTAIN = ["of"] + TZOFFSET = {} + # TODO: ERA = ["AD", "BC", "CE", "BCE", "Stardate", + # "Anno Domini", "Year of Our Lord"] + + def __init__(self, dayfirst=False, yearfirst=False): + self._jump = self._convert(self.JUMP) + self._weekdays = self._convert(self.WEEKDAYS) + self._months = self._convert(self.MONTHS) + self._hms = self._convert(self.HMS) + self._ampm = self._convert(self.AMPM) + self._utczone = self._convert(self.UTCZONE) + self._pertain = self._convert(self.PERTAIN) + + self.dayfirst = dayfirst + self.yearfirst = yearfirst + + self._year = time.localtime().tm_year + self._century = self._year // 100 * 100 + + def _convert(self, lst): + dct = {} + for i, v in enumerate(lst): + if isinstance(v, tuple): + for v in v: + dct[v.lower()] = i + else: + dct[v.lower()] = i + return dct + + def jump(self, name): + return name.lower() in self._jump + + def weekday(self, name): + try: + return self._weekdays[name.lower()] + except KeyError: + pass + return None + + def month(self, name): + try: + return self._months[name.lower()] + 1 + except KeyError: + pass + return None + + def hms(self, name): + try: + return self._hms[name.lower()] + except KeyError: + return None + + def ampm(self, name): + try: + return self._ampm[name.lower()] + except KeyError: + return None + + def pertain(self, name): + return name.lower() in self._pertain + + def utczone(self, name): + return name.lower() in self._utczone + + def tzoffset(self, name): + if name in self._utczone: + return 0 + + return self.TZOFFSET.get(name) + + def convertyear(self, year, century_specified=False): + """ + Converts two-digit years to year within [-50, 49] + range of self._year (current local time) + """ + + # Function contract is that the year is always positive + assert year >= 0 + + if year < 100 and not century_specified: + # assume current century to start + year += self._century + + if year >= self._year + 50: # if too far in future + year -= 100 + elif year < self._year - 50: # if too far in past + year += 100 + + return year + + def validate(self, res): + # move to info + if res.year is not None: + res.year = self.convertyear(res.year, res.century_specified) + + if ((res.tzoffset == 0 and not res.tzname) or + (res.tzname == 'Z' or res.tzname == 'z')): + res.tzname = "UTC" + res.tzoffset = 0 + elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname): + res.tzoffset = 0 + return True + + +class _ymd(list): + def __init__(self, *args, **kwargs): + super(self.__class__, self).__init__(*args, **kwargs) + self.century_specified = False + self.dstridx = None + self.mstridx = None + self.ystridx = None + + @property + def has_year(self): + return self.ystridx is not None + + @property + def has_month(self): + return self.mstridx is not None + + @property + def has_day(self): + return self.dstridx is not None + + def could_be_day(self, value): + if self.has_day: + return False + elif not self.has_month: + return 1 <= value <= 31 + elif not self.has_year: + # Be permissive, assume leap year + month = self[self.mstridx] + return 1 <= value <= monthrange(2000, month)[1] + else: + month = self[self.mstridx] + year = self[self.ystridx] + return 1 <= value <= monthrange(year, month)[1] + + def append(self, val, label=None): + if hasattr(val, '__len__'): + if val.isdigit() and len(val) > 2: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + elif val > 100: + self.century_specified = True + if label not in [None, 'Y']: # pragma: no cover + raise ValueError(label) + label = 'Y' + + super(self.__class__, self).append(int(val)) + + if label == 'M': + if self.has_month: + raise ValueError('Month is already set') + self.mstridx = len(self) - 1 + elif label == 'D': + if self.has_day: + raise ValueError('Day is already set') + self.dstridx = len(self) - 1 + elif label == 'Y': + if self.has_year: + raise ValueError('Year is already set') + self.ystridx = len(self) - 1 + + def _resolve_from_stridxs(self, strids): + """ + Try to resolve the identities of year/month/day elements using + ystridx, mstridx, and dstridx, if enough of these are specified. + """ + if len(self) == 3 and len(strids) == 2: + # we can back out the remaining stridx value + missing = [x for x in range(3) if x not in strids.values()] + key = [x for x in ['y', 'm', 'd'] if x not in strids] + assert len(missing) == len(key) == 1 + key = key[0] + val = missing[0] + strids[key] = val + + assert len(self) == len(strids) # otherwise this should not be called + out = {key: self[strids[key]] for key in strids} + return (out.get('y'), out.get('m'), out.get('d')) + + def resolve_ymd(self, yearfirst, dayfirst): + len_ymd = len(self) + year, month, day = (None, None, None) + + strids = (('y', self.ystridx), + ('m', self.mstridx), + ('d', self.dstridx)) + + strids = {key: val for key, val in strids if val is not None} + if (len(self) == len(strids) > 0 or + (len(self) == 3 and len(strids) == 2)): + return self._resolve_from_stridxs(strids) + + mstridx = self.mstridx + + if len_ymd > 3: + raise ValueError("More than three YMD values") + elif len_ymd == 1 or (mstridx is not None and len_ymd == 2): + # One member, or two members with a month string + if mstridx is not None: + month = self[mstridx] + # since mstridx is 0 or 1, self[mstridx-1] always + # looks up the other element + other = self[mstridx - 1] + else: + other = self[0] + + if len_ymd > 1 or mstridx is None: + if other > 31: + year = other + else: + day = other + + elif len_ymd == 2: + # Two members with numbers + if self[0] > 31: + # 99-01 + year, month = self + elif self[1] > 31: + # 01-99 + month, year = self + elif dayfirst and self[1] <= 12: + # 13-01 + day, month = self + else: + # 01-13 + month, day = self + + elif len_ymd == 3: + # Three members + if mstridx == 0: + if self[1] > 31: + # Apr-2003-25 + month, year, day = self + else: + month, day, year = self + elif mstridx == 1: + if self[0] > 31 or (yearfirst and self[2] <= 31): + # 99-Jan-01 + year, month, day = self + else: + # 01-Jan-01 + # Give precedence to day-first, since + # two-digit years is usually hand-written. + day, month, year = self + + elif mstridx == 2: + # WTF!? + if self[1] > 31: + # 01-99-Jan + day, year, month = self + else: + # 99-01-Jan + year, day, month = self + + else: + if (self[0] > 31 or + self.ystridx == 0 or + (yearfirst and self[1] <= 12 and self[2] <= 31)): + # 99-01-01 + if dayfirst and self[2] <= 12: + year, day, month = self + else: + year, month, day = self + elif self[0] > 12 or (dayfirst and self[1] <= 12): + # 13-01-01 + day, month, year = self + else: + # 01-13-01 + month, day, year = self + + return year, month, day + + +class parser(object): + def __init__(self, info=None): + self.info = info or parserinfo() + + def parse(self, timestr, default=None, + ignoretz=False, tzinfos=None, **kwargs): + """ + Parse the date/time string into a :class:`datetime.datetime` object. + + :param timestr: + Any date/time string using the supported formats. + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a + naive :class:`datetime.datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param \\*\\*kwargs: + Keyword arguments as passed to ``_parse()``. + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string format, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date + would be created. + + :raises TypeError: + Raised for non-string or character stream input. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + + if default is None: + default = datetime.datetime.now().replace(hour=0, minute=0, + second=0, microsecond=0) + + res, skipped_tokens = self._parse(timestr, **kwargs) + + if res is None: + raise ParserError("Unknown string format: %s", timestr) + + if len(res) == 0: + raise ParserError("String does not contain a date: %s", timestr) + + try: + ret = self._build_naive(res, default) + except ValueError as e: + six.raise_from(ParserError(str(e) + ": %s", timestr), e) + + if not ignoretz: + ret = self._build_tzaware(ret, res, tzinfos) + + if kwargs.get('fuzzy_with_tokens', False): + return ret, skipped_tokens + else: + return ret + + class _result(_resultbase): + __slots__ = ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond", + "tzname", "tzoffset", "ampm","any_unused_tokens"] + + def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False, + fuzzy_with_tokens=False): + """ + Private method which performs the heavy lifting of parsing, called from + ``parse()``, which passes on its ``kwargs`` to this function. + + :param timestr: + The string to parse. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM + and YMD. If set to ``None``, this value is retrieved from the + current :class:`parserinfo` object (which itself defaults to + ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken + to be the year, otherwise the last number is taken to be the year. + If this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + """ + if fuzzy_with_tokens: + fuzzy = True + + info = self.info + + if dayfirst is None: + dayfirst = info.dayfirst + + if yearfirst is None: + yearfirst = info.yearfirst + + res = self._result() + l = _timelex.split(timestr) # Splits the timestr into tokens + + skipped_idxs = [] + + # year/month/day list + ymd = _ymd() + + len_l = len(l) + i = 0 + try: + while i < len_l: + + # Check if it's a number + value_repr = l[i] + try: + value = float(value_repr) + except ValueError: + value = None + + if value is not None: + # Numeric token + i = self._parse_numeric_token(l, i, info, ymd, res, fuzzy) + + # Check weekday + elif info.weekday(l[i]) is not None: + value = info.weekday(l[i]) + res.weekday = value + + # Check month name + elif info.month(l[i]) is not None: + value = info.month(l[i]) + ymd.append(value, 'M') + + if i + 1 < len_l: + if l[i + 1] in ('-', '/'): + # Jan-01[-99] + sep = l[i + 1] + ymd.append(l[i + 2]) + + if i + 3 < len_l and l[i + 3] == sep: + # Jan-01-99 + ymd.append(l[i + 4]) + i += 2 + + i += 2 + + elif (i + 4 < len_l and l[i + 1] == l[i + 3] == ' ' and + info.pertain(l[i + 2])): + # Jan of 01 + # In this case, 01 is clearly year + if l[i + 4].isdigit(): + # Convert it here to become unambiguous + value = int(l[i + 4]) + year = str(info.convertyear(value)) + ymd.append(year, 'Y') + else: + # Wrong guess + pass + # TODO: not hit in tests + i += 4 + + # Check am/pm + elif info.ampm(l[i]) is not None: + value = info.ampm(l[i]) + val_is_ampm = self._ampm_valid(res.hour, res.ampm, fuzzy) + + if val_is_ampm: + res.hour = self._adjust_ampm(res.hour, value) + res.ampm = value + + elif fuzzy: + skipped_idxs.append(i) + + # Check for a timezone name + elif self._could_be_tzname(res.hour, res.tzname, res.tzoffset, l[i]): + res.tzname = l[i] + res.tzoffset = info.tzoffset(res.tzname) + + # Check for something like GMT+3, or BRST+3. Notice + # that it doesn't mean "I am 3 hours after GMT", but + # "my time +3 is GMT". If found, we reverse the + # logic so that timezone parsing code will get it + # right. + if i + 1 < len_l and l[i + 1] in ('+', '-'): + l[i + 1] = ('+', '-')[l[i + 1] == '+'] + res.tzoffset = None + if info.utczone(res.tzname): + # With something like GMT+3, the timezone + # is *not* GMT. + res.tzname = None + + # Check for a numbered timezone + elif res.hour is not None and l[i] in ('+', '-'): + signal = (-1, 1)[l[i] == '+'] + len_li = len(l[i + 1]) + + # TODO: check that l[i + 1] is integer? + if len_li == 4: + # -0300 + hour_offset = int(l[i + 1][:2]) + min_offset = int(l[i + 1][2:]) + elif i + 2 < len_l and l[i + 2] == ':': + # -03:00 + hour_offset = int(l[i + 1]) + min_offset = int(l[i + 3]) # TODO: Check that l[i+3] is minute-like? + i += 2 + elif len_li <= 2: + # -[0]3 + hour_offset = int(l[i + 1][:2]) + min_offset = 0 + else: + raise ValueError(timestr) + + res.tzoffset = signal * (hour_offset * 3600 + min_offset * 60) + + # Look for a timezone name between parenthesis + if (i + 5 < len_l and + info.jump(l[i + 2]) and l[i + 3] == '(' and + l[i + 5] == ')' and + 3 <= len(l[i + 4]) and + self._could_be_tzname(res.hour, res.tzname, + None, l[i + 4])): + # -0300 (BRST) + res.tzname = l[i + 4] + i += 4 + + i += 1 + + # Check jumps + elif not (info.jump(l[i]) or fuzzy): + raise ValueError(timestr) + + else: + skipped_idxs.append(i) + i += 1 + + # Process year/month/day + year, month, day = ymd.resolve_ymd(yearfirst, dayfirst) + + res.century_specified = ymd.century_specified + res.year = year + res.month = month + res.day = day + + except (IndexError, ValueError): + return None, None + + if not info.validate(res): + return None, None + + if fuzzy_with_tokens: + skipped_tokens = self._recombine_skipped(l, skipped_idxs) + return res, tuple(skipped_tokens) + else: + return res, None + + def _parse_numeric_token(self, tokens, idx, info, ymd, res, fuzzy): + # Token is a number + value_repr = tokens[idx] + try: + value = self._to_decimal(value_repr) + except Exception as e: + six.raise_from(ValueError('Unknown numeric token'), e) + + len_li = len(value_repr) + + len_l = len(tokens) + + if (len(ymd) == 3 and len_li in (2, 4) and + res.hour is None and + (idx + 1 >= len_l or + (tokens[idx + 1] != ':' and + info.hms(tokens[idx + 1]) is None))): + # 19990101T23[59] + s = tokens[idx] + res.hour = int(s[:2]) + + if len_li == 4: + res.minute = int(s[2:]) + + elif len_li == 6 or (len_li > 6 and tokens[idx].find('.') == 6): + # YYMMDD or HHMMSS[.ss] + s = tokens[idx] + + if not ymd and '.' not in tokens[idx]: + ymd.append(s[:2]) + ymd.append(s[2:4]) + ymd.append(s[4:]) + else: + # 19990101T235959[.59] + + # TODO: Check if res attributes already set. + res.hour = int(s[:2]) + res.minute = int(s[2:4]) + res.second, res.microsecond = self._parsems(s[4:]) + + elif len_li in (8, 12, 14): + # YYYYMMDD + s = tokens[idx] + ymd.append(s[:4], 'Y') + ymd.append(s[4:6]) + ymd.append(s[6:8]) + + if len_li > 8: + res.hour = int(s[8:10]) + res.minute = int(s[10:12]) + + if len_li > 12: + res.second = int(s[12:]) + + elif self._find_hms_idx(idx, tokens, info, allow_jump=True) is not None: + # HH[ ]h or MM[ ]m or SS[.ss][ ]s + hms_idx = self._find_hms_idx(idx, tokens, info, allow_jump=True) + (idx, hms) = self._parse_hms(idx, tokens, info, hms_idx) + if hms is not None: + # TODO: checking that hour/minute/second are not + # already set? + self._assign_hms(res, value_repr, hms) + + elif idx + 2 < len_l and tokens[idx + 1] == ':': + # HH:MM[:SS[.ss]] + res.hour = int(value) + value = self._to_decimal(tokens[idx + 2]) # TODO: try/except for this? + (res.minute, res.second) = self._parse_min_sec(value) + + if idx + 4 < len_l and tokens[idx + 3] == ':': + res.second, res.microsecond = self._parsems(tokens[idx + 4]) + + idx += 2 + + idx += 2 + + elif idx + 1 < len_l and tokens[idx + 1] in ('-', '/', '.'): + sep = tokens[idx + 1] + ymd.append(value_repr) + + if idx + 2 < len_l and not info.jump(tokens[idx + 2]): + if tokens[idx + 2].isdigit(): + # 01-01[-01] + ymd.append(tokens[idx + 2]) + else: + # 01-Jan[-01] + value = info.month(tokens[idx + 2]) + + if value is not None: + ymd.append(value, 'M') + else: + raise ValueError() + + if idx + 3 < len_l and tokens[idx + 3] == sep: + # We have three members + value = info.month(tokens[idx + 4]) + + if value is not None: + ymd.append(value, 'M') + else: + ymd.append(tokens[idx + 4]) + idx += 2 + + idx += 1 + idx += 1 + + elif idx + 1 >= len_l or info.jump(tokens[idx + 1]): + if idx + 2 < len_l and info.ampm(tokens[idx + 2]) is not None: + # 12 am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 2])) + idx += 1 + else: + # Year, month or day + ymd.append(value) + idx += 1 + + elif info.ampm(tokens[idx + 1]) is not None and (0 <= value < 24): + # 12am + hour = int(value) + res.hour = self._adjust_ampm(hour, info.ampm(tokens[idx + 1])) + idx += 1 + + elif ymd.could_be_day(value): + ymd.append(value) + + elif not fuzzy: + raise ValueError() + + return idx + + def _find_hms_idx(self, idx, tokens, info, allow_jump): + len_l = len(tokens) + + if idx+1 < len_l and info.hms(tokens[idx+1]) is not None: + # There is an "h", "m", or "s" label following this token. We take + # assign the upcoming label to the current token. + # e.g. the "12" in 12h" + hms_idx = idx + 1 + + elif (allow_jump and idx+2 < len_l and tokens[idx+1] == ' ' and + info.hms(tokens[idx+2]) is not None): + # There is a space and then an "h", "m", or "s" label. + # e.g. the "12" in "12 h" + hms_idx = idx + 2 + + elif idx > 0 and info.hms(tokens[idx-1]) is not None: + # There is a "h", "m", or "s" preceding this token. Since neither + # of the previous cases was hit, there is no label following this + # token, so we use the previous label. + # e.g. the "04" in "12h04" + hms_idx = idx-1 + + elif (1 < idx == len_l-1 and tokens[idx-1] == ' ' and + info.hms(tokens[idx-2]) is not None): + # If we are looking at the final token, we allow for a + # backward-looking check to skip over a space. + # TODO: Are we sure this is the right condition here? + hms_idx = idx - 2 + + else: + hms_idx = None + + return hms_idx + + def _assign_hms(self, res, value_repr, hms): + # See GH issue #427, fixing float rounding + value = self._to_decimal(value_repr) + + if hms == 0: + # Hour + res.hour = int(value) + if value % 1: + res.minute = int(60*(value % 1)) + + elif hms == 1: + (res.minute, res.second) = self._parse_min_sec(value) + + elif hms == 2: + (res.second, res.microsecond) = self._parsems(value_repr) + + def _could_be_tzname(self, hour, tzname, tzoffset, token): + return (hour is not None and + tzname is None and + tzoffset is None and + len(token) <= 5 and + (all(x in string.ascii_uppercase for x in token) + or token in self.info.UTCZONE)) + + def _ampm_valid(self, hour, ampm, fuzzy): + """ + For fuzzy parsing, 'a' or 'am' (both valid English words) + may erroneously trigger the AM/PM flag. Deal with that + here. + """ + val_is_ampm = True + + # If there's already an AM/PM flag, this one isn't one. + if fuzzy and ampm is not None: + val_is_ampm = False + + # If AM/PM is found and hour is not, raise a ValueError + if hour is None: + if fuzzy: + val_is_ampm = False + else: + raise ValueError('No hour specified with AM or PM flag.') + elif not 0 <= hour <= 12: + # If AM/PM is found, it's a 12 hour clock, so raise + # an error for invalid range + if fuzzy: + val_is_ampm = False + else: + raise ValueError('Invalid hour specified for 12-hour clock.') + + return val_is_ampm + + def _adjust_ampm(self, hour, ampm): + if hour < 12 and ampm == 1: + hour += 12 + elif hour == 12 and ampm == 0: + hour = 0 + return hour + + def _parse_min_sec(self, value): + # TODO: Every usage of this function sets res.second to the return + # value. Are there any cases where second will be returned as None and + # we *don't* want to set res.second = None? + minute = int(value) + second = None + + sec_remainder = value % 1 + if sec_remainder: + second = int(60 * sec_remainder) + return (minute, second) + + def _parse_hms(self, idx, tokens, info, hms_idx): + # TODO: Is this going to admit a lot of false-positives for when we + # just happen to have digits and "h", "m" or "s" characters in non-date + # text? I guess hex hashes won't have that problem, but there's plenty + # of random junk out there. + if hms_idx is None: + hms = None + new_idx = idx + elif hms_idx > idx: + hms = info.hms(tokens[hms_idx]) + new_idx = hms_idx + else: + # Looking backwards, increment one. + hms = info.hms(tokens[hms_idx]) + 1 + new_idx = idx + + return (new_idx, hms) + + # ------------------------------------------------------------------ + # Handling for individual tokens. These are kept as methods instead + # of functions for the sake of customizability via subclassing. + + def _parsems(self, value): + """Parse a I[.F] seconds value into (seconds, microseconds).""" + if "." not in value: + return int(value), 0 + else: + i, f = value.split(".") + return int(i), int(f.ljust(6, "0")[:6]) + + def _to_decimal(self, val): + try: + decimal_value = Decimal(val) + # See GH 662, edge case, infinite value should not be converted + # via `_to_decimal` + if not decimal_value.is_finite(): + raise ValueError("Converted decimal value is infinite or NaN") + except Exception as e: + msg = "Could not convert %s to decimal" % val + six.raise_from(ValueError(msg), e) + else: + return decimal_value + + # ------------------------------------------------------------------ + # Post-Parsing construction of datetime output. These are kept as + # methods instead of functions for the sake of customizability via + # subclassing. + + def _build_tzinfo(self, tzinfos, tzname, tzoffset): + if callable(tzinfos): + tzdata = tzinfos(tzname, tzoffset) + else: + tzdata = tzinfos.get(tzname) + # handle case where tzinfo is paased an options that returns None + # eg tzinfos = {'BRST' : None} + if isinstance(tzdata, datetime.tzinfo) or tzdata is None: + tzinfo = tzdata + elif isinstance(tzdata, text_type): + tzinfo = tz.tzstr(tzdata) + elif isinstance(tzdata, integer_types): + tzinfo = tz.tzoffset(tzname, tzdata) + else: + raise TypeError("Offset must be tzinfo subclass, tz string, " + "or int offset.") + return tzinfo + + def _build_tzaware(self, naive, res, tzinfos): + if (callable(tzinfos) or (tzinfos and res.tzname in tzinfos)): + tzinfo = self._build_tzinfo(tzinfos, res.tzname, res.tzoffset) + aware = naive.replace(tzinfo=tzinfo) + aware = self._assign_tzname(aware, res.tzname) + + elif res.tzname and res.tzname in time.tzname: + aware = naive.replace(tzinfo=tz.tzlocal()) + + # Handle ambiguous local datetime + aware = self._assign_tzname(aware, res.tzname) + + # This is mostly relevant for winter GMT zones parsed in the UK + if (aware.tzname() != res.tzname and + res.tzname in self.info.UTCZONE): + aware = aware.replace(tzinfo=tz.UTC) + + elif res.tzoffset == 0: + aware = naive.replace(tzinfo=tz.UTC) + + elif res.tzoffset: + aware = naive.replace(tzinfo=tz.tzoffset(res.tzname, res.tzoffset)) + + elif not res.tzname and not res.tzoffset: + # i.e. no timezone information was found. + aware = naive + + elif res.tzname: + # tz-like string was parsed but we don't know what to do + # with it + warnings.warn("tzname {tzname} identified but not understood. " + "Pass `tzinfos` argument in order to correctly " + "return a timezone-aware datetime. In a future " + "version, this will raise an " + "exception.".format(tzname=res.tzname), + category=UnknownTimezoneWarning) + aware = naive + + return aware + + def _build_naive(self, res, default): + repl = {} + for attr in ("year", "month", "day", "hour", + "minute", "second", "microsecond"): + value = getattr(res, attr) + if value is not None: + repl[attr] = value + + if 'day' not in repl: + # If the default day exceeds the last day of the month, fall back + # to the end of the month. + cyear = default.year if res.year is None else res.year + cmonth = default.month if res.month is None else res.month + cday = default.day if res.day is None else res.day + + if cday > monthrange(cyear, cmonth)[1]: + repl['day'] = monthrange(cyear, cmonth)[1] + + naive = default.replace(**repl) + + if res.weekday is not None and not res.day: + naive = naive + relativedelta.relativedelta(weekday=res.weekday) + + return naive + + def _assign_tzname(self, dt, tzname): + if dt.tzname() != tzname: + new_dt = tz.enfold(dt, fold=1) + if new_dt.tzname() == tzname: + return new_dt + + return dt + + def _recombine_skipped(self, tokens, skipped_idxs): + """ + >>> tokens = ["foo", " ", "bar", " ", "19June2000", "baz"] + >>> skipped_idxs = [0, 1, 2, 5] + >>> _recombine_skipped(tokens, skipped_idxs) + ["foo bar", "baz"] + """ + skipped_tokens = [] + for i, idx in enumerate(sorted(skipped_idxs)): + if i > 0 and idx - 1 == skipped_idxs[i - 1]: + skipped_tokens[-1] = skipped_tokens[-1] + tokens[idx] + else: + skipped_tokens.append(tokens[idx]) + + return skipped_tokens + + +DEFAULTPARSER = parser() + + +def parse(timestr, parserinfo=None, **kwargs): + """ + + Parse a string in one of the supported formats, using the + ``parserinfo`` parameters. + + :param timestr: + A string containing a date/time stamp. + + :param parserinfo: + A :class:`parserinfo` object containing parameters for the parser. + If ``None``, the default arguments to the :class:`parserinfo` + constructor are used. + + The ``**kwargs`` parameter takes the following keyword arguments: + + :param default: + The default datetime object, if this is a datetime object and not + ``None``, elements specified in ``timestr`` replace elements in the + default object. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime` object is returned. + + :param tzinfos: + Additional time zone names / aliases which may be present in the + string. This argument maps time zone names (and optionally offsets + from those time zones) to time zones. This parameter can be a + dictionary with timezone aliases mapping time zone names to time + zones or a function taking two parameters (``tzname`` and + ``tzoffset``) and returning a time zone. + + The timezones to which the names are mapped can be an integer + offset from UTC in seconds or a :class:`tzinfo` object. + + .. doctest:: + :options: +NORMALIZE_WHITESPACE + + >>> from dateutil.parser import parse + >>> from dateutil.tz import gettz + >>> tzinfos = {"BRST": -7200, "CST": gettz("America/Chicago")} + >>> parse("2012-01-19 17:21:00 BRST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, tzinfo=tzoffset(u'BRST', -7200)) + >>> parse("2012-01-19 17:21:00 CST", tzinfos=tzinfos) + datetime.datetime(2012, 1, 19, 17, 21, + tzinfo=tzfile('/usr/share/zoneinfo/America/Chicago')) + + This parameter is ignored if ``ignoretz`` is set. + + :param dayfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the day (``True``) or month (``False``). If + ``yearfirst`` is set to ``True``, this distinguishes between YDM and + YMD. If set to ``None``, this value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param yearfirst: + Whether to interpret the first value in an ambiguous 3-integer date + (e.g. 01/05/09) as the year. If ``True``, the first number is taken to + be the year, otherwise the last number is taken to be the year. If + this is set to ``None``, the value is retrieved from the current + :class:`parserinfo` object (which itself defaults to ``False``). + + :param fuzzy: + Whether to allow fuzzy parsing, allowing for string like "Today is + January 1, 2047 at 8:21:00AM". + + :param fuzzy_with_tokens: + If ``True``, ``fuzzy`` is automatically set to True, and the parser + will return a tuple where the first element is the parsed + :class:`datetime.datetime` datetimestamp and the second element is + a tuple containing the portions of the string which were ignored: + + .. doctest:: + + >>> from dateutil.parser import parse + >>> parse("Today is January 1, 2047 at 8:21:00AM", fuzzy_with_tokens=True) + (datetime.datetime(2047, 1, 1, 8, 21), (u'Today is ', u' ', u'at ')) + + :return: + Returns a :class:`datetime.datetime` object or, if the + ``fuzzy_with_tokens`` option is ``True``, returns a tuple, the + first element being a :class:`datetime.datetime` object, the second + a tuple containing the fuzzy tokens. + + :raises ParserError: + Raised for invalid or unknown string formats, if the provided + :class:`tzinfo` is not in a valid format, or if an invalid date would + be created. + + :raises OverflowError: + Raised if the parsed date exceeds the largest valid C integer on + your system. + """ + if parserinfo: + return parser(parserinfo).parse(timestr, **kwargs) + else: + return DEFAULTPARSER.parse(timestr, **kwargs) + + +class _tzparser(object): + + class _result(_resultbase): + + __slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset", + "start", "end"] + + class _attr(_resultbase): + __slots__ = ["month", "week", "weekday", + "yday", "jyday", "day", "time"] + + def __repr__(self): + return self._repr("") + + def __init__(self): + _resultbase.__init__(self) + self.start = self._attr() + self.end = self._attr() + + def parse(self, tzstr): + res = self._result() + l = [x for x in re.split(r'([,:.]|[a-zA-Z]+|[0-9]+)',tzstr) if x] + used_idxs = list() + try: + + len_l = len(l) + + i = 0 + while i < len_l: + # BRST+3[BRDT[+2]] + j = i + while j < len_l and not [x for x in l[j] + if x in "0123456789:,-+"]: + j += 1 + if j != i: + if not res.stdabbr: + offattr = "stdoffset" + res.stdabbr = "".join(l[i:j]) + else: + offattr = "dstoffset" + res.dstabbr = "".join(l[i:j]) + + for ii in range(j): + used_idxs.append(ii) + i = j + if (i < len_l and (l[i] in ('+', '-') or l[i][0] in + "0123456789")): + if l[i] in ('+', '-'): + # Yes, that's right. See the TZ variable + # documentation. + signal = (1, -1)[l[i] == '+'] + used_idxs.append(i) + i += 1 + else: + signal = -1 + len_li = len(l[i]) + if len_li == 4: + # -0300 + setattr(res, offattr, (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) * signal) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + setattr(res, offattr, + (int(l[i]) * 3600 + + int(l[i + 2]) * 60) * signal) + used_idxs.append(i) + i += 2 + elif len_li <= 2: + # -[0]3 + setattr(res, offattr, + int(l[i][:2]) * 3600 * signal) + else: + return None + used_idxs.append(i) + i += 1 + if res.dstabbr: + break + else: + break + + + if i < len_l: + for j in range(i, len_l): + if l[j] == ';': + l[j] = ',' + + assert l[i] == ',' + + i += 1 + + if i >= len_l: + pass + elif (8 <= l.count(',') <= 9 and + not [y for x in l[i:] if x != ',' + for y in x if y not in "0123456789+-"]): + # GMT0BST,3,0,30,3600,10,0,26,7200[,3600] + for x in (res.start, res.end): + x.month = int(l[i]) + used_idxs.append(i) + i += 2 + if l[i] == '-': + value = int(l[i + 1]) * -1 + used_idxs.append(i) + i += 1 + else: + value = int(l[i]) + used_idxs.append(i) + i += 2 + if value: + x.week = value + x.weekday = (int(l[i]) - 1) % 7 + else: + x.day = int(l[i]) + used_idxs.append(i) + i += 2 + x.time = int(l[i]) + used_idxs.append(i) + i += 2 + if i < len_l: + if l[i] in ('-', '+'): + signal = (-1, 1)[l[i] == "+"] + used_idxs.append(i) + i += 1 + else: + signal = 1 + used_idxs.append(i) + res.dstoffset = (res.stdoffset + int(l[i]) * signal) + + # This was a made-up format that is not in normal use + warn(('Parsed time zone "%s"' % tzstr) + + 'is in a non-standard dateutil-specific format, which ' + + 'is now deprecated; support for parsing this format ' + + 'will be removed in future versions. It is recommended ' + + 'that you switch to a standard format like the GNU ' + + 'TZ variable format.', tz.DeprecatedTzFormatWarning) + elif (l.count(',') == 2 and l[i:].count('/') <= 2 and + not [y for x in l[i:] if x not in (',', '/', 'J', 'M', + '.', '-', ':') + for y in x if y not in "0123456789"]): + for x in (res.start, res.end): + if l[i] == 'J': + # non-leap year day (1 based) + used_idxs.append(i) + i += 1 + x.jyday = int(l[i]) + elif l[i] == 'M': + # month[-.]week[-.]weekday + used_idxs.append(i) + i += 1 + x.month = int(l[i]) + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.week = int(l[i]) + if x.week == 5: + x.week = -1 + used_idxs.append(i) + i += 1 + assert l[i] in ('-', '.') + used_idxs.append(i) + i += 1 + x.weekday = (int(l[i]) - 1) % 7 + else: + # year day (zero based) + x.yday = int(l[i]) + 1 + + used_idxs.append(i) + i += 1 + + if i < len_l and l[i] == '/': + used_idxs.append(i) + i += 1 + # start time + len_li = len(l[i]) + if len_li == 4: + # -0300 + x.time = (int(l[i][:2]) * 3600 + + int(l[i][2:]) * 60) + elif i + 1 < len_l and l[i + 1] == ':': + # -03:00 + x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60 + used_idxs.append(i) + i += 2 + if i + 1 < len_l and l[i + 1] == ':': + used_idxs.append(i) + i += 2 + x.time += int(l[i]) + elif len_li <= 2: + # -[0]3 + x.time = (int(l[i][:2]) * 3600) + else: + return None + used_idxs.append(i) + i += 1 + + assert i == len_l or l[i] == ',' + + i += 1 + + assert i >= len_l + + except (IndexError, ValueError, AssertionError): + return None + + unused_idxs = set(range(len_l)).difference(used_idxs) + res.any_unused_tokens = not {l[n] for n in unused_idxs}.issubset({",",":"}) + return res + + +DEFAULTTZPARSER = _tzparser() + + +def _parsetz(tzstr): + return DEFAULTTZPARSER.parse(tzstr) + + +class ParserError(ValueError): + """Exception subclass used for any failure to parse a datetime string. + + This is a subclass of :py:exc:`ValueError`, and should be raised any time + earlier versions of ``dateutil`` would have raised ``ValueError``. + + .. versionadded:: 2.8.1 + """ + def __str__(self): + try: + return self.args[0] % self.args[1:] + except (TypeError, IndexError): + return super(ParserError, self).__str__() + + def __repr__(self): + args = ", ".join("'%s'" % arg for arg in self.args) + return "%s(%s)" % (self.__class__.__name__, args) + + +class UnknownTimezoneWarning(RuntimeWarning): + """Raised when the parser finds a timezone it cannot parse into a tzinfo. + + .. versionadded:: 2.7.0 + """ +# vim:ts=4:sw=4:et diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/parser/isoparser.py b/dbt-env/lib/python3.8/site-packages/dateutil/parser/isoparser.py new file mode 100644 index 0000000..5d7bee3 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/parser/isoparser.py @@ -0,0 +1,416 @@ +# -*- coding: utf-8 -*- +""" +This module offers a parser for ISO-8601 strings + +It is intended to support all valid date, time and datetime formats per the +ISO-8601 specification. + +..versionadded:: 2.7.0 +""" +from datetime import datetime, timedelta, time, date +import calendar +from dateutil import tz + +from functools import wraps + +import re +import six + +__all__ = ["isoparse", "isoparser"] + + +def _takes_ascii(f): + @wraps(f) + def func(self, str_in, *args, **kwargs): + # If it's a stream, read the whole thing + str_in = getattr(str_in, 'read', lambda: str_in)() + + # If it's unicode, turn it into bytes, since ISO-8601 only covers ASCII + if isinstance(str_in, six.text_type): + # ASCII is the same in UTF-8 + try: + str_in = str_in.encode('ascii') + except UnicodeEncodeError as e: + msg = 'ISO-8601 strings should contain only ASCII characters' + six.raise_from(ValueError(msg), e) + + return f(self, str_in, *args, **kwargs) + + return func + + +class isoparser(object): + def __init__(self, sep=None): + """ + :param sep: + A single character that separates date and time portions. If + ``None``, the parser will accept any single character. + For strict ISO-8601 adherence, pass ``'T'``. + """ + if sep is not None: + if (len(sep) != 1 or ord(sep) >= 128 or sep in '0123456789'): + raise ValueError('Separator must be a single, non-numeric ' + + 'ASCII character') + + sep = sep.encode('ascii') + + self._sep = sep + + @_takes_ascii + def isoparse(self, dt_str): + """ + Parse an ISO-8601 datetime string into a :class:`datetime.datetime`. + + An ISO-8601 datetime string consists of a date portion, followed + optionally by a time portion - the date and time portions are separated + by a single character separator, which is ``T`` in the official + standard. Incomplete date formats (such as ``YYYY-MM``) may *not* be + combined with a time portion. + + Supported date formats are: + + Common: + + - ``YYYY`` + - ``YYYY-MM`` or ``YYYYMM`` + - ``YYYY-MM-DD`` or ``YYYYMMDD`` + + Uncommon: + + - ``YYYY-Www`` or ``YYYYWww`` - ISO week (day defaults to 0) + - ``YYYY-Www-D`` or ``YYYYWwwD`` - ISO week and day + + The ISO week and day numbering follows the same logic as + :func:`datetime.date.isocalendar`. + + Supported time formats are: + + - ``hh`` + - ``hh:mm`` or ``hhmm`` + - ``hh:mm:ss`` or ``hhmmss`` + - ``hh:mm:ss.ssssss`` (Up to 6 sub-second digits) + + Midnight is a special case for `hh`, as the standard supports both + 00:00 and 24:00 as a representation. The decimal separator can be + either a dot or a comma. + + + .. caution:: + + Support for fractional components other than seconds is part of the + ISO-8601 standard, but is not currently implemented in this parser. + + Supported time zone offset formats are: + + - `Z` (UTC) + - `±HH:MM` + - `±HHMM` + - `±HH` + + Offsets will be represented as :class:`dateutil.tz.tzoffset` objects, + with the exception of UTC, which will be represented as + :class:`dateutil.tz.tzutc`. Time zone offsets equivalent to UTC (such + as `+00:00`) will also be represented as :class:`dateutil.tz.tzutc`. + + :param dt_str: + A string or stream containing only an ISO-8601 datetime string + + :return: + Returns a :class:`datetime.datetime` representing the string. + Unspecified components default to their lowest value. + + .. warning:: + + As of version 2.7.0, the strictness of the parser should not be + considered a stable part of the contract. Any valid ISO-8601 string + that parses correctly with the default settings will continue to + parse correctly in future versions, but invalid strings that + currently fail (e.g. ``2017-01-01T00:00+00:00:00``) are not + guaranteed to continue failing in future versions if they encode + a valid date. + + .. versionadded:: 2.7.0 + """ + components, pos = self._parse_isodate(dt_str) + + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + """ + Parse the date portion of an ISO string. + + :param datestr: + The string portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.date` object + """ + components, pos = self._parse_isodate(datestr) + if pos < len(datestr): + raise ValueError('String contains unknown ISO ' + + 'components: {!r}'.format(datestr.decode('ascii'))) + return date(*components) + + @_takes_ascii + def parse_isotime(self, timestr): + """ + Parse the time portion of an ISO string. + + :param timestr: + The time portion of an ISO string, without a separator + + :return: + Returns a :class:`datetime.time` object + """ + components = self._parse_isotime(timestr) + if components[0] == 24: + components[0] = 0 + return time(*components) + + @_takes_ascii + def parse_tzstr(self, tzstr, zero_as_utc=True): + """ + Parse a valid ISO time zone string. + + See :func:`isoparser.isoparse` for details on supported formats. + + :param tzstr: + A string representing an ISO time zone offset + + :param zero_as_utc: + Whether to return :class:`dateutil.tz.tzutc` for zero-offset zones + + :return: + Returns :class:`dateutil.tz.tzoffset` for offsets and + :class:`dateutil.tz.tzutc` for ``Z`` and (if ``zero_as_utc`` is + specified) offsets equivalent to UTC. + """ + return self._parse_tzstr(tzstr, zero_as_utc=zero_as_utc) + + # Constants + _DATE_SEP = b'-' + _TIME_SEP = b':' + _FRACTION_REGEX = re.compile(b'[\\.,]([0-9]+)') + + def _parse_isodate(self, dt_str): + try: + return self._parse_isodate_common(dt_str) + except ValueError: + return self._parse_isodate_uncommon(dt_str) + + def _parse_isodate_common(self, dt_str): + len_str = len(dt_str) + components = [1, 1, 1] + + if len_str < 4: + raise ValueError('ISO string too short') + + # Year + components[0] = int(dt_str[0:4]) + pos = 4 + if pos >= len_str: + return components, pos + + has_sep = dt_str[pos:pos + 1] == self._DATE_SEP + if has_sep: + pos += 1 + + # Month + if len_str - pos < 2: + raise ValueError('Invalid common month') + + components[1] = int(dt_str[pos:pos + 2]) + pos += 2 + + if pos >= len_str: + if has_sep: + return components, pos + else: + raise ValueError('Invalid ISO format') + + if has_sep: + if dt_str[pos:pos + 1] != self._DATE_SEP: + raise ValueError('Invalid separator in ISO string') + pos += 1 + + # Day + if len_str - pos < 2: + raise ValueError('Invalid common day') + components[2] = int(dt_str[pos:pos + 2]) + return components, pos + 2 + + def _parse_isodate_uncommon(self, dt_str): + if len(dt_str) < 4: + raise ValueError('ISO string too short') + + # All ISO formats start with the year + year = int(dt_str[0:4]) + + has_sep = dt_str[4:5] == self._DATE_SEP + + pos = 4 + has_sep # Skip '-' if it's there + if dt_str[pos:pos + 1] == b'W': + # YYYY-?Www-?D? + pos += 1 + weekno = int(dt_str[pos:pos + 2]) + pos += 2 + + dayno = 1 + if len(dt_str) > pos: + if (dt_str[pos:pos + 1] == self._DATE_SEP) != has_sep: + raise ValueError('Inconsistent use of dash separator') + + pos += has_sep + + dayno = int(dt_str[pos:pos + 1]) + pos += 1 + + base_date = self._calculate_weekdate(year, weekno, dayno) + else: + # YYYYDDD or YYYY-DDD + if len(dt_str) - pos < 3: + raise ValueError('Invalid ordinal day') + + ordinal_day = int(dt_str[pos:pos + 3]) + pos += 3 + + if ordinal_day < 1 or ordinal_day > (365 + calendar.isleap(year)): + raise ValueError('Invalid ordinal day' + + ' {} for year {}'.format(ordinal_day, year)) + + base_date = date(year, 1, 1) + timedelta(days=ordinal_day - 1) + + components = [base_date.year, base_date.month, base_date.day] + return components, pos + + def _calculate_weekdate(self, year, week, day): + """ + Calculate the day of corresponding to the ISO year-week-day calendar. + + This function is effectively the inverse of + :func:`datetime.date.isocalendar`. + + :param year: + The year in the ISO calendar + + :param week: + The week in the ISO calendar - range is [1, 53] + + :param day: + The day in the ISO calendar - range is [1 (MON), 7 (SUN)] + + :return: + Returns a :class:`datetime.date` + """ + if not 0 < week < 54: + raise ValueError('Invalid week: {}'.format(week)) + + if not 0 < day < 8: # Range is 1-7 + raise ValueError('Invalid weekday: {}'.format(day)) + + # Get week 1 for the specific year: + jan_4 = date(year, 1, 4) # Week 1 always has January 4th in it + week_1 = jan_4 - timedelta(days=jan_4.isocalendar()[2] - 1) + + # Now add the specific number of weeks and days to get what we want + week_offset = (week - 1) * 7 + (day - 1) + return week_1 + timedelta(days=week_offset) + + def _parse_isotime(self, timestr): + len_str = len(timestr) + components = [0, 0, 0, 0, None] + pos = 0 + comp = -1 + + if len_str < 2: + raise ValueError('ISO time too short') + + has_sep = False + + while pos < len_str and comp < 5: + comp += 1 + + if timestr[pos:pos + 1] in b'-+Zz': + # Detect time zone boundary + components[-1] = self._parse_tzstr(timestr[pos:]) + pos = len_str + break + + if comp == 1 and timestr[pos:pos+1] == self._TIME_SEP: + has_sep = True + pos += 1 + elif comp == 2 and has_sep: + if timestr[pos:pos+1] != self._TIME_SEP: + raise ValueError('Inconsistent use of colon separator') + pos += 1 + + if comp < 3: + # Hour, minute, second + components[comp] = int(timestr[pos:pos + 2]) + pos += 2 + + if comp == 3: + # Fraction of a second + frac = self._FRACTION_REGEX.match(timestr[pos:]) + if not frac: + continue + + us_str = frac.group(1)[:6] # Truncate to microseconds + components[comp] = int(us_str) * 10**(6 - len(us_str)) + pos += len(frac.group()) + + if pos < len_str: + raise ValueError('Unused components in ISO string') + + if components[0] == 24: + # Standard supports 00:00 and 24:00 as representations of midnight + if any(component != 0 for component in components[1:4]): + raise ValueError('Hour may only be 24 at 24:00:00.000') + + return components + + def _parse_tzstr(self, tzstr, zero_as_utc=True): + if tzstr == b'Z' or tzstr == b'z': + return tz.UTC + + if len(tzstr) not in {3, 5, 6}: + raise ValueError('Time zone offset must be 1, 3, 5 or 6 characters') + + if tzstr[0:1] == b'-': + mult = -1 + elif tzstr[0:1] == b'+': + mult = 1 + else: + raise ValueError('Time zone offset requires sign') + + hours = int(tzstr[1:3]) + if len(tzstr) == 3: + minutes = 0 + else: + minutes = int(tzstr[(4 if tzstr[3:4] == self._TIME_SEP else 3):]) + + if zero_as_utc and hours == 0 and minutes == 0: + return tz.UTC + else: + if minutes > 59: + raise ValueError('Invalid minutes in time zone offset') + + if hours > 23: + raise ValueError('Invalid hours in time zone offset') + + return tz.tzoffset(None, mult * (hours * 60 + minutes) * 60) + + +DEFAULT_ISOPARSER = isoparser() +isoparse = DEFAULT_ISOPARSER.isoparse diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/relativedelta.py b/dbt-env/lib/python3.8/site-packages/dateutil/relativedelta.py new file mode 100644 index 0000000..a9e85f7 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/relativedelta.py @@ -0,0 +1,599 @@ +# -*- coding: utf-8 -*- +import datetime +import calendar + +import operator +from math import copysign + +from six import integer_types +from warnings import warn + +from ._common import weekday + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + +__all__ = ["relativedelta", "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + + +class relativedelta(object): + """ + The relativedelta type is designed to be applied to an existing datetime and + can replace specific components of that datetime, or represents an interval + of time. + + It is based on the specification of the excellent work done by M.-A. Lemburg + in his + `mx.DateTime `_ extension. + However, notice that this type does *NOT* implement the same algorithm as + his work. Do *NOT* expect it to behave like mx.DateTime's counterpart. + + There are two different ways to build a relativedelta instance. The + first one is passing it two date/datetime classes:: + + relativedelta(datetime1, datetime2) + + The second one is passing it any number of the following keyword arguments:: + + relativedelta(arg1=x,arg2=y,arg3=z...) + + year, month, day, hour, minute, second, microsecond: + Absolute information (argument is singular); adding or subtracting a + relativedelta with absolute information does not perform an arithmetic + operation, but rather REPLACES the corresponding value in the + original datetime with the value(s) in relativedelta. + + years, months, weeks, days, hours, minutes, seconds, microseconds: + Relative information, may be negative (argument is plural); adding + or subtracting a relativedelta with relative information performs + the corresponding arithmetic operation on the original datetime value + with the information in the relativedelta. + + weekday: + One of the weekday instances (MO, TU, etc) available in the + relativedelta module. These instances may receive a parameter N, + specifying the Nth weekday, which could be positive or negative + (like MO(+1) or MO(-2)). Not specifying it is the same as specifying + +1. You can also use an integer, where 0=MO. This argument is always + relative e.g. if the calculated date is already Monday, using MO(1) + or MO(-1) won't change the day. To effectively make it absolute, use + it in combination with the day argument (e.g. day=1, MO(1) for first + Monday of the month). + + leapdays: + Will add given days to the date found, if year is a leap + year, and the date found is post 28 of february. + + yearday, nlyearday: + Set the yearday or the non-leap year day (jump leap days). + These are converted to day/month/leapdays information. + + There are relative and absolute forms of the keyword + arguments. The plural is relative, and the singular is + absolute. For each argument in the order below, the absolute form + is applied first (by setting each attribute to that value) and + then the relative form (by adding the value to the attribute). + + The order of attributes considered when this relativedelta is + added to a datetime is: + + 1. Year + 2. Month + 3. Day + 4. Hours + 5. Minutes + 6. Seconds + 7. Microseconds + + Finally, weekday is applied, using the rule described above. + + For example + + >>> from datetime import datetime + >>> from dateutil.relativedelta import relativedelta, MO + >>> dt = datetime(2018, 4, 9, 13, 37, 0) + >>> delta = relativedelta(hours=25, day=1, weekday=MO(1)) + >>> dt + delta + datetime.datetime(2018, 4, 2, 14, 37) + + First, the day is set to 1 (the first of the month), then 25 hours + are added, to get to the 2nd day and 14th hour, finally the + weekday is applied, but since the 2nd is already a Monday there is + no effect. + + """ + + def __init__(self, dt1=None, dt2=None, + years=0, months=0, days=0, leapdays=0, weeks=0, + hours=0, minutes=0, seconds=0, microseconds=0, + year=None, month=None, day=None, weekday=None, + yearday=None, nlyearday=None, + hour=None, minute=None, second=None, microsecond=None): + + if dt1 and dt2: + # datetime is a subclass of date. So both must be date + if not (isinstance(dt1, datetime.date) and + isinstance(dt2, datetime.date)): + raise TypeError("relativedelta only diffs datetime/date") + + # We allow two dates, or two datetimes, so we coerce them to be + # of the same type + if (isinstance(dt1, datetime.datetime) != + isinstance(dt2, datetime.datetime)): + if not isinstance(dt1, datetime.datetime): + dt1 = datetime.datetime.fromordinal(dt1.toordinal()) + elif not isinstance(dt2, datetime.datetime): + dt2 = datetime.datetime.fromordinal(dt2.toordinal()) + + self.years = 0 + self.months = 0 + self.days = 0 + self.leapdays = 0 + self.hours = 0 + self.minutes = 0 + self.seconds = 0 + self.microseconds = 0 + self.year = None + self.month = None + self.day = None + self.weekday = None + self.hour = None + self.minute = None + self.second = None + self.microsecond = None + self._has_time = 0 + + # Get year / month delta between the two + months = (dt1.year - dt2.year) * 12 + (dt1.month - dt2.month) + self._set_months(months) + + # Remove the year/month delta so the timedelta is just well-defined + # time units (seconds, days and microseconds) + dtm = self.__radd__(dt2) + + # If we've overshot our target, make an adjustment + if dt1 < dt2: + compare = operator.gt + increment = 1 + else: + compare = operator.lt + increment = -1 + + while compare(dt1, dtm): + months += increment + self._set_months(months) + dtm = self.__radd__(dt2) + + # Get the timedelta between the "months-adjusted" date and dt1 + delta = dt1 - dtm + self.seconds = delta.seconds + delta.days * 86400 + self.microseconds = delta.microseconds + else: + # Check for non-integer values in integer-only quantities + if any(x is not None and x != int(x) for x in (years, months)): + raise ValueError("Non-integer years and months are " + "ambiguous and not currently supported.") + + # Relative information + self.years = int(years) + self.months = int(months) + self.days = days + weeks * 7 + self.leapdays = leapdays + self.hours = hours + self.minutes = minutes + self.seconds = seconds + self.microseconds = microseconds + + # Absolute information + self.year = year + self.month = month + self.day = day + self.hour = hour + self.minute = minute + self.second = second + self.microsecond = microsecond + + if any(x is not None and int(x) != x + for x in (year, month, day, hour, + minute, second, microsecond)): + # For now we'll deprecate floats - later it'll be an error. + warn("Non-integer value passed as absolute information. " + + "This is not a well-defined condition and will raise " + + "errors in future versions.", DeprecationWarning) + + if isinstance(weekday, integer_types): + self.weekday = weekdays[weekday] + else: + self.weekday = weekday + + yday = 0 + if nlyearday: + yday = nlyearday + elif yearday: + yday = yearday + if yearday > 59: + self.leapdays = -1 + if yday: + ydayidx = [31, 59, 90, 120, 151, 181, 212, + 243, 273, 304, 334, 366] + for idx, ydays in enumerate(ydayidx): + if yday <= ydays: + self.month = idx+1 + if idx == 0: + self.day = yday + else: + self.day = yday-ydayidx[idx-1] + break + else: + raise ValueError("invalid year day (%d)" % yday) + + self._fix() + + def _fix(self): + if abs(self.microseconds) > 999999: + s = _sign(self.microseconds) + div, mod = divmod(self.microseconds * s, 1000000) + self.microseconds = mod * s + self.seconds += div * s + if abs(self.seconds) > 59: + s = _sign(self.seconds) + div, mod = divmod(self.seconds * s, 60) + self.seconds = mod * s + self.minutes += div * s + if abs(self.minutes) > 59: + s = _sign(self.minutes) + div, mod = divmod(self.minutes * s, 60) + self.minutes = mod * s + self.hours += div * s + if abs(self.hours) > 23: + s = _sign(self.hours) + div, mod = divmod(self.hours * s, 24) + self.hours = mod * s + self.days += div * s + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years += div * s + if (self.hours or self.minutes or self.seconds or self.microseconds + or self.hour is not None or self.minute is not None or + self.second is not None or self.microsecond is not None): + self._has_time = 1 + else: + self._has_time = 0 + + @property + def weeks(self): + return int(self.days / 7.0) + + @weeks.setter + def weeks(self, value): + self.days = self.days - (self.weeks * 7) + value * 7 + + def _set_months(self, months): + self.months = months + if abs(self.months) > 11: + s = _sign(self.months) + div, mod = divmod(self.months * s, 12) + self.months = mod * s + self.years = div * s + else: + self.years = 0 + + def normalized(self): + """ + Return a version of this object represented entirely using integer + values for the relative attributes. + + >>> relativedelta(days=1.5, hours=2).normalized() + relativedelta(days=+1, hours=+14) + + :return: + Returns a :class:`dateutil.relativedelta.relativedelta` object. + """ + # Cascade remainders down (rounding each to roughly nearest microsecond) + days = int(self.days) + + hours_f = round(self.hours + 24 * (self.days - days), 11) + hours = int(hours_f) + + minutes_f = round(self.minutes + 60 * (hours_f - hours), 10) + minutes = int(minutes_f) + + seconds_f = round(self.seconds + 60 * (minutes_f - minutes), 8) + seconds = int(seconds_f) + + microseconds = round(self.microseconds + 1e6 * (seconds_f - seconds)) + + # Constructor carries overflow back up with call to _fix() + return self.__class__(years=self.years, months=self.months, + days=days, hours=hours, minutes=minutes, + seconds=seconds, microseconds=microseconds, + leapdays=self.leapdays, year=self.year, + month=self.month, day=self.day, + weekday=self.weekday, hour=self.hour, + minute=self.minute, second=self.second, + microsecond=self.microsecond) + + def __add__(self, other): + if isinstance(other, relativedelta): + return self.__class__(years=other.years + self.years, + months=other.months + self.months, + days=other.days + self.days, + hours=other.hours + self.hours, + minutes=other.minutes + self.minutes, + seconds=other.seconds + self.seconds, + microseconds=(other.microseconds + + self.microseconds), + leapdays=other.leapdays or self.leapdays, + year=(other.year if other.year is not None + else self.year), + month=(other.month if other.month is not None + else self.month), + day=(other.day if other.day is not None + else self.day), + weekday=(other.weekday if other.weekday is not None + else self.weekday), + hour=(other.hour if other.hour is not None + else self.hour), + minute=(other.minute if other.minute is not None + else self.minute), + second=(other.second if other.second is not None + else self.second), + microsecond=(other.microsecond if other.microsecond + is not None else + self.microsecond)) + if isinstance(other, datetime.timedelta): + return self.__class__(years=self.years, + months=self.months, + days=self.days + other.days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds + other.seconds, + microseconds=self.microseconds + other.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + if not isinstance(other, datetime.date): + return NotImplemented + elif self._has_time and not isinstance(other, datetime.datetime): + other = datetime.datetime.fromordinal(other.toordinal()) + year = (self.year or other.year)+self.years + month = self.month or other.month + if self.months: + assert 1 <= abs(self.months) <= 12 + month += self.months + if month > 12: + year += 1 + month -= 12 + elif month < 1: + year -= 1 + month += 12 + day = min(calendar.monthrange(year, month)[1], + self.day or other.day) + repl = {"year": year, "month": month, "day": day} + for attr in ["hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + repl[attr] = value + days = self.days + if self.leapdays and month > 2 and calendar.isleap(year): + days += self.leapdays + ret = (other.replace(**repl) + + datetime.timedelta(days=days, + hours=self.hours, + minutes=self.minutes, + seconds=self.seconds, + microseconds=self.microseconds)) + if self.weekday: + weekday, nth = self.weekday.weekday, self.weekday.n or 1 + jumpdays = (abs(nth) - 1) * 7 + if nth > 0: + jumpdays += (7 - ret.weekday() + weekday) % 7 + else: + jumpdays += (ret.weekday() - weekday) % 7 + jumpdays *= -1 + ret += datetime.timedelta(days=jumpdays) + return ret + + def __radd__(self, other): + return self.__add__(other) + + def __rsub__(self, other): + return self.__neg__().__radd__(other) + + def __sub__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented # In case the other object defines __rsub__ + return self.__class__(years=self.years - other.years, + months=self.months - other.months, + days=self.days - other.days, + hours=self.hours - other.hours, + minutes=self.minutes - other.minutes, + seconds=self.seconds - other.seconds, + microseconds=self.microseconds - other.microseconds, + leapdays=self.leapdays or other.leapdays, + year=(self.year if self.year is not None + else other.year), + month=(self.month if self.month is not None else + other.month), + day=(self.day if self.day is not None else + other.day), + weekday=(self.weekday if self.weekday is not None else + other.weekday), + hour=(self.hour if self.hour is not None else + other.hour), + minute=(self.minute if self.minute is not None else + other.minute), + second=(self.second if self.second is not None else + other.second), + microsecond=(self.microsecond if self.microsecond + is not None else + other.microsecond)) + + def __abs__(self): + return self.__class__(years=abs(self.years), + months=abs(self.months), + days=abs(self.days), + hours=abs(self.hours), + minutes=abs(self.minutes), + seconds=abs(self.seconds), + microseconds=abs(self.microseconds), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __neg__(self): + return self.__class__(years=-self.years, + months=-self.months, + days=-self.days, + hours=-self.hours, + minutes=-self.minutes, + seconds=-self.seconds, + microseconds=-self.microseconds, + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + def __bool__(self): + return not (not self.years and + not self.months and + not self.days and + not self.hours and + not self.minutes and + not self.seconds and + not self.microseconds and + not self.leapdays and + self.year is None and + self.month is None and + self.day is None and + self.weekday is None and + self.hour is None and + self.minute is None and + self.second is None and + self.microsecond is None) + # Compatibility with Python 2.x + __nonzero__ = __bool__ + + def __mul__(self, other): + try: + f = float(other) + except TypeError: + return NotImplemented + + return self.__class__(years=int(self.years * f), + months=int(self.months * f), + days=int(self.days * f), + hours=int(self.hours * f), + minutes=int(self.minutes * f), + seconds=int(self.seconds * f), + microseconds=int(self.microseconds * f), + leapdays=self.leapdays, + year=self.year, + month=self.month, + day=self.day, + weekday=self.weekday, + hour=self.hour, + minute=self.minute, + second=self.second, + microsecond=self.microsecond) + + __rmul__ = __mul__ + + def __eq__(self, other): + if not isinstance(other, relativedelta): + return NotImplemented + if self.weekday or other.weekday: + if not self.weekday or not other.weekday: + return False + if self.weekday.weekday != other.weekday.weekday: + return False + n1, n2 = self.weekday.n, other.weekday.n + if n1 != n2 and not ((not n1 or n1 == 1) and (not n2 or n2 == 1)): + return False + return (self.years == other.years and + self.months == other.months and + self.days == other.days and + self.hours == other.hours and + self.minutes == other.minutes and + self.seconds == other.seconds and + self.microseconds == other.microseconds and + self.leapdays == other.leapdays and + self.year == other.year and + self.month == other.month and + self.day == other.day and + self.hour == other.hour and + self.minute == other.minute and + self.second == other.second and + self.microsecond == other.microsecond) + + def __hash__(self): + return hash(( + self.weekday, + self.years, + self.months, + self.days, + self.hours, + self.minutes, + self.seconds, + self.microseconds, + self.leapdays, + self.year, + self.month, + self.day, + self.hour, + self.minute, + self.second, + self.microsecond, + )) + + def __ne__(self, other): + return not self.__eq__(other) + + def __div__(self, other): + try: + reciprocal = 1 / float(other) + except TypeError: + return NotImplemented + + return self.__mul__(reciprocal) + + __truediv__ = __div__ + + def __repr__(self): + l = [] + for attr in ["years", "months", "days", "leapdays", + "hours", "minutes", "seconds", "microseconds"]: + value = getattr(self, attr) + if value: + l.append("{attr}={value:+g}".format(attr=attr, value=value)) + for attr in ["year", "month", "day", "weekday", + "hour", "minute", "second", "microsecond"]: + value = getattr(self, attr) + if value is not None: + l.append("{attr}={value}".format(attr=attr, value=repr(value))) + return "{classname}({attrs})".format(classname=self.__class__.__name__, + attrs=", ".join(l)) + + +def _sign(x): + return int(copysign(1, x)) + +# vim:ts=4:sw=4:et diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/rrule.py b/dbt-env/lib/python3.8/site-packages/dateutil/rrule.py new file mode 100644 index 0000000..b320339 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/rrule.py @@ -0,0 +1,1737 @@ +# -*- coding: utf-8 -*- +""" +The rrule module offers a small, complete, and very fast, implementation of +the recurrence rules documented in the +`iCalendar RFC `_, +including support for caching of results. +""" +import calendar +import datetime +import heapq +import itertools +import re +import sys +from functools import wraps +# For warning about deprecation of until and count +from warnings import warn + +from six import advance_iterator, integer_types + +from six.moves import _thread, range + +from ._common import weekday as weekdaybase + +try: + from math import gcd +except ImportError: + from fractions import gcd + +__all__ = ["rrule", "rruleset", "rrulestr", + "YEARLY", "MONTHLY", "WEEKLY", "DAILY", + "HOURLY", "MINUTELY", "SECONDLY", + "MO", "TU", "WE", "TH", "FR", "SA", "SU"] + +# Every mask is 7 days longer to handle cross-year weekly periods. +M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 + + [7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7) +M365MASK = list(M366MASK) +M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32)) +MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +MDAY365MASK = list(MDAY366MASK) +M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0)) +NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7]) +NMDAY365MASK = list(NMDAY366MASK) +M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366) +M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365) +WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55 +del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31] +MDAY365MASK = tuple(MDAY365MASK) +M365MASK = tuple(M365MASK) + +FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY'] + +(YEARLY, + MONTHLY, + WEEKLY, + DAILY, + HOURLY, + MINUTELY, + SECONDLY) = list(range(7)) + +# Imported on demand. +easter = None +parser = None + + +class weekday(weekdaybase): + """ + This version of weekday does not allow n = 0. + """ + def __init__(self, wkday, n=None): + if n == 0: + raise ValueError("Can't create weekday with n==0") + + super(weekday, self).__init__(wkday, n) + + +MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7)) + + +def _invalidates_cache(f): + """ + Decorator for rruleset methods which may invalidate the + cached length. + """ + @wraps(f) + def inner_func(self, *args, **kwargs): + rv = f(self, *args, **kwargs) + self._invalidate_cache() + return rv + + return inner_func + + +class rrulebase(object): + def __init__(self, cache=False): + if cache: + self._cache = [] + self._cache_lock = _thread.allocate_lock() + self._invalidate_cache() + else: + self._cache = None + self._cache_complete = False + self._len = None + + def __iter__(self): + if self._cache_complete: + return iter(self._cache) + elif self._cache is None: + return self._iter() + else: + return self._iter_cached() + + def _invalidate_cache(self): + if self._cache is not None: + self._cache = [] + self._cache_complete = False + self._cache_gen = self._iter() + + if self._cache_lock.locked(): + self._cache_lock.release() + + self._len = None + + def _iter_cached(self): + i = 0 + gen = self._cache_gen + cache = self._cache + acquire = self._cache_lock.acquire + release = self._cache_lock.release + while gen: + if i == len(cache): + acquire() + if self._cache_complete: + break + try: + for j in range(10): + cache.append(advance_iterator(gen)) + except StopIteration: + self._cache_gen = gen = None + self._cache_complete = True + break + release() + yield cache[i] + i += 1 + while i < self._len: + yield cache[i] + i += 1 + + def __getitem__(self, item): + if self._cache_complete: + return self._cache[item] + elif isinstance(item, slice): + if item.step and item.step < 0: + return list(iter(self))[item] + else: + return list(itertools.islice(self, + item.start or 0, + item.stop or sys.maxsize, + item.step or 1)) + elif item >= 0: + gen = iter(self) + try: + for i in range(item+1): + res = advance_iterator(gen) + except StopIteration: + raise IndexError + return res + else: + return list(iter(self))[item] + + def __contains__(self, item): + if self._cache_complete: + return item in self._cache + else: + for i in self: + if i == item: + return True + elif i > item: + return False + return False + + # __len__() introduces a large performance penalty. + def count(self): + """ Returns the number of recurrences in this set. It will have go + trough the whole recurrence, if this hasn't been done before. """ + if self._len is None: + for x in self: + pass + return self._len + + def before(self, dt, inc=False): + """ Returns the last recurrence before the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + last = None + if inc: + for i in gen: + if i > dt: + break + last = i + else: + for i in gen: + if i >= dt: + break + last = i + return last + + def after(self, dt, inc=False): + """ Returns the first recurrence after the given datetime instance. The + inc keyword defines what happens if dt is an occurrence. With + inc=True, if dt itself is an occurrence, it will be returned. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + if inc: + for i in gen: + if i >= dt: + return i + else: + for i in gen: + if i > dt: + return i + return None + + def xafter(self, dt, count=None, inc=False): + """ + Generator which yields up to `count` recurrences after the given + datetime instance, equivalent to `after`. + + :param dt: + The datetime at which to start generating recurrences. + + :param count: + The maximum number of recurrences to generate. If `None` (default), + dates are generated until the recurrence rule is exhausted. + + :param inc: + If `dt` is an instance of the rule and `inc` is `True`, it is + included in the output. + + :yields: Yields a sequence of `datetime` objects. + """ + + if self._cache_complete: + gen = self._cache + else: + gen = self + + # Select the comparison function + if inc: + comp = lambda dc, dtc: dc >= dtc + else: + comp = lambda dc, dtc: dc > dtc + + # Generate dates + n = 0 + for d in gen: + if comp(d, dt): + if count is not None: + n += 1 + if n > count: + break + + yield d + + def between(self, after, before, inc=False, count=1): + """ Returns all the occurrences of the rrule between after and before. + The inc keyword defines what happens if after and/or before are + themselves occurrences. With inc=True, they will be included in the + list, if they are found in the recurrence set. """ + if self._cache_complete: + gen = self._cache + else: + gen = self + started = False + l = [] + if inc: + for i in gen: + if i > before: + break + elif not started: + if i >= after: + started = True + l.append(i) + else: + l.append(i) + else: + for i in gen: + if i >= before: + break + elif not started: + if i > after: + started = True + l.append(i) + else: + l.append(i) + return l + + +class rrule(rrulebase): + """ + That's the base of the rrule operation. It accepts all the keywords + defined in the RFC as its constructor parameters (except byday, + which was renamed to byweekday) and more. The constructor prototype is:: + + rrule(freq) + + Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY, + or SECONDLY. + + .. note:: + Per RFC section 3.3.10, recurrence instances falling on invalid dates + and times are ignored rather than coerced: + + Recurrence rules may generate recurrence instances with an invalid + date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM + on a day where the local time is moved forward by an hour at 1:00 + AM). Such recurrence instances MUST be ignored and MUST NOT be + counted as part of the recurrence set. + + This can lead to possibly surprising behavior when, for example, the + start date occurs at the end of the month: + + >>> from dateutil.rrule import rrule, MONTHLY + >>> from datetime import datetime + >>> start_date = datetime(2014, 12, 31) + >>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date)) + ... # doctest: +NORMALIZE_WHITESPACE + [datetime.datetime(2014, 12, 31, 0, 0), + datetime.datetime(2015, 1, 31, 0, 0), + datetime.datetime(2015, 3, 31, 0, 0), + datetime.datetime(2015, 5, 31, 0, 0)] + + Additionally, it supports the following keyword arguments: + + :param dtstart: + The recurrence start. Besides being the base for the recurrence, + missing parameters in the final recurrence instances will also be + extracted from this date. If not given, datetime.now() will be used + instead. + :param interval: + The interval between each freq iteration. For example, when using + YEARLY, an interval of 2 means once every two years, but with HOURLY, + it means once every two hours. The default interval is 1. + :param wkst: + The week start day. Must be one of the MO, TU, WE constants, or an + integer, specifying the first day of the week. This will affect + recurrences based on weekly periods. The default week start is got + from calendar.firstweekday(), and may be modified by + calendar.setfirstweekday(). + :param count: + If given, this determines how many occurrences will be generated. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param until: + If given, this must be a datetime instance specifying the upper-bound + limit of the recurrence. The last recurrence in the rule is the greatest + datetime that is less than or equal to the value specified in the + ``until`` parameter. + + .. note:: + As of version 2.5.0, the use of the keyword ``until`` in conjunction + with ``count`` is deprecated, to make sure ``dateutil`` is fully + compliant with `RFC-5545 Sec. 3.3.10 `_. Therefore, ``until`` and ``count`` + **must not** occur in the same call to ``rrule``. + :param bysetpos: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each given integer will specify an occurrence + number, corresponding to the nth occurrence of the rule inside the + frequency period. For example, a bysetpos of -1 if combined with a + MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will + result in the last work day of every month. + :param bymonth: + If given, it must be either an integer, or a sequence of integers, + meaning the months to apply the recurrence to. + :param bymonthday: + If given, it must be either an integer, or a sequence of integers, + meaning the month days to apply the recurrence to. + :param byyearday: + If given, it must be either an integer, or a sequence of integers, + meaning the year days to apply the recurrence to. + :param byeaster: + If given, it must be either an integer, or a sequence of integers, + positive or negative. Each integer will define an offset from the + Easter Sunday. Passing the offset 0 to byeaster will yield the Easter + Sunday itself. This is an extension to the RFC specification. + :param byweekno: + If given, it must be either an integer, or a sequence of integers, + meaning the week numbers to apply the recurrence to. Week numbers + have the meaning described in ISO8601, that is, the first week of + the year is that containing at least four days of the new year. + :param byweekday: + If given, it must be either an integer (0 == MO), a sequence of + integers, one of the weekday constants (MO, TU, etc), or a sequence + of these constants. When given, these variables will define the + weekdays where the recurrence will be applied. It's also possible to + use an argument n for the weekday instances, which will mean the nth + occurrence of this weekday in the period. For example, with MONTHLY, + or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the + first friday of the month where the recurrence happens. Notice that in + the RFC documentation, this is specified as BYDAY, but was renamed to + avoid the ambiguity of that keyword. + :param byhour: + If given, it must be either an integer, or a sequence of integers, + meaning the hours to apply the recurrence to. + :param byminute: + If given, it must be either an integer, or a sequence of integers, + meaning the minutes to apply the recurrence to. + :param bysecond: + If given, it must be either an integer, or a sequence of integers, + meaning the seconds to apply the recurrence to. + :param cache: + If given, it must be a boolean value specifying to enable or disable + caching of results. If you will use the same rrule instance multiple + times, enabling caching will improve the performance considerably. + """ + def __init__(self, freq, dtstart=None, + interval=1, wkst=None, count=None, until=None, bysetpos=None, + bymonth=None, bymonthday=None, byyearday=None, byeaster=None, + byweekno=None, byweekday=None, + byhour=None, byminute=None, bysecond=None, + cache=False): + super(rrule, self).__init__(cache) + global easter + if not dtstart: + if until and until.tzinfo: + dtstart = datetime.datetime.now(tz=until.tzinfo).replace(microsecond=0) + else: + dtstart = datetime.datetime.now().replace(microsecond=0) + elif not isinstance(dtstart, datetime.datetime): + dtstart = datetime.datetime.fromordinal(dtstart.toordinal()) + else: + dtstart = dtstart.replace(microsecond=0) + self._dtstart = dtstart + self._tzinfo = dtstart.tzinfo + self._freq = freq + self._interval = interval + self._count = count + + # Cache the original byxxx rules, if they are provided, as the _byxxx + # attributes do not necessarily map to the inputs, and this can be + # a problem in generating the strings. Only store things if they've + # been supplied (the string retrieval will just use .get()) + self._original_rule = {} + + if until and not isinstance(until, datetime.datetime): + until = datetime.datetime.fromordinal(until.toordinal()) + self._until = until + + if self._dtstart and self._until: + if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None): + # According to RFC5545 Section 3.3.10: + # https://tools.ietf.org/html/rfc5545#section-3.3.10 + # + # > If the "DTSTART" property is specified as a date with UTC + # > time or a date with local time and time zone reference, + # > then the UNTIL rule part MUST be specified as a date with + # > UTC time. + raise ValueError( + 'RRULE UNTIL values must be specified in UTC when DTSTART ' + 'is timezone-aware' + ) + + if count is not None and until: + warn("Using both 'count' and 'until' is inconsistent with RFC 5545" + " and has been deprecated in dateutil. Future versions will " + "raise an error.", DeprecationWarning) + + if wkst is None: + self._wkst = calendar.firstweekday() + elif isinstance(wkst, integer_types): + self._wkst = wkst + else: + self._wkst = wkst.weekday + + if bysetpos is None: + self._bysetpos = None + elif isinstance(bysetpos, integer_types): + if bysetpos == 0 or not (-366 <= bysetpos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + self._bysetpos = (bysetpos,) + else: + self._bysetpos = tuple(bysetpos) + for pos in self._bysetpos: + if pos == 0 or not (-366 <= pos <= 366): + raise ValueError("bysetpos must be between 1 and 366, " + "or between -366 and -1") + + if self._bysetpos: + self._original_rule['bysetpos'] = self._bysetpos + + if (byweekno is None and byyearday is None and bymonthday is None and + byweekday is None and byeaster is None): + if freq == YEARLY: + if bymonth is None: + bymonth = dtstart.month + self._original_rule['bymonth'] = None + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == MONTHLY: + bymonthday = dtstart.day + self._original_rule['bymonthday'] = None + elif freq == WEEKLY: + byweekday = dtstart.weekday() + self._original_rule['byweekday'] = None + + # bymonth + if bymonth is None: + self._bymonth = None + else: + if isinstance(bymonth, integer_types): + bymonth = (bymonth,) + + self._bymonth = tuple(sorted(set(bymonth))) + + if 'bymonth' not in self._original_rule: + self._original_rule['bymonth'] = self._bymonth + + # byyearday + if byyearday is None: + self._byyearday = None + else: + if isinstance(byyearday, integer_types): + byyearday = (byyearday,) + + self._byyearday = tuple(sorted(set(byyearday))) + self._original_rule['byyearday'] = self._byyearday + + # byeaster + if byeaster is not None: + if not easter: + from dateutil import easter + if isinstance(byeaster, integer_types): + self._byeaster = (byeaster,) + else: + self._byeaster = tuple(sorted(byeaster)) + + self._original_rule['byeaster'] = self._byeaster + else: + self._byeaster = None + + # bymonthday + if bymonthday is None: + self._bymonthday = () + self._bynmonthday = () + else: + if isinstance(bymonthday, integer_types): + bymonthday = (bymonthday,) + + bymonthday = set(bymonthday) # Ensure it's unique + + self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0)) + self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0)) + + # Storing positive numbers first, then negative numbers + if 'bymonthday' not in self._original_rule: + self._original_rule['bymonthday'] = tuple( + itertools.chain(self._bymonthday, self._bynmonthday)) + + # byweekno + if byweekno is None: + self._byweekno = None + else: + if isinstance(byweekno, integer_types): + byweekno = (byweekno,) + + self._byweekno = tuple(sorted(set(byweekno))) + + self._original_rule['byweekno'] = self._byweekno + + # byweekday / bynweekday + if byweekday is None: + self._byweekday = None + self._bynweekday = None + else: + # If it's one of the valid non-sequence types, convert to a + # single-element sequence before the iterator that builds the + # byweekday set. + if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"): + byweekday = (byweekday,) + + self._byweekday = set() + self._bynweekday = set() + for wday in byweekday: + if isinstance(wday, integer_types): + self._byweekday.add(wday) + elif not wday.n or freq > MONTHLY: + self._byweekday.add(wday.weekday) + else: + self._bynweekday.add((wday.weekday, wday.n)) + + if not self._byweekday: + self._byweekday = None + elif not self._bynweekday: + self._bynweekday = None + + if self._byweekday is not None: + self._byweekday = tuple(sorted(self._byweekday)) + orig_byweekday = [weekday(x) for x in self._byweekday] + else: + orig_byweekday = () + + if self._bynweekday is not None: + self._bynweekday = tuple(sorted(self._bynweekday)) + orig_bynweekday = [weekday(*x) for x in self._bynweekday] + else: + orig_bynweekday = () + + if 'byweekday' not in self._original_rule: + self._original_rule['byweekday'] = tuple(itertools.chain( + orig_byweekday, orig_bynweekday)) + + # byhour + if byhour is None: + if freq < HOURLY: + self._byhour = {dtstart.hour} + else: + self._byhour = None + else: + if isinstance(byhour, integer_types): + byhour = (byhour,) + + if freq == HOURLY: + self._byhour = self.__construct_byset(start=dtstart.hour, + byxxx=byhour, + base=24) + else: + self._byhour = set(byhour) + + self._byhour = tuple(sorted(self._byhour)) + self._original_rule['byhour'] = self._byhour + + # byminute + if byminute is None: + if freq < MINUTELY: + self._byminute = {dtstart.minute} + else: + self._byminute = None + else: + if isinstance(byminute, integer_types): + byminute = (byminute,) + + if freq == MINUTELY: + self._byminute = self.__construct_byset(start=dtstart.minute, + byxxx=byminute, + base=60) + else: + self._byminute = set(byminute) + + self._byminute = tuple(sorted(self._byminute)) + self._original_rule['byminute'] = self._byminute + + # bysecond + if bysecond is None: + if freq < SECONDLY: + self._bysecond = ((dtstart.second,)) + else: + self._bysecond = None + else: + if isinstance(bysecond, integer_types): + bysecond = (bysecond,) + + self._bysecond = set(bysecond) + + if freq == SECONDLY: + self._bysecond = self.__construct_byset(start=dtstart.second, + byxxx=bysecond, + base=60) + else: + self._bysecond = set(bysecond) + + self._bysecond = tuple(sorted(self._bysecond)) + self._original_rule['bysecond'] = self._bysecond + + if self._freq >= HOURLY: + self._timeset = None + else: + self._timeset = [] + for hour in self._byhour: + for minute in self._byminute: + for second in self._bysecond: + self._timeset.append( + datetime.time(hour, minute, second, + tzinfo=self._tzinfo)) + self._timeset.sort() + self._timeset = tuple(self._timeset) + + def __str__(self): + """ + Output a string that would generate this RRULE if passed to rrulestr. + This is mostly compatible with RFC5545, except for the + dateutil-specific extension BYEASTER. + """ + + output = [] + h, m, s = [None] * 3 + if self._dtstart: + output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S')) + h, m, s = self._dtstart.timetuple()[3:6] + + parts = ['FREQ=' + FREQNAMES[self._freq]] + if self._interval != 1: + parts.append('INTERVAL=' + str(self._interval)) + + if self._wkst: + parts.append('WKST=' + repr(weekday(self._wkst))[0:2]) + + if self._count is not None: + parts.append('COUNT=' + str(self._count)) + + if self._until: + parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S')) + + if self._original_rule.get('byweekday') is not None: + # The str() method on weekday objects doesn't generate + # RFC5545-compliant strings, so we should modify that. + original_rule = dict(self._original_rule) + wday_strings = [] + for wday in original_rule['byweekday']: + if wday.n: + wday_strings.append('{n:+d}{wday}'.format( + n=wday.n, + wday=repr(wday)[0:2])) + else: + wday_strings.append(repr(wday)) + + original_rule['byweekday'] = wday_strings + else: + original_rule = self._original_rule + + partfmt = '{name}={vals}' + for name, key in [('BYSETPOS', 'bysetpos'), + ('BYMONTH', 'bymonth'), + ('BYMONTHDAY', 'bymonthday'), + ('BYYEARDAY', 'byyearday'), + ('BYWEEKNO', 'byweekno'), + ('BYDAY', 'byweekday'), + ('BYHOUR', 'byhour'), + ('BYMINUTE', 'byminute'), + ('BYSECOND', 'bysecond'), + ('BYEASTER', 'byeaster')]: + value = original_rule.get(key) + if value: + parts.append(partfmt.format(name=name, vals=(','.join(str(v) + for v in value)))) + + output.append('RRULE:' + ';'.join(parts)) + return '\n'.join(output) + + def replace(self, **kwargs): + """Return new rrule with same attributes except for those attributes given new + values by whichever keyword arguments are specified.""" + new_kwargs = {"interval": self._interval, + "count": self._count, + "dtstart": self._dtstart, + "freq": self._freq, + "until": self._until, + "wkst": self._wkst, + "cache": False if self._cache is None else True } + new_kwargs.update(self._original_rule) + new_kwargs.update(kwargs) + return rrule(**new_kwargs) + + def _iter(self): + year, month, day, hour, minute, second, weekday, yearday, _ = \ + self._dtstart.timetuple() + + # Some local variables to speed things up a bit + freq = self._freq + interval = self._interval + wkst = self._wkst + until = self._until + bymonth = self._bymonth + byweekno = self._byweekno + byyearday = self._byyearday + byweekday = self._byweekday + byeaster = self._byeaster + bymonthday = self._bymonthday + bynmonthday = self._bynmonthday + bysetpos = self._bysetpos + byhour = self._byhour + byminute = self._byminute + bysecond = self._bysecond + + ii = _iterinfo(self) + ii.rebuild(year, month) + + getdayset = {YEARLY: ii.ydayset, + MONTHLY: ii.mdayset, + WEEKLY: ii.wdayset, + DAILY: ii.ddayset, + HOURLY: ii.ddayset, + MINUTELY: ii.ddayset, + SECONDLY: ii.ddayset}[freq] + + if freq < HOURLY: + timeset = self._timeset + else: + gettimeset = {HOURLY: ii.htimeset, + MINUTELY: ii.mtimeset, + SECONDLY: ii.stimeset}[freq] + if ((freq >= HOURLY and + self._byhour and hour not in self._byhour) or + (freq >= MINUTELY and + self._byminute and minute not in self._byminute) or + (freq >= SECONDLY and + self._bysecond and second not in self._bysecond)): + timeset = () + else: + timeset = gettimeset(hour, minute, second) + + total = 0 + count = self._count + while True: + # Get dayset with the right frequency + dayset, start, end = getdayset(year, month, day) + + # Do the "hard" work ;-) + filtered = False + for i in dayset[start:end]: + if ((bymonth and ii.mmask[i] not in bymonth) or + (byweekno and not ii.wnomask[i]) or + (byweekday and ii.wdaymask[i] not in byweekday) or + (ii.nwdaymask and not ii.nwdaymask[i]) or + (byeaster and not ii.eastermask[i]) or + ((bymonthday or bynmonthday) and + ii.mdaymask[i] not in bymonthday and + ii.nmdaymask[i] not in bynmonthday) or + (byyearday and + ((i < ii.yearlen and i+1 not in byyearday and + -ii.yearlen+i not in byyearday) or + (i >= ii.yearlen and i+1-ii.yearlen not in byyearday and + -ii.nextyearlen+i-ii.yearlen not in byyearday)))): + dayset[i] = None + filtered = True + + # Output results + if bysetpos and timeset: + poslist = [] + for pos in bysetpos: + if pos < 0: + daypos, timepos = divmod(pos, len(timeset)) + else: + daypos, timepos = divmod(pos-1, len(timeset)) + try: + i = [x for x in dayset[start:end] + if x is not None][daypos] + time = timeset[timepos] + except IndexError: + pass + else: + date = datetime.date.fromordinal(ii.yearordinal+i) + res = datetime.datetime.combine(date, time) + if res not in poslist: + poslist.append(res) + poslist.sort() + for res in poslist: + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + total += 1 + yield res + else: + for i in dayset[start:end]: + if i is not None: + date = datetime.date.fromordinal(ii.yearordinal + i) + for time in timeset: + res = datetime.datetime.combine(date, time) + if until and res > until: + self._len = total + return + elif res >= self._dtstart: + if count is not None: + count -= 1 + if count < 0: + self._len = total + return + + total += 1 + yield res + + # Handle frequency and interval + fixday = False + if freq == YEARLY: + year += interval + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == MONTHLY: + month += interval + if month > 12: + div, mod = divmod(month, 12) + month = mod + year += div + if month == 0: + month = 12 + year -= 1 + if year > datetime.MAXYEAR: + self._len = total + return + ii.rebuild(year, month) + elif freq == WEEKLY: + if wkst > weekday: + day += -(weekday+1+(6-wkst))+self._interval*7 + else: + day += -(weekday-wkst)+self._interval*7 + weekday = wkst + fixday = True + elif freq == DAILY: + day += interval + fixday = True + elif freq == HOURLY: + if filtered: + # Jump to one iteration before next day + hour += ((23-hour)//interval)*interval + + if byhour: + ndays, hour = self.__mod_distance(value=hour, + byxxx=self._byhour, + base=24) + else: + ndays, hour = divmod(hour+interval, 24) + + if ndays: + day += ndays + fixday = True + + timeset = gettimeset(hour, minute, second) + elif freq == MINUTELY: + if filtered: + # Jump to one iteration before next day + minute += ((1439-(hour*60+minute))//interval)*interval + + valid = False + rep_rate = (24*60) + for j in range(rep_rate // gcd(interval, rep_rate)): + if byminute: + nhours, minute = \ + self.__mod_distance(value=minute, + byxxx=self._byminute, + base=60) + else: + nhours, minute = divmod(minute+interval, 60) + + div, hour = divmod(hour+nhours, 24) + if div: + day += div + fixday = True + filtered = False + + if not byhour or hour in byhour: + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval and ' + + 'byhour resulting in empty rule.') + + timeset = gettimeset(hour, minute, second) + elif freq == SECONDLY: + if filtered: + # Jump to one iteration before next day + second += (((86399 - (hour * 3600 + minute * 60 + second)) + // interval) * interval) + + rep_rate = (24 * 3600) + valid = False + for j in range(0, rep_rate // gcd(interval, rep_rate)): + if bysecond: + nminutes, second = \ + self.__mod_distance(value=second, + byxxx=self._bysecond, + base=60) + else: + nminutes, second = divmod(second+interval, 60) + + div, minute = divmod(minute+nminutes, 60) + if div: + hour += div + div, hour = divmod(hour, 24) + if div: + day += div + fixday = True + + if ((not byhour or hour in byhour) and + (not byminute or minute in byminute) and + (not bysecond or second in bysecond)): + valid = True + break + + if not valid: + raise ValueError('Invalid combination of interval, ' + + 'byhour and byminute resulting in empty' + + ' rule.') + + timeset = gettimeset(hour, minute, second) + + if fixday and day > 28: + daysinmonth = calendar.monthrange(year, month)[1] + if day > daysinmonth: + while day > daysinmonth: + day -= daysinmonth + month += 1 + if month == 13: + month = 1 + year += 1 + if year > datetime.MAXYEAR: + self._len = total + return + daysinmonth = calendar.monthrange(year, month)[1] + ii.rebuild(year, month) + + def __construct_byset(self, start, byxxx, base): + """ + If a `BYXXX` sequence is passed to the constructor at the same level as + `FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some + specifications which cannot be reached given some starting conditions. + + This occurs whenever the interval is not coprime with the base of a + given unit and the difference between the starting position and the + ending position is not coprime with the greatest common denominator + between the interval and the base. For example, with a FREQ of hourly + starting at 17:00 and an interval of 4, the only valid values for + BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not + coprime. + + :param start: + Specifies the starting position. + :param byxxx: + An iterable containing the list of allowed values. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + This does not preserve the type of the iterable, returning a set, since + the values should be unique and the order is irrelevant, this will + speed up later lookups. + + In the event of an empty set, raises a :exception:`ValueError`, as this + results in an empty rrule. + """ + + cset = set() + + # Support a single byxxx value. + if isinstance(byxxx, integer_types): + byxxx = (byxxx, ) + + for num in byxxx: + i_gcd = gcd(self._interval, base) + # Use divmod rather than % because we need to wrap negative nums. + if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0: + cset.add(num) + + if len(cset) == 0: + raise ValueError("Invalid rrule byxxx generates an empty set.") + + return cset + + def __mod_distance(self, value, byxxx, base): + """ + Calculates the next value in a sequence where the `FREQ` parameter is + specified along with a `BYXXX` parameter at the same "level" + (e.g. `HOURLY` specified with `BYHOUR`). + + :param value: + The old value of the component. + :param byxxx: + The `BYXXX` set, which should have been generated by + `rrule._construct_byset`, or something else which checks that a + valid rule is present. + :param base: + The largest allowable value for the specified frequency (e.g. + 24 hours, 60 minutes). + + If a valid value is not found after `base` iterations (the maximum + number before the sequence would start to repeat), this raises a + :exception:`ValueError`, as no valid values were found. + + This returns a tuple of `divmod(n*interval, base)`, where `n` is the + smallest number of `interval` repetitions until the next specified + value in `byxxx` is found. + """ + accumulator = 0 + for ii in range(1, base + 1): + # Using divmod() over % to account for negative intervals + div, value = divmod(value + self._interval, base) + accumulator += div + if value in byxxx: + return (accumulator, value) + + +class _iterinfo(object): + __slots__ = ["rrule", "lastyear", "lastmonth", + "yearlen", "nextyearlen", "yearordinal", "yearweekday", + "mmask", "mrange", "mdaymask", "nmdaymask", + "wdaymask", "wnomask", "nwdaymask", "eastermask"] + + def __init__(self, rrule): + for attr in self.__slots__: + setattr(self, attr, None) + self.rrule = rrule + + def rebuild(self, year, month): + # Every mask is 7 days longer to handle cross-year weekly periods. + rr = self.rrule + if year != self.lastyear: + self.yearlen = 365 + calendar.isleap(year) + self.nextyearlen = 365 + calendar.isleap(year + 1) + firstyday = datetime.date(year, 1, 1) + self.yearordinal = firstyday.toordinal() + self.yearweekday = firstyday.weekday() + + wday = datetime.date(year, 1, 1).weekday() + if self.yearlen == 365: + self.mmask = M365MASK + self.mdaymask = MDAY365MASK + self.nmdaymask = NMDAY365MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M365RANGE + else: + self.mmask = M366MASK + self.mdaymask = MDAY366MASK + self.nmdaymask = NMDAY366MASK + self.wdaymask = WDAYMASK[wday:] + self.mrange = M366RANGE + + if not rr._byweekno: + self.wnomask = None + else: + self.wnomask = [0]*(self.yearlen+7) + # no1wkst = firstwkst = self.wdaymask.index(rr._wkst) + no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7 + if no1wkst >= 4: + no1wkst = 0 + # Number of days in the year, plus the days we got + # from last year. + wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7 + else: + # Number of days in the year, minus the days we + # left in last year. + wyearlen = self.yearlen-no1wkst + div, mod = divmod(wyearlen, 7) + numweeks = div+mod//4 + for n in rr._byweekno: + if n < 0: + n += numweeks+1 + if not (0 < n <= numweeks): + continue + if n > 1: + i = no1wkst+(n-1)*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + else: + i = no1wkst + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if 1 in rr._byweekno: + # Check week number 1 of next year as well + # TODO: Check -numweeks for next year. + i = no1wkst+numweeks*7 + if no1wkst != firstwkst: + i -= 7-firstwkst + if i < self.yearlen: + # If week starts in next year, we + # don't care about it. + for j in range(7): + self.wnomask[i] = 1 + i += 1 + if self.wdaymask[i] == rr._wkst: + break + if no1wkst: + # Check last week number of last year as + # well. If no1wkst is 0, either the year + # started on week start, or week number 1 + # got days from last year, so there are no + # days from last year's last week number in + # this year. + if -1 not in rr._byweekno: + lyearweekday = datetime.date(year-1, 1, 1).weekday() + lno1wkst = (7-lyearweekday+rr._wkst) % 7 + lyearlen = 365+calendar.isleap(year-1) + if lno1wkst >= 4: + lno1wkst = 0 + lnumweeks = 52+(lyearlen + + (lyearweekday-rr._wkst) % 7) % 7//4 + else: + lnumweeks = 52+(self.yearlen-no1wkst) % 7//4 + else: + lnumweeks = -1 + if lnumweeks in rr._byweekno: + for i in range(no1wkst): + self.wnomask[i] = 1 + + if (rr._bynweekday and (month != self.lastmonth or + year != self.lastyear)): + ranges = [] + if rr._freq == YEARLY: + if rr._bymonth: + for month in rr._bymonth: + ranges.append(self.mrange[month-1:month+1]) + else: + ranges = [(0, self.yearlen)] + elif rr._freq == MONTHLY: + ranges = [self.mrange[month-1:month+1]] + if ranges: + # Weekly frequency won't get here, so we may not + # care about cross-year weekly periods. + self.nwdaymask = [0]*self.yearlen + for first, last in ranges: + last -= 1 + for wday, n in rr._bynweekday: + if n < 0: + i = last+(n+1)*7 + i -= (self.wdaymask[i]-wday) % 7 + else: + i = first+(n-1)*7 + i += (7-self.wdaymask[i]+wday) % 7 + if first <= i <= last: + self.nwdaymask[i] = 1 + + if rr._byeaster: + self.eastermask = [0]*(self.yearlen+7) + eyday = easter.easter(year).toordinal()-self.yearordinal + for offset in rr._byeaster: + self.eastermask[eyday+offset] = 1 + + self.lastyear = year + self.lastmonth = month + + def ydayset(self, year, month, day): + return list(range(self.yearlen)), 0, self.yearlen + + def mdayset(self, year, month, day): + dset = [None]*self.yearlen + start, end = self.mrange[month-1:month+1] + for i in range(start, end): + dset[i] = i + return dset, start, end + + def wdayset(self, year, month, day): + # We need to handle cross-year weeks here. + dset = [None]*(self.yearlen+7) + i = datetime.date(year, month, day).toordinal()-self.yearordinal + start = i + for j in range(7): + dset[i] = i + i += 1 + # if (not (0 <= i < self.yearlen) or + # self.wdaymask[i] == self.rrule._wkst): + # This will cross the year boundary, if necessary. + if self.wdaymask[i] == self.rrule._wkst: + break + return dset, start, i + + def ddayset(self, year, month, day): + dset = [None] * self.yearlen + i = datetime.date(year, month, day).toordinal() - self.yearordinal + dset[i] = i + return dset, i, i + 1 + + def htimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for minute in rr._byminute: + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, + tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def mtimeset(self, hour, minute, second): + tset = [] + rr = self.rrule + for second in rr._bysecond: + tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo)) + tset.sort() + return tset + + def stimeset(self, hour, minute, second): + return (datetime.time(hour, minute, second, + tzinfo=self.rrule._tzinfo),) + + +class rruleset(rrulebase): + """ The rruleset type allows more complex recurrence setups, mixing + multiple rules, dates, exclusion rules, and exclusion dates. The type + constructor takes the following keyword arguments: + + :param cache: If True, caching of results will be enabled, improving + performance of multiple queries considerably. """ + + class _genitem(object): + def __init__(self, genlist, gen): + try: + self.dt = advance_iterator(gen) + genlist.append(self) + except StopIteration: + pass + self.genlist = genlist + self.gen = gen + + def __next__(self): + try: + self.dt = advance_iterator(self.gen) + except StopIteration: + if self.genlist[0] is self: + heapq.heappop(self.genlist) + else: + self.genlist.remove(self) + heapq.heapify(self.genlist) + + next = __next__ + + def __lt__(self, other): + return self.dt < other.dt + + def __gt__(self, other): + return self.dt > other.dt + + def __eq__(self, other): + return self.dt == other.dt + + def __ne__(self, other): + return self.dt != other.dt + + def __init__(self, cache=False): + super(rruleset, self).__init__(cache) + self._rrule = [] + self._rdate = [] + self._exrule = [] + self._exdate = [] + + @_invalidates_cache + def rrule(self, rrule): + """ Include the given :py:class:`rrule` instance in the recurrence set + generation. """ + self._rrule.append(rrule) + + @_invalidates_cache + def rdate(self, rdate): + """ Include the given :py:class:`datetime` instance in the recurrence + set generation. """ + self._rdate.append(rdate) + + @_invalidates_cache + def exrule(self, exrule): + """ Include the given rrule instance in the recurrence set exclusion + list. Dates which are part of the given recurrence rules will not + be generated, even if some inclusive rrule or rdate matches them. + """ + self._exrule.append(exrule) + + @_invalidates_cache + def exdate(self, exdate): + """ Include the given datetime instance in the recurrence set + exclusion list. Dates included that way will not be generated, + even if some inclusive rrule or rdate matches them. """ + self._exdate.append(exdate) + + def _iter(self): + rlist = [] + self._rdate.sort() + self._genitem(rlist, iter(self._rdate)) + for gen in [iter(x) for x in self._rrule]: + self._genitem(rlist, gen) + exlist = [] + self._exdate.sort() + self._genitem(exlist, iter(self._exdate)) + for gen in [iter(x) for x in self._exrule]: + self._genitem(exlist, gen) + lastdt = None + total = 0 + heapq.heapify(rlist) + heapq.heapify(exlist) + while rlist: + ritem = rlist[0] + if not lastdt or lastdt != ritem.dt: + while exlist and exlist[0] < ritem: + exitem = exlist[0] + advance_iterator(exitem) + if exlist and exlist[0] is exitem: + heapq.heapreplace(exlist, exitem) + if not exlist or ritem != exlist[0]: + total += 1 + yield ritem.dt + lastdt = ritem.dt + advance_iterator(ritem) + if rlist and rlist[0] is ritem: + heapq.heapreplace(rlist, ritem) + self._len = total + + + + +class _rrulestr(object): + """ Parses a string representation of a recurrence rule or set of + recurrence rules. + + :param s: + Required, a string defining one or more recurrence rules. + + :param dtstart: + If given, used as the default recurrence start if not specified in the + rule string. + + :param cache: + If set ``True`` caching of results will be enabled, improving + performance of multiple queries considerably. + + :param unfold: + If set ``True`` indicates that a rule string is split over more + than one line and should be joined before processing. + + :param forceset: + If set ``True`` forces a :class:`dateutil.rrule.rruleset` to + be returned. + + :param compatible: + If set ``True`` forces ``unfold`` and ``forceset`` to be ``True``. + + :param ignoretz: + If set ``True``, time zones in parsed strings are ignored and a naive + :class:`datetime.datetime` object is returned. + + :param tzids: + If given, a callable or mapping used to retrieve a + :class:`datetime.tzinfo` from a string representation. + Defaults to :func:`dateutil.tz.gettz`. + + :param tzinfos: + Additional time zone names / aliases which may be present in a string + representation. See :func:`dateutil.parser.parse` for more + information. + + :return: + Returns a :class:`dateutil.rrule.rruleset` or + :class:`dateutil.rrule.rrule` + """ + + _freq_map = {"YEARLY": YEARLY, + "MONTHLY": MONTHLY, + "WEEKLY": WEEKLY, + "DAILY": DAILY, + "HOURLY": HOURLY, + "MINUTELY": MINUTELY, + "SECONDLY": SECONDLY} + + _weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3, + "FR": 4, "SA": 5, "SU": 6} + + def _handle_int(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = int(value) + + def _handle_int_list(self, rrkwargs, name, value, **kwargs): + rrkwargs[name.lower()] = [int(x) for x in value.split(',')] + + _handle_INTERVAL = _handle_int + _handle_COUNT = _handle_int + _handle_BYSETPOS = _handle_int_list + _handle_BYMONTH = _handle_int_list + _handle_BYMONTHDAY = _handle_int_list + _handle_BYYEARDAY = _handle_int_list + _handle_BYEASTER = _handle_int_list + _handle_BYWEEKNO = _handle_int_list + _handle_BYHOUR = _handle_int_list + _handle_BYMINUTE = _handle_int_list + _handle_BYSECOND = _handle_int_list + + def _handle_FREQ(self, rrkwargs, name, value, **kwargs): + rrkwargs["freq"] = self._freq_map[value] + + def _handle_UNTIL(self, rrkwargs, name, value, **kwargs): + global parser + if not parser: + from dateutil import parser + try: + rrkwargs["until"] = parser.parse(value, + ignoretz=kwargs.get("ignoretz"), + tzinfos=kwargs.get("tzinfos")) + except ValueError: + raise ValueError("invalid until date") + + def _handle_WKST(self, rrkwargs, name, value, **kwargs): + rrkwargs["wkst"] = self._weekday_map[value] + + def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs): + """ + Two ways to specify this: +1MO or MO(+1) + """ + l = [] + for wday in value.split(','): + if '(' in wday: + # If it's of the form TH(+1), etc. + splt = wday.split('(') + w = splt[0] + n = int(splt[1][:-1]) + elif len(wday): + # If it's of the form +1MO + for i in range(len(wday)): + if wday[i] not in '+-0123456789': + break + n = wday[:i] or None + w = wday[i:] + if n: + n = int(n) + else: + raise ValueError("Invalid (empty) BYDAY specification.") + + l.append(weekdays[self._weekday_map[w]](n)) + rrkwargs["byweekday"] = l + + _handle_BYDAY = _handle_BYWEEKDAY + + def _parse_rfc_rrule(self, line, + dtstart=None, + cache=False, + ignoretz=False, + tzinfos=None): + if line.find(':') != -1: + name, value = line.split(':') + if name != "RRULE": + raise ValueError("unknown parameter name") + else: + value = line + rrkwargs = {} + for pair in value.split(';'): + name, value = pair.split('=') + name = name.upper() + value = value.upper() + try: + getattr(self, "_handle_"+name)(rrkwargs, name, value, + ignoretz=ignoretz, + tzinfos=tzinfos) + except AttributeError: + raise ValueError("unknown parameter '%s'" % name) + except (KeyError, ValueError): + raise ValueError("invalid '%s': %s" % (name, value)) + return rrule(dtstart=dtstart, cache=cache, **rrkwargs) + + def _parse_date_value(self, date_value, parms, rule_tzids, + ignoretz, tzids, tzinfos): + global parser + if not parser: + from dateutil import parser + + datevals = [] + value_found = False + TZID = None + + for parm in parms: + if parm.startswith("TZID="): + try: + tzkey = rule_tzids[parm.split('TZID=')[-1]] + except KeyError: + continue + if tzids is None: + from . import tz + tzlookup = tz.gettz + elif callable(tzids): + tzlookup = tzids + else: + tzlookup = getattr(tzids, 'get', None) + if tzlookup is None: + msg = ('tzids must be a callable, mapping, or None, ' + 'not %s' % tzids) + raise ValueError(msg) + + TZID = tzlookup(tzkey) + continue + + # RFC 5445 3.8.2.4: The VALUE parameter is optional, but may be found + # only once. + if parm not in {"VALUE=DATE-TIME", "VALUE=DATE"}: + raise ValueError("unsupported parm: " + parm) + else: + if value_found: + msg = ("Duplicate value parameter found in: " + parm) + raise ValueError(msg) + value_found = True + + for datestr in date_value.split(','): + date = parser.parse(datestr, ignoretz=ignoretz, tzinfos=tzinfos) + if TZID is not None: + if date.tzinfo is None: + date = date.replace(tzinfo=TZID) + else: + raise ValueError('DTSTART/EXDATE specifies multiple timezone') + datevals.append(date) + + return datevals + + def _parse_rfc(self, s, + dtstart=None, + cache=False, + unfold=False, + forceset=False, + compatible=False, + ignoretz=False, + tzids=None, + tzinfos=None): + global parser + if compatible: + forceset = True + unfold = True + + TZID_NAMES = dict(map( + lambda x: (x.upper(), x), + re.findall('TZID=(?P[^:]+):', s) + )) + s = s.upper() + if not s.strip(): + raise ValueError("empty string") + if unfold: + lines = s.splitlines() + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + else: + lines = s.split() + if (not forceset and len(lines) == 1 and (s.find(':') == -1 or + s.startswith('RRULE:'))): + return self._parse_rfc_rrule(lines[0], cache=cache, + dtstart=dtstart, ignoretz=ignoretz, + tzinfos=tzinfos) + else: + rrulevals = [] + rdatevals = [] + exrulevals = [] + exdatevals = [] + for line in lines: + if not line: + continue + if line.find(':') == -1: + name = "RRULE" + value = line + else: + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0] + parms = parms[1:] + if name == "RRULE": + for parm in parms: + raise ValueError("unsupported RRULE parm: "+parm) + rrulevals.append(value) + elif name == "RDATE": + for parm in parms: + if parm != "VALUE=DATE-TIME": + raise ValueError("unsupported RDATE parm: "+parm) + rdatevals.append(value) + elif name == "EXRULE": + for parm in parms: + raise ValueError("unsupported EXRULE parm: "+parm) + exrulevals.append(value) + elif name == "EXDATE": + exdatevals.extend( + self._parse_date_value(value, parms, + TZID_NAMES, ignoretz, + tzids, tzinfos) + ) + elif name == "DTSTART": + dtvals = self._parse_date_value(value, parms, TZID_NAMES, + ignoretz, tzids, tzinfos) + if len(dtvals) != 1: + raise ValueError("Multiple DTSTART values specified:" + + value) + dtstart = dtvals[0] + else: + raise ValueError("unsupported property: "+name) + if (forceset or len(rrulevals) > 1 or rdatevals + or exrulevals or exdatevals): + if not parser and (rdatevals or exdatevals): + from dateutil import parser + rset = rruleset(cache=cache) + for value in rrulevals: + rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in rdatevals: + for datestr in value.split(','): + rset.rdate(parser.parse(datestr, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exrulevals: + rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart, + ignoretz=ignoretz, + tzinfos=tzinfos)) + for value in exdatevals: + rset.exdate(value) + if compatible and dtstart: + rset.rdate(dtstart) + return rset + else: + return self._parse_rfc_rrule(rrulevals[0], + dtstart=dtstart, + cache=cache, + ignoretz=ignoretz, + tzinfos=tzinfos) + + def __call__(self, s, **kwargs): + return self._parse_rfc(s, **kwargs) + + +rrulestr = _rrulestr() + +# vim:ts=4:sw=4:et diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/__init__.py b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__init__.py new file mode 100644 index 0000000..af1352c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__init__.py @@ -0,0 +1,12 @@ +# -*- coding: utf-8 -*- +from .tz import * +from .tz import __doc__ + +__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange", + "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz", + "enfold", "datetime_ambiguous", "datetime_exists", + "resolve_imaginary", "UTC", "DeprecatedTzFormatWarning"] + + +class DeprecatedTzFormatWarning(Warning): + """Warning raised when time zones are parsed from deprecated formats.""" diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d8e710e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/_common.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/_common.cpython-38.pyc new file mode 100644 index 0000000..535af8c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/_common.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/_factories.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/_factories.cpython-38.pyc new file mode 100644 index 0000000..ab5d0c3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/_factories.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/tz.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/tz.cpython-38.pyc new file mode 100644 index 0000000..0c8071b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/tz.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/win.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/win.cpython-38.pyc new file mode 100644 index 0000000..087c588 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/tz/__pycache__/win.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/_common.py b/dbt-env/lib/python3.8/site-packages/dateutil/tz/_common.py new file mode 100644 index 0000000..e6ac118 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/tz/_common.py @@ -0,0 +1,419 @@ +from six import PY2 + +from functools import wraps + +from datetime import datetime, timedelta, tzinfo + + +ZERO = timedelta(0) + +__all__ = ['tzname_in_python2', 'enfold'] + + +def tzname_in_python2(namefunc): + """Change unicode output into bytestrings in Python 2 + + tzname() API changed in Python 3. It used to return bytes, but was changed + to unicode strings + """ + if PY2: + @wraps(namefunc) + def adjust_encoding(*args, **kwargs): + name = namefunc(*args, **kwargs) + if name is not None: + name = name.encode() + + return name + + return adjust_encoding + else: + return namefunc + + +# The following is adapted from Alexander Belopolsky's tz library +# https://github.com/abalkin/tz +if hasattr(datetime, 'fold'): + # This is the pre-python 3.6 fold situation + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + return dt.replace(fold=fold) + +else: + class _DatetimeWithFold(datetime): + """ + This is a class designed to provide a PEP 495-compliant interface for + Python versions before 3.6. It is used only for dates in a fold, so + the ``fold`` attribute is fixed at ``1``. + + .. versionadded:: 2.6.0 + """ + __slots__ = () + + def replace(self, *args, **kwargs): + """ + Return a datetime with the same attributes, except for those + attributes given new values by whichever keyword arguments are + specified. Note that tzinfo=None can be specified to create a naive + datetime from an aware datetime with no conversion of date and time + data. + + This is reimplemented in ``_DatetimeWithFold`` because pypy3 will + return a ``datetime.datetime`` even if ``fold`` is unchanged. + """ + argnames = ( + 'year', 'month', 'day', 'hour', 'minute', 'second', + 'microsecond', 'tzinfo' + ) + + for arg, argname in zip(args, argnames): + if argname in kwargs: + raise TypeError('Duplicate argument: {}'.format(argname)) + + kwargs[argname] = arg + + for argname in argnames: + if argname not in kwargs: + kwargs[argname] = getattr(self, argname) + + dt_class = self.__class__ if kwargs.get('fold', 1) else datetime + + return dt_class(**kwargs) + + @property + def fold(self): + return 1 + + def enfold(dt, fold=1): + """ + Provides a unified interface for assigning the ``fold`` attribute to + datetimes both before and after the implementation of PEP-495. + + :param fold: + The value for the ``fold`` attribute in the returned datetime. This + should be either 0 or 1. + + :return: + Returns an object for which ``getattr(dt, 'fold', 0)`` returns + ``fold`` for all versions of Python. In versions prior to + Python 3.6, this is a ``_DatetimeWithFold`` object, which is a + subclass of :py:class:`datetime.datetime` with the ``fold`` + attribute added, if ``fold`` is 1. + + .. versionadded:: 2.6.0 + """ + if getattr(dt, 'fold', 0) == fold: + return dt + + args = dt.timetuple()[:6] + args += (dt.microsecond, dt.tzinfo) + + if fold: + return _DatetimeWithFold(*args) + else: + return datetime(*args) + + +def _validate_fromutc_inputs(f): + """ + The CPython version of ``fromutc`` checks that the input is a ``datetime`` + object and that ``self`` is attached as its ``tzinfo``. + """ + @wraps(f) + def fromutc(self, dt): + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + return f(self, dt) + + return fromutc + + +class _tzinfo(tzinfo): + """ + Base class for all ``dateutil`` ``tzinfo`` objects. + """ + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + + dt = dt.replace(tzinfo=self) + + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dt = wall_0.replace(tzinfo=None) == wall_1.replace(tzinfo=None) + + return same_dt and not same_offset + + def _fold_status(self, dt_utc, dt_wall): + """ + Determine the fold status of a "wall" datetime, given a representation + of the same datetime as a (naive) UTC datetime. This is calculated based + on the assumption that ``dt.utcoffset() - dt.dst()`` is constant for all + datetimes, and that this offset is the actual number of hours separating + ``dt_utc`` and ``dt_wall``. + + :param dt_utc: + Representation of the datetime as UTC + + :param dt_wall: + Representation of the datetime as "wall time". This parameter must + either have a `fold` attribute or have a fold-naive + :class:`datetime.tzinfo` attached, otherwise the calculation may + fail. + """ + if self.is_ambiguous(dt_wall): + delta_wall = dt_wall - dt_utc + _fold = int(delta_wall == (dt_utc.utcoffset() - dt_utc.dst())) + else: + _fold = 0 + + return _fold + + def _fold(self, dt): + return getattr(dt, 'fold', 0) + + def _fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + + # Re-implement the algorithm from Python's datetime.py + dtoff = dt.utcoffset() + if dtoff is None: + raise ValueError("fromutc() requires a non-None utcoffset() " + "result") + + # The original datetime.py code assumes that `dst()` defaults to + # zero during ambiguous times. PEP 495 inverts this presumption, so + # for pre-PEP 495 versions of python, we need to tweak the algorithm. + dtdst = dt.dst() + if dtdst is None: + raise ValueError("fromutc() requires a non-None dst() result") + delta = dtoff - dtdst + + dt += delta + # Set fold=1 so we can default to being in the fold for + # ambiguous dates. + dtdst = enfold(dt, fold=1).dst() + if dtdst is None: + raise ValueError("fromutc(): dt.dst gave inconsistent " + "results; cannot convert") + return dt + dtdst + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Given a timezone-aware datetime in a given timezone, calculates a + timezone-aware datetime in a new timezone. + + Since this is the one time that we *know* we have an unambiguous + datetime object, we take this opportunity to determine whether the + datetime is ambiguous and in a "fold" state (e.g. if it's the first + occurrence, chronologically, of the ambiguous datetime). + + :param dt: + A timezone-aware :class:`datetime.datetime` object. + """ + dt_wall = self._fromutc(dt) + + # Calculate the fold status given the two datetimes. + _fold = self._fold_status(dt, dt_wall) + + # Set the default fold value for ambiguous dates + return enfold(dt_wall, fold=_fold) + + +class tzrangebase(_tzinfo): + """ + This is an abstract base class for time zones represented by an annual + transition into and out of DST. Child classes should implement the following + methods: + + * ``__init__(self, *args, **kwargs)`` + * ``transitions(self, year)`` - this is expected to return a tuple of + datetimes representing the DST on and off transitions in standard + time. + + A fully initialized ``tzrangebase`` subclass should also provide the + following attributes: + * ``hasdst``: Boolean whether or not the zone uses DST. + * ``_dst_offset`` / ``_std_offset``: :class:`datetime.timedelta` objects + representing the respective UTC offsets. + * ``_dst_abbr`` / ``_std_abbr``: Strings representing the timezone short + abbreviations in DST and STD, respectively. + * ``_hasdst``: Whether or not the zone has DST. + + .. versionadded:: 2.6.0 + """ + def __init__(self): + raise NotImplementedError('tzrangebase is an abstract base class') + + def utcoffset(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + isdst = self._isdst(dt) + + if isdst is None: + return None + elif isdst: + return self._dst_base_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + if self._isdst(dt): + return self._dst_abbr + else: + return self._std_abbr + + def fromutc(self, dt): + """ Given a datetime in UTC, return local time """ + if not isinstance(dt, datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # Get transitions - if there are none, fixed offset + transitions = self.transitions(dt.year) + if transitions is None: + return dt + self.utcoffset(dt) + + # Get the transition times in UTC + dston, dstoff = transitions + + dston -= self._std_offset + dstoff -= self._std_offset + + utc_transitions = (dston, dstoff) + dt_utc = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt_utc, utc_transitions) + + if isdst: + dt_wall = dt + self._dst_offset + else: + dt_wall = dt + self._std_offset + + _fold = int(not isdst and self.is_ambiguous(dt_wall)) + + return enfold(dt_wall, fold=_fold) + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if not self.hasdst: + return False + + start, end = self.transitions(dt.year) + + dt = dt.replace(tzinfo=None) + return (end <= dt < end + self._dst_base_offset) + + def _isdst(self, dt): + if not self.hasdst: + return False + elif dt is None: + return None + + transitions = self.transitions(dt.year) + + if transitions is None: + return False + + dt = dt.replace(tzinfo=None) + + isdst = self._naive_isdst(dt, transitions) + + # Handle ambiguous dates + if not isdst and self.is_ambiguous(dt): + return not self._fold(dt) + else: + return isdst + + def _naive_isdst(self, dt, transitions): + dston, dstoff = transitions + + dt = dt.replace(tzinfo=None) + + if dston < dstoff: + isdst = dston <= dt < dstoff + else: + isdst = not dstoff <= dt < dston + + return isdst + + @property + def _dst_base_offset(self): + return self._dst_offset - self._std_offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(...)" % self.__class__.__name__ + + __reduce__ = object.__reduce__ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/_factories.py b/dbt-env/lib/python3.8/site-packages/dateutil/tz/_factories.py new file mode 100644 index 0000000..f8a6589 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/tz/_factories.py @@ -0,0 +1,80 @@ +from datetime import timedelta +import weakref +from collections import OrderedDict + +from six.moves import _thread + + +class _TzSingleton(type): + def __init__(cls, *args, **kwargs): + cls.__instance = None + super(_TzSingleton, cls).__init__(*args, **kwargs) + + def __call__(cls): + if cls.__instance is None: + cls.__instance = super(_TzSingleton, cls).__call__() + return cls.__instance + + +class _TzFactory(type): + def instance(cls, *args, **kwargs): + """Alternate constructor that returns a fresh instance""" + return type.__call__(cls, *args, **kwargs) + + +class _TzOffsetFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls._cache_lock = _thread.allocate_lock() + + def __call__(cls, name, offset): + if isinstance(offset, timedelta): + key = (name, offset.total_seconds()) + else: + key = (name, offset) + + instance = cls.__instances.get(key, None) + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(name, offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls._cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + + +class _TzStrFactory(_TzFactory): + def __init__(cls, *args, **kwargs): + cls.__instances = weakref.WeakValueDictionary() + cls.__strong_cache = OrderedDict() + cls.__strong_cache_size = 8 + + cls.__cache_lock = _thread.allocate_lock() + + def __call__(cls, s, posix_offset=False): + key = (s, posix_offset) + instance = cls.__instances.get(key, None) + + if instance is None: + instance = cls.__instances.setdefault(key, + cls.instance(s, posix_offset)) + + # This lock may not be necessary in Python 3. See GH issue #901 + with cls.__cache_lock: + cls.__strong_cache[key] = cls.__strong_cache.pop(key, instance) + + # Remove an item if the strong cache is overpopulated + if len(cls.__strong_cache) > cls.__strong_cache_size: + cls.__strong_cache.popitem(last=False) + + return instance + diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/tz.py b/dbt-env/lib/python3.8/site-packages/dateutil/tz/tz.py new file mode 100644 index 0000000..c67f56d --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/tz/tz.py @@ -0,0 +1,1849 @@ +# -*- coding: utf-8 -*- +""" +This module offers timezone implementations subclassing the abstract +:py:class:`datetime.tzinfo` type. There are classes to handle tzfile format +files (usually are in :file:`/etc/localtime`, :file:`/usr/share/zoneinfo`, +etc), TZ environment string (in all known formats), given ranges (with help +from relative deltas), local machine timezone, fixed offset timezone, and UTC +timezone. +""" +import datetime +import struct +import time +import sys +import os +import bisect +import weakref +from collections import OrderedDict + +import six +from six import string_types +from six.moves import _thread +from ._common import tzname_in_python2, _tzinfo +from ._common import tzrangebase, enfold +from ._common import _validate_fromutc_inputs + +from ._factories import _TzSingleton, _TzOffsetFactory +from ._factories import _TzStrFactory +try: + from .win import tzwin, tzwinlocal +except ImportError: + tzwin = tzwinlocal = None + +# For warning about rounding tzinfo +from warnings import warn + +ZERO = datetime.timedelta(0) +EPOCH = datetime.datetime.utcfromtimestamp(0) +EPOCHORDINAL = EPOCH.toordinal() + + +@six.add_metaclass(_TzSingleton) +class tzutc(datetime.tzinfo): + """ + This is a tzinfo object that represents the UTC time zone. + + **Examples:** + + .. doctest:: + + >>> from datetime import * + >>> from dateutil.tz import * + + >>> datetime.now() + datetime.datetime(2003, 9, 27, 9, 40, 1, 521290) + + >>> datetime.now(tzutc()) + datetime.datetime(2003, 9, 27, 12, 40, 12, 156379, tzinfo=tzutc()) + + >>> datetime.now(tzutc()).tzname() + 'UTC' + + .. versionchanged:: 2.7.0 + ``tzutc()`` is now a singleton, so the result of ``tzutc()`` will + always return the same object. + + .. doctest:: + + >>> from dateutil.tz import tzutc, UTC + >>> tzutc() is tzutc() + True + >>> tzutc() is UTC + True + """ + def utcoffset(self, dt): + return ZERO + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return "UTC" + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + @_validate_fromutc_inputs + def fromutc(self, dt): + """ + Fast track version of fromutc() returns the original ``dt`` object for + any valid :py:class:`datetime.datetime` object. + """ + return dt + + def __eq__(self, other): + if not isinstance(other, (tzutc, tzoffset)): + return NotImplemented + + return (isinstance(other, tzutc) or + (isinstance(other, tzoffset) and other._offset == ZERO)) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +#: Convenience constant providing a :class:`tzutc()` instance +#: +#: .. versionadded:: 2.7.0 +UTC = tzutc() + + +@six.add_metaclass(_TzOffsetFactory) +class tzoffset(datetime.tzinfo): + """ + A simple class for representing a fixed offset from UTC. + + :param name: + The timezone name, to be returned when ``tzname()`` is called. + :param offset: + The time zone offset in seconds, or (since version 2.6.0, represented + as a :py:class:`datetime.timedelta` object). + """ + def __init__(self, name, offset): + self._name = name + + try: + # Allow a timedelta + offset = offset.total_seconds() + except (TypeError, AttributeError): + pass + + self._offset = datetime.timedelta(seconds=_get_supported_offset(offset)) + + def utcoffset(self, dt): + return self._offset + + def dst(self, dt): + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._name + + @_validate_fromutc_inputs + def fromutc(self, dt): + return dt + self._offset + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + return False + + def __eq__(self, other): + if not isinstance(other, tzoffset): + return NotImplemented + + return self._offset == other._offset + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s, %s)" % (self.__class__.__name__, + repr(self._name), + int(self._offset.total_seconds())) + + __reduce__ = object.__reduce__ + + +class tzlocal(_tzinfo): + """ + A :class:`tzinfo` subclass built around the ``time`` timezone functions. + """ + def __init__(self): + super(tzlocal, self).__init__() + + self._std_offset = datetime.timedelta(seconds=-time.timezone) + if time.daylight: + self._dst_offset = datetime.timedelta(seconds=-time.altzone) + else: + self._dst_offset = self._std_offset + + self._dst_saved = self._dst_offset - self._std_offset + self._hasdst = bool(self._dst_saved) + self._tznames = tuple(time.tzname) + + def utcoffset(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset + else: + return self._std_offset + + def dst(self, dt): + if dt is None and self._hasdst: + return None + + if self._isdst(dt): + return self._dst_offset - self._std_offset + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._tznames[self._isdst(dt)] + + def is_ambiguous(self, dt): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + naive_dst = self._naive_is_dst(dt) + return (not naive_dst and + (naive_dst != self._naive_is_dst(dt - self._dst_saved))) + + def _naive_is_dst(self, dt): + timestamp = _datetime_to_timestamp(dt) + return time.localtime(timestamp + time.timezone).tm_isdst + + def _isdst(self, dt, fold_naive=True): + # We can't use mktime here. It is unstable when deciding if + # the hour near to a change is DST or not. + # + # timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour, + # dt.minute, dt.second, dt.weekday(), 0, -1)) + # return time.localtime(timestamp).tm_isdst + # + # The code above yields the following result: + # + # >>> import tz, datetime + # >>> t = tz.tzlocal() + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRST' + # >>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname() + # 'BRDT' + # >>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname() + # 'BRDT' + # + # Here is a more stable implementation: + # + if not self._hasdst: + return False + + # Check for ambiguous times: + dstval = self._naive_is_dst(dt) + fold = getattr(dt, 'fold', None) + + if self.is_ambiguous(dt): + if fold is not None: + return not self._fold(dt) + else: + return True + + return dstval + + def __eq__(self, other): + if isinstance(other, tzlocal): + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset) + elif isinstance(other, tzutc): + return (not self._hasdst and + self._tznames[0] in {'UTC', 'GMT'} and + self._std_offset == ZERO) + elif isinstance(other, tzoffset): + return (not self._hasdst and + self._tznames[0] == other._name and + self._std_offset == other._offset) + else: + return NotImplemented + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s()" % self.__class__.__name__ + + __reduce__ = object.__reduce__ + + +class _ttinfo(object): + __slots__ = ["offset", "delta", "isdst", "abbr", + "isstd", "isgmt", "dstoffset"] + + def __init__(self): + for attr in self.__slots__: + setattr(self, attr, None) + + def __repr__(self): + l = [] + for attr in self.__slots__: + value = getattr(self, attr) + if value is not None: + l.append("%s=%s" % (attr, repr(value))) + return "%s(%s)" % (self.__class__.__name__, ", ".join(l)) + + def __eq__(self, other): + if not isinstance(other, _ttinfo): + return NotImplemented + + return (self.offset == other.offset and + self.delta == other.delta and + self.isdst == other.isdst and + self.abbr == other.abbr and + self.isstd == other.isstd and + self.isgmt == other.isgmt and + self.dstoffset == other.dstoffset) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __getstate__(self): + state = {} + for name in self.__slots__: + state[name] = getattr(self, name, None) + return state + + def __setstate__(self, state): + for name in self.__slots__: + if name in state: + setattr(self, name, state[name]) + + +class _tzfile(object): + """ + Lightweight class for holding the relevant transition and time zone + information read from binary tzfiles. + """ + attrs = ['trans_list', 'trans_list_utc', 'trans_idx', 'ttinfo_list', + 'ttinfo_std', 'ttinfo_dst', 'ttinfo_before', 'ttinfo_first'] + + def __init__(self, **kwargs): + for attr in self.attrs: + setattr(self, attr, kwargs.get(attr, None)) + + +class tzfile(_tzinfo): + """ + This is a ``tzinfo`` subclass that allows one to use the ``tzfile(5)`` + format timezone files to extract current and historical zone information. + + :param fileobj: + This can be an opened file stream or a file name that the time zone + information can be read from. + + :param filename: + This is an optional parameter specifying the source of the time zone + information in the event that ``fileobj`` is a file object. If omitted + and ``fileobj`` is a file stream, this parameter will be set either to + ``fileobj``'s ``name`` attribute or to ``repr(fileobj)``. + + See `Sources for Time Zone and Daylight Saving Time Data + `_ for more information. + Time zone files can be compiled from the `IANA Time Zone database files + `_ with the `zic time zone compiler + `_ + + .. note:: + + Only construct a ``tzfile`` directly if you have a specific timezone + file on disk that you want to read into a Python ``tzinfo`` object. + If you want to get a ``tzfile`` representing a specific IANA zone, + (e.g. ``'America/New_York'``), you should call + :func:`dateutil.tz.gettz` with the zone identifier. + + + **Examples:** + + Using the US Eastern time zone as an example, we can see that a ``tzfile`` + provides time zone information for the standard Daylight Saving offsets: + + .. testsetup:: tzfile + + from dateutil.tz import gettz + from datetime import datetime + + .. doctest:: tzfile + + >>> NYC = gettz('America/New_York') + >>> NYC + tzfile('/usr/share/zoneinfo/America/New_York') + + >>> print(datetime(2016, 1, 3, tzinfo=NYC)) # EST + 2016-01-03 00:00:00-05:00 + + >>> print(datetime(2016, 7, 7, tzinfo=NYC)) # EDT + 2016-07-07 00:00:00-04:00 + + + The ``tzfile`` structure contains a fully history of the time zone, + so historical dates will also have the right offsets. For example, before + the adoption of the UTC standards, New York used local solar mean time: + + .. doctest:: tzfile + + >>> print(datetime(1901, 4, 12, tzinfo=NYC)) # LMT + 1901-04-12 00:00:00-04:56 + + And during World War II, New York was on "Eastern War Time", which was a + state of permanent daylight saving time: + + .. doctest:: tzfile + + >>> print(datetime(1944, 2, 7, tzinfo=NYC)) # EWT + 1944-02-07 00:00:00-04:00 + + """ + + def __init__(self, fileobj, filename=None): + super(tzfile, self).__init__() + + file_opened_here = False + if isinstance(fileobj, string_types): + self._filename = fileobj + fileobj = open(fileobj, 'rb') + file_opened_here = True + elif filename is not None: + self._filename = filename + elif hasattr(fileobj, "name"): + self._filename = fileobj.name + else: + self._filename = repr(fileobj) + + if fileobj is not None: + if not file_opened_here: + fileobj = _nullcontext(fileobj) + + with fileobj as file_stream: + tzobj = self._read_tzfile(file_stream) + + self._set_tzdata(tzobj) + + def _set_tzdata(self, tzobj): + """ Set the time zone data of this object from a _tzfile object """ + # Copy the relevant attributes over as private attributes + for attr in _tzfile.attrs: + setattr(self, '_' + attr, getattr(tzobj, attr)) + + def _read_tzfile(self, fileobj): + out = _tzfile() + + # From tzfile(5): + # + # The time zone information files used by tzset(3) + # begin with the magic characters "TZif" to identify + # them as time zone information files, followed by + # sixteen bytes reserved for future use, followed by + # six four-byte values of type long, written in a + # ``standard'' byte order (the high-order byte + # of the value is written first). + if fileobj.read(4).decode() != "TZif": + raise ValueError("magic not found") + + fileobj.read(16) + + ( + # The number of UTC/local indicators stored in the file. + ttisgmtcnt, + + # The number of standard/wall indicators stored in the file. + ttisstdcnt, + + # The number of leap seconds for which data is + # stored in the file. + leapcnt, + + # The number of "transition times" for which data + # is stored in the file. + timecnt, + + # The number of "local time types" for which data + # is stored in the file (must not be zero). + typecnt, + + # The number of characters of "time zone + # abbreviation strings" stored in the file. + charcnt, + + ) = struct.unpack(">6l", fileobj.read(24)) + + # The above header is followed by tzh_timecnt four-byte + # values of type long, sorted in ascending order. + # These values are written in ``standard'' byte order. + # Each is used as a transition time (as returned by + # time(2)) at which the rules for computing local time + # change. + + if timecnt: + out.trans_list_utc = list(struct.unpack(">%dl" % timecnt, + fileobj.read(timecnt*4))) + else: + out.trans_list_utc = [] + + # Next come tzh_timecnt one-byte values of type unsigned + # char; each one tells which of the different types of + # ``local time'' types described in the file is associated + # with the same-indexed transition time. These values + # serve as indices into an array of ttinfo structures that + # appears next in the file. + + if timecnt: + out.trans_idx = struct.unpack(">%dB" % timecnt, + fileobj.read(timecnt)) + else: + out.trans_idx = [] + + # Each ttinfo structure is written as a four-byte value + # for tt_gmtoff of type long, in a standard byte + # order, followed by a one-byte value for tt_isdst + # and a one-byte value for tt_abbrind. In each + # structure, tt_gmtoff gives the number of + # seconds to be added to UTC, tt_isdst tells whether + # tm_isdst should be set by localtime(3), and + # tt_abbrind serves as an index into the array of + # time zone abbreviation characters that follow the + # ttinfo structure(s) in the file. + + ttinfo = [] + + for i in range(typecnt): + ttinfo.append(struct.unpack(">lbb", fileobj.read(6))) + + abbr = fileobj.read(charcnt).decode() + + # Then there are tzh_leapcnt pairs of four-byte + # values, written in standard byte order; the + # first value of each pair gives the time (as + # returned by time(2)) at which a leap second + # occurs; the second gives the total number of + # leap seconds to be applied after the given time. + # The pairs of values are sorted in ascending order + # by time. + + # Not used, for now (but seek for correct file position) + if leapcnt: + fileobj.seek(leapcnt * 8, os.SEEK_CUR) + + # Then there are tzh_ttisstdcnt standard/wall + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as standard + # time or wall clock time, and are used when + # a time zone file is used in handling POSIX-style + # time zone environment variables. + + if ttisstdcnt: + isstd = struct.unpack(">%db" % ttisstdcnt, + fileobj.read(ttisstdcnt)) + + # Finally, there are tzh_ttisgmtcnt UTC/local + # indicators, each stored as a one-byte value; + # they tell whether the transition times associated + # with local time types were specified as UTC or + # local time, and are used when a time zone file + # is used in handling POSIX-style time zone envi- + # ronment variables. + + if ttisgmtcnt: + isgmt = struct.unpack(">%db" % ttisgmtcnt, + fileobj.read(ttisgmtcnt)) + + # Build ttinfo list + out.ttinfo_list = [] + for i in range(typecnt): + gmtoff, isdst, abbrind = ttinfo[i] + gmtoff = _get_supported_offset(gmtoff) + tti = _ttinfo() + tti.offset = gmtoff + tti.dstoffset = datetime.timedelta(0) + tti.delta = datetime.timedelta(seconds=gmtoff) + tti.isdst = isdst + tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)] + tti.isstd = (ttisstdcnt > i and isstd[i] != 0) + tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0) + out.ttinfo_list.append(tti) + + # Replace ttinfo indexes for ttinfo objects. + out.trans_idx = [out.ttinfo_list[idx] for idx in out.trans_idx] + + # Set standard, dst, and before ttinfos. before will be + # used when a given time is before any transitions, + # and will be set to the first non-dst ttinfo, or to + # the first dst, if all of them are dst. + out.ttinfo_std = None + out.ttinfo_dst = None + out.ttinfo_before = None + if out.ttinfo_list: + if not out.trans_list_utc: + out.ttinfo_std = out.ttinfo_first = out.ttinfo_list[0] + else: + for i in range(timecnt-1, -1, -1): + tti = out.trans_idx[i] + if not out.ttinfo_std and not tti.isdst: + out.ttinfo_std = tti + elif not out.ttinfo_dst and tti.isdst: + out.ttinfo_dst = tti + + if out.ttinfo_std and out.ttinfo_dst: + break + else: + if out.ttinfo_dst and not out.ttinfo_std: + out.ttinfo_std = out.ttinfo_dst + + for tti in out.ttinfo_list: + if not tti.isdst: + out.ttinfo_before = tti + break + else: + out.ttinfo_before = out.ttinfo_list[0] + + # Now fix transition times to become relative to wall time. + # + # I'm not sure about this. In my tests, the tz source file + # is setup to wall time, and in the binary file isstd and + # isgmt are off, so it should be in wall time. OTOH, it's + # always in gmt time. Let me know if you have comments + # about this. + lastdst = None + lastoffset = None + lastdstoffset = None + lastbaseoffset = None + out.trans_list = [] + + for i, tti in enumerate(out.trans_idx): + offset = tti.offset + dstoffset = 0 + + if lastdst is not None: + if tti.isdst: + if not lastdst: + dstoffset = offset - lastoffset + + if not dstoffset and lastdstoffset: + dstoffset = lastdstoffset + + tti.dstoffset = datetime.timedelta(seconds=dstoffset) + lastdstoffset = dstoffset + + # If a time zone changes its base offset during a DST transition, + # then you need to adjust by the previous base offset to get the + # transition time in local time. Otherwise you use the current + # base offset. Ideally, I would have some mathematical proof of + # why this is true, but I haven't really thought about it enough. + baseoffset = offset - dstoffset + adjustment = baseoffset + if (lastbaseoffset is not None and baseoffset != lastbaseoffset + and tti.isdst != lastdst): + # The base DST has changed + adjustment = lastbaseoffset + + lastdst = tti.isdst + lastoffset = offset + lastbaseoffset = baseoffset + + out.trans_list.append(out.trans_list_utc[i] + adjustment) + + out.trans_idx = tuple(out.trans_idx) + out.trans_list = tuple(out.trans_list) + out.trans_list_utc = tuple(out.trans_list_utc) + + return out + + def _find_last_transition(self, dt, in_utc=False): + # If there's no list, there are no transitions to find + if not self._trans_list: + return None + + timestamp = _datetime_to_timestamp(dt) + + # Find where the timestamp fits in the transition list - if the + # timestamp is a transition time, it's part of the "after" period. + trans_list = self._trans_list_utc if in_utc else self._trans_list + idx = bisect.bisect_right(trans_list, timestamp) + + # We want to know when the previous transition was, so subtract off 1 + return idx - 1 + + def _get_ttinfo(self, idx): + # For no list or after the last transition, default to _ttinfo_std + if idx is None or (idx + 1) >= len(self._trans_list): + return self._ttinfo_std + + # If there is a list and the time is before it, return _ttinfo_before + if idx < 0: + return self._ttinfo_before + + return self._trans_idx[idx] + + def _find_ttinfo(self, dt): + idx = self._resolve_ambiguous_time(dt) + + return self._get_ttinfo(idx) + + def fromutc(self, dt): + """ + The ``tzfile`` implementation of :py:func:`datetime.tzinfo.fromutc`. + + :param dt: + A :py:class:`datetime.datetime` object. + + :raises TypeError: + Raised if ``dt`` is not a :py:class:`datetime.datetime` object. + + :raises ValueError: + Raised if this is called with a ``dt`` which does not have this + ``tzinfo`` attached. + + :return: + Returns a :py:class:`datetime.datetime` object representing the + wall time in ``self``'s time zone. + """ + # These isinstance checks are in datetime.tzinfo, so we'll preserve + # them, even if we don't care about duck typing. + if not isinstance(dt, datetime.datetime): + raise TypeError("fromutc() requires a datetime argument") + + if dt.tzinfo is not self: + raise ValueError("dt.tzinfo is not self") + + # First treat UTC as wall time and get the transition we're in. + idx = self._find_last_transition(dt, in_utc=True) + tti = self._get_ttinfo(idx) + + dt_out = dt + datetime.timedelta(seconds=tti.offset) + + fold = self.is_ambiguous(dt_out, idx=idx) + + return enfold(dt_out, fold=int(fold)) + + def is_ambiguous(self, dt, idx=None): + """ + Whether or not the "wall time" of a given datetime is ambiguous in this + zone. + + :param dt: + A :py:class:`datetime.datetime`, naive or time zone aware. + + + :return: + Returns ``True`` if ambiguous, ``False`` otherwise. + + .. versionadded:: 2.6.0 + """ + if idx is None: + idx = self._find_last_transition(dt) + + # Calculate the difference in offsets from current to previous + timestamp = _datetime_to_timestamp(dt) + tti = self._get_ttinfo(idx) + + if idx is None or idx <= 0: + return False + + od = self._get_ttinfo(idx - 1).offset - tti.offset + tt = self._trans_list[idx] # Transition time + + return timestamp < tt + od + + def _resolve_ambiguous_time(self, dt): + idx = self._find_last_transition(dt) + + # If we have no transitions, return the index + _fold = self._fold(dt) + if idx is None or idx == 0: + return idx + + # If it's ambiguous and we're in a fold, shift to a different index. + idx_offset = int(not _fold and self.is_ambiguous(dt, idx)) + + return idx - idx_offset + + def utcoffset(self, dt): + if dt is None: + return None + + if not self._ttinfo_std: + return ZERO + + return self._find_ttinfo(dt).delta + + def dst(self, dt): + if dt is None: + return None + + if not self._ttinfo_dst: + return ZERO + + tti = self._find_ttinfo(dt) + + if not tti.isdst: + return ZERO + + # The documentation says that utcoffset()-dst() must + # be constant for every dt. + return tti.dstoffset + + @tzname_in_python2 + def tzname(self, dt): + if not self._ttinfo_std or dt is None: + return None + return self._find_ttinfo(dt).abbr + + def __eq__(self, other): + if not isinstance(other, tzfile): + return NotImplemented + return (self._trans_list == other._trans_list and + self._trans_idx == other._trans_idx and + self._ttinfo_list == other._ttinfo_list) + + __hash__ = None + + def __ne__(self, other): + return not (self == other) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._filename)) + + def __reduce__(self): + return self.__reduce_ex__(None) + + def __reduce_ex__(self, protocol): + return (self.__class__, (None, self._filename), self.__dict__) + + +class tzrange(tzrangebase): + """ + The ``tzrange`` object is a time zone specified by a set of offsets and + abbreviations, equivalent to the way the ``TZ`` variable can be specified + in POSIX-like systems, but using Python delta objects to specify DST + start, end and offsets. + + :param stdabbr: + The abbreviation for standard time (e.g. ``'EST'``). + + :param stdoffset: + An integer or :class:`datetime.timedelta` object or equivalent + specifying the base offset from UTC. + + If unspecified, +00:00 is used. + + :param dstabbr: + The abbreviation for DST / "Summer" time (e.g. ``'EDT'``). + + If specified, with no other DST information, DST is assumed to occur + and the default behavior or ``dstoffset``, ``start`` and ``end`` is + used. If unspecified and no other DST information is specified, it + is assumed that this zone has no DST. + + If this is unspecified and other DST information is *is* specified, + DST occurs in the zone but the time zone abbreviation is left + unchanged. + + :param dstoffset: + A an integer or :class:`datetime.timedelta` object or equivalent + specifying the UTC offset during DST. If unspecified and any other DST + information is specified, it is assumed to be the STD offset +1 hour. + + :param start: + A :class:`relativedelta.relativedelta` object or equivalent specifying + the time and time of year that daylight savings time starts. To + specify, for example, that DST starts at 2AM on the 2nd Sunday in + March, pass: + + ``relativedelta(hours=2, month=3, day=1, weekday=SU(+2))`` + + If unspecified and any other DST information is specified, the default + value is 2 AM on the first Sunday in April. + + :param end: + A :class:`relativedelta.relativedelta` object or equivalent + representing the time and time of year that daylight savings time + ends, with the same specification method as in ``start``. One note is + that this should point to the first time in the *standard* zone, so if + a transition occurs at 2AM in the DST zone and the clocks are set back + 1 hour to 1AM, set the ``hours`` parameter to +1. + + + **Examples:** + + .. testsetup:: tzrange + + from dateutil.tz import tzrange, tzstr + + .. doctest:: tzrange + + >>> tzstr('EST5EDT') == tzrange("EST", -18000, "EDT") + True + + >>> from dateutil.relativedelta import * + >>> range1 = tzrange("EST", -18000, "EDT") + >>> range2 = tzrange("EST", -18000, "EDT", -14400, + ... relativedelta(hours=+2, month=4, day=1, + ... weekday=SU(+1)), + ... relativedelta(hours=+1, month=10, day=31, + ... weekday=SU(-1))) + >>> tzstr('EST5EDT') == range1 == range2 + True + + """ + def __init__(self, stdabbr, stdoffset=None, + dstabbr=None, dstoffset=None, + start=None, end=None): + + global relativedelta + from dateutil import relativedelta + + self._std_abbr = stdabbr + self._dst_abbr = dstabbr + + try: + stdoffset = stdoffset.total_seconds() + except (TypeError, AttributeError): + pass + + try: + dstoffset = dstoffset.total_seconds() + except (TypeError, AttributeError): + pass + + if stdoffset is not None: + self._std_offset = datetime.timedelta(seconds=stdoffset) + else: + self._std_offset = ZERO + + if dstoffset is not None: + self._dst_offset = datetime.timedelta(seconds=dstoffset) + elif dstabbr and stdoffset is not None: + self._dst_offset = self._std_offset + datetime.timedelta(hours=+1) + else: + self._dst_offset = ZERO + + if dstabbr and start is None: + self._start_delta = relativedelta.relativedelta( + hours=+2, month=4, day=1, weekday=relativedelta.SU(+1)) + else: + self._start_delta = start + + if dstabbr and end is None: + self._end_delta = relativedelta.relativedelta( + hours=+1, month=10, day=31, weekday=relativedelta.SU(-1)) + else: + self._end_delta = end + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = bool(self._start_delta) + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + if not self.hasdst: + return None + + base_year = datetime.datetime(year, 1, 1) + + start = base_year + self._start_delta + end = base_year + self._end_delta + + return (start, end) + + def __eq__(self, other): + if not isinstance(other, tzrange): + return NotImplemented + + return (self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr and + self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._start_delta == other._start_delta and + self._end_delta == other._end_delta) + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +@six.add_metaclass(_TzStrFactory) +class tzstr(tzrange): + """ + ``tzstr`` objects are time zone objects specified by a time-zone string as + it would be passed to a ``TZ`` variable on POSIX-style systems (see + the `GNU C Library: TZ Variable`_ for more details). + + There is one notable exception, which is that POSIX-style time zones use an + inverted offset format, so normally ``GMT+3`` would be parsed as an offset + 3 hours *behind* GMT. The ``tzstr`` time zone object will parse this as an + offset 3 hours *ahead* of GMT. If you would like to maintain the POSIX + behavior, pass a ``True`` value to ``posix_offset``. + + The :class:`tzrange` object provides the same functionality, but is + specified using :class:`relativedelta.relativedelta` objects. rather than + strings. + + :param s: + A time zone string in ``TZ`` variable format. This can be a + :class:`bytes` (2.x: :class:`str`), :class:`str` (2.x: + :class:`unicode`) or a stream emitting unicode characters + (e.g. :class:`StringIO`). + + :param posix_offset: + Optional. If set to ``True``, interpret strings such as ``GMT+3`` or + ``UTC+3`` as being 3 hours *behind* UTC rather than ahead, per the + POSIX standard. + + .. caution:: + + Prior to version 2.7.0, this function also supported time zones + in the format: + + * ``EST5EDT,4,0,6,7200,10,0,26,7200,3600`` + * ``EST5EDT,4,1,0,7200,10,-1,0,7200,3600`` + + This format is non-standard and has been deprecated; this function + will raise a :class:`DeprecatedTZFormatWarning` until + support is removed in a future version. + + .. _`GNU C Library: TZ Variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + """ + def __init__(self, s, posix_offset=False): + global parser + from dateutil.parser import _parser as parser + + self._s = s + + res = parser._parsetz(s) + if res is None or res.any_unused_tokens: + raise ValueError("unknown string format") + + # Here we break the compatibility with the TZ variable handling. + # GMT-3 actually *means* the timezone -3. + if res.stdabbr in ("GMT", "UTC") and not posix_offset: + res.stdoffset *= -1 + + # We must initialize it first, since _delta() needs + # _std_offset and _dst_offset set. Use False in start/end + # to avoid building it two times. + tzrange.__init__(self, res.stdabbr, res.stdoffset, + res.dstabbr, res.dstoffset, + start=False, end=False) + + if not res.dstabbr: + self._start_delta = None + self._end_delta = None + else: + self._start_delta = self._delta(res.start) + if self._start_delta: + self._end_delta = self._delta(res.end, isend=1) + + self.hasdst = bool(self._start_delta) + + def _delta(self, x, isend=0): + from dateutil import relativedelta + kwargs = {} + if x.month is not None: + kwargs["month"] = x.month + if x.weekday is not None: + kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week) + if x.week > 0: + kwargs["day"] = 1 + else: + kwargs["day"] = 31 + elif x.day: + kwargs["day"] = x.day + elif x.yday is not None: + kwargs["yearday"] = x.yday + elif x.jyday is not None: + kwargs["nlyearday"] = x.jyday + if not kwargs: + # Default is to start on first sunday of april, and end + # on last sunday of october. + if not isend: + kwargs["month"] = 4 + kwargs["day"] = 1 + kwargs["weekday"] = relativedelta.SU(+1) + else: + kwargs["month"] = 10 + kwargs["day"] = 31 + kwargs["weekday"] = relativedelta.SU(-1) + if x.time is not None: + kwargs["seconds"] = x.time + else: + # Default is 2AM. + kwargs["seconds"] = 7200 + if isend: + # Convert to standard time, to follow the documented way + # of working with the extra hour. See the documentation + # of the tzinfo class. + delta = self._dst_offset - self._std_offset + kwargs["seconds"] -= delta.seconds + delta.days * 86400 + return relativedelta.relativedelta(**kwargs) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +class _tzicalvtzcomp(object): + def __init__(self, tzoffsetfrom, tzoffsetto, isdst, + tzname=None, rrule=None): + self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom) + self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto) + self.tzoffsetdiff = self.tzoffsetto - self.tzoffsetfrom + self.isdst = isdst + self.tzname = tzname + self.rrule = rrule + + +class _tzicalvtz(_tzinfo): + def __init__(self, tzid, comps=[]): + super(_tzicalvtz, self).__init__() + + self._tzid = tzid + self._comps = comps + self._cachedate = [] + self._cachecomp = [] + self._cache_lock = _thread.allocate_lock() + + def _find_comp(self, dt): + if len(self._comps) == 1: + return self._comps[0] + + dt = dt.replace(tzinfo=None) + + try: + with self._cache_lock: + return self._cachecomp[self._cachedate.index( + (dt, self._fold(dt)))] + except ValueError: + pass + + lastcompdt = None + lastcomp = None + + for comp in self._comps: + compdt = self._find_compdt(comp, dt) + + if compdt and (not lastcompdt or lastcompdt < compdt): + lastcompdt = compdt + lastcomp = comp + + if not lastcomp: + # RFC says nothing about what to do when a given + # time is before the first onset date. We'll look for the + # first standard component, or the first component, if + # none is found. + for comp in self._comps: + if not comp.isdst: + lastcomp = comp + break + else: + lastcomp = comp[0] + + with self._cache_lock: + self._cachedate.insert(0, (dt, self._fold(dt))) + self._cachecomp.insert(0, lastcomp) + + if len(self._cachedate) > 10: + self._cachedate.pop() + self._cachecomp.pop() + + return lastcomp + + def _find_compdt(self, comp, dt): + if comp.tzoffsetdiff < ZERO and self._fold(dt): + dt -= comp.tzoffsetdiff + + compdt = comp.rrule.before(dt, inc=True) + + return compdt + + def utcoffset(self, dt): + if dt is None: + return None + + return self._find_comp(dt).tzoffsetto + + def dst(self, dt): + comp = self._find_comp(dt) + if comp.isdst: + return comp.tzoffsetdiff + else: + return ZERO + + @tzname_in_python2 + def tzname(self, dt): + return self._find_comp(dt).tzname + + def __repr__(self): + return "" % repr(self._tzid) + + __reduce__ = object.__reduce__ + + +class tzical(object): + """ + This object is designed to parse an iCalendar-style ``VTIMEZONE`` structure + as set out in `RFC 5545`_ Section 4.6.5 into one or more `tzinfo` objects. + + :param `fileobj`: + A file or stream in iCalendar format, which should be UTF-8 encoded + with CRLF endings. + + .. _`RFC 5545`: https://tools.ietf.org/html/rfc5545 + """ + def __init__(self, fileobj): + global rrule + from dateutil import rrule + + if isinstance(fileobj, string_types): + self._s = fileobj + # ical should be encoded in UTF-8 with CRLF + fileobj = open(fileobj, 'r') + else: + self._s = getattr(fileobj, 'name', repr(fileobj)) + fileobj = _nullcontext(fileobj) + + self._vtz = {} + + with fileobj as fobj: + self._parse_rfc(fobj.read()) + + def keys(self): + """ + Retrieves the available time zones as a list. + """ + return list(self._vtz.keys()) + + def get(self, tzid=None): + """ + Retrieve a :py:class:`datetime.tzinfo` object by its ``tzid``. + + :param tzid: + If there is exactly one time zone available, omitting ``tzid`` + or passing :py:const:`None` value returns it. Otherwise a valid + key (which can be retrieved from :func:`keys`) is required. + + :raises ValueError: + Raised if ``tzid`` is not specified but there are either more + or fewer than 1 zone defined. + + :returns: + Returns either a :py:class:`datetime.tzinfo` object representing + the relevant time zone or :py:const:`None` if the ``tzid`` was + not found. + """ + if tzid is None: + if len(self._vtz) == 0: + raise ValueError("no timezones defined") + elif len(self._vtz) > 1: + raise ValueError("more than one timezone available") + tzid = next(iter(self._vtz)) + + return self._vtz.get(tzid) + + def _parse_offset(self, s): + s = s.strip() + if not s: + raise ValueError("empty offset") + if s[0] in ('+', '-'): + signal = (-1, +1)[s[0] == '+'] + s = s[1:] + else: + signal = +1 + if len(s) == 4: + return (int(s[:2]) * 3600 + int(s[2:]) * 60) * signal + elif len(s) == 6: + return (int(s[:2]) * 3600 + int(s[2:4]) * 60 + int(s[4:])) * signal + else: + raise ValueError("invalid offset: " + s) + + def _parse_rfc(self, s): + lines = s.splitlines() + if not lines: + raise ValueError("empty string") + + # Unfold + i = 0 + while i < len(lines): + line = lines[i].rstrip() + if not line: + del lines[i] + elif i > 0 and line[0] == " ": + lines[i-1] += line[1:] + del lines[i] + else: + i += 1 + + tzid = None + comps = [] + invtz = False + comptype = None + for line in lines: + if not line: + continue + name, value = line.split(':', 1) + parms = name.split(';') + if not parms: + raise ValueError("empty property name") + name = parms[0].upper() + parms = parms[1:] + if invtz: + if name == "BEGIN": + if value in ("STANDARD", "DAYLIGHT"): + # Process component + pass + else: + raise ValueError("unknown component: "+value) + comptype = value + founddtstart = False + tzoffsetfrom = None + tzoffsetto = None + rrulelines = [] + tzname = None + elif name == "END": + if value == "VTIMEZONE": + if comptype: + raise ValueError("component not closed: "+comptype) + if not tzid: + raise ValueError("mandatory TZID not found") + if not comps: + raise ValueError( + "at least one component is needed") + # Process vtimezone + self._vtz[tzid] = _tzicalvtz(tzid, comps) + invtz = False + elif value == comptype: + if not founddtstart: + raise ValueError("mandatory DTSTART not found") + if tzoffsetfrom is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + if tzoffsetto is None: + raise ValueError( + "mandatory TZOFFSETFROM not found") + # Process component + rr = None + if rrulelines: + rr = rrule.rrulestr("\n".join(rrulelines), + compatible=True, + ignoretz=True, + cache=True) + comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto, + (comptype == "DAYLIGHT"), + tzname, rr) + comps.append(comp) + comptype = None + else: + raise ValueError("invalid component end: "+value) + elif comptype: + if name == "DTSTART": + # DTSTART in VTIMEZONE takes a subset of valid RRULE + # values under RFC 5545. + for parm in parms: + if parm != 'VALUE=DATE-TIME': + msg = ('Unsupported DTSTART param in ' + + 'VTIMEZONE: ' + parm) + raise ValueError(msg) + rrulelines.append(line) + founddtstart = True + elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"): + rrulelines.append(line) + elif name == "TZOFFSETFROM": + if parms: + raise ValueError( + "unsupported %s parm: %s " % (name, parms[0])) + tzoffsetfrom = self._parse_offset(value) + elif name == "TZOFFSETTO": + if parms: + raise ValueError( + "unsupported TZOFFSETTO parm: "+parms[0]) + tzoffsetto = self._parse_offset(value) + elif name == "TZNAME": + if parms: + raise ValueError( + "unsupported TZNAME parm: "+parms[0]) + tzname = value + elif name == "COMMENT": + pass + else: + raise ValueError("unsupported property: "+name) + else: + if name == "TZID": + if parms: + raise ValueError( + "unsupported TZID parm: "+parms[0]) + tzid = value + elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"): + pass + else: + raise ValueError("unsupported property: "+name) + elif name == "BEGIN" and value == "VTIMEZONE": + tzid = None + comps = [] + invtz = True + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, repr(self._s)) + + +if sys.platform != "win32": + TZFILES = ["/etc/localtime", "localtime"] + TZPATHS = ["/usr/share/zoneinfo", + "/usr/lib/zoneinfo", + "/usr/share/lib/zoneinfo", + "/etc/zoneinfo"] +else: + TZFILES = [] + TZPATHS = [] + + +def __get_gettz(): + tzlocal_classes = (tzlocal,) + if tzwinlocal is not None: + tzlocal_classes += (tzwinlocal,) + + class GettzFunc(object): + """ + Retrieve a time zone object from a string representation + + This function is intended to retrieve the :py:class:`tzinfo` subclass + that best represents the time zone that would be used if a POSIX + `TZ variable`_ were set to the same value. + + If no argument or an empty string is passed to ``gettz``, local time + is returned: + + .. code-block:: python3 + + >>> gettz() + tzfile('/etc/localtime') + + This function is also the preferred way to map IANA tz database keys + to :class:`tzfile` objects: + + .. code-block:: python3 + + >>> gettz('Pacific/Kiritimati') + tzfile('/usr/share/zoneinfo/Pacific/Kiritimati') + + On Windows, the standard is extended to include the Windows-specific + zone names provided by the operating system: + + .. code-block:: python3 + + >>> gettz('Egypt Standard Time') + tzwin('Egypt Standard Time') + + Passing a GNU ``TZ`` style string time zone specification returns a + :class:`tzstr` object: + + .. code-block:: python3 + + >>> gettz('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + tzstr('AEST-10AEDT-11,M10.1.0/2,M4.1.0/3') + + :param name: + A time zone name (IANA, or, on Windows, Windows keys), location of + a ``tzfile(5)`` zoneinfo file or ``TZ`` variable style time zone + specifier. An empty string, no argument or ``None`` is interpreted + as local time. + + :return: + Returns an instance of one of ``dateutil``'s :py:class:`tzinfo` + subclasses. + + .. versionchanged:: 2.7.0 + + After version 2.7.0, any two calls to ``gettz`` using the same + input strings will return the same object: + + .. code-block:: python3 + + >>> tz.gettz('America/Chicago') is tz.gettz('America/Chicago') + True + + In addition to improving performance, this ensures that + `"same zone" semantics`_ are used for datetimes in the same zone. + + + .. _`TZ variable`: + https://www.gnu.org/software/libc/manual/html_node/TZ-Variable.html + + .. _`"same zone" semantics`: + https://blog.ganssle.io/articles/2018/02/aware-datetime-arithmetic.html + """ + def __init__(self): + + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache_size = 8 + self.__strong_cache = OrderedDict() + self._cache_lock = _thread.allocate_lock() + + def __call__(self, name=None): + with self._cache_lock: + rv = self.__instances.get(name, None) + + if rv is None: + rv = self.nocache(name=name) + if not (name is None + or isinstance(rv, tzlocal_classes) + or rv is None): + # tzlocal is slightly more complicated than the other + # time zone providers because it depends on environment + # at construction time, so don't cache that. + # + # We also cannot store weak references to None, so we + # will also not store that. + self.__instances[name] = rv + else: + # No need for strong caching, return immediately + return rv + + self.__strong_cache[name] = self.__strong_cache.pop(name, rv) + + if len(self.__strong_cache) > self.__strong_cache_size: + self.__strong_cache.popitem(last=False) + + return rv + + def set_cache_size(self, size): + with self._cache_lock: + self.__strong_cache_size = size + while len(self.__strong_cache) > size: + self.__strong_cache.popitem(last=False) + + def cache_clear(self): + with self._cache_lock: + self.__instances = weakref.WeakValueDictionary() + self.__strong_cache.clear() + + @staticmethod + def nocache(name=None): + """A non-cached version of gettz""" + tz = None + if not name: + try: + name = os.environ["TZ"] + except KeyError: + pass + if name is None or name in ("", ":"): + for filepath in TZFILES: + if not os.path.isabs(filepath): + filename = filepath + for path in TZPATHS: + filepath = os.path.join(path, filename) + if os.path.isfile(filepath): + break + else: + continue + if os.path.isfile(filepath): + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = tzlocal() + else: + try: + if name.startswith(":"): + name = name[1:] + except TypeError as e: + if isinstance(name, bytes): + new_msg = "gettz argument should be str, not bytes" + six.raise_from(TypeError(new_msg), e) + else: + raise + if os.path.isabs(name): + if os.path.isfile(name): + tz = tzfile(name) + else: + tz = None + else: + for path in TZPATHS: + filepath = os.path.join(path, name) + if not os.path.isfile(filepath): + filepath = filepath.replace(' ', '_') + if not os.path.isfile(filepath): + continue + try: + tz = tzfile(filepath) + break + except (IOError, OSError, ValueError): + pass + else: + tz = None + if tzwin is not None: + try: + tz = tzwin(name) + except (WindowsError, UnicodeEncodeError): + # UnicodeEncodeError is for Python 2.7 compat + tz = None + + if not tz: + from dateutil.zoneinfo import get_zonefile_instance + tz = get_zonefile_instance().get(name) + + if not tz: + for c in name: + # name is not a tzstr unless it has at least + # one offset. For short values of "name", an + # explicit for loop seems to be the fastest way + # To determine if a string contains a digit + if c in "0123456789": + try: + tz = tzstr(name) + except ValueError: + pass + break + else: + if name in ("GMT", "UTC"): + tz = UTC + elif name in time.tzname: + tz = tzlocal() + return tz + + return GettzFunc() + + +gettz = __get_gettz() +del __get_gettz + + +def datetime_exists(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + would fall in a gap. + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" exists in + ``tz``. + + .. versionadded:: 2.7.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + tz = dt.tzinfo + + dt = dt.replace(tzinfo=None) + + # This is essentially a test of whether or not the datetime can survive + # a round trip to UTC. + dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz) + dt_rt = dt_rt.replace(tzinfo=None) + + return dt == dt_rt + + +def datetime_ambiguous(dt, tz=None): + """ + Given a datetime and a time zone, determine whether or not a given datetime + is ambiguous (i.e if there are two times differentiated only by their DST + status). + + :param dt: + A :class:`datetime.datetime` (whose time zone will be ignored if ``tz`` + is provided.) + + :param tz: + A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If + ``None`` or not provided, the datetime's own time zone will be used. + + :return: + Returns a boolean value whether or not the "wall time" is ambiguous in + ``tz``. + + .. versionadded:: 2.6.0 + """ + if tz is None: + if dt.tzinfo is None: + raise ValueError('Datetime is naive and no time zone provided.') + + tz = dt.tzinfo + + # If a time zone defines its own "is_ambiguous" function, we'll use that. + is_ambiguous_fn = getattr(tz, 'is_ambiguous', None) + if is_ambiguous_fn is not None: + try: + return tz.is_ambiguous(dt) + except Exception: + pass + + # If it doesn't come out and tell us it's ambiguous, we'll just check if + # the fold attribute has any effect on this particular date and time. + dt = dt.replace(tzinfo=tz) + wall_0 = enfold(dt, fold=0) + wall_1 = enfold(dt, fold=1) + + same_offset = wall_0.utcoffset() == wall_1.utcoffset() + same_dst = wall_0.dst() == wall_1.dst() + + return not (same_offset and same_dst) + + +def resolve_imaginary(dt): + """ + Given a datetime that may be imaginary, return an existing datetime. + + This function assumes that an imaginary datetime represents what the + wall time would be in a zone had the offset transition not occurred, so + it will always fall forward by the transition's change in offset. + + .. doctest:: + + >>> from dateutil import tz + >>> from datetime import datetime + >>> NYC = tz.gettz('America/New_York') + >>> print(tz.resolve_imaginary(datetime(2017, 3, 12, 2, 30, tzinfo=NYC))) + 2017-03-12 03:30:00-04:00 + + >>> KIR = tz.gettz('Pacific/Kiritimati') + >>> print(tz.resolve_imaginary(datetime(1995, 1, 1, 12, 30, tzinfo=KIR))) + 1995-01-02 12:30:00+14:00 + + As a note, :func:`datetime.astimezone` is guaranteed to produce a valid, + existing datetime, so a round-trip to and from UTC is sufficient to get + an extant datetime, however, this generally "falls back" to an earlier time + rather than falling forward to the STD side (though no guarantees are made + about this behavior). + + :param dt: + A :class:`datetime.datetime` which may or may not exist. + + :return: + Returns an existing :class:`datetime.datetime`. If ``dt`` was not + imaginary, the datetime returned is guaranteed to be the same object + passed to the function. + + .. versionadded:: 2.7.0 + """ + if dt.tzinfo is not None and not datetime_exists(dt): + + curr_offset = (dt + datetime.timedelta(hours=24)).utcoffset() + old_offset = (dt - datetime.timedelta(hours=24)).utcoffset() + + dt += curr_offset - old_offset + + return dt + + +def _datetime_to_timestamp(dt): + """ + Convert a :class:`datetime.datetime` object to an epoch timestamp in + seconds since January 1, 1970, ignoring the time zone. + """ + return (dt.replace(tzinfo=None) - EPOCH).total_seconds() + + +if sys.version_info >= (3, 6): + def _get_supported_offset(second_offset): + return second_offset +else: + def _get_supported_offset(second_offset): + # For python pre-3.6, round to full-minutes if that's not the case. + # Python's datetime doesn't accept sub-minute timezones. Check + # http://python.org/sf/1447945 or https://bugs.python.org/issue5288 + # for some information. + old_offset = second_offset + calculated_offset = 60 * ((second_offset + 30) // 60) + return calculated_offset + + +try: + # Python 3.7 feature + from contextlib import nullcontext as _nullcontext +except ImportError: + class _nullcontext(object): + """ + Class for wrapping contexts so that they are passed through in a + with statement. + """ + def __init__(self, context): + self.context = context + + def __enter__(self): + return self.context + + def __exit__(*args, **kwargs): + pass + +# vim:ts=4:sw=4:et diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tz/win.py b/dbt-env/lib/python3.8/site-packages/dateutil/tz/win.py new file mode 100644 index 0000000..cde07ba --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/tz/win.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +""" +This module provides an interface to the native time zone data on Windows, +including :py:class:`datetime.tzinfo` implementations. + +Attempting to import this module on a non-Windows platform will raise an +:py:obj:`ImportError`. +""" +# This code was originally contributed by Jeffrey Harris. +import datetime +import struct + +from six.moves import winreg +from six import text_type + +try: + import ctypes + from ctypes import wintypes +except ValueError: + # ValueError is raised on non-Windows systems for some horrible reason. + raise ImportError("Running tzwin on non-Windows system") + +from ._common import tzrangebase + +__all__ = ["tzwin", "tzwinlocal", "tzres"] + +ONEWEEK = datetime.timedelta(7) + +TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" +TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones" +TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + + +def _settzkeyname(): + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + winreg.OpenKey(handle, TZKEYNAMENT).Close() + TZKEYNAME = TZKEYNAMENT + except WindowsError: + TZKEYNAME = TZKEYNAME9X + handle.Close() + return TZKEYNAME + + +TZKEYNAME = _settzkeyname() + + +class tzres(object): + """ + Class for accessing ``tzres.dll``, which contains timezone name related + resources. + + .. versionadded:: 2.5.0 + """ + p_wchar = ctypes.POINTER(wintypes.WCHAR) # Pointer to a wide char + + def __init__(self, tzres_loc='tzres.dll'): + # Load the user32 DLL so we can load strings from tzres + user32 = ctypes.WinDLL('user32') + + # Specify the LoadStringW function + user32.LoadStringW.argtypes = (wintypes.HINSTANCE, + wintypes.UINT, + wintypes.LPWSTR, + ctypes.c_int) + + self.LoadStringW = user32.LoadStringW + self._tzres = ctypes.WinDLL(tzres_loc) + self.tzres_loc = tzres_loc + + def load_name(self, offset): + """ + Load a timezone name from a DLL offset (integer). + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.load_name(112)) + 'Eastern Standard Time' + + :param offset: + A positive integer value referring to a string from the tzres dll. + + .. note:: + + Offsets found in the registry are generally of the form + ``@tzres.dll,-114``. The offset in this case is 114, not -114. + + """ + resource = self.p_wchar() + lpBuffer = ctypes.cast(ctypes.byref(resource), wintypes.LPWSTR) + nchar = self.LoadStringW(self._tzres._handle, offset, lpBuffer, 0) + return resource[:nchar] + + def name_from_string(self, tzname_str): + """ + Parse strings as returned from the Windows registry into the time zone + name as defined in the registry. + + >>> from dateutil.tzwin import tzres + >>> tzr = tzres() + >>> print(tzr.name_from_string('@tzres.dll,-251')) + 'Dateline Daylight Time' + >>> print(tzr.name_from_string('Eastern Standard Time')) + 'Eastern Standard Time' + + :param tzname_str: + A timezone name string as returned from a Windows registry key. + + :return: + Returns the localized timezone string from tzres.dll if the string + is of the form `@tzres.dll,-offset`, else returns the input string. + """ + if not tzname_str.startswith('@'): + return tzname_str + + name_splt = tzname_str.split(',-') + try: + offset = int(name_splt[1]) + except: + raise ValueError("Malformed timezone string.") + + return self.load_name(offset) + + +class tzwinbase(tzrangebase): + """tzinfo class based on win32's timezones available in the registry.""" + def __init__(self): + raise NotImplementedError('tzwinbase is an abstract base class') + + def __eq__(self, other): + # Compare on all relevant dimensions, including name. + if not isinstance(other, tzwinbase): + return NotImplemented + + return (self._std_offset == other._std_offset and + self._dst_offset == other._dst_offset and + self._stddayofweek == other._stddayofweek and + self._dstdayofweek == other._dstdayofweek and + self._stdweeknumber == other._stdweeknumber and + self._dstweeknumber == other._dstweeknumber and + self._stdhour == other._stdhour and + self._dsthour == other._dsthour and + self._stdminute == other._stdminute and + self._dstminute == other._dstminute and + self._std_abbr == other._std_abbr and + self._dst_abbr == other._dst_abbr) + + @staticmethod + def list(): + """Return a list of all time zones known to the system.""" + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZKEYNAME) as tzkey: + result = [winreg.EnumKey(tzkey, i) + for i in range(winreg.QueryInfoKey(tzkey)[0])] + return result + + def display(self): + """ + Return the display name of the time zone. + """ + return self._display + + def transitions(self, year): + """ + For a given year, get the DST on and off transition times, expressed + always on the standard time side. For zones with no transitions, this + function returns ``None``. + + :param year: + The year whose transitions you would like to query. + + :return: + Returns a :class:`tuple` of :class:`datetime.datetime` objects, + ``(dston, dstoff)`` for zones with an annual DST transition, or + ``None`` for fixed offset zones. + """ + + if not self.hasdst: + return None + + dston = picknthweekday(year, self._dstmonth, self._dstdayofweek, + self._dsthour, self._dstminute, + self._dstweeknumber) + + dstoff = picknthweekday(year, self._stdmonth, self._stddayofweek, + self._stdhour, self._stdminute, + self._stdweeknumber) + + # Ambiguous dates default to the STD side + dstoff -= self._dst_base_offset + + return dston, dstoff + + def _get_hasdst(self): + return self._dstmonth != 0 + + @property + def _dst_base_offset(self): + return self._dst_base_offset_ + + +class tzwin(tzwinbase): + """ + Time zone object created from the zone info in the Windows registry + + These are similar to :py:class:`dateutil.tz.tzrange` objects in that + the time zone data is provided in the format of a single offset rule + for either 0 or 2 time zone transitions per year. + + :param: name + The name of a Windows time zone key, e.g. "Eastern Standard Time". + The full list of keys can be retrieved with :func:`tzwin.list`. + """ + + def __init__(self, name): + self._name = name + + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + tzkeyname = text_type("{kn}\\{name}").format(kn=TZKEYNAME, name=name) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + keydict = valuestodict(tzkey) + + self._std_abbr = keydict["Std"] + self._dst_abbr = keydict["Dlt"] + + self._display = keydict["Display"] + + # See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm + tup = struct.unpack("=3l16h", keydict["TZI"]) + stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1 + dstoffset = stdoffset-tup[2] # + DaylightBias * -1 + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # for the meaning see the win32 TIME_ZONE_INFORMATION structure docs + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms725481(v=vs.85).aspx + (self._stdmonth, + self._stddayofweek, # Sunday = 0 + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[4:9] + + (self._dstmonth, + self._dstdayofweek, # Sunday = 0 + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[12:17] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwin(%s)" % repr(self._name) + + def __reduce__(self): + return (self.__class__, (self._name,)) + + +class tzwinlocal(tzwinbase): + """ + Class representing the local time zone information in the Windows registry + + While :class:`dateutil.tz.tzlocal` makes system calls (via the :mod:`time` + module) to retrieve time zone information, ``tzwinlocal`` retrieves the + rules directly from the Windows registry and creates an object like + :class:`dateutil.tz.tzwin`. + + Because Windows does not have an equivalent of :func:`time.tzset`, on + Windows, :class:`dateutil.tz.tzlocal` instances will always reflect the + time zone settings *at the time that the process was started*, meaning + changes to the machine's time zone settings during the run of a program + on Windows will **not** be reflected by :class:`dateutil.tz.tzlocal`. + Because ``tzwinlocal`` reads the registry directly, it is unaffected by + this issue. + """ + def __init__(self): + with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as handle: + with winreg.OpenKey(handle, TZLOCALKEYNAME) as tzlocalkey: + keydict = valuestodict(tzlocalkey) + + self._std_abbr = keydict["StandardName"] + self._dst_abbr = keydict["DaylightName"] + + try: + tzkeyname = text_type('{kn}\\{sn}').format(kn=TZKEYNAME, + sn=self._std_abbr) + with winreg.OpenKey(handle, tzkeyname) as tzkey: + _keydict = valuestodict(tzkey) + self._display = _keydict["Display"] + except OSError: + self._display = None + + stdoffset = -keydict["Bias"]-keydict["StandardBias"] + dstoffset = stdoffset-keydict["DaylightBias"] + + self._std_offset = datetime.timedelta(minutes=stdoffset) + self._dst_offset = datetime.timedelta(minutes=dstoffset) + + # For reasons unclear, in this particular key, the day of week has been + # moved to the END of the SYSTEMTIME structure. + tup = struct.unpack("=8h", keydict["StandardStart"]) + + (self._stdmonth, + self._stdweeknumber, # Last = 5 + self._stdhour, + self._stdminute) = tup[1:5] + + self._stddayofweek = tup[7] + + tup = struct.unpack("=8h", keydict["DaylightStart"]) + + (self._dstmonth, + self._dstweeknumber, # Last = 5 + self._dsthour, + self._dstminute) = tup[1:5] + + self._dstdayofweek = tup[7] + + self._dst_base_offset_ = self._dst_offset - self._std_offset + self.hasdst = self._get_hasdst() + + def __repr__(self): + return "tzwinlocal()" + + def __str__(self): + # str will return the standard name, not the daylight name. + return "tzwinlocal(%s)" % repr(self._std_abbr) + + def __reduce__(self): + return (self.__class__, ()) + + +def picknthweekday(year, month, dayofweek, hour, minute, whichweek): + """ dayofweek == 0 means Sunday, whichweek 5 means last instance """ + first = datetime.datetime(year, month, 1, hour, minute) + + # This will work if dayofweek is ISO weekday (1-7) or Microsoft-style (0-6), + # Because 7 % 7 = 0 + weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) + wd = weekdayone + ((whichweek - 1) * ONEWEEK) + if (wd.month != month): + wd -= ONEWEEK + + return wd + + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dout = {} + size = winreg.QueryInfoKey(key)[1] + tz_res = None + + for i in range(size): + key_name, value, dtype = winreg.EnumValue(key, i) + if dtype == winreg.REG_DWORD or dtype == winreg.REG_DWORD_LITTLE_ENDIAN: + # If it's a DWORD (32-bit integer), it's stored as unsigned - convert + # that to a proper signed integer + if value & (1 << 31): + value = value - (1 << 32) + elif dtype == winreg.REG_SZ: + # If it's a reference to the tzres DLL, load the actual string + if value.startswith('@tzres'): + tz_res = tz_res or tzres() + value = tz_res.name_from_string(value) + + value = value.rstrip('\x00') # Remove trailing nulls + + dout[key_name] = value + + return dout diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/tzwin.py b/dbt-env/lib/python3.8/site-packages/dateutil/tzwin.py new file mode 100644 index 0000000..cebc673 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/tzwin.py @@ -0,0 +1,2 @@ +# tzwin has moved to dateutil.tz.win +from .tz.win import * diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/utils.py b/dbt-env/lib/python3.8/site-packages/dateutil/utils.py new file mode 100644 index 0000000..dd2d245 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/utils.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +""" +This module offers general convenience and utility functions for dealing with +datetimes. + +.. versionadded:: 2.7.0 +""" +from __future__ import unicode_literals + +from datetime import datetime, time + + +def today(tzinfo=None): + """ + Returns a :py:class:`datetime` representing the current day at midnight + + :param tzinfo: + The time zone to attach (also used to determine the current day). + + :return: + A :py:class:`datetime.datetime` object representing the current day + at midnight. + """ + + dt = datetime.now(tzinfo) + return datetime.combine(dt.date(), time(0, tzinfo=tzinfo)) + + +def default_tzinfo(dt, tzinfo): + """ + Sets the ``tzinfo`` parameter on naive datetimes only + + This is useful for example when you are provided a datetime that may have + either an implicit or explicit time zone, such as when parsing a time zone + string. + + .. doctest:: + + >>> from dateutil.tz import tzoffset + >>> from dateutil.parser import parse + >>> from dateutil.utils import default_tzinfo + >>> dflt_tz = tzoffset("EST", -18000) + >>> print(default_tzinfo(parse('2014-01-01 12:30 UTC'), dflt_tz)) + 2014-01-01 12:30:00+00:00 + >>> print(default_tzinfo(parse('2014-01-01 12:30'), dflt_tz)) + 2014-01-01 12:30:00-05:00 + + :param dt: + The datetime on which to replace the time zone + + :param tzinfo: + The :py:class:`datetime.tzinfo` subclass instance to assign to + ``dt`` if (and only if) it is naive. + + :return: + Returns an aware :py:class:`datetime.datetime`. + """ + if dt.tzinfo is not None: + return dt + else: + return dt.replace(tzinfo=tzinfo) + + +def within_delta(dt1, dt2, delta): + """ + Useful for comparing two datetimes that may have a negligible difference + to be considered equal. + """ + delta = abs(delta) + difference = dt1 - dt2 + return -delta <= difference <= delta diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__init__.py b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__init__.py new file mode 100644 index 0000000..34f11ad --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__init__.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +import warnings +import json + +from tarfile import TarFile +from pkgutil import get_data +from io import BytesIO + +from dateutil.tz import tzfile as _tzfile + +__all__ = ["get_zonefile_instance", "gettz", "gettz_db_metadata"] + +ZONEFILENAME = "dateutil-zoneinfo.tar.gz" +METADATA_FN = 'METADATA' + + +class tzfile(_tzfile): + def __reduce__(self): + return (gettz, (self._filename,)) + + +def getzoneinfofile_stream(): + try: + return BytesIO(get_data(__name__, ZONEFILENAME)) + except IOError as e: # TODO switch to FileNotFoundError? + warnings.warn("I/O error({0}): {1}".format(e.errno, e.strerror)) + return None + + +class ZoneInfoFile(object): + def __init__(self, zonefile_stream=None): + if zonefile_stream is not None: + with TarFile.open(fileobj=zonefile_stream) as tf: + self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) + for zf in tf.getmembers() + if zf.isfile() and zf.name != METADATA_FN} + # deal with links: They'll point to their parent object. Less + # waste of memory + links = {zl.name: self.zones[zl.linkname] + for zl in tf.getmembers() if + zl.islnk() or zl.issym()} + self.zones.update(links) + try: + metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) + metadata_str = metadata_json.read().decode('UTF-8') + self.metadata = json.loads(metadata_str) + except KeyError: + # no metadata in tar file + self.metadata = None + else: + self.zones = {} + self.metadata = None + + def get(self, name, default=None): + """ + Wrapper for :func:`ZoneInfoFile.zones.get`. This is a convenience method + for retrieving zones from the zone dictionary. + + :param name: + The name of the zone to retrieve. (Generally IANA zone names) + + :param default: + The value to return in the event of a missing key. + + .. versionadded:: 2.6.0 + + """ + return self.zones.get(name, default) + + +# The current API has gettz as a module function, although in fact it taps into +# a stateful class. So as a workaround for now, without changing the API, we +# will create a new "global" class instance the first time a user requests a +# timezone. Ugly, but adheres to the api. +# +# TODO: Remove after deprecation period. +_CLASS_ZONE_INSTANCE = [] + + +def get_zonefile_instance(new_instance=False): + """ + This is a convenience function which provides a :class:`ZoneInfoFile` + instance using the data provided by the ``dateutil`` package. By default, it + caches a single instance of the ZoneInfoFile object and returns that. + + :param new_instance: + If ``True``, a new instance of :class:`ZoneInfoFile` is instantiated and + used as the cached instance for the next call. Otherwise, new instances + are created only as necessary. + + :return: + Returns a :class:`ZoneInfoFile` object. + + .. versionadded:: 2.6 + """ + if new_instance: + zif = None + else: + zif = getattr(get_zonefile_instance, '_cached_instance', None) + + if zif is None: + zif = ZoneInfoFile(getzoneinfofile_stream()) + + get_zonefile_instance._cached_instance = zif + + return zif + + +def gettz(name): + """ + This retrieves a time zone from the local zoneinfo tarball that is packaged + with dateutil. + + :param name: + An IANA-style time zone name, as found in the zoneinfo file. + + :return: + Returns a :class:`dateutil.tz.tzfile` time zone object. + + .. warning:: + It is generally inadvisable to use this function, and it is only + provided for API compatibility with earlier versions. This is *not* + equivalent to ``dateutil.tz.gettz()``, which selects an appropriate + time zone based on the inputs, favoring system zoneinfo. This is ONLY + for accessing the dateutil-specific zoneinfo (which may be out of + date compared to the system zoneinfo). + + .. deprecated:: 2.6 + If you need to use a specific zoneinfofile over the system zoneinfo, + instantiate a :class:`dateutil.zoneinfo.ZoneInfoFile` object and call + :func:`dateutil.zoneinfo.ZoneInfoFile.get(name)` instead. + + Use :func:`get_zonefile_instance` to retrieve an instance of the + dateutil-provided zoneinfo. + """ + warnings.warn("zoneinfo.gettz() will be removed in future versions, " + "to use the dateutil-provided zoneinfo files, instantiate a " + "ZoneInfoFile object and use ZoneInfoFile.zones.get() " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].zones.get(name) + + +def gettz_db_metadata(): + """ Get the zonefile metadata + + See `zonefile_metadata`_ + + :returns: + A dictionary with the database metadata + + .. deprecated:: 2.6 + See deprecation warning in :func:`zoneinfo.gettz`. To get metadata, + query the attribute ``zoneinfo.ZoneInfoFile.metadata``. + """ + warnings.warn("zoneinfo.gettz_db_metadata() will be removed in future " + "versions, to use the dateutil-provided zoneinfo files, " + "ZoneInfoFile object and query the 'metadata' attribute " + "instead. See the documentation for details.", + DeprecationWarning) + + if len(_CLASS_ZONE_INSTANCE) == 0: + _CLASS_ZONE_INSTANCE.append(ZoneInfoFile(getzoneinfofile_stream())) + return _CLASS_ZONE_INSTANCE[0].metadata diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d06040d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-38.pyc new file mode 100644 index 0000000..25b32d9 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/__pycache__/rebuild.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz new file mode 100644 index 0000000..524c48e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz differ diff --git a/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/rebuild.py b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/rebuild.py new file mode 100644 index 0000000..684c658 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dateutil/zoneinfo/rebuild.py @@ -0,0 +1,75 @@ +import logging +import os +import tempfile +import shutil +import json +from subprocess import check_call, check_output +from tarfile import TarFile + +from dateutil.zoneinfo import METADATA_FN, ZONEFILENAME + + +def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None): + """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar* + + filename is the timezone tarball from ``ftp.iana.org/tz``. + + """ + tmpdir = tempfile.mkdtemp() + zonedir = os.path.join(tmpdir, "zoneinfo") + moduledir = os.path.dirname(__file__) + try: + with TarFile.open(filename) as tf: + for name in zonegroups: + tf.extract(name, tmpdir) + filepaths = [os.path.join(tmpdir, n) for n in zonegroups] + + _run_zic(zonedir, filepaths) + + # write metadata file + with open(os.path.join(zonedir, METADATA_FN), 'w') as f: + json.dump(metadata, f, indent=4, sort_keys=True) + target = os.path.join(moduledir, ZONEFILENAME) + with TarFile.open(target, "w:%s" % format) as tf: + for entry in os.listdir(zonedir): + entrypath = os.path.join(zonedir, entry) + tf.add(entrypath, entry) + finally: + shutil.rmtree(tmpdir) + + +def _run_zic(zonedir, filepaths): + """Calls the ``zic`` compiler in a compatible way to get a "fat" binary. + + Recent versions of ``zic`` default to ``-b slim``, while older versions + don't even have the ``-b`` option (but default to "fat" binaries). The + current version of dateutil does not support Version 2+ TZif files, which + causes problems when used in conjunction with "slim" binaries, so this + function is used to ensure that we always get a "fat" binary. + """ + + try: + help_text = check_output(["zic", "--help"]) + except OSError as e: + _print_on_nosuchfile(e) + raise + + if b"-b " in help_text: + bloat_args = ["-b", "fat"] + else: + bloat_args = [] + + check_call(["zic"] + bloat_args + ["-d", zonedir] + filepaths) + + +def _print_on_nosuchfile(e): + """Print helpful troubleshooting message + + e is an exception raised by subprocess.check_call() + + """ + if e.errno == 2: + logging.error( + "Could not find zic. Perhaps you need to install " + "libc-bin or some other package that provides it, " + "or it's not in your PATH?") diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/compilation.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/compilation.cpython-38.pyc new file mode 100644 index 0000000..941ba35 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/compilation.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/dataclass_schema.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/dataclass_schema.cpython-38.pyc new file mode 100644 index 0000000..35069c7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/dataclass_schema.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/deprecations.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/deprecations.cpython-38.pyc new file mode 100644 index 0000000..8b6f020 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/deprecations.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/exceptions.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000..68e31e7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/exceptions.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/flags.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/flags.cpython-38.pyc new file mode 100644 index 0000000..74bcc98 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/flags.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/helper_types.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/helper_types.cpython-38.pyc new file mode 100644 index 0000000..428d14e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/helper_types.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/hooks.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/hooks.cpython-38.pyc new file mode 100644 index 0000000..82e1025 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/hooks.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/links.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/links.cpython-38.pyc new file mode 100644 index 0000000..068783a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/links.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/logger.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/logger.cpython-38.pyc new file mode 100644 index 0000000..d671840 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/logger.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/main.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/main.cpython-38.pyc new file mode 100644 index 0000000..e90ae45 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/main.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/node_types.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/node_types.cpython-38.pyc new file mode 100644 index 0000000..3b73799 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/node_types.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/profiler.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/profiler.cpython-38.pyc new file mode 100644 index 0000000..7a73fd3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/profiler.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/semver.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/semver.cpython-38.pyc new file mode 100644 index 0000000..553db54 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/semver.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/tracking.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/tracking.cpython-38.pyc new file mode 100644 index 0000000..92d28dd Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/tracking.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/ui.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/ui.cpython-38.pyc new file mode 100644 index 0000000..ebdaba0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/ui.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/utils.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000..06b2fb5 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/utils.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/version.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000..29253a3 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/__pycache__/version.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/cache.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/cache.cpython-38.pyc new file mode 100644 index 0000000..294160a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/cache.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/factory.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/factory.cpython-38.pyc new file mode 100644 index 0000000..7975a8f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/factory.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/protocol.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/protocol.cpython-38.pyc new file mode 100644 index 0000000..698fe33 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/__pycache__/protocol.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__init__.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__init__.py new file mode 100644 index 0000000..c0a8f01 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__init__.py @@ -0,0 +1,14 @@ +# these are all just exports, #noqa them so flake8 will be happy + +# TODO: Should we still include this in the `adapters` namespace? +from dbt.contracts.connection import Credentials # noqa +from dbt.adapters.base.meta import available # noqa +from dbt.adapters.base.connections import BaseConnectionManager # noqa +from dbt.adapters.base.relation import ( # noqa + BaseRelation, + RelationType, + SchemaSearchMap, +) +from dbt.adapters.base.column import Column # noqa +from dbt.adapters.base.impl import AdapterConfig, BaseAdapter # noqa +from dbt.adapters.base.plugin import AdapterPlugin # noqa diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..d31662e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/column.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/column.cpython-38.pyc new file mode 100644 index 0000000..4d5cd5b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/column.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/connections.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/connections.cpython-38.pyc new file mode 100644 index 0000000..77271c8 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/connections.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/impl.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/impl.cpython-38.pyc new file mode 100644 index 0000000..36791f7 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/impl.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/meta.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/meta.cpython-38.pyc new file mode 100644 index 0000000..a11a230 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/meta.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/plugin.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/plugin.cpython-38.pyc new file mode 100644 index 0000000..1794b6d Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/plugin.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/query_headers.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/query_headers.cpython-38.pyc new file mode 100644 index 0000000..989a93f Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/query_headers.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/relation.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/relation.cpython-38.pyc new file mode 100644 index 0000000..8a6acc2 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/__pycache__/relation.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/column.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/column.py new file mode 100644 index 0000000..df0319c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/column.py @@ -0,0 +1,153 @@ +from dataclasses import dataclass +import re +from typing import Dict, ClassVar, Any, Optional + +from dbt.exceptions import RuntimeException + + +@dataclass +class Column: + TYPE_LABELS: ClassVar[Dict[str, str]] = { + 'STRING': 'TEXT', + 'TIMESTAMP': 'TIMESTAMP', + 'FLOAT': 'FLOAT', + 'INTEGER': 'INT' + } + column: str + dtype: str + char_size: Optional[int] = None + numeric_precision: Optional[Any] = None + numeric_scale: Optional[Any] = None + + @classmethod + def translate_type(cls, dtype: str) -> str: + return cls.TYPE_LABELS.get(dtype.upper(), dtype) + + @classmethod + def create(cls, name, label_or_dtype: str) -> 'Column': + column_type = cls.translate_type(label_or_dtype) + return cls(name, column_type) + + @property + def name(self) -> str: + return self.column + + @property + def quoted(self) -> str: + return '"{}"'.format(self.column) + + @property + def data_type(self) -> str: + if self.is_string(): + return Column.string_type(self.string_size()) + elif self.is_numeric(): + return Column.numeric_type(self.dtype, self.numeric_precision, + self.numeric_scale) + else: + return self.dtype + + def is_string(self) -> bool: + return self.dtype.lower() in ['text', 'character varying', 'character', + 'varchar'] + + def is_number(self): + return any([self.is_integer(), self.is_numeric(), self.is_float()]) + + def is_float(self): + return self.dtype.lower() in [ + # floats + 'real', 'float4', 'float', 'double precision', 'float8' + ] + + def is_integer(self) -> bool: + return self.dtype.lower() in [ + # real types + 'smallint', 'integer', 'bigint', + 'smallserial', 'serial', 'bigserial', + # aliases + 'int2', 'int4', 'int8', + 'serial2', 'serial4', 'serial8', + ] + + def is_numeric(self) -> bool: + return self.dtype.lower() in ['numeric', 'decimal'] + + def string_size(self) -> int: + if not self.is_string(): + raise RuntimeException("Called string_size() on non-string field!") + + if self.dtype == 'text' or self.char_size is None: + # char_size should never be None. Handle it reasonably just in case + return 256 + else: + return int(self.char_size) + + def can_expand_to(self, other_column: 'Column') -> bool: + """returns True if this column can be expanded to the size of the + other column""" + if not self.is_string() or not other_column.is_string(): + return False + + return other_column.string_size() > self.string_size() + + def literal(self, value: Any) -> str: + return "{}::{}".format(value, self.data_type) + + @classmethod + def string_type(cls, size: int) -> str: + return "character varying({})".format(size) + + @classmethod + def numeric_type(cls, dtype: str, precision: Any, scale: Any) -> str: + # This could be decimal(...), numeric(...), number(...) + # Just use whatever was fed in here -- don't try to get too clever + if precision is None or scale is None: + return dtype + else: + return "{}({},{})".format(dtype, precision, scale) + + def __repr__(self) -> str: + return "".format(self.name, self.data_type) + + @classmethod + def from_description(cls, name: str, raw_data_type: str) -> 'Column': + match = re.match(r'([^(]+)(\([^)]+\))?', raw_data_type) + if match is None: + raise RuntimeException( + f'Could not interpret data type "{raw_data_type}"' + ) + data_type, size_info = match.groups() + char_size = None + numeric_precision = None + numeric_scale = None + if size_info is not None: + # strip out the parentheses + size_info = size_info[1:-1] + parts = size_info.split(',') + if len(parts) == 1: + try: + char_size = int(parts[0]) + except ValueError: + raise RuntimeException( + f'Could not interpret data_type "{raw_data_type}": ' + f'could not convert "{parts[0]}" to an integer' + ) + elif len(parts) == 2: + try: + numeric_precision = int(parts[0]) + except ValueError: + raise RuntimeException( + f'Could not interpret data_type "{raw_data_type}": ' + f'could not convert "{parts[0]}" to an integer' + ) + try: + numeric_scale = int(parts[1]) + except ValueError: + raise RuntimeException( + f'Could not interpret data_type "{raw_data_type}": ' + f'could not convert "{parts[1]}" to an integer' + ) + + return cls( + name, data_type, char_size, numeric_precision, numeric_scale + ) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/connections.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/connections.py new file mode 100644 index 0000000..90e2fc2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/connections.py @@ -0,0 +1,306 @@ +import abc +import os +# multiprocessing.RLock is a function returning this type +from multiprocessing.synchronize import RLock +from threading import get_ident +from typing import ( + Dict, Tuple, Hashable, Optional, ContextManager, List, Union +) + +import agate + +import dbt.exceptions +from dbt.contracts.connection import ( + Connection, Identifier, ConnectionState, + AdapterRequiredConfig, LazyHandle, AdapterResponse +) +from dbt.contracts.graph.manifest import Manifest +from dbt.adapters.base.query_headers import ( + MacroQueryStringSetter, +) +from dbt.logger import GLOBAL_LOGGER as logger +from dbt import flags + + +class BaseConnectionManager(metaclass=abc.ABCMeta): + """Methods to implement: + - exception_handler + - cancel_open + - open + - begin + - commit + - clear_transaction + - execute + + You must also set the 'TYPE' class attribute with a class-unique constant + string. + """ + TYPE: str = NotImplemented + + def __init__(self, profile: AdapterRequiredConfig): + self.profile = profile + self.thread_connections: Dict[Hashable, Connection] = {} + self.lock: RLock = flags.MP_CONTEXT.RLock() + self.query_header: Optional[MacroQueryStringSetter] = None + + def set_query_header(self, manifest: Manifest) -> None: + self.query_header = MacroQueryStringSetter(self.profile, manifest) + + @staticmethod + def get_thread_identifier() -> Hashable: + # note that get_ident() may be re-used, but we should never experience + # that within a single process + return (os.getpid(), get_ident()) + + def get_thread_connection(self) -> Connection: + key = self.get_thread_identifier() + with self.lock: + if key not in self.thread_connections: + raise dbt.exceptions.InvalidConnectionException( + key, list(self.thread_connections) + ) + return self.thread_connections[key] + + def set_thread_connection(self, conn: Connection) -> None: + key = self.get_thread_identifier() + if key in self.thread_connections: + raise dbt.exceptions.InternalException( + 'In set_thread_connection, existing connection exists for {}' + ) + self.thread_connections[key] = conn + + def get_if_exists(self) -> Optional[Connection]: + key = self.get_thread_identifier() + with self.lock: + return self.thread_connections.get(key) + + def clear_thread_connection(self) -> None: + key = self.get_thread_identifier() + with self.lock: + if key in self.thread_connections: + del self.thread_connections[key] + + def clear_transaction(self) -> None: + """Clear any existing transactions.""" + conn = self.get_thread_connection() + if conn is not None: + if conn.transaction_open: + self._rollback(conn) + self.begin() + self.commit() + + def rollback_if_open(self) -> None: + conn = self.get_if_exists() + if conn is not None and conn.handle and conn.transaction_open: + self._rollback(conn) + + @abc.abstractmethod + def exception_handler(self, sql: str) -> ContextManager: + """Create a context manager that handles exceptions caused by database + interactions. + + :param str sql: The SQL string that the block inside the context + manager is executing. + :return: A context manager that handles exceptions raised by the + underlying database. + """ + raise dbt.exceptions.NotImplementedException( + '`exception_handler` is not implemented for this adapter!') + + def set_connection_name(self, name: Optional[str] = None) -> Connection: + conn_name: str + if name is None: + # if a name isn't specified, we'll re-use a single handle + # named 'master' + conn_name = 'master' + else: + if not isinstance(name, str): + raise dbt.exceptions.CompilerException( + f'For connection name, got {name} - not a string!' + ) + assert isinstance(name, str) + conn_name = name + + conn = self.get_if_exists() + if conn is None: + conn = Connection( + type=Identifier(self.TYPE), + name=None, + state=ConnectionState.INIT, + transaction_open=False, + handle=None, + credentials=self.profile.credentials + ) + self.set_thread_connection(conn) + + if conn.name == conn_name and conn.state == 'open': + return conn + + logger.debug( + 'Acquiring new {} connection "{}".'.format(self.TYPE, conn_name)) + + if conn.state == 'open': + logger.debug( + 'Re-using an available connection from the pool (formerly {}).' + .format(conn.name) + ) + else: + conn.handle = LazyHandle(self.open) + + conn.name = conn_name + return conn + + @abc.abstractmethod + def cancel_open(self) -> Optional[List[str]]: + """Cancel all open connections on the adapter. (passable)""" + raise dbt.exceptions.NotImplementedException( + '`cancel_open` is not implemented for this adapter!' + ) + + @abc.abstractclassmethod + def open(cls, connection: Connection) -> Connection: + """Open the given connection on the adapter and return it. + + This may mutate the given connection (in particular, its state and its + handle). + + This should be thread-safe, or hold the lock if necessary. The given + connection should not be in either in_use or available. + """ + raise dbt.exceptions.NotImplementedException( + '`open` is not implemented for this adapter!' + ) + + def release(self) -> None: + with self.lock: + conn = self.get_if_exists() + if conn is None: + return + + try: + # always close the connection. close() calls _rollback() if there + # is an open transaction + self.close(conn) + except Exception: + # if rollback or close failed, remove our busted connection + self.clear_thread_connection() + raise + + def cleanup_all(self) -> None: + with self.lock: + for connection in self.thread_connections.values(): + if connection.state not in {'closed', 'init'}: + logger.debug("Connection '{}' was left open." + .format(connection.name)) + else: + logger.debug("Connection '{}' was properly closed." + .format(connection.name)) + self.close(connection) + + # garbage collect these connections + self.thread_connections.clear() + + @abc.abstractmethod + def begin(self) -> None: + """Begin a transaction. (passable)""" + raise dbt.exceptions.NotImplementedException( + '`begin` is not implemented for this adapter!' + ) + + @abc.abstractmethod + def commit(self) -> None: + """Commit a transaction. (passable)""" + raise dbt.exceptions.NotImplementedException( + '`commit` is not implemented for this adapter!' + ) + + @classmethod + def _rollback_handle(cls, connection: Connection) -> None: + """Perform the actual rollback operation.""" + try: + connection.handle.rollback() + except Exception: + logger.debug( + 'Failed to rollback {}'.format(connection.name), + exc_info=True + ) + + @classmethod + def _close_handle(cls, connection: Connection) -> None: + """Perform the actual close operation.""" + # On windows, sometimes connection handles don't have a close() attr. + if hasattr(connection.handle, 'close'): + logger.debug(f'On {connection.name}: Close') + connection.handle.close() + else: + logger.debug(f'On {connection.name}: No close available on handle') + + @classmethod + def _rollback(cls, connection: Connection) -> None: + """Roll back the given connection.""" + if flags.STRICT_MODE: + if not isinstance(connection, Connection): + raise dbt.exceptions.CompilerException( + f'In _rollback, got {connection} - not a Connection!' + ) + + if connection.transaction_open is False: + raise dbt.exceptions.InternalException( + f'Tried to rollback transaction on connection ' + f'"{connection.name}", but it does not have one open!' + ) + + logger.debug(f'On {connection.name}: ROLLBACK') + cls._rollback_handle(connection) + + connection.transaction_open = False + + @classmethod + def close(cls, connection: Connection) -> Connection: + if flags.STRICT_MODE: + if not isinstance(connection, Connection): + raise dbt.exceptions.CompilerException( + f'In close, got {connection} - not a Connection!' + ) + + # if the connection is in closed or init, there's nothing to do + if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}: + return connection + + if connection.transaction_open and connection.handle: + logger.debug('On {}: ROLLBACK'.format(connection.name)) + cls._rollback_handle(connection) + connection.transaction_open = False + + cls._close_handle(connection) + connection.state = ConnectionState.CLOSED + + return connection + + def commit_if_has_connection(self) -> None: + """If the named connection exists, commit the current transaction.""" + connection = self.get_if_exists() + if connection: + self.commit() + + def _add_query_comment(self, sql: str) -> str: + if self.query_header is None: + return sql + return self.query_header.add(sql) + + @abc.abstractmethod + def execute( + self, sql: str, auto_begin: bool = False, fetch: bool = False + ) -> Tuple[Union[str, AdapterResponse], agate.Table]: + """Execute the given SQL. + + :param str sql: The sql to execute. + :param bool auto_begin: If set, and dbt is not currently inside a + transaction, automatically begin one. + :param bool fetch: If set, fetch results. + :return: A tuple of the status and the results (empty if fetch=False). + :rtype: Tuple[Union[str, AdapterResponse], agate.Table] + """ + raise dbt.exceptions.NotImplementedException( + '`execute` is not implemented for this adapter!' + ) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/impl.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/impl.py new file mode 100644 index 0000000..1d90184 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/impl.py @@ -0,0 +1,1259 @@ +import abc +from concurrent.futures import as_completed, Future +from contextlib import contextmanager +from datetime import datetime +from itertools import chain +from typing import ( + Optional, Tuple, Callable, Iterable, Type, Dict, Any, List, Mapping, + Iterator, Union, Set +) + +import agate +import pytz + +from dbt.exceptions import ( + raise_database_error, raise_compiler_error, invalid_type_error, + get_relation_returned_multiple_results, + InternalException, NotImplementedException, RuntimeException, +) +from dbt import flags + +from dbt import deprecations +from dbt.adapters.protocol import ( + AdapterConfig, + ConnectionManagerProtocol, +) +from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows +from dbt.clients.jinja import MacroGenerator +from dbt.contracts.graph.compiled import ( + CompileResultNode, CompiledSeedNode +) +from dbt.contracts.graph.manifest import Manifest, MacroManifest +from dbt.contracts.graph.parsed import ParsedSeedNode +from dbt.exceptions import warn_or_error +from dbt.logger import GLOBAL_LOGGER as logger +from dbt.utils import filter_null_values, executor + +from dbt.adapters.base.connections import Connection, AdapterResponse +from dbt.adapters.base.meta import AdapterMeta, available +from dbt.adapters.base.relation import ( + ComponentName, BaseRelation, InformationSchema, SchemaSearchMap +) +from dbt.adapters.base import Column as BaseColumn +from dbt.adapters.cache import RelationsCache + + +SeedModel = Union[ParsedSeedNode, CompiledSeedNode] + + +GET_CATALOG_MACRO_NAME = 'get_catalog' +FRESHNESS_MACRO_NAME = 'collect_freshness' + + +def _expect_row_value(key: str, row: agate.Row): + if key not in row.keys(): + raise InternalException( + 'Got a row without "{}" column, columns: {}' + .format(key, row.keys()) + ) + return row[key] + + +def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]: + """Return a function that takes a row and decides if the row should be + included in the catalog output. + """ + schemas = frozenset((d.lower(), s.lower()) + for d, s in manifest.get_used_schemas()) + + def test(row: agate.Row) -> bool: + table_database = _expect_row_value('table_database', row) + table_schema = _expect_row_value('table_schema', row) + # the schema may be present but None, which is not an error and should + # be filtered out + if table_schema is None: + return False + return (table_database.lower(), table_schema.lower()) in schemas + return test + + +def _utc( + dt: Optional[datetime], source: BaseRelation, field_name: str +) -> datetime: + """If dt has a timezone, return a new datetime that's in UTC. Otherwise, + assume the datetime is already for UTC and add the timezone. + """ + if dt is None: + raise raise_database_error( + "Expected a non-null value when querying field '{}' of table " + " {} but received value 'null' instead".format( + field_name, + source)) + + elif not hasattr(dt, 'tzinfo'): + raise raise_database_error( + "Expected a timestamp value when querying field '{}' of table " + "{} but received value of type '{}' instead".format( + field_name, + source, + type(dt).__name__)) + + elif dt.tzinfo: + return dt.astimezone(pytz.UTC) + else: + return dt.replace(tzinfo=pytz.UTC) + + +def _relation_name(rel: Optional[BaseRelation]) -> str: + if rel is None: + return 'null relation' + else: + return str(rel) + + +class BaseAdapter(metaclass=AdapterMeta): + """The BaseAdapter provides an abstract base class for adapters. + + Adapters must implement the following methods and macros. Some of the + methods can be safely overridden as a noop, where it makes sense + (transactions on databases that don't support them, for instance). Those + methods are marked with a (passable) in their docstrings. Check docstrings + for type information, etc. + + To implement a macro, implement "${adapter_type}__${macro_name}". in the + adapter's internal project. + + Methods: + - exception_handler + - date_function + - list_schemas + - drop_relation + - truncate_relation + - rename_relation + - get_columns_in_relation + - expand_column_types + - list_relations_without_caching + - is_cancelable + - create_schema + - drop_schema + - quote + - convert_text_type + - convert_number_type + - convert_boolean_type + - convert_datetime_type + - convert_date_type + - convert_time_type + + Macros: + - get_catalog + """ + Relation: Type[BaseRelation] = BaseRelation + Column: Type[BaseColumn] = BaseColumn + ConnectionManager: Type[ConnectionManagerProtocol] + + # A set of clobber config fields accepted by this adapter + # for use in materializations + AdapterSpecificConfigs: Type[AdapterConfig] = AdapterConfig + + def __init__(self, config): + self.config = config + self.cache = RelationsCache() + self.connections = self.ConnectionManager(config) + self._macro_manifest_lazy: Optional[MacroManifest] = None + + ### + # Methods that pass through to the connection manager + ### + def acquire_connection(self, name=None) -> Connection: + return self.connections.set_connection_name(name) + + def release_connection(self) -> None: + self.connections.release() + + def cleanup_connections(self) -> None: + self.connections.cleanup_all() + + def clear_transaction(self) -> None: + self.connections.clear_transaction() + + def commit_if_has_connection(self) -> None: + self.connections.commit_if_has_connection() + + def debug_query(self) -> None: + self.execute('select 1 as id') + + def nice_connection_name(self) -> str: + conn = self.connections.get_if_exists() + if conn is None or conn.name is None: + return '' + return conn.name + + @contextmanager + def connection_named( + self, name: str, node: Optional[CompileResultNode] = None + ) -> Iterator[None]: + try: + if self.connections.query_header is not None: + self.connections.query_header.set(name, node) + self.acquire_connection(name) + yield + finally: + self.release_connection() + if self.connections.query_header is not None: + self.connections.query_header.reset() + + @contextmanager + def connection_for( + self, node: CompileResultNode + ) -> Iterator[None]: + with self.connection_named(node.unique_id, node): + yield + + @available.parse(lambda *a, **k: ('', empty_table())) + def execute( + self, sql: str, auto_begin: bool = False, fetch: bool = False + ) -> Tuple[Union[str, AdapterResponse], agate.Table]: + """Execute the given SQL. This is a thin wrapper around + ConnectionManager.execute. + + :param str sql: The sql to execute. + :param bool auto_begin: If set, and dbt is not currently inside a + transaction, automatically begin one. + :param bool fetch: If set, fetch results. + :return: A tuple of the status and the results (empty if fetch=False). + :rtype: Tuple[Union[str, AdapterResponse], agate.Table] + """ + return self.connections.execute( + sql=sql, + auto_begin=auto_begin, + fetch=fetch + ) + + @available.parse(lambda *a, **k: ('', empty_table())) + def get_partitions_metadata( + self, table: str + ) -> Tuple[agate.Table]: + """Obtain partitions metadata for a BigQuery partitioned table. + + :param str table_id: a partitioned table id, in standard SQL format. + :return: a partition metadata tuple, as described in + https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables. + :rtype: agate.Table + """ + return self.connections.get_partitions_metadata( + table=table + ) + + ### + # Methods that should never be overridden + ### + @classmethod + def type(cls) -> str: + """Get the type of this adapter. Types must be class-unique and + consistent. + + :return: The type name + :rtype: str + """ + return cls.ConnectionManager.TYPE + + @property + def _macro_manifest(self) -> MacroManifest: + if self._macro_manifest_lazy is None: + return self.load_macro_manifest() + return self._macro_manifest_lazy + + def check_macro_manifest(self) -> Optional[MacroManifest]: + """Return the internal manifest (used for executing macros) if it's + been initialized, otherwise return None. + """ + return self._macro_manifest_lazy + + def load_macro_manifest(self) -> MacroManifest: + if self._macro_manifest_lazy is None: + # avoid a circular import + from dbt.parser.manifest import ManifestLoader + manifest = ManifestLoader.load_macros( + self.config, self.connections.set_query_header + ) + self._macro_manifest_lazy = manifest + return self._macro_manifest_lazy + + def clear_macro_manifest(self): + if self._macro_manifest_lazy is not None: + self._macro_manifest_lazy = None + + ### + # Caching methods + ### + def _schema_is_cached(self, database: Optional[str], schema: str) -> bool: + """Check if the schema is cached, and by default logs if it is not.""" + + if flags.USE_CACHE is False: + return False + elif (database, schema) not in self.cache: + logger.debug( + 'On "{}": cache miss for schema "{}.{}", this is inefficient' + .format(self.nice_connection_name(), database, schema) + ) + return False + else: + return True + + def _get_cache_schemas(self, manifest: Manifest) -> Set[BaseRelation]: + """Get the set of schema relations that the cache logic needs to + populate. This means only executable nodes are included. + """ + # the cache only cares about executable nodes + return { + self.Relation.create_from(self.config, node).without_identifier() + for node in manifest.nodes.values() + if ( + node.is_relational and not node.is_ephemeral_model + ) + } + + def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap: + """Get a mapping of each node's "information_schema" relations to a + set of all schemas expected in that information_schema. + + There may be keys that are technically duplicates on the database side, + for example all of '"foo", 'foo', '"FOO"' and 'FOO' could coexist as + databases, and values could overlap as appropriate. All values are + lowercase strings. + """ + info_schema_name_map = SchemaSearchMap() + nodes: Iterator[CompileResultNode] = chain( + manifest.nodes.values(), + manifest.sources.values(), + ) + for node in nodes: + relation = self.Relation.create_from(self.config, node) + info_schema_name_map.add(relation) + # result is a map whose keys are information_schema Relations without + # identifiers that have appropriate database prefixes, and whose values + # are sets of lowercase schema names that are valid members of those + # databases + return info_schema_name_map + + def _relations_cache_for_schemas(self, manifest: Manifest) -> None: + """Populate the relations cache for the given schemas. Returns an + iterable of the schemas populated, as strings. + """ + if not flags.USE_CACHE: + return + + cache_schemas = self._get_cache_schemas(manifest) + with executor(self.config) as tpe: + futures: List[Future[List[BaseRelation]]] = [] + for cache_schema in cache_schemas: + fut = tpe.submit_connected( + self, + f'list_{cache_schema.database}_{cache_schema.schema}', + self.list_relations_without_caching, + cache_schema + ) + futures.append(fut) + + for future in as_completed(futures): + # if we can't read the relations we need to just raise anyway, + # so just call future.result() and let that raise on failure + for relation in future.result(): + self.cache.add(relation) + + # it's possible that there were no relations in some schemas. We want + # to insert the schemas we query into the cache's `.schemas` attribute + # so we can check it later + cache_update: Set[Tuple[Optional[str], Optional[str]]] = set() + for relation in cache_schemas: + cache_update.add((relation.database, relation.schema)) + self.cache.update_schemas(cache_update) + + def set_relations_cache( + self, manifest: Manifest, clear: bool = False + ) -> None: + """Run a query that gets a populated cache of the relations in the + database and set the cache on this adapter. + """ + if not flags.USE_CACHE: + return + + with self.cache.lock: + if clear: + self.cache.clear() + self._relations_cache_for_schemas(manifest) + + @available + def cache_added(self, relation: Optional[BaseRelation]) -> str: + """Cache a new relation in dbt. It will show up in `list relations`.""" + if relation is None: + name = self.nice_connection_name() + raise_compiler_error( + 'Attempted to cache a null relation for {}'.format(name) + ) + if flags.USE_CACHE: + self.cache.add(relation) + # so jinja doesn't render things + return '' + + @available + def cache_dropped(self, relation: Optional[BaseRelation]) -> str: + """Drop a relation in dbt. It will no longer show up in + `list relations`, and any bound views will be dropped from the cache + """ + if relation is None: + name = self.nice_connection_name() + raise_compiler_error( + 'Attempted to drop a null relation for {}'.format(name) + ) + if flags.USE_CACHE: + self.cache.drop(relation) + return '' + + @available + def cache_renamed( + self, + from_relation: Optional[BaseRelation], + to_relation: Optional[BaseRelation], + ) -> str: + """Rename a relation in dbt. It will show up with a new name in + `list_relations`, but bound views will remain bound. + """ + if from_relation is None or to_relation is None: + name = self.nice_connection_name() + src_name = _relation_name(from_relation) + dst_name = _relation_name(to_relation) + raise_compiler_error( + 'Attempted to rename {} to {} for {}' + .format(src_name, dst_name, name) + ) + + if flags.USE_CACHE: + self.cache.rename(from_relation, to_relation) + return '' + + ### + # Abstract methods for database-specific values, attributes, and types + ### + @abc.abstractclassmethod + def date_function(cls) -> str: + """Get the date function used by this adapter's database.""" + raise NotImplementedException( + '`date_function` is not implemented for this adapter!') + + @abc.abstractclassmethod + def is_cancelable(cls) -> bool: + raise NotImplementedException( + '`is_cancelable` is not implemented for this adapter!' + ) + + ### + # Abstract methods about schemas + ### + @abc.abstractmethod + def list_schemas(self, database: str) -> List[str]: + """Get a list of existing schemas in database""" + raise NotImplementedException( + '`list_schemas` is not implemented for this adapter!' + ) + + @available.parse(lambda *a, **k: False) + def check_schema_exists(self, database: str, schema: str) -> bool: + """Check if a schema exists. + + The default implementation of this is potentially unnecessarily slow, + and adapters should implement it if there is an optimized path (and + there probably is) + """ + search = ( + s.lower() for s in + self.list_schemas(database=database) + ) + return schema.lower() in search + + ### + # Abstract methods about relations + ### + @abc.abstractmethod + @available.parse_none + def drop_relation(self, relation: BaseRelation) -> None: + """Drop the given relation. + + *Implementors must call self.cache.drop() to preserve cache state!* + """ + raise NotImplementedException( + '`drop_relation` is not implemented for this adapter!' + ) + + @abc.abstractmethod + @available.parse_none + def truncate_relation(self, relation: BaseRelation) -> None: + """Truncate the given relation.""" + raise NotImplementedException( + '`truncate_relation` is not implemented for this adapter!' + ) + + @abc.abstractmethod + @available.parse_none + def rename_relation( + self, from_relation: BaseRelation, to_relation: BaseRelation + ) -> None: + """Rename the relation from from_relation to to_relation. + + Implementors must call self.cache.rename() to preserve cache state. + """ + raise NotImplementedException( + '`rename_relation` is not implemented for this adapter!' + ) + + @abc.abstractmethod + @available.parse_list + def get_columns_in_relation( + self, relation: BaseRelation + ) -> List[BaseColumn]: + """Get a list of the columns in the given Relation. """ + raise NotImplementedException( + '`get_columns_in_relation` is not implemented for this adapter!' + ) + + @available.deprecated('get_columns_in_relation', lambda *a, **k: []) + def get_columns_in_table( + self, schema: str, identifier: str + ) -> List[BaseColumn]: + """DEPRECATED: Get a list of the columns in the given table.""" + relation = self.Relation.create( + database=self.config.credentials.database, + schema=schema, + identifier=identifier, + quote_policy=self.config.quoting + ) + return self.get_columns_in_relation(relation) + + @abc.abstractmethod + def expand_column_types( + self, goal: BaseRelation, current: BaseRelation + ) -> None: + """Expand the current table's types to match the goal table. (passable) + + :param self.Relation goal: A relation that currently exists in the + database with columns of the desired types. + :param self.Relation current: A relation that currently exists in the + database with columns of unspecified types. + """ + raise NotImplementedException( + '`expand_target_column_types` is not implemented for this adapter!' + ) + + @abc.abstractmethod + def list_relations_without_caching( + self, schema_relation: BaseRelation + ) -> List[BaseRelation]: + """List relations in the given schema, bypassing the cache. + + This is used as the underlying behavior to fill the cache. + + :param schema_relation: A relation containing the database and schema + as appropraite for the underlying data warehouse + :return: The relations in schema + :rtype: List[self.Relation] + """ + raise NotImplementedException( + '`list_relations_without_caching` is not implemented for this ' + 'adapter!' + ) + + ### + # Provided methods about relations + ### + @available.parse_list + def get_missing_columns( + self, from_relation: BaseRelation, to_relation: BaseRelation + ) -> List[BaseColumn]: + """Returns a list of Columns in from_relation that are missing from + to_relation. + """ + if not isinstance(from_relation, self.Relation): + invalid_type_error( + method_name='get_missing_columns', + arg_name='from_relation', + got_value=from_relation, + expected_type=self.Relation) + + if not isinstance(to_relation, self.Relation): + invalid_type_error( + method_name='get_missing_columns', + arg_name='to_relation', + got_value=to_relation, + expected_type=self.Relation) + + from_columns = { + col.name: col for col in + self.get_columns_in_relation(from_relation) + } + + to_columns = { + col.name: col for col in + self.get_columns_in_relation(to_relation) + } + + missing_columns = set(from_columns.keys()) - set(to_columns.keys()) + + return [ + col for (col_name, col) in from_columns.items() + if col_name in missing_columns + ] + + @available.parse_none + def valid_snapshot_target(self, relation: BaseRelation) -> None: + """Ensure that the target relation is valid, by making sure it has the + expected columns. + + :param Relation relation: The relation to check + :raises CompilationException: If the columns are + incorrect. + """ + if not isinstance(relation, self.Relation): + invalid_type_error( + method_name='valid_snapshot_target', + arg_name='relation', + got_value=relation, + expected_type=self.Relation) + + columns = self.get_columns_in_relation(relation) + names = set(c.name.lower() for c in columns) + expanded_keys = ('scd_id', 'valid_from', 'valid_to') + extra = [] + missing = [] + for legacy in expanded_keys: + desired = 'dbt_' + legacy + if desired not in names: + missing.append(desired) + if legacy in names: + extra.append(legacy) + + if missing: + if extra: + msg = ( + 'Snapshot target has ("{}") but not ("{}") - is it an ' + 'unmigrated previous version archive?' + .format('", "'.join(extra), '", "'.join(missing)) + ) + else: + msg = ( + 'Snapshot target is not a snapshot table (missing "{}")' + .format('", "'.join(missing)) + ) + raise_compiler_error(msg) + + @available.parse_none + def expand_target_column_types( + self, from_relation: BaseRelation, to_relation: BaseRelation + ) -> None: + if not isinstance(from_relation, self.Relation): + invalid_type_error( + method_name='expand_target_column_types', + arg_name='from_relation', + got_value=from_relation, + expected_type=self.Relation) + + if not isinstance(to_relation, self.Relation): + invalid_type_error( + method_name='expand_target_column_types', + arg_name='to_relation', + got_value=to_relation, + expected_type=self.Relation) + + self.expand_column_types(from_relation, to_relation) + + def list_relations( + self, database: Optional[str], schema: str + ) -> List[BaseRelation]: + if self._schema_is_cached(database, schema): + return self.cache.get_relations(database, schema) + + schema_relation = self.Relation.create( + database=database, + schema=schema, + identifier='', + quote_policy=self.config.quoting + ).without_identifier() + + # we can't build the relations cache because we don't have a + # manifest so we can't run any operations. + relations = self.list_relations_without_caching( + schema_relation + ) + + logger.debug('with database={}, schema={}, relations={}' + .format(database, schema, relations)) + return relations + + def _make_match_kwargs( + self, database: str, schema: str, identifier: str + ) -> Dict[str, str]: + quoting = self.config.quoting + if identifier is not None and quoting['identifier'] is False: + identifier = identifier.lower() + + if schema is not None and quoting['schema'] is False: + schema = schema.lower() + + if database is not None and quoting['database'] is False: + database = database.lower() + + return filter_null_values({ + 'database': database, + 'identifier': identifier, + 'schema': schema, + }) + + def _make_match( + self, + relations_list: List[BaseRelation], + database: str, + schema: str, + identifier: str, + ) -> List[BaseRelation]: + + matches = [] + + search = self._make_match_kwargs(database, schema, identifier) + + for relation in relations_list: + if relation.matches(**search): + matches.append(relation) + + return matches + + @available.parse_none + def get_relation( + self, database: str, schema: str, identifier: str + ) -> Optional[BaseRelation]: + relations_list = self.list_relations(database, schema) + + matches = self._make_match(relations_list, database, schema, + identifier) + + if len(matches) > 1: + kwargs = { + 'identifier': identifier, + 'schema': schema, + 'database': database, + } + get_relation_returned_multiple_results( + kwargs, matches + ) + + elif matches: + return matches[0] + + return None + + @available.deprecated('get_relation', lambda *a, **k: False) + def already_exists(self, schema: str, name: str) -> bool: + """DEPRECATED: Return if a model already exists in the database""" + database = self.config.credentials.database + relation = self.get_relation(database, schema, name) + return relation is not None + + ### + # ODBC FUNCTIONS -- these should not need to change for every adapter, + # although some adapters may override them + ### + @abc.abstractmethod + @available.parse_none + def create_schema(self, relation: BaseRelation): + """Create the given schema if it does not exist.""" + raise NotImplementedException( + '`create_schema` is not implemented for this adapter!' + ) + + @abc.abstractmethod + @available.parse_none + def drop_schema(self, relation: BaseRelation): + """Drop the given schema (and everything in it) if it exists.""" + raise NotImplementedException( + '`drop_schema` is not implemented for this adapter!' + ) + + @available + @abc.abstractclassmethod + def quote(cls, identifier: str) -> str: + """Quote the given identifier, as appropriate for the database.""" + raise NotImplementedException( + '`quote` is not implemented for this adapter!' + ) + + @available + def quote_as_configured(self, identifier: str, quote_key: str) -> str: + """Quote or do not quote the given identifer as configured in the + project config for the quote key. + + The quote key should be one of 'database' (on bigquery, 'profile'), + 'identifier', or 'schema', or it will be treated as if you set `True`. + """ + try: + key = ComponentName(quote_key) + except ValueError: + return identifier + + default = self.Relation.get_default_quote_policy().get_part(key) + if self.config.quoting.get(key, default): + return self.quote(identifier) + else: + return identifier + + @available + def quote_seed_column( + self, column: str, quote_config: Optional[bool] + ) -> str: + # this is the default for now + quote_columns: bool = False + if isinstance(quote_config, bool): + quote_columns = quote_config + elif quote_config is None: + deprecations.warn('column-quoting-unset') + else: + raise_compiler_error( + f'The seed configuration value of "quote_columns" has an ' + f'invalid type {type(quote_config)}' + ) + + if quote_columns: + return self.quote(column) + else: + return column + + ### + # Conversions: These must be implemented by concrete implementations, for + # converting agate types into their sql equivalents. + ### + @abc.abstractclassmethod + def convert_text_type( + cls, agate_table: agate.Table, col_idx: int + ) -> str: + """Return the type in the database that best maps to the agate.Text + type for the given agate table and column index. + + :param agate_table: The table + :param col_idx: The index into the agate table for the column. + :return: The name of the type in the database + """ + raise NotImplementedException( + '`convert_text_type` is not implemented for this adapter!') + + @abc.abstractclassmethod + def convert_number_type( + cls, agate_table: agate.Table, col_idx: int + ) -> str: + """Return the type in the database that best maps to the agate.Number + type for the given agate table and column index. + + :param agate_table: The table + :param col_idx: The index into the agate table for the column. + :return: The name of the type in the database + """ + raise NotImplementedException( + '`convert_number_type` is not implemented for this adapter!') + + @abc.abstractclassmethod + def convert_boolean_type( + cls, agate_table: agate.Table, col_idx: int + ) -> str: + """Return the type in the database that best maps to the agate.Boolean + type for the given agate table and column index. + + :param agate_table: The table + :param col_idx: The index into the agate table for the column. + :return: The name of the type in the database + """ + raise NotImplementedException( + '`convert_boolean_type` is not implemented for this adapter!') + + @abc.abstractclassmethod + def convert_datetime_type( + cls, agate_table: agate.Table, col_idx: int + ) -> str: + """Return the type in the database that best maps to the agate.DateTime + type for the given agate table and column index. + + :param agate_table: The table + :param col_idx: The index into the agate table for the column. + :return: The name of the type in the database + """ + raise NotImplementedException( + '`convert_datetime_type` is not implemented for this adapter!') + + @abc.abstractclassmethod + def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: + """Return the type in the database that best maps to the agate.Date + type for the given agate table and column index. + + :param agate_table: The table + :param col_idx: The index into the agate table for the column. + :return: The name of the type in the database + """ + raise NotImplementedException( + '`convert_date_type` is not implemented for this adapter!') + + @abc.abstractclassmethod + def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: + """Return the type in the database that best maps to the + agate.TimeDelta type for the given agate table and column index. + + :param agate_table: The table + :param col_idx: The index into the agate table for the column. + :return: The name of the type in the database + """ + raise NotImplementedException( + '`convert_time_type` is not implemented for this adapter!') + + @available + @classmethod + def convert_type( + cls, agate_table: agate.Table, col_idx: int + ) -> Optional[str]: + return cls.convert_agate_type(agate_table, col_idx) + + @classmethod + def convert_agate_type( + cls, agate_table: agate.Table, col_idx: int + ) -> Optional[str]: + agate_type: Type = agate_table.column_types[col_idx] + conversions: List[Tuple[Type, Callable[..., str]]] = [ + (agate.Text, cls.convert_text_type), + (agate.Number, cls.convert_number_type), + (agate.Boolean, cls.convert_boolean_type), + (agate.DateTime, cls.convert_datetime_type), + (agate.Date, cls.convert_date_type), + (agate.TimeDelta, cls.convert_time_type), + ] + for agate_cls, func in conversions: + if isinstance(agate_type, agate_cls): + return func(agate_table, col_idx) + + return None + + ### + # Operations involving the manifest + ### + def execute_macro( + self, + macro_name: str, + manifest: Optional[Manifest] = None, + project: Optional[str] = None, + context_override: Optional[Dict[str, Any]] = None, + kwargs: Dict[str, Any] = None, + release: bool = False, + text_only_columns: Optional[Iterable[str]] = None, + ) -> agate.Table: + """Look macro_name up in the manifest and execute its results. + + :param macro_name: The name of the macro to execute. + :param manifest: The manifest to use for generating the base macro + execution context. If none is provided, use the internal manifest. + :param project: The name of the project to search in, or None for the + first match. + :param context_override: An optional dict to update() the macro + execution context. + :param kwargs: An optional dict of keyword args used to pass to the + macro. + :param release: Ignored. + """ + if release is not False: + deprecations.warn('execute-macro-release') + if kwargs is None: + kwargs = {} + if context_override is None: + context_override = {} + + if manifest is None: + manifest = self._macro_manifest + + macro = manifest.find_macro_by_name( + macro_name, self.config.project_name, project + ) + if macro is None: + if project is None: + package_name = 'any package' + else: + package_name = 'the "{}" package'.format(project) + + raise RuntimeException( + 'dbt could not find a macro with the name "{}" in {}' + .format(macro_name, package_name) + ) + # This causes a reference cycle, as generate_runtime_macro() + # ends up calling get_adapter, so the import has to be here. + from dbt.context.providers import generate_runtime_macro + macro_context = generate_runtime_macro( + macro=macro, + config=self.config, + manifest=manifest, + package_name=project + ) + macro_context.update(context_override) + + macro_function = MacroGenerator(macro, macro_context) + + with self.connections.exception_handler(f'macro {macro_name}'): + result = macro_function(**kwargs) + return result + + @classmethod + def _catalog_filter_table( + cls, table: agate.Table, manifest: Manifest + ) -> agate.Table: + """Filter the table as appropriate for catalog entries. Subclasses can + override this to change filtering rules on a per-adapter basis. + """ + # force database + schema to be strings + table = table_from_rows( + table.rows, + table.column_names, + text_only_columns=['table_database', 'table_schema', 'table_name'] + ) + return table.where(_catalog_filter_schemas(manifest)) + + def _get_one_catalog( + self, + information_schema: InformationSchema, + schemas: Set[str], + manifest: Manifest, + ) -> agate.Table: + + kwargs = { + 'information_schema': information_schema, + 'schemas': schemas + } + table = self.execute_macro( + GET_CATALOG_MACRO_NAME, + kwargs=kwargs, + # pass in the full manifest so we get any local project + # overrides + manifest=manifest, + ) + + results = self._catalog_filter_table(table, manifest) + return results + + def get_catalog( + self, manifest: Manifest + ) -> Tuple[agate.Table, List[Exception]]: + schema_map = self._get_catalog_schemas(manifest) + + with executor(self.config) as tpe: + futures: List[Future[agate.Table]] = [] + for info, schemas in schema_map.items(): + if len(schemas) == 0: + continue + name = '.'.join([ + str(info.database), + 'information_schema' + ]) + + fut = tpe.submit_connected( + self, name, + self._get_one_catalog, info, schemas, manifest + ) + futures.append(fut) + + catalogs, exceptions = catch_as_completed(futures) + + return catalogs, exceptions + + def cancel_open_connections(self): + """Cancel all open connections.""" + return self.connections.cancel_open() + + def calculate_freshness( + self, + source: BaseRelation, + loaded_at_field: str, + filter: Optional[str], + manifest: Optional[Manifest] = None + ) -> Dict[str, Any]: + """Calculate the freshness of sources in dbt, and return it""" + kwargs: Dict[str, Any] = { + 'source': source, + 'loaded_at_field': loaded_at_field, + 'filter': filter, + } + + # run the macro + table = self.execute_macro( + FRESHNESS_MACRO_NAME, + kwargs=kwargs, + manifest=manifest + ) + # now we have a 1-row table of the maximum `loaded_at_field` value and + # the current time according to the db. + if len(table) != 1 or len(table[0]) != 2: + raise_compiler_error( + 'Got an invalid result from "{}" macro: {}'.format( + FRESHNESS_MACRO_NAME, [tuple(r) for r in table] + ) + ) + if table[0][0] is None: + # no records in the table, so really the max_loaded_at was + # infinitely long ago. Just call it 0:00 January 1 year UTC + max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC) + else: + max_loaded_at = _utc(table[0][0], source, loaded_at_field) + + snapshotted_at = _utc(table[0][1], source, loaded_at_field) + age = (snapshotted_at - max_loaded_at).total_seconds() + return { + 'max_loaded_at': max_loaded_at, + 'snapshotted_at': snapshotted_at, + 'age': age, + } + + def pre_model_hook(self, config: Mapping[str, Any]) -> Any: + """A hook for running some operation before the model materialization + runs. The hook can assume it has a connection available. + + The only parameter is a configuration dictionary (the same one + available in the materialization context). It should be considered + read-only. + + The pre-model hook may return anything as a context, which will be + passed to the post-model hook. + """ + pass + + def post_model_hook(self, config: Mapping[str, Any], context: Any) -> None: + """A hook for running some operation after the model materialization + runs. The hook can assume it has a connection available. + + The first parameter is a configuration dictionary (the same one + available in the materialization context). It should be considered + read-only. + + The second parameter is the value returned by pre_mdoel_hook. + """ + pass + + def get_compiler(self): + from dbt.compilation import Compiler + return Compiler(self.config) + + # Methods used in adapter tests + def update_column_sql( + self, + dst_name: str, + dst_column: str, + clause: str, + where_clause: Optional[str] = None, + ) -> str: + clause = f'update {dst_name} set {dst_column} = {clause}' + if where_clause is not None: + clause += f' where {where_clause}' + return clause + + def timestamp_add_sql( + self, add_to: str, number: int = 1, interval: str = 'hour' + ) -> str: + # for backwards compatibility, we're compelled to set some sort of + # default. A lot of searching has lead me to believe that the + # '+ interval' syntax used in postgres/redshift is relatively common + # and might even be the SQL standard's intention. + return f"{add_to} + interval '{number} {interval}'" + + def string_add_sql( + self, add_to: str, value: str, location='append', + ) -> str: + if location == 'append': + return f"{add_to} || '{value}'" + elif location == 'prepend': + return f"'{value}' || {add_to}" + else: + raise RuntimeException( + f'Got an unexpected location value of "{location}"' + ) + + def get_rows_different_sql( + self, + relation_a: BaseRelation, + relation_b: BaseRelation, + column_names: Optional[List[str]] = None, + except_operator: str = 'EXCEPT', + ) -> str: + """Generate SQL for a query that returns a single row with a two + columns: the number of rows that are different between the two + relations and the number of mismatched rows. + """ + # This method only really exists for test reasons. + names: List[str] + if column_names is None: + columns = self.get_columns_in_relation(relation_a) + names = sorted((self.quote(c.name) for c in columns)) + else: + names = sorted((self.quote(n) for n in column_names)) + columns_csv = ', '.join(names) + + sql = COLUMNS_EQUAL_SQL.format( + columns=columns_csv, + relation_a=str(relation_a), + relation_b=str(relation_b), + except_op=except_operator, + ) + + return sql + + +COLUMNS_EQUAL_SQL = ''' +with diff_count as ( + SELECT + 1 as id, + COUNT(*) as num_missing FROM ( + (SELECT {columns} FROM {relation_a} {except_op} + SELECT {columns} FROM {relation_b}) + UNION ALL + (SELECT {columns} FROM {relation_b} {except_op} + SELECT {columns} FROM {relation_a}) + ) as a +), table_a as ( + SELECT COUNT(*) as num_rows FROM {relation_a} +), table_b as ( + SELECT COUNT(*) as num_rows FROM {relation_b} +), row_count_diff as ( + select + 1 as id, + table_a.num_rows - table_b.num_rows as difference + from table_a, table_b +) +select + row_count_diff.difference as row_count_difference, + diff_count.num_missing as num_mismatched +from row_count_diff +join diff_count using (id) +'''.strip() + + +def catch_as_completed( + futures # typing: List[Future[agate.Table]] +) -> Tuple[agate.Table, List[Exception]]: + + # catalogs: agate.Table = agate.Table(rows=[]) + tables: List[agate.Table] = [] + exceptions: List[Exception] = [] + + for future in as_completed(futures): + exc = future.exception() + # we want to re-raise on ctrl+c and BaseException + if exc is None: + catalog = future.result() + tables.append(catalog) + elif ( + isinstance(exc, KeyboardInterrupt) or + not isinstance(exc, Exception) + ): + raise exc + else: + warn_or_error( + f'Encountered an error while generating catalog: {str(exc)}' + ) + # exc is not None, derives from Exception, and isn't ctrl+c + exceptions.append(exc) + return merge_tables(tables), exceptions diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/meta.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/meta.py new file mode 100644 index 0000000..209240c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/meta.py @@ -0,0 +1,126 @@ +import abc +from functools import wraps +from typing import Callable, Optional, Any, FrozenSet, Dict, Set + +from dbt.deprecations import warn, renamed_method + + +Decorator = Callable[[Any], Callable] + + +class _Available: + def __call__(self, func: Callable) -> Callable: + func._is_available_ = True # type: ignore + return func + + def parse(self, parse_replacement: Callable) -> Decorator: + """A decorator factory to indicate that a method on the adapter will be + exposed to the database wrapper, and will be stubbed out at parse time + with the given function. + + @available.parse() + def my_method(self, a, b): + if something: + return None + return big_expensive_db_query() + + @available.parse(lambda *args, **args: {}) + def my_other_method(self, a, b): + x = {} + x.update(big_expensive_db_query()) + return x + """ + def inner(func): + func._parse_replacement_ = parse_replacement + return self(func) + return inner + + def deprecated( + self, supported_name: str, parse_replacement: Optional[Callable] = None + ) -> Decorator: + """A decorator that marks a function as available, but also prints a + deprecation warning. Use like + + @available.deprecated('my_new_method') + def my_old_method(self, arg): + args = compatability_shim(arg) + return self.my_new_method(*args) + + @available.deprecated('my_new_slow_method', lambda *a, **k: (0, '')) + def my_old_slow_method(self, arg): + args = compatibility_shim(arg) + return self.my_new_slow_method(*args) + + To make `adapter.my_old_method` available but also print out a warning + on use directing users to `my_new_method`. + + The optional parse_replacement, if provided, will provide a parse-time + replacement for the actual method (see `available.parse`). + """ + def wrapper(func): + func_name = func.__name__ + renamed_method(func_name, supported_name) + + @wraps(func) + def inner(*args, **kwargs): + warn('adapter:{}'.format(func_name)) + return func(*args, **kwargs) + + if parse_replacement: + available_function = self.parse(parse_replacement) + else: + available_function = self + return available_function(inner) + return wrapper + + def parse_none(self, func: Callable) -> Callable: + wrapper = self.parse(lambda *a, **k: None) + return wrapper(func) + + def parse_list(self, func: Callable) -> Callable: + wrapper = self.parse(lambda *a, **k: []) + return wrapper(func) + + +available = _Available() + + +class AdapterMeta(abc.ABCMeta): + _available_: FrozenSet[str] + _parse_replacements_: Dict[str, Callable] + + def __new__(mcls, name, bases, namespace, **kwargs): + # mypy does not like the `**kwargs`. But `ABCMeta` itself takes + # `**kwargs` in its argspec here (and passes them to `type.__new__`. + # I'm not sure there is any benefit to it after poking around a bit, + # but having it doesn't hurt on the python side (and omitting it could + # hurt for obscure metaclass reasons, for all I know) + cls = abc.ABCMeta.__new__( # type: ignore + mcls, name, bases, namespace, **kwargs + ) + + # this is very much inspired by ABCMeta's own implementation + + # dict mapping the method name to whether the model name should be + # injected into the arguments. All methods in here are exposed to the + # context. + available: Set[str] = set() + replacements: Dict[str, Any] = {} + + # collect base class data first + for base in bases: + available.update(getattr(base, '_available_', set())) + replacements.update(getattr(base, '_parse_replacements_', set())) + + # override with local data if it exists + for name, value in namespace.items(): + if getattr(value, '_is_available_', False): + available.add(name) + parse_replacement = getattr(value, '_parse_replacement_', None) + if parse_replacement is not None: + replacements[name] = parse_replacement + + cls._available_ = frozenset(available) + # should this be a namedtuple so it will be immutable like _available_? + cls._parse_replacements_ = replacements + return cls diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/plugin.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/plugin.py new file mode 100644 index 0000000..c87b2a2 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/plugin.py @@ -0,0 +1,42 @@ +from typing import List, Optional, Type + +from dbt.adapters.base import Credentials +from dbt.exceptions import CompilationException +from dbt.adapters.protocol import AdapterProtocol + + +def project_name_from_path(include_path: str) -> str: + # avoid an import cycle + from dbt.config.project import Project + partial = Project.partial_load(include_path) + if partial.project_name is None: + raise CompilationException( + f'Invalid project at {include_path}: name not set!' + ) + return partial.project_name + + +class AdapterPlugin: + """Defines the basic requirements for a dbt adapter plugin. + + :param include_path: The path to this adapter plugin's root + :param dependencies: A list of adapter names that this adapter depends + upon. + """ + def __init__( + self, + adapter: Type[AdapterProtocol], + credentials: Type[Credentials], + include_path: str, + dependencies: Optional[List[str]] = None + ): + + self.adapter: Type[AdapterProtocol] = adapter + self.credentials: Type[Credentials] = credentials + self.include_path: str = include_path + self.project_name: str = project_name_from_path(include_path) + self.dependencies: List[str] + if dependencies is None: + self.dependencies = [] + else: + self.dependencies = dependencies diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/query_headers.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/query_headers.py new file mode 100644 index 0000000..49c564f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/query_headers.py @@ -0,0 +1,101 @@ +from threading import local +from typing import Optional, Callable, Dict, Any + +from dbt.clients.jinja import QueryStringGenerator + +from dbt.context.manifest import generate_query_header_context +from dbt.contracts.connection import AdapterRequiredConfig, QueryComment +from dbt.contracts.graph.compiled import CompileResultNode +from dbt.contracts.graph.manifest import Manifest +from dbt.exceptions import RuntimeException + + +class NodeWrapper: + def __init__(self, node): + self._inner_node = node + + def __getattr__(self, name): + return getattr(self._inner_node, name, '') + + +class _QueryComment(local): + """A thread-local class storing thread-specific state information for + connection management, namely: + - the current thread's query comment. + - a source_name indicating what set the current thread's query comment + """ + def __init__(self, initial): + self.query_comment: Optional[str] = initial + self.append = False + + def add(self, sql: str) -> str: + if not self.query_comment: + return sql + + if self.append: + # replace last ';' with ';' + sql = sql.rstrip() + if sql[-1] == ';': + sql = sql[:-1] + return '{}\n/* {} */;'.format(sql, self.query_comment.strip()) + + return '{}\n/* {} */'.format(sql, self.query_comment.strip()) + + return '/* {} */\n{}'.format(self.query_comment.strip(), sql) + + def set(self, comment: Optional[str], append: bool): + if isinstance(comment, str) and '*/' in comment: + # tell the user "no" so they don't hurt themselves by writing + # garbage + raise RuntimeException( + f'query comment contains illegal value "*/": {comment}' + ) + self.query_comment = comment + self.append = append + + +QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str] + + +class MacroQueryStringSetter: + def __init__(self, config: AdapterRequiredConfig, manifest: Manifest): + self.manifest = manifest + self.config = config + + comment_macro = self._get_comment_macro() + self.generator: QueryStringFunc = lambda name, model: '' + # if the comment value was None or the empty string, just skip it + if comment_macro: + assert isinstance(comment_macro, str) + macro = '\n'.join(( + '{%- macro query_comment_macro(connection_name, node) -%}', + comment_macro, + '{% endmacro %}' + )) + ctx = self._get_context() + self.generator = QueryStringGenerator(macro, ctx) + self.comment = _QueryComment(None) + self.reset() + + def _get_comment_macro(self) -> Optional[str]: + return self.config.query_comment.comment + + def _get_context(self) -> Dict[str, Any]: + return generate_query_header_context(self.config, self.manifest) + + def add(self, sql: str) -> str: + return self.comment.add(sql) + + def reset(self): + self.set('master', None) + + def set(self, name: str, node: Optional[CompileResultNode]): + wrapped: Optional[NodeWrapper] = None + if node is not None: + wrapped = NodeWrapper(node) + comment_str = self.generator(name, wrapped) + + append = False + if isinstance(self.config.query_comment, QueryComment): + append = self.config.query_comment.append + self.comment.set(comment_str, append) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/relation.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/relation.py new file mode 100644 index 0000000..9f89d82 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/base/relation.py @@ -0,0 +1,456 @@ +from collections.abc import Hashable +from dataclasses import dataclass +from typing import ( + Optional, TypeVar, Any, Type, Dict, Union, Iterator, Tuple, Set +) + +from dbt.contracts.graph.compiled import CompiledNode +from dbt.contracts.graph.parsed import ParsedSourceDefinition, ParsedNode +from dbt.contracts.relation import ( + RelationType, ComponentName, HasQuoting, FakeAPIObject, Policy, Path +) +from dbt.exceptions import InternalException +from dbt.node_types import NodeType +from dbt.utils import filter_null_values, deep_merge, classproperty + +import dbt.exceptions + + +Self = TypeVar('Self', bound='BaseRelation') + + +@dataclass(frozen=True, eq=False, repr=False) +class BaseRelation(FakeAPIObject, Hashable): + path: Path + type: Optional[RelationType] = None + quote_character: str = '"' + include_policy: Policy = Policy() + quote_policy: Policy = Policy() + dbt_created: bool = False + + def _is_exactish_match(self, field: ComponentName, value: str) -> bool: + if self.dbt_created and self.quote_policy.get_part(field) is False: + return self.path.get_lowered_part(field) == value.lower() + else: + return self.path.get_part(field) == value + + @classmethod + def _get_field_named(cls, field_name): + for field, _ in cls._get_fields(): + if field.name == field_name: + return field + # this should be unreachable + raise ValueError(f'BaseRelation has no {field_name} field!') + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + return self.to_dict(omit_none=True) == other.to_dict(omit_none=True) + + @classmethod + def get_default_quote_policy(cls) -> Policy: + return cls._get_field_named('quote_policy').default + + @classmethod + def get_default_include_policy(cls) -> Policy: + return cls._get_field_named('include_policy').default + + def get(self, key, default=None): + """Override `.get` to return a metadata object so we don't break + dbt_utils. + """ + if key == 'metadata': + return { + 'type': self.__class__.__name__ + } + return super().get(key, default) + + def matches( + self, + database: Optional[str] = None, + schema: Optional[str] = None, + identifier: Optional[str] = None, + ) -> bool: + search = filter_null_values({ + ComponentName.Database: database, + ComponentName.Schema: schema, + ComponentName.Identifier: identifier + }) + + if not search: + # nothing was passed in + raise dbt.exceptions.RuntimeException( + "Tried to match relation, but no search path was passed!") + + exact_match = True + approximate_match = True + + for k, v in search.items(): + if not self._is_exactish_match(k, v): + exact_match = False + + if self.path.get_lowered_part(k) != v.lower(): + approximate_match = False + + if approximate_match and not exact_match: + target = self.create( + database=database, schema=schema, identifier=identifier + ) + dbt.exceptions.approximate_relation_match(target, self) + + return exact_match + + def replace_path(self, **kwargs): + return self.replace(path=self.path.replace(**kwargs)) + + def quote( + self: Self, + database: Optional[bool] = None, + schema: Optional[bool] = None, + identifier: Optional[bool] = None, + ) -> Self: + policy = filter_null_values({ + ComponentName.Database: database, + ComponentName.Schema: schema, + ComponentName.Identifier: identifier + }) + + new_quote_policy = self.quote_policy.replace_dict(policy) + return self.replace(quote_policy=new_quote_policy) + + def include( + self: Self, + database: Optional[bool] = None, + schema: Optional[bool] = None, + identifier: Optional[bool] = None, + ) -> Self: + policy = filter_null_values({ + ComponentName.Database: database, + ComponentName.Schema: schema, + ComponentName.Identifier: identifier + }) + + new_include_policy = self.include_policy.replace_dict(policy) + return self.replace(include_policy=new_include_policy) + + def information_schema(self, view_name=None) -> 'InformationSchema': + # some of our data comes from jinja, where things can be `Undefined`. + if not isinstance(view_name, str): + view_name = None + + # Kick the user-supplied schema out of the information schema relation + # Instead address this as .information_schema by default + info_schema = InformationSchema.from_relation(self, view_name) + return info_schema.incorporate(path={"schema": None}) + + def information_schema_only(self) -> 'InformationSchema': + return self.information_schema() + + def without_identifier(self) -> 'BaseRelation': + """Return a form of this relation that only has the database and schema + set to included. To get the appropriately-quoted form the schema out of + the result (for use as part of a query), use `.render()`. To get the + raw database or schema name, use `.database` or `.schema`. + + The hash of the returned object is the result of render(). + """ + return self.include(identifier=False).replace_path(identifier=None) + + def _render_iterator( + self + ) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]: + + for key in ComponentName: + path_part: Optional[str] = None + if self.include_policy.get_part(key): + path_part = self.path.get_part(key) + if path_part is not None and self.quote_policy.get_part(key): + path_part = self.quoted(path_part) + yield key, path_part + + def render(self) -> str: + # if there is nothing set, this will return the empty string. + return '.'.join( + part for _, part in self._render_iterator() + if part is not None + ) + + def quoted(self, identifier): + return '{quote_char}{identifier}{quote_char}'.format( + quote_char=self.quote_character, + identifier=identifier, + ) + + @classmethod + def create_from_source( + cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any + ) -> Self: + source_quoting = source.quoting.to_dict(omit_none=True) + source_quoting.pop('column', None) + quote_policy = deep_merge( + cls.get_default_quote_policy().to_dict(omit_none=True), + source_quoting, + kwargs.get('quote_policy', {}), + ) + + return cls.create( + database=source.database, + schema=source.schema, + identifier=source.identifier, + quote_policy=quote_policy, + **kwargs + ) + + @staticmethod + def add_ephemeral_prefix(name: str): + return f'__dbt__cte__{name}' + + @classmethod + def create_ephemeral_from_node( + cls: Type[Self], + config: HasQuoting, + node: Union[ParsedNode, CompiledNode], + ) -> Self: + # Note that ephemeral models are based on the name. + identifier = cls.add_ephemeral_prefix(node.name) + return cls.create( + type=cls.CTE, + identifier=identifier, + ).quote(identifier=False) + + @classmethod + def create_from_node( + cls: Type[Self], + config: HasQuoting, + node: Union[ParsedNode, CompiledNode], + quote_policy: Optional[Dict[str, bool]] = None, + **kwargs: Any, + ) -> Self: + if quote_policy is None: + quote_policy = {} + + quote_policy = dbt.utils.merge(config.quoting, quote_policy) + + return cls.create( + database=node.database, + schema=node.schema, + identifier=node.alias, + quote_policy=quote_policy, + **kwargs) + + @classmethod + def create_from( + cls: Type[Self], + config: HasQuoting, + node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition], + **kwargs: Any, + ) -> Self: + if node.resource_type == NodeType.Source: + if not isinstance(node, ParsedSourceDefinition): + raise InternalException( + 'type mismatch, expected ParsedSourceDefinition but got {}' + .format(type(node)) + ) + return cls.create_from_source(node, **kwargs) + else: + if not isinstance(node, (ParsedNode, CompiledNode)): + raise InternalException( + 'type mismatch, expected ParsedNode or CompiledNode but ' + 'got {}'.format(type(node)) + ) + return cls.create_from_node(config, node, **kwargs) + + @classmethod + def create( + cls: Type[Self], + database: Optional[str] = None, + schema: Optional[str] = None, + identifier: Optional[str] = None, + type: Optional[RelationType] = None, + **kwargs, + ) -> Self: + kwargs.update({ + 'path': { + 'database': database, + 'schema': schema, + 'identifier': identifier, + }, + 'type': type, + }) + return cls.from_dict(kwargs) + + def __repr__(self) -> str: + return "<{} {}>".format(self.__class__.__name__, self.render()) + + def __hash__(self) -> int: + return hash(self.render()) + + def __str__(self) -> str: + return self.render() + + @property + def database(self) -> Optional[str]: + return self.path.database + + @property + def schema(self) -> Optional[str]: + return self.path.schema + + @property + def identifier(self) -> Optional[str]: + return self.path.identifier + + @property + def table(self) -> Optional[str]: + return self.path.identifier + + # Here for compatibility with old Relation interface + @property + def name(self) -> Optional[str]: + return self.identifier + + @property + def is_table(self) -> bool: + return self.type == RelationType.Table + + @property + def is_cte(self) -> bool: + return self.type == RelationType.CTE + + @property + def is_view(self) -> bool: + return self.type == RelationType.View + + @classproperty + def Table(cls) -> str: + return str(RelationType.Table) + + @classproperty + def CTE(cls) -> str: + return str(RelationType.CTE) + + @classproperty + def View(cls) -> str: + return str(RelationType.View) + + @classproperty + def External(cls) -> str: + return str(RelationType.External) + + @classproperty + def get_relation_type(cls) -> Type[RelationType]: + return RelationType + + +Info = TypeVar('Info', bound='InformationSchema') + + +@dataclass(frozen=True, eq=False, repr=False) +class InformationSchema(BaseRelation): + information_schema_view: Optional[str] = None + + def __post_init__(self): + if not isinstance(self.information_schema_view, (type(None), str)): + raise dbt.exceptions.CompilationException( + 'Got an invalid name: {}'.format(self.information_schema_view) + ) + + @classmethod + def get_path( + cls, relation: BaseRelation, information_schema_view: Optional[str] + ) -> Path: + return Path( + database=relation.database, + schema=relation.schema, + identifier='INFORMATION_SCHEMA', + ) + + @classmethod + def get_include_policy( + cls, + relation, + information_schema_view: Optional[str], + ) -> Policy: + return relation.include_policy.replace( + database=relation.database is not None, + schema=False, + identifier=True, + ) + + @classmethod + def get_quote_policy( + cls, + relation, + information_schema_view: Optional[str], + ) -> Policy: + return relation.quote_policy.replace( + identifier=False, + ) + + @classmethod + def from_relation( + cls: Type[Info], + relation: BaseRelation, + information_schema_view: Optional[str], + ) -> Info: + include_policy = cls.get_include_policy( + relation, information_schema_view + ) + quote_policy = cls.get_quote_policy(relation, information_schema_view) + path = cls.get_path(relation, information_schema_view) + return cls( + type=RelationType.View, + path=path, + include_policy=include_policy, + quote_policy=quote_policy, + information_schema_view=information_schema_view, + ) + + def _render_iterator(self): + for k, v in super()._render_iterator(): + yield k, v + yield None, self.information_schema_view + + +class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]): + """A utility class to keep track of what information_schema tables to + search for what schemas. The schema values are all lowercased to avoid + duplication. + """ + def add(self, relation: BaseRelation): + key = relation.information_schema_only() + if key not in self: + self[key] = set() + schema: Optional[str] = None + if relation.schema is not None: + schema = relation.schema.lower() + self[key].add(schema) + + def search( + self + ) -> Iterator[Tuple[InformationSchema, Optional[str]]]: + for information_schema_name, schemas in self.items(): + for schema in schemas: + yield information_schema_name, schema + + def flatten(self, allow_multiple_databases: bool = False): + new = self.__class__() + + # make sure we don't have multiple databases if allow_multiple_databases is set to False + if not allow_multiple_databases: + seen = {r.database.lower() for r in self if r.database} + if len(seen) > 1: + dbt.exceptions.raise_compiler_error(str(seen)) + + for information_schema_name, schema in self.search(): + path = { + 'database': information_schema_name.database, + 'schema': schema + } + new.add(information_schema_name.incorporate( + path=path, + quote_policy={'database': False}, + include_policy={'database': False}, + )) + + return new diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/cache.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/cache.py new file mode 100644 index 0000000..8bbac5c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/cache.py @@ -0,0 +1,523 @@ +from collections import namedtuple +from copy import deepcopy +from typing import List, Iterable, Optional, Dict, Set, Tuple, Any +import threading + +from dbt.logger import CACHE_LOGGER as logger +from dbt.utils import lowercase +import dbt.exceptions + +_ReferenceKey = namedtuple('_ReferenceKey', 'database schema identifier') + + +def _make_key(relation) -> _ReferenceKey: + """Make _ReferenceKeys with lowercase values for the cache so we don't have + to keep track of quoting + """ + # databases and schemas can both be None + return _ReferenceKey(lowercase(relation.database), + lowercase(relation.schema), + lowercase(relation.identifier)) + + +def dot_separated(key: _ReferenceKey) -> str: + """Return the key in dot-separated string form. + + :param _ReferenceKey key: The key to stringify. + """ + return '.'.join(map(str, key)) + + +class _CachedRelation: + """Nothing about _CachedRelation is guaranteed to be thread-safe! + + :attr str schema: The schema of this relation. + :attr str identifier: The identifier of this relation. + :attr Dict[_ReferenceKey, _CachedRelation] referenced_by: The relations + that refer to this relation. + :attr BaseRelation inner: The underlying dbt relation. + """ + def __init__(self, inner): + self.referenced_by = {} + self.inner = inner + + def __str__(self) -> str: + return ( + '_CachedRelation(database={}, schema={}, identifier={}, inner={})' + ).format(self.database, self.schema, self.identifier, self.inner) + + @property + def database(self) -> Optional[str]: + return lowercase(self.inner.database) + + @property + def schema(self) -> Optional[str]: + return lowercase(self.inner.schema) + + @property + def identifier(self) -> Optional[str]: + return lowercase(self.inner.identifier) + + def __copy__(self): + new = self.__class__(self.inner) + new.__dict__.update(self.__dict__) + return new + + def __deepcopy__(self, memo): + new = self.__class__(self.inner.incorporate()) + new.__dict__.update(self.__dict__) + new.referenced_by = deepcopy(self.referenced_by, memo) + + def is_referenced_by(self, key): + return key in self.referenced_by + + def key(self): + """Get the _ReferenceKey that represents this relation + + :return _ReferenceKey: A key for this relation. + """ + return _make_key(self) + + def add_reference(self, referrer: '_CachedRelation'): + """Add a reference from referrer to self, indicating that if this node + were drop...cascaded, the referrer would be dropped as well. + + :param _CachedRelation referrer: The node that refers to this node. + """ + self.referenced_by[referrer.key()] = referrer + + def collect_consequences(self): + """Recursively collect a set of _ReferenceKeys that would + consequentially get dropped if this were dropped via + "drop ... cascade". + + :return Set[_ReferenceKey]: All the relations that would be dropped + """ + consequences = {self.key()} + for relation in self.referenced_by.values(): + consequences.update(relation.collect_consequences()) + return consequences + + def release_references(self, keys): + """Non-recursively indicate that an iterable of _ReferenceKey no longer + exist. Unknown keys are ignored. + + :param Iterable[_ReferenceKey] keys: The keys to drop. + """ + keys = set(self.referenced_by) & set(keys) + for key in keys: + self.referenced_by.pop(key) + + def rename(self, new_relation): + """Rename this cached relation to new_relation. + Note that this will change the output of key(), all refs must be + updated! + + :param _CachedRelation new_relation: The new name to apply to the + relation + """ + # Relations store this stuff inside their `path` dict. But they + # also store a table_name, and usually use it in their .render(), + # so we need to update that as well. It doesn't appear that + # table_name is ever anything but the identifier (via .create()) + self.inner = self.inner.incorporate( + path={ + 'database': new_relation.inner.database, + 'schema': new_relation.inner.schema, + 'identifier': new_relation.inner.identifier + }, + ) + + def rename_key(self, old_key, new_key): + """Rename a reference that may or may not exist. Only handles the + reference itself, so this is the other half of what `rename` does. + + If old_key is not in referenced_by, this is a no-op. + + :param _ReferenceKey old_key: The old key to be renamed. + :param _ReferenceKey new_key: The new key to rename to. + :raises InternalError: If the new key already exists. + """ + if new_key in self.referenced_by: + dbt.exceptions.raise_cache_inconsistent( + 'in rename of "{}" -> "{}", new name is in the cache already' + .format(old_key, new_key) + ) + + if old_key not in self.referenced_by: + return + value = self.referenced_by.pop(old_key) + self.referenced_by[new_key] = value + + def dump_graph_entry(self): + """Return a key/value pair representing this key and its referents. + + return List[str]: The dot-separated form of all referent keys. + """ + return [dot_separated(r) for r in self.referenced_by] + + +def lazy_log(msg, func): + if logger.disabled: + return + logger.debug(msg.format(func())) + + +class RelationsCache: + """A cache of the relations known to dbt. Keeps track of relationships + declared between tables and handles renames/drops as a real database would. + + :attr Dict[_ReferenceKey, _CachedRelation] relations: The known relations. + :attr threading.RLock lock: The lock around relations, held during updates. + The adapters also hold this lock while filling the cache. + :attr Set[str] schemas: The set of known/cached schemas, all lowercased. + """ + def __init__(self) -> None: + self.relations: Dict[_ReferenceKey, _CachedRelation] = {} + self.lock = threading.RLock() + self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set() + + def add_schema( + self, database: Optional[str], schema: Optional[str], + ) -> None: + """Add a schema to the set of known schemas (case-insensitive) + + :param database: The database name to add. + :param schema: The schema name to add. + """ + self.schemas.add((lowercase(database), lowercase(schema))) + + def drop_schema( + self, database: Optional[str], schema: Optional[str], + ) -> None: + """Drop the given schema and remove it from the set of known schemas. + + Then remove all its contents (and their dependents, etc) as well. + """ + key = (lowercase(database), lowercase(schema)) + if key not in self.schemas: + return + + # avoid iterating over self.relations while removing things by + # collecting the list first. + + with self.lock: + to_remove = self._list_relations_in_schema(database, schema) + self._remove_all(to_remove) + # handle a drop_schema race by using discard() over remove() + self.schemas.discard(key) + + def update_schemas(self, schemas: Iterable[Tuple[Optional[str], str]]): + """Add multiple schemas to the set of known schemas (case-insensitive) + + :param schemas: An iterable of the schema names to add. + """ + self.schemas.update((lowercase(d), s.lower()) for (d, s) in schemas) + + def __contains__(self, schema_id: Tuple[Optional[str], str]): + """A schema is 'in' the relations cache if it is in the set of cached + schemas. + + :param schema_id: The db name and schema name to look up. + """ + db, schema = schema_id + return (lowercase(db), schema.lower()) in self.schemas + + def dump_graph(self): + """Dump a key-only representation of the schema to a dictionary. Every + known relation is a key with a value of a list of keys it is referenced + by. + """ + # we have to hold the lock for the entire dump, if other threads modify + # self.relations or any cache entry's referenced_by during iteration + # it's a runtime error! + with self.lock: + return { + dot_separated(k): v.dump_graph_entry() + for k, v in self.relations.items() + } + + def _setdefault(self, relation: _CachedRelation): + """Add a relation to the cache, or return it if it already exists. + + :param _CachedRelation relation: The relation to set or get. + :return _CachedRelation: The relation stored under the given relation's + key + """ + self.add_schema(relation.database, relation.schema) + key = relation.key() + return self.relations.setdefault(key, relation) + + def _add_link(self, referenced_key, dependent_key): + """Add a link between two relations to the database. Both the old and + new entries must alraedy exist in the database. + + :param _ReferenceKey referenced_key: The key identifying the referenced + model (the one that if dropped will drop the dependent model). + :param _ReferenceKey dependent_key: The key identifying the dependent + model. + :raises InternalError: If either entry does not exist. + """ + referenced = self.relations.get(referenced_key) + if referenced is None: + return + if referenced is None: + dbt.exceptions.raise_cache_inconsistent( + 'in add_link, referenced link key {} not in cache!' + .format(referenced_key) + ) + + dependent = self.relations.get(dependent_key) + if dependent is None: + dbt.exceptions.raise_cache_inconsistent( + 'in add_link, dependent link key {} not in cache!' + .format(dependent_key) + ) + + assert dependent is not None # we just raised! + + referenced.add_reference(dependent) + + def add_link(self, referenced, dependent): + """Add a link between two relations to the database. If either relation + does not exist, it will be added as an "external" relation. + + The dependent model refers _to_ the referenced model. So, given + arguments of (jake_test, bar, jake_test, foo): + both values are in the schema jake_test and foo is a view that refers + to bar, so "drop bar cascade" will drop foo and all of foo's + dependents. + + :param BaseRelation referenced: The referenced model. + :param BaseRelation dependent: The dependent model. + :raises InternalError: If either entry does not exist. + """ + ref_key = _make_key(referenced) + if (ref_key.database, ref_key.schema) not in self: + # if we have not cached the referenced schema at all, we must be + # referring to a table outside our control. There's no need to make + # a link - we will never drop the referenced relation during a run. + logger.debug( + '{dep!s} references {ref!s} but {ref.database}.{ref.schema} ' + 'is not in the cache, skipping assumed external relation' + .format(dep=dependent, ref=ref_key) + ) + return + if ref_key not in self.relations: + # Insert a dummy "external" relation. + referenced = referenced.replace( + type=referenced.External + ) + self.add(referenced) + + dep_key = _make_key(dependent) + if dep_key not in self.relations: + # Insert a dummy "external" relation. + dependent = dependent.replace( + type=referenced.External + ) + self.add(dependent) + logger.debug( + 'adding link, {!s} references {!s}'.format(dep_key, ref_key) + ) + with self.lock: + self._add_link(ref_key, dep_key) + + def add(self, relation): + """Add the relation inner to the cache, under the schema schema and + identifier identifier + + :param BaseRelation relation: The underlying relation. + """ + cached = _CachedRelation(relation) + logger.debug('Adding relation: {!s}'.format(cached)) + + lazy_log('before adding: {!s}', self.dump_graph) + + with self.lock: + self._setdefault(cached) + + lazy_log('after adding: {!s}', self.dump_graph) + + def _remove_refs(self, keys): + """Removes all references to all entries in keys. This does not + cascade! + + :param Iterable[_ReferenceKey] keys: The keys to remove. + """ + # remove direct refs + for key in keys: + del self.relations[key] + # then remove all entries from each child + for cached in self.relations.values(): + cached.release_references(keys) + + def _drop_cascade_relation(self, dropped): + """Drop the given relation and cascade it appropriately to all + dependent relations. + + :param _CachedRelation dropped: An existing _CachedRelation to drop. + """ + if dropped not in self.relations: + logger.debug('dropped a nonexistent relationship: {!s}' + .format(dropped)) + return + consequences = self.relations[dropped].collect_consequences() + logger.debug( + 'drop {} is cascading to {}'.format(dropped, consequences) + ) + self._remove_refs(consequences) + + def drop(self, relation): + """Drop the named relation and cascade it appropriately to all + dependent relations. + + Because dbt proactively does many `drop relation if exist ... cascade` + that are noops, nonexistent relation drops cause a debug log and no + other actions. + + :param str schema: The schema of the relation to drop. + :param str identifier: The identifier of the relation to drop. + """ + dropped = _make_key(relation) + logger.debug('Dropping relation: {!s}'.format(dropped)) + with self.lock: + self._drop_cascade_relation(dropped) + + def _rename_relation(self, old_key, new_relation): + """Rename a relation named old_key to new_key, updating references. + Return whether or not there was a key to rename. + + :param _ReferenceKey old_key: The existing key, to rename from. + :param _CachedRelation new_key: The new relation, to rename to. + """ + # On the database level, a rename updates all values that were + # previously referenced by old_name to be referenced by new_name. + # basically, the name changes but some underlying ID moves. Kind of + # like an object reference! + relation = self.relations.pop(old_key) + new_key = new_relation.key() + + # relaton has to rename its innards, so it needs the _CachedRelation. + relation.rename(new_relation) + # update all the relations that refer to it + for cached in self.relations.values(): + if cached.is_referenced_by(old_key): + logger.debug( + 'updated reference from {0} -> {2} to {1} -> {2}' + .format(old_key, new_key, cached.key()) + ) + cached.rename_key(old_key, new_key) + + self.relations[new_key] = relation + # also fixup the schemas! + self.add_schema(new_key.database, new_key.schema) + + return True + + def _check_rename_constraints(self, old_key, new_key): + """Check the rename constraints, and return whether or not the rename + can proceed. + + If the new key is already present, that is an error. + If the old key is absent, we debug log and return False, assuming it's + a temp table being renamed. + + :param _ReferenceKey old_key: The existing key, to rename from. + :param _ReferenceKey new_key: The new key, to rename to. + :return bool: If the old relation exists for renaming. + :raises InternalError: If the new key is already present. + """ + if new_key in self.relations: + dbt.exceptions.raise_cache_inconsistent( + 'in rename, new key {} already in cache: {}' + .format(new_key, list(self.relations.keys())) + ) + + if old_key not in self.relations: + logger.debug( + 'old key {} not found in self.relations, assuming temporary' + .format(old_key) + ) + return False + return True + + def rename(self, old, new): + """Rename the old schema/identifier to the new schema/identifier and + update references. + + If the new schema/identifier is already present, that is an error. + If the schema/identifier key is absent, we only debug log and return, + assuming it's a temp table being renamed. + + :param BaseRelation old: The existing relation name information. + :param BaseRelation new: The new relation name information. + :raises InternalError: If the new key is already present. + """ + old_key = _make_key(old) + new_key = _make_key(new) + logger.debug('Renaming relation {!s} to {!s}'.format( + old_key, new_key + )) + + lazy_log('before rename: {!s}', self.dump_graph) + + with self.lock: + if self._check_rename_constraints(old_key, new_key): + self._rename_relation(old_key, _CachedRelation(new)) + else: + self._setdefault(_CachedRelation(new)) + + lazy_log('after rename: {!s}', self.dump_graph) + + def get_relations( + self, database: Optional[str], schema: Optional[str] + ) -> List[Any]: + """Case-insensitively yield all relations matching the given schema. + + :param str schema: The case-insensitive schema name to list from. + :return List[BaseRelation]: The list of relations with the given + schema + """ + database = lowercase(database) + schema = lowercase(schema) + with self.lock: + results = [ + r.inner for r in self.relations.values() + if (lowercase(r.schema) == schema and + lowercase(r.database) == database) + ] + + if None in results: + dbt.exceptions.raise_cache_inconsistent( + 'in get_relations, a None relation was found in the cache!' + ) + return results + + def clear(self): + """Clear the cache""" + with self.lock: + self.relations.clear() + self.schemas.clear() + + def _list_relations_in_schema( + self, database: Optional[str], schema: Optional[str] + ) -> List[_CachedRelation]: + """Get the relations in a schema. Callers should hold the lock.""" + key = (lowercase(database), lowercase(schema)) + + to_remove: List[_CachedRelation] = [] + for cachekey, relation in self.relations.items(): + if (cachekey.database, cachekey.schema) == key: + to_remove.append(relation) + return to_remove + + def _remove_all(self, to_remove: List[_CachedRelation]): + """Remove all the listed relations. Ignore relations that have been + cascaded out. + """ + for relation in to_remove: + # it may have been cascaded out already + drop_key = _make_key(relation) + if drop_key in self.relations: + self.drop(drop_key) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/factory.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/factory.py new file mode 100644 index 0000000..0de3f25 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/factory.py @@ -0,0 +1,227 @@ +import threading +from pathlib import Path +from importlib import import_module +from typing import Type, Dict, Any, List, Optional, Set + +from dbt.exceptions import RuntimeException, InternalException +from dbt.include.global_project import ( + PACKAGE_PATH as GLOBAL_PROJECT_PATH, + PROJECT_NAME as GLOBAL_PROJECT_NAME, +) +from dbt.logger import GLOBAL_LOGGER as logger +from dbt.contracts.connection import Credentials, AdapterRequiredConfig + + +from dbt.adapters.protocol import ( + AdapterProtocol, + AdapterConfig, + RelationProtocol, +) +from dbt.adapters.base.plugin import AdapterPlugin + + +Adapter = AdapterProtocol + + +class AdapterContainer: + def __init__(self): + self.lock = threading.Lock() + self.adapters: Dict[str, Adapter] = {} + self.plugins: Dict[str, AdapterPlugin] = {} + # map package names to their include paths + self.packages: Dict[str, Path] = { + GLOBAL_PROJECT_NAME: Path(GLOBAL_PROJECT_PATH), + } + + def get_plugin_by_name(self, name: str) -> AdapterPlugin: + with self.lock: + if name in self.plugins: + return self.plugins[name] + names = ", ".join(self.plugins.keys()) + + message = f"Invalid adapter type {name}! Must be one of {names}" + raise RuntimeException(message) + + def get_adapter_class_by_name(self, name: str) -> Type[Adapter]: + plugin = self.get_plugin_by_name(name) + return plugin.adapter + + def get_relation_class_by_name(self, name: str) -> Type[RelationProtocol]: + adapter = self.get_adapter_class_by_name(name) + return adapter.Relation + + def get_config_class_by_name( + self, name: str + ) -> Type[AdapterConfig]: + adapter = self.get_adapter_class_by_name(name) + return adapter.AdapterSpecificConfigs + + def load_plugin(self, name: str) -> Type[Credentials]: + # this doesn't need a lock: in the worst case we'll overwrite packages + # and adapter_type entries with the same value, as they're all + # singletons + try: + # mypy doesn't think modules have any attributes. + mod: Any = import_module('.' + name, 'dbt.adapters') + except ModuleNotFoundError as exc: + # if we failed to import the target module in particular, inform + # the user about it via a runtime error + if exc.name == 'dbt.adapters.' + name: + raise RuntimeException(f'Could not find adapter type {name}!') + logger.info(f'Error importing adapter: {exc}') + # otherwise, the error had to have come from some underlying + # library. Log the stack trace. + logger.debug('', exc_info=True) + raise + plugin: AdapterPlugin = mod.Plugin + plugin_type = plugin.adapter.type() + + if plugin_type != name: + raise RuntimeException( + f'Expected to find adapter with type named {name}, got ' + f'adapter with type {plugin_type}' + ) + + with self.lock: + # things do hold the lock to iterate over it so we need it to add + self.plugins[name] = plugin + + self.packages[plugin.project_name] = Path(plugin.include_path) + + for dep in plugin.dependencies: + self.load_plugin(dep) + + return plugin.credentials + + def register_adapter(self, config: AdapterRequiredConfig) -> None: + adapter_name = config.credentials.type + adapter_type = self.get_adapter_class_by_name(adapter_name) + + with self.lock: + if adapter_name in self.adapters: + # this shouldn't really happen... + return + + adapter: Adapter = adapter_type(config) # type: ignore + self.adapters[adapter_name] = adapter + + def lookup_adapter(self, adapter_name: str) -> Adapter: + return self.adapters[adapter_name] + + def reset_adapters(self): + """Clear the adapters. This is useful for tests, which change configs. + """ + with self.lock: + for adapter in self.adapters.values(): + adapter.cleanup_connections() + self.adapters.clear() + + def cleanup_connections(self): + """Only clean up the adapter connections list without resetting the + actual adapters. + """ + with self.lock: + for adapter in self.adapters.values(): + adapter.cleanup_connections() + + def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]: + """Iterate over the known adapter plugins. If a name is provided, + iterate in dependency order over the named plugin and its dependencies. + """ + if name is None: + return list(self.plugins.values()) + + plugins: List[AdapterPlugin] = [] + seen: Set[str] = set() + plugin_names: List[str] = [name] + while plugin_names: + plugin_name = plugin_names[0] + plugin_names = plugin_names[1:] + try: + plugin = self.plugins[plugin_name] + except KeyError: + raise InternalException( + f'No plugin found for {plugin_name}' + ) from None + plugins.append(plugin) + seen.add(plugin_name) + if plugin.dependencies is None: + continue + for dep in plugin.dependencies: + if dep not in seen: + plugin_names.append(dep) + return plugins + + def get_adapter_package_names(self, name: Optional[str]) -> List[str]: + package_names: List[str] = [ + p.project_name for p in self.get_adapter_plugins(name) + ] + package_names.append(GLOBAL_PROJECT_NAME) + return package_names + + def get_include_paths(self, name: Optional[str]) -> List[Path]: + paths = [] + for package_name in self.get_adapter_package_names(name): + try: + path = self.packages[package_name] + except KeyError: + raise InternalException( + f'No internal package listing found for {package_name}' + ) + paths.append(path) + return paths + + def get_adapter_type_names(self, name: Optional[str]) -> List[str]: + return [p.adapter.type() for p in self.get_adapter_plugins(name)] + + +FACTORY: AdapterContainer = AdapterContainer() + + +def register_adapter(config: AdapterRequiredConfig) -> None: + FACTORY.register_adapter(config) + + +def get_adapter(config: AdapterRequiredConfig): + return FACTORY.lookup_adapter(config.credentials.type) + + +def reset_adapters(): + """Clear the adapters. This is useful for tests, which change configs. + """ + FACTORY.reset_adapters() + + +def cleanup_connections(): + """Only clean up the adapter connections list without resetting the actual + adapters. + """ + FACTORY.cleanup_connections() + + +def get_adapter_class_by_name(name: str) -> Type[AdapterProtocol]: + return FACTORY.get_adapter_class_by_name(name) + + +def get_config_class_by_name(name: str) -> Type[AdapterConfig]: + return FACTORY.get_config_class_by_name(name) + + +def get_relation_class_by_name(name: str) -> Type[RelationProtocol]: + return FACTORY.get_relation_class_by_name(name) + + +def load_plugin(name: str) -> Type[Credentials]: + return FACTORY.load_plugin(name) + + +def get_include_paths(name: Optional[str]) -> List[Path]: + return FACTORY.get_include_paths(name) + + +def get_adapter_package_names(name: Optional[str]) -> List[str]: + return FACTORY.get_adapter_package_names(name) + + +def get_adapter_type_names(name: Optional[str]) -> List[str]: + return FACTORY.get_adapter_type_names(name) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__init__.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__init__.py new file mode 100644 index 0000000..04f0e8c --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__init__.py @@ -0,0 +1,14 @@ +# these are mostly just exports, #noqa them so flake8 will be happy +from dbt.adapters.postgres.connections import PostgresConnectionManager # noqa +from dbt.adapters.postgres.connections import PostgresCredentials +from dbt.adapters.postgres.relation import PostgresColumn # noqa +from dbt.adapters.postgres.relation import PostgresRelation # noqa: F401 +from dbt.adapters.postgres.impl import PostgresAdapter + +from dbt.adapters.base import AdapterPlugin +from dbt.include import postgres + +Plugin = AdapterPlugin( + adapter=PostgresAdapter, + credentials=PostgresCredentials, + include_path=postgres.PACKAGE_PATH) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..86b4d70 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/__version__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/__version__.cpython-38.pyc new file mode 100644 index 0000000..f006ce0 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/__version__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/connections.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/connections.cpython-38.pyc new file mode 100644 index 0000000..7e3618a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/connections.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/impl.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/impl.cpython-38.pyc new file mode 100644 index 0000000..04cd436 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/impl.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/relation.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/relation.cpython-38.pyc new file mode 100644 index 0000000..ece6b1b Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__pycache__/relation.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__version__.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__version__.py new file mode 100644 index 0000000..0451b15 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/__version__.py @@ -0,0 +1 @@ +version = '0.21.1' diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/connections.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/connections.py new file mode 100644 index 0000000..7eaf4c1 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/connections.py @@ -0,0 +1,186 @@ +from contextlib import contextmanager + +import psycopg2 + +import dbt.exceptions +from dbt.adapters.base import Credentials +from dbt.adapters.sql import SQLConnectionManager +from dbt.contracts.connection import AdapterResponse +from dbt.logger import GLOBAL_LOGGER as logger + +from dbt.helper_types import Port +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class PostgresCredentials(Credentials): + host: str + user: str + port: Port + password: str # on postgres the password is mandatory + connect_timeout: int = 10 + role: Optional[str] = None + search_path: Optional[str] = None + keepalives_idle: int = 0 # 0 means to use the default value + sslmode: Optional[str] = None + sslcert: Optional[str] = None + sslkey: Optional[str] = None + sslrootcert: Optional[str] = None + application_name: Optional[str] = 'dbt' + + _ALIASES = { + 'dbname': 'database', + 'pass': 'password' + } + + @property + def type(self): + return 'postgres' + + @property + def unique_field(self): + return self.host + + def _connection_keys(self): + return ('host', 'port', 'user', 'database', 'schema', 'search_path', + 'keepalives_idle', 'sslmode') + + +class PostgresConnectionManager(SQLConnectionManager): + TYPE = 'postgres' + + @contextmanager + def exception_handler(self, sql): + try: + yield + + except psycopg2.DatabaseError as e: + logger.debug('Postgres error: {}'.format(str(e))) + + try: + self.rollback_if_open() + except psycopg2.Error: + logger.debug("Failed to release connection!") + pass + + raise dbt.exceptions.DatabaseException(str(e).strip()) from e + + except Exception as e: + logger.debug("Error running SQL: {}", sql) + logger.debug("Rolling back transaction.") + self.rollback_if_open() + if isinstance(e, dbt.exceptions.RuntimeException): + # during a sql query, an internal to dbt exception was raised. + # this sounds a lot like a signal handler and probably has + # useful information, so raise it without modification. + raise + + raise dbt.exceptions.RuntimeException(e) from e + + @classmethod + def open(cls, connection): + if connection.state == 'open': + logger.debug('Connection is already open, skipping open.') + return connection + + credentials = cls.get_credentials(connection.credentials) + kwargs = {} + # we don't want to pass 0 along to connect() as postgres will try to + # call an invalid setsockopt() call (contrary to the docs). + if credentials.keepalives_idle: + kwargs['keepalives_idle'] = credentials.keepalives_idle + + # psycopg2 doesn't support search_path officially, + # see https://github.com/psycopg/psycopg2/issues/465 + search_path = credentials.search_path + if search_path is not None and search_path != '': + # see https://postgresql.org/docs/9.5/libpq-connect.html + kwargs['options'] = '-c search_path={}'.format( + search_path.replace(' ', '\\ ')) + + if credentials.sslmode: + kwargs['sslmode'] = credentials.sslmode + + if credentials.sslcert is not None: + kwargs["sslcert"] = credentials.sslcert + + if credentials.sslkey is not None: + kwargs["sslkey"] = credentials.sslkey + + if credentials.sslrootcert is not None: + kwargs["sslrootcert"] = credentials.sslrootcert + + if credentials.application_name: + kwargs['application_name'] = credentials.application_name + + try: + handle = psycopg2.connect( + dbname=credentials.database, + user=credentials.user, + host=credentials.host, + password=credentials.password, + port=credentials.port, + connect_timeout=credentials.connect_timeout, + **kwargs) + + if credentials.role: + handle.cursor().execute('set role {}'.format(credentials.role)) + + connection.handle = handle + connection.state = 'open' + except psycopg2.Error as e: + logger.debug("Got an error when attempting to open a postgres " + "connection: '{}'" + .format(e)) + + connection.handle = None + connection.state = 'fail' + + raise dbt.exceptions.FailedToConnectException(str(e)) + + return connection + + def cancel(self, connection): + connection_name = connection.name + try: + pid = connection.handle.get_backend_pid() + except psycopg2.InterfaceError as exc: + # if the connection is already closed, not much to cancel! + if 'already closed' in str(exc): + logger.debug( + f'Connection {connection_name} was already closed' + ) + return + # probably bad, re-raise it + raise + + sql = "select pg_terminate_backend({})".format(pid) + + logger.debug("Cancelling query '{}' ({})".format(connection_name, pid)) + + _, cursor = self.add_query(sql) + res = cursor.fetchone() + + logger.debug("Cancel query '{}': {}".format(connection_name, res)) + + @classmethod + def get_credentials(cls, credentials): + return credentials + + @classmethod + def get_response(cls, cursor) -> AdapterResponse: + message = str(cursor.statusmessage) + rows = cursor.rowcount + status_message_parts = message.split() if message is not None else [] + status_messsage_strings = [ + part + for part in status_message_parts + if not part.isdigit() + ] + code = ' '.join(status_messsage_strings) + return AdapterResponse( + _message=message, + code=code, + rows_affected=rows + ) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/impl.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/impl.py new file mode 100644 index 0000000..3dc134f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/impl.py @@ -0,0 +1,142 @@ +from datetime import datetime +from dataclasses import dataclass +from typing import Optional, Set, List, Any +from dbt.adapters.base.meta import available +from dbt.adapters.base.impl import AdapterConfig +from dbt.adapters.sql import SQLAdapter +from dbt.adapters.postgres import PostgresConnectionManager +from dbt.adapters.postgres import PostgresColumn +from dbt.adapters.postgres import PostgresRelation +from dbt.dataclass_schema import dbtClassMixin, ValidationError +import dbt.exceptions +import dbt.utils + + +# note that this isn't an adapter macro, so just a single underscore +GET_RELATIONS_MACRO_NAME = 'postgres_get_relations' + + +@dataclass +class PostgresIndexConfig(dbtClassMixin): + columns: List[str] + unique: bool = False + type: Optional[str] = None + + def render(self, relation): + # We append the current timestamp to the index name because otherwise + # the index will only be created on every other run. See + # https://github.com/dbt-labs/dbt/issues/1945#issuecomment-576714925 + # for an explanation. + now = datetime.utcnow().isoformat() + inputs = (self.columns + + [relation.render(), str(self.unique), str(self.type), now]) + string = '_'.join(inputs) + return dbt.utils.md5(string) + + @classmethod + def parse(cls, raw_index) -> Optional['PostgresIndexConfig']: + if raw_index is None: + return None + try: + cls.validate(raw_index) + return cls.from_dict(raw_index) + except ValidationError as exc: + msg = dbt.exceptions.validator_error_message(exc) + dbt.exceptions.raise_compiler_error( + f'Could not parse index config: {msg}' + ) + except TypeError: + dbt.exceptions.raise_compiler_error( + f'Invalid index config:\n' + f' Got: {raw_index}\n' + f' Expected a dictionary with at minimum a "columns" key' + ) + + +@dataclass +class PostgresConfig(AdapterConfig): + unlogged: Optional[bool] = None + indexes: Optional[List[PostgresIndexConfig]] = None + + +class PostgresAdapter(SQLAdapter): + Relation = PostgresRelation + ConnectionManager = PostgresConnectionManager + Column = PostgresColumn + + AdapterSpecificConfigs = PostgresConfig + + @classmethod + def date_function(cls): + return 'now()' + + @available + def verify_database(self, database): + if database.startswith('"'): + database = database.strip('"') + expected = self.config.credentials.database + if database.lower() != expected.lower(): + raise dbt.exceptions.NotImplementedException( + 'Cross-db references not allowed in {} ({} vs {})' + .format(self.type(), database, expected) + ) + # return an empty string on success so macros can call this + return '' + + @available + def parse_index(self, raw_index: Any) -> Optional[PostgresIndexConfig]: + return PostgresIndexConfig.parse(raw_index) + + def _link_cached_database_relations(self, schemas: Set[str]): + """ + :param schemas: The set of schemas that should have links added. + """ + database = self.config.credentials.database + table = self.execute_macro(GET_RELATIONS_MACRO_NAME) + + for (dep_schema, dep_name, refed_schema, refed_name) in table: + dependent = self.Relation.create( + database=database, + schema=dep_schema, + identifier=dep_name + ) + referenced = self.Relation.create( + database=database, + schema=refed_schema, + identifier=refed_name + ) + + # don't record in cache if this relation isn't in a relevant + # schema + if refed_schema.lower() in schemas: + self.cache.add_link(referenced, dependent) + + def _get_catalog_schemas(self, manifest): + # postgres only allow one database (the main one) + schemas = super()._get_catalog_schemas(manifest) + try: + return schemas.flatten() + except dbt.exceptions.RuntimeException as exc: + dbt.exceptions.raise_compiler_error( + 'Cross-db references not allowed in adapter {}: Got {}'.format( + self.type(), exc.msg + ) + ) + + def _link_cached_relations(self, manifest): + schemas: Set[str] = set() + relations_schemas = self._get_cache_schemas(manifest) + for relation in relations_schemas: + self.verify_database(relation.database) + schemas.add(relation.schema.lower()) + + self._link_cached_database_relations(schemas) + + def _relations_cache_for_schemas(self, manifest): + super()._relations_cache_for_schemas(manifest) + self._link_cached_relations(manifest) + + def timestamp_add_sql( + self, add_to: str, number: int = 1, interval: str = 'hour' + ) -> str: + return f"{add_to} + interval '{number} {interval}'" diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/relation.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/relation.py new file mode 100644 index 0000000..2040337 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/postgres/relation.py @@ -0,0 +1,29 @@ +from dbt.adapters.base import Column +from dataclasses import dataclass +from dbt.adapters.base.relation import BaseRelation +from dbt.exceptions import RuntimeException + + +@dataclass(frozen=True, eq=False, repr=False) +class PostgresRelation(BaseRelation): + def __post_init__(self): + # Check for length of Postgres table/view names. + # Check self.type to exclude test relation identifiers + if (self.identifier is not None and self.type is not None and + len(self.identifier) > self.relation_max_name_length()): + raise RuntimeException( + f"Relation name '{self.identifier}' " + f"is longer than {self.relation_max_name_length()} characters" + ) + + def relation_max_name_length(self): + return 63 + + +class PostgresColumn(Column): + @property + def data_type(self): + # on postgres, do not convert 'text' to 'varchar()' + if self.dtype.lower() == 'text': + return self.dtype + return super().data_type diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/protocol.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/protocol.py new file mode 100644 index 0000000..e073148 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/protocol.py @@ -0,0 +1,163 @@ +from dataclasses import dataclass +from typing import ( + Type, Hashable, Optional, ContextManager, List, Generic, TypeVar, ClassVar, + Tuple, Union, Dict, Any +) +from typing_extensions import Protocol + +import agate + +from dbt.contracts.connection import ( + Connection, AdapterRequiredConfig, AdapterResponse +) +from dbt.contracts.graph.compiled import ( + CompiledNode, ManifestNode, NonSourceCompiledNode +) +from dbt.contracts.graph.parsed import ParsedNode, ParsedSourceDefinition +from dbt.contracts.graph.model_config import BaseConfig +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.relation import Policy, HasQuoting + +from dbt.graph import Graph + + +@dataclass +class AdapterConfig(BaseConfig): + pass + + +class ConnectionManagerProtocol(Protocol): + TYPE: str + + +class ColumnProtocol(Protocol): + pass + + +Self = TypeVar('Self', bound='RelationProtocol') + + +class RelationProtocol(Protocol): + @classmethod + def get_default_quote_policy(cls) -> Policy: + ... + + @classmethod + def create_from( + cls: Type[Self], + config: HasQuoting, + node: Union[CompiledNode, ParsedNode, ParsedSourceDefinition], + ) -> Self: + ... + + +class CompilerProtocol(Protocol): + def compile(self, manifest: Manifest, write=True) -> Graph: + ... + + def compile_node( + self, + node: ManifestNode, + manifest: Manifest, + extra_context: Optional[Dict[str, Any]] = None, + ) -> NonSourceCompiledNode: + ... + + +AdapterConfig_T = TypeVar( + 'AdapterConfig_T', bound=AdapterConfig +) +ConnectionManager_T = TypeVar( + 'ConnectionManager_T', bound=ConnectionManagerProtocol +) +Relation_T = TypeVar( + 'Relation_T', bound=RelationProtocol +) +Column_T = TypeVar( + 'Column_T', bound=ColumnProtocol +) +Compiler_T = TypeVar('Compiler_T', bound=CompilerProtocol) + + +class AdapterProtocol( + Protocol, + Generic[ + AdapterConfig_T, + ConnectionManager_T, + Relation_T, + Column_T, + Compiler_T, + ] +): + AdapterSpecificConfigs: ClassVar[Type[AdapterConfig_T]] + Column: ClassVar[Type[Column_T]] + Relation: ClassVar[Type[Relation_T]] + ConnectionManager: ClassVar[Type[ConnectionManager_T]] + connections: ConnectionManager_T + + def __init__(self, config: AdapterRequiredConfig): + ... + + @classmethod + def type(cls) -> str: + pass + + def set_query_header(self, manifest: Manifest) -> None: + ... + + @staticmethod + def get_thread_identifier() -> Hashable: + ... + + def get_thread_connection(self) -> Connection: + ... + + def set_thread_connection(self, conn: Connection) -> None: + ... + + def get_if_exists(self) -> Optional[Connection]: + ... + + def clear_thread_connection(self) -> None: + ... + + def clear_transaction(self) -> None: + ... + + def exception_handler(self, sql: str) -> ContextManager: + ... + + def set_connection_name(self, name: Optional[str] = None) -> Connection: + ... + + def cancel_open(self) -> Optional[List[str]]: + ... + + def open(cls, connection: Connection) -> Connection: + ... + + def release(self) -> None: + ... + + def cleanup_all(self) -> None: + ... + + def begin(self) -> None: + ... + + def commit(self) -> None: + ... + + def close(cls, connection: Connection) -> Connection: + ... + + def commit_if_has_connection(self) -> None: + ... + + def execute( + self, sql: str, auto_begin: bool = False, fetch: bool = False + ) -> Tuple[Union[str, AdapterResponse], agate.Table]: + ... + + def get_compiler(self) -> Compiler_T: + ... diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__init__.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__init__.py new file mode 100644 index 0000000..3535806 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__init__.py @@ -0,0 +1,3 @@ +# these are all just exports, #noqa them so flake8 will be happy +from dbt.adapters.sql.connections import SQLConnectionManager # noqa +from dbt.adapters.sql.impl import SQLAdapter # noqa diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..512a13e Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/connections.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/connections.cpython-38.pyc new file mode 100644 index 0000000..14052ee Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/connections.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/impl.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/impl.cpython-38.pyc new file mode 100644 index 0000000..04afeed Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/__pycache__/impl.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/connections.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/connections.py new file mode 100644 index 0000000..11d8dce --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/connections.py @@ -0,0 +1,182 @@ +import abc +import time +from typing import List, Optional, Tuple, Any, Iterable, Dict, Union + +import agate + +import dbt.clients.agate_helper +import dbt.exceptions +from dbt.adapters.base import BaseConnectionManager +from dbt.contracts.connection import ( + Connection, ConnectionState, AdapterResponse +) +from dbt.logger import GLOBAL_LOGGER as logger +from dbt import flags + + +class SQLConnectionManager(BaseConnectionManager): + """The default connection manager with some common SQL methods implemented. + + Methods to implement: + - exception_handler + - cancel + - get_response + - open + """ + @abc.abstractmethod + def cancel(self, connection: Connection): + """Cancel the given connection.""" + raise dbt.exceptions.NotImplementedException( + '`cancel` is not implemented for this adapter!' + ) + + def cancel_open(self) -> List[str]: + names = [] + this_connection = self.get_if_exists() + with self.lock: + for connection in self.thread_connections.values(): + if connection is this_connection: + continue + + # if the connection failed, the handle will be None so we have + # nothing to cancel. + if ( + connection.handle is not None and + connection.state == ConnectionState.OPEN + ): + self.cancel(connection) + if connection.name is not None: + names.append(connection.name) + return names + + def add_query( + self, + sql: str, + auto_begin: bool = True, + bindings: Optional[Any] = None, + abridge_sql_log: bool = False + ) -> Tuple[Connection, Any]: + connection = self.get_thread_connection() + if auto_begin and connection.transaction_open is False: + self.begin() + + logger.debug('Using {} connection "{}".' + .format(self.TYPE, connection.name)) + + with self.exception_handler(sql): + if abridge_sql_log: + log_sql = '{}...'.format(sql[:512]) + else: + log_sql = sql + + logger.debug( + 'On {connection_name}: {sql}', + connection_name=connection.name, + sql=log_sql, + ) + pre = time.time() + + cursor = connection.handle.cursor() + cursor.execute(sql, bindings) + logger.debug( + "SQL status: {status} in {elapsed:0.2f} seconds", + status=self.get_response(cursor), + elapsed=(time.time() - pre) + ) + + return connection, cursor + + @abc.abstractclassmethod + def get_response(cls, cursor: Any) -> Union[AdapterResponse, str]: + """Get the status of the cursor.""" + raise dbt.exceptions.NotImplementedException( + '`get_response` is not implemented for this adapter!' + ) + + @classmethod + def process_results( + cls, + column_names: Iterable[str], + rows: Iterable[Any] + ) -> List[Dict[str, Any]]: + unique_col_names = dict() + for idx in range(len(column_names)): + col_name = column_names[idx] + if col_name in unique_col_names: + unique_col_names[col_name] += 1 + column_names[idx] = f'{col_name}_{unique_col_names[col_name]}' + else: + unique_col_names[column_names[idx]] = 1 + return [dict(zip(column_names, row)) for row in rows] + + @classmethod + def get_result_from_cursor(cls, cursor: Any) -> agate.Table: + data: List[Any] = [] + column_names: List[str] = [] + + if cursor.description is not None: + column_names = [col[0] for col in cursor.description] + rows = cursor.fetchall() + data = cls.process_results(column_names, rows) + + return dbt.clients.agate_helper.table_from_data_flat( + data, + column_names + ) + + def execute( + self, sql: str, auto_begin: bool = False, fetch: bool = False + ) -> Tuple[Union[AdapterResponse, str], agate.Table]: + sql = self._add_query_comment(sql) + _, cursor = self.add_query(sql, auto_begin) + response = self.get_response(cursor) + if fetch: + table = self.get_result_from_cursor(cursor) + else: + table = dbt.clients.agate_helper.empty_table() + return response, table + + def add_begin_query(self): + return self.add_query('BEGIN', auto_begin=False) + + def add_commit_query(self): + return self.add_query('COMMIT', auto_begin=False) + + def begin(self): + connection = self.get_thread_connection() + + if flags.STRICT_MODE: + if not isinstance(connection, Connection): + raise dbt.exceptions.CompilerException( + f'In begin, got {connection} - not a Connection!' + ) + + if connection.transaction_open is True: + raise dbt.exceptions.InternalException( + 'Tried to begin a new transaction on connection "{}", but ' + 'it already had one open!'.format(connection.name)) + + self.add_begin_query() + + connection.transaction_open = True + return connection + + def commit(self): + connection = self.get_thread_connection() + if flags.STRICT_MODE: + if not isinstance(connection, Connection): + raise dbt.exceptions.CompilerException( + f'In commit, got {connection} - not a Connection!' + ) + + if connection.transaction_open is False: + raise dbt.exceptions.InternalException( + 'Tried to commit transaction on connection "{}", but ' + 'it does not have one open!'.format(connection.name)) + + logger.debug('On {}: COMMIT'.format(connection.name)) + self.add_commit_query() + + connection.transaction_open = False + + return connection diff --git a/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/impl.py b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/impl.py new file mode 100644 index 0000000..3377453 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/adapters/sql/impl.py @@ -0,0 +1,250 @@ +import agate +from typing import Any, Optional, Tuple, Type, List + +import dbt.clients.agate_helper +from dbt.contracts.connection import Connection +import dbt.exceptions +from dbt.adapters.base import BaseAdapter, available +from dbt.adapters.sql import SQLConnectionManager +from dbt.logger import GLOBAL_LOGGER as logger + +from dbt.adapters.base.relation import BaseRelation + +LIST_RELATIONS_MACRO_NAME = 'list_relations_without_caching' +GET_COLUMNS_IN_RELATION_MACRO_NAME = 'get_columns_in_relation' +LIST_SCHEMAS_MACRO_NAME = 'list_schemas' +CHECK_SCHEMA_EXISTS_MACRO_NAME = 'check_schema_exists' +CREATE_SCHEMA_MACRO_NAME = 'create_schema' +DROP_SCHEMA_MACRO_NAME = 'drop_schema' +RENAME_RELATION_MACRO_NAME = 'rename_relation' +TRUNCATE_RELATION_MACRO_NAME = 'truncate_relation' +DROP_RELATION_MACRO_NAME = 'drop_relation' +ALTER_COLUMN_TYPE_MACRO_NAME = 'alter_column_type' + + +class SQLAdapter(BaseAdapter): + """The default adapter with the common agate conversions and some SQL + methods implemented. This adapter has a different much shorter list of + methods to implement, but some more macros that must be implemented. + + To implement a macro, implement "${adapter_type}__${macro_name}". in the + adapter's internal project. + + Methods to implement: + - date_function + + Macros to implement: + - get_catalog + - list_relations_without_caching + - get_columns_in_relation + """ + + ConnectionManager: Type[SQLConnectionManager] + connections: SQLConnectionManager + + @available.parse(lambda *a, **k: (None, None)) + def add_query( + self, + sql: str, + auto_begin: bool = True, + bindings: Optional[Any] = None, + abridge_sql_log: bool = False, + ) -> Tuple[Connection, Any]: + """Add a query to the current transaction. A thin wrapper around + ConnectionManager.add_query. + + :param sql: The SQL query to add + :param auto_begin: If set and there is no transaction in progress, + begin a new one. + :param bindings: An optional list of bindings for the query. + :param abridge_sql_log: If set, limit the raw sql logged to 512 + characters + """ + return self.connections.add_query(sql, auto_begin, bindings, + abridge_sql_log) + + @classmethod + def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "text" + + @classmethod + def convert_number_type( + cls, agate_table: agate.Table, col_idx: int + ) -> str: + decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) + return "float8" if decimals else "integer" + + @classmethod + def convert_boolean_type( + cls, agate_table: agate.Table, col_idx: int + ) -> str: + return "boolean" + + @classmethod + def convert_datetime_type( + cls, agate_table: agate.Table, col_idx: int + ) -> str: + return "timestamp without time zone" + + @classmethod + def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "date" + + @classmethod + def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: + return "time" + + @classmethod + def is_cancelable(cls) -> bool: + return True + + def expand_column_types(self, goal, current): + reference_columns = { + c.name: c for c in + self.get_columns_in_relation(goal) + } + + target_columns = { + c.name: c for c + in self.get_columns_in_relation(current) + } + + for column_name, reference_column in reference_columns.items(): + target_column = target_columns.get(column_name) + + if target_column is not None and \ + target_column.can_expand_to(reference_column): + col_string_size = reference_column.string_size() + new_type = self.Column.string_type(col_string_size) + logger.debug("Changing col type from {} to {} in table {}", + target_column.data_type, new_type, current) + + self.alter_column_type(current, column_name, new_type) + + def alter_column_type( + self, relation, column_name, new_column_type + ) -> None: + """ + 1. Create a new column (w/ temp name and correct type) + 2. Copy data over to it + 3. Drop the existing column (cascade!) + 4. Rename the new column to existing column + """ + kwargs = { + 'relation': relation, + 'column_name': column_name, + 'new_column_type': new_column_type, + } + self.execute_macro( + ALTER_COLUMN_TYPE_MACRO_NAME, + kwargs=kwargs + ) + + def drop_relation(self, relation): + if relation.type is None: + dbt.exceptions.raise_compiler_error( + 'Tried to drop relation {}, but its type is null.' + .format(relation)) + + self.cache_dropped(relation) + self.execute_macro( + DROP_RELATION_MACRO_NAME, + kwargs={'relation': relation} + ) + + def truncate_relation(self, relation): + self.execute_macro( + TRUNCATE_RELATION_MACRO_NAME, + kwargs={'relation': relation} + ) + + def rename_relation(self, from_relation, to_relation): + self.cache_renamed(from_relation, to_relation) + + kwargs = {'from_relation': from_relation, 'to_relation': to_relation} + self.execute_macro( + RENAME_RELATION_MACRO_NAME, + kwargs=kwargs + ) + + def get_columns_in_relation(self, relation): + return self.execute_macro( + GET_COLUMNS_IN_RELATION_MACRO_NAME, + kwargs={'relation': relation} + ) + + def create_schema(self, relation: BaseRelation) -> None: + relation = relation.without_identifier() + logger.debug('Creating schema "{}"', relation) + kwargs = { + 'relation': relation, + } + self.execute_macro(CREATE_SCHEMA_MACRO_NAME, kwargs=kwargs) + self.commit_if_has_connection() + # we can't update the cache here, as if the schema already existed we + # don't want to (incorrectly) say that it's empty + + def drop_schema(self, relation: BaseRelation) -> None: + relation = relation.without_identifier() + logger.debug('Dropping schema "{}".', relation) + kwargs = { + 'relation': relation, + } + self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs) + # we can update the cache here + self.cache.drop_schema(relation.database, relation.schema) + + def list_relations_without_caching( + self, schema_relation: BaseRelation, + ) -> List[BaseRelation]: + kwargs = {'schema_relation': schema_relation} + results = self.execute_macro( + LIST_RELATIONS_MACRO_NAME, + kwargs=kwargs + ) + + relations = [] + quote_policy = { + 'database': True, + 'schema': True, + 'identifier': True + } + for _database, name, _schema, _type in results: + try: + _type = self.Relation.get_relation_type(_type) + except ValueError: + _type = self.Relation.External + relations.append(self.Relation.create( + database=_database, + schema=_schema, + identifier=name, + quote_policy=quote_policy, + type=_type + )) + return relations + + def quote(self, identifier): + return '"{}"'.format(identifier) + + def list_schemas(self, database: str) -> List[str]: + results = self.execute_macro( + LIST_SCHEMAS_MACRO_NAME, + kwargs={'database': database} + ) + + return [row[0] for row in results] + + def check_schema_exists(self, database: str, schema: str) -> bool: + information_schema = self.Relation.create( + database=database, + schema=schema, + identifier='INFORMATION_SCHEMA', + quote_policy=self.config.quoting + ).information_schema() + + kwargs = {'information_schema': information_schema, 'schema': schema} + results = self.execute_macro( + CHECK_SCHEMA_EXISTS_MACRO_NAME, + kwargs=kwargs + ) + return results[0][0] > 0 diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__init__.py b/dbt-env/lib/python3.8/site-packages/dbt/clients/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/__init__.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000..018f236 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/__init__.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/_jinja_blocks.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/_jinja_blocks.cpython-38.pyc new file mode 100644 index 0000000..0cc416a Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/_jinja_blocks.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/agate_helper.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/agate_helper.cpython-38.pyc new file mode 100644 index 0000000..f0d8d9c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/agate_helper.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/gcloud.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/gcloud.cpython-38.pyc new file mode 100644 index 0000000..352228c Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/gcloud.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/git.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/git.cpython-38.pyc new file mode 100644 index 0000000..c816a71 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/git.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/jinja.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/jinja.cpython-38.pyc new file mode 100644 index 0000000..50131ee Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/jinja.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/jinja_static.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/jinja_static.cpython-38.pyc new file mode 100644 index 0000000..92e8ac1 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/jinja_static.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/registry.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/registry.cpython-38.pyc new file mode 100644 index 0000000..ee93d59 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/registry.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/system.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/system.cpython-38.pyc new file mode 100644 index 0000000..4f94971 Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/system.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/yaml_helper.cpython-38.pyc b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/yaml_helper.cpython-38.pyc new file mode 100644 index 0000000..0f32ddb Binary files /dev/null and b/dbt-env/lib/python3.8/site-packages/dbt/clients/__pycache__/yaml_helper.cpython-38.pyc differ diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/_jinja_blocks.py b/dbt-env/lib/python3.8/site-packages/dbt/clients/_jinja_blocks.py new file mode 100644 index 0000000..8a5a1da --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/clients/_jinja_blocks.py @@ -0,0 +1,393 @@ +import re +from collections import namedtuple + +import dbt.exceptions + + +def regex(pat): + return re.compile(pat, re.DOTALL | re.MULTILINE) + + +class BlockData: + """raw plaintext data from the top level of the file.""" + def __init__(self, contents): + self.block_type_name = '__dbt__data' + self.contents = contents + self.full_block = contents + + +class BlockTag: + def __init__(self, block_type_name, block_name, contents=None, + full_block=None, **kw): + self.block_type_name = block_type_name + self.block_name = block_name + self.contents = contents + self.full_block = full_block + + def __str__(self): + return 'BlockTag({!r}, {!r})'.format(self.block_type_name, + self.block_name) + + def __repr__(self): + return str(self) + + @property + def end_block_type_name(self): + return 'end{}'.format(self.block_type_name) + + def end_pat(self): + # we don't want to use string formatting here because jinja uses most + # of the string formatting operators in its syntax... + pattern = ''.join(( + r'(?P((?:\s*\{\%\-|\{\%)\s*', + self.end_block_type_name, + r'\s*(?:\-\%\}\s*|\%\})))', + )) + return regex(pattern) + + +Tag = namedtuple('Tag', 'block_type_name block_name start end') + + +_NAME_PATTERN = r'[A-Za-z_][A-Za-z_0-9]*' + +COMMENT_START_PATTERN = regex(r'(?:(?P(\s*\{\#)))') +COMMENT_END_PATTERN = regex(r'(.*?)(\s*\#\})') +RAW_START_PATTERN = regex( + r'(?:\s*\{\%\-|\{\%)\s*(?P(raw))\s*(?:\-\%\}\s*|\%\})' +) +EXPR_START_PATTERN = regex(r'(?P(\{\{\s*))') +EXPR_END_PATTERN = regex(r'(?P(\s*\}\}))') + +BLOCK_START_PATTERN = regex(''.join(( + r'(?:\s*\{\%\-|\{\%)\s*', + r'(?P({}))'.format(_NAME_PATTERN), + # some blocks have a 'block name'. + r'(?:\s+(?P({})))?'.format(_NAME_PATTERN), +))) + + +RAW_BLOCK_PATTERN = regex(''.join(( + r'(?:\s*\{\%\-|\{\%)\s*raw\s*(?:\-\%\}\s*|\%\})', + r'(?:.*?)', + r'(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})', +))) + +TAG_CLOSE_PATTERN = regex(r'(?:(?P(\-\%\}\s*|\%\})))') + +# stolen from jinja's lexer. Note that we've consumed all prefix whitespace by +# the time we want to use this. +STRING_PATTERN = regex( + r"(?P('([^'\\]*(?:\\.[^'\\]*)*)'|" + r'"([^"\\]*(?:\\.[^"\\]*)*)"))' +) + +QUOTE_START_PATTERN = regex(r'''(?P(['"]))''') + + +class TagIterator: + def __init__(self, data): + self.data = data + self.blocks = [] + self._parenthesis_stack = [] + self.pos = 0 + + def linepos(self, end=None) -> str: + """Given an absolute position in the input data, return a pair of + line number + relative position to the start of the line. + """ + end_val: int = self.pos if end is None else end + data = self.data[:end_val] + # if not found, rfind returns -1, and -1+1=0, which is perfect! + last_line_start = data.rfind('\n') + 1 + # it's easy to forget this, but line numbers are 1-indexed + line_number = data.count('\n') + 1 + return f'{line_number}:{end_val - last_line_start}' + + def advance(self, new_position): + self.pos = new_position + + def rewind(self, amount=1): + self.pos -= amount + + def _search(self, pattern): + return pattern.search(self.data, self.pos) + + def _match(self, pattern): + return pattern.match(self.data, self.pos) + + def _first_match(self, *patterns, **kwargs): + matches = [] + for pattern in patterns: + # default to 'search', but sometimes we want to 'match'. + if kwargs.get('method', 'search') == 'search': + match = self._search(pattern) + else: + match = self._match(pattern) + if match: + matches.append(match) + if not matches: + return None + # if there are multiple matches, pick the least greedy match + # TODO: do I need to account for m.start(), or is this ok? + return min(matches, key=lambda m: m.end()) + + def _expect_match(self, expected_name, *patterns, **kwargs): + match = self._first_match(*patterns, **kwargs) + if match is None: + msg = 'unexpected EOF, expected {}, got "{}"'.format( + expected_name, self.data[self.pos:] + ) + dbt.exceptions.raise_compiler_error(msg) + return match + + def handle_expr(self, match): + """Handle an expression. At this point we're at a string like: + {{ 1 + 2 }} + ^ right here + + And the match contains "{{ " + + We expect to find a `}}`, but we might find one in a string before + that. Imagine the case of `{{ 2 * "}}" }}`... + + You're not allowed to have blocks or comments inside an expr so it is + pretty straightforward, I hope: only strings can get in the way. + """ + self.advance(match.end()) + while True: + match = self._expect_match('}}', + EXPR_END_PATTERN, + QUOTE_START_PATTERN) + if match.groupdict().get('expr_end') is not None: + break + else: + # it's a quote. we haven't advanced for this match yet, so + # just slurp up the whole string, no need to rewind. + match = self._expect_match('string', STRING_PATTERN) + self.advance(match.end()) + + self.advance(match.end()) + + def handle_comment(self, match): + self.advance(match.end()) + match = self._expect_match('#}', COMMENT_END_PATTERN) + self.advance(match.end()) + + def _expect_block_close(self): + """Search for the tag close marker. + To the right of the type name, there are a few possiblities: + - a name (handled by the regex's 'block_name') + - any number of: `=`, `(`, `)`, strings, etc (arguments) + - nothing + + followed eventually by a %} + + So the only characters we actually have to worry about in this context + are quote and `%}` - nothing else can hide the %} and be valid jinja. + """ + while True: + end_match = self._expect_match( + 'tag close ("%}")', + QUOTE_START_PATTERN, + TAG_CLOSE_PATTERN + ) + self.advance(end_match.end()) + if end_match.groupdict().get('tag_close') is not None: + return + # must be a string. Rewind to its start and advance past it. + self.rewind() + string_match = self._expect_match('string', STRING_PATTERN) + self.advance(string_match.end()) + + def handle_raw(self): + # raw blocks are super special, they are a single complete regex + match = self._expect_match('{% raw %}...{% endraw %}', + RAW_BLOCK_PATTERN) + self.advance(match.end()) + return match.end() + + def handle_tag(self, match): + """The tag could be one of a few things: + + {% mytag %} + {% mytag x = y %} + {% mytag x = "y" %} + {% mytag x.y() %} + {% mytag foo("a", "b", c="d") %} + + But the key here is that it's always going to be `{% mytag`! + """ + groups = match.groupdict() + # always a value + block_type_name = groups['block_type_name'] + # might be None + block_name = groups.get('block_name') + start_pos = self.pos + if block_type_name == 'raw': + match = self._expect_match('{% raw %}...{% endraw %}', + RAW_BLOCK_PATTERN) + self.advance(match.end()) + else: + self.advance(match.end()) + self._expect_block_close() + return Tag( + block_type_name=block_type_name, + block_name=block_name, + start=start_pos, + end=self.pos + ) + + def find_tags(self): + while True: + match = self._first_match( + BLOCK_START_PATTERN, + COMMENT_START_PATTERN, + EXPR_START_PATTERN + ) + if match is None: + break + + self.advance(match.start()) + # start = self.pos + + groups = match.groupdict() + comment_start = groups.get('comment_start') + expr_start = groups.get('expr_start') + block_type_name = groups.get('block_type_name') + + if comment_start is not None: + self.handle_comment(match) + elif expr_start is not None: + self.handle_expr(match) + elif block_type_name is not None: + yield self.handle_tag(match) + else: + raise dbt.exceptions.InternalException( + 'Invalid regex match in next_block, expected block start, ' + 'expr start, or comment start' + ) + + def __iter__(self): + return self.find_tags() + + +duplicate_tags = ( + 'Got nested tags: {outer.block_type_name} (started at {outer.start}) did ' + 'not have a matching {{% end{outer.block_type_name} %}} before a ' + 'subsequent {inner.block_type_name} was found (started at {inner.start})' +) + + +_CONTROL_FLOW_TAGS = { + 'if': 'endif', + 'for': 'endfor', +} + +_CONTROL_FLOW_END_TAGS = { + v: k + for k, v in _CONTROL_FLOW_TAGS.items() +} + + +class BlockIterator: + def __init__(self, data): + self.tag_parser = TagIterator(data) + self.current = None + self.stack = [] + self.last_position = 0 + + @property + def current_end(self): + if self.current is None: + return 0 + else: + return self.current.end + + @property + def data(self): + return self.tag_parser.data + + def is_current_end(self, tag): + return ( + tag.block_type_name.startswith('end') and + self.current is not None and + tag.block_type_name[3:] == self.current.block_type_name + ) + + def find_blocks(self, allowed_blocks=None, collect_raw_data=True): + """Find all top-level blocks in the data.""" + if allowed_blocks is None: + allowed_blocks = {'snapshot', 'macro', 'materialization', 'docs'} + + for tag in self.tag_parser.find_tags(): + if tag.block_type_name in _CONTROL_FLOW_TAGS: + self.stack.append(tag.block_type_name) + elif tag.block_type_name in _CONTROL_FLOW_END_TAGS: + found = None + if self.stack: + found = self.stack.pop() + else: + expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name] + dbt.exceptions.raise_compiler_error(( + 'Got an unexpected control flow end tag, got {} but ' + 'never saw a preceeding {} (@ {})' + ).format( + tag.block_type_name, + expected, + self.tag_parser.linepos(tag.start) + )) + expected = _CONTROL_FLOW_TAGS[found] + if expected != tag.block_type_name: + dbt.exceptions.raise_compiler_error(( + 'Got an unexpected control flow end tag, got {} but ' + 'expected {} next (@ {})' + ).format( + tag.block_type_name, + expected, + self.tag_parser.linepos(tag.start) + )) + + if tag.block_type_name in allowed_blocks: + if self.stack: + dbt.exceptions.raise_compiler_error(( + 'Got a block definition inside control flow at {}. ' + 'All dbt block definitions must be at the top level' + ).format(self.tag_parser.linepos(tag.start))) + if self.current is not None: + dbt.exceptions.raise_compiler_error( + duplicate_tags.format(outer=self.current, inner=tag) + ) + if collect_raw_data: + raw_data = self.data[self.last_position:tag.start] + self.last_position = tag.start + if raw_data: + yield BlockData(raw_data) + self.current = tag + + elif self.is_current_end(tag): + self.last_position = tag.end + assert self.current is not None + yield BlockTag( + block_type_name=self.current.block_type_name, + block_name=self.current.block_name, + contents=self.data[self.current.end:tag.start], + full_block=self.data[self.current.start:tag.end] + ) + self.current = None + + if self.current: + linecount = self.data[:self.current.end].count('\n') + 1 + dbt.exceptions.raise_compiler_error(( + 'Reached EOF without finding a close tag for ' + '{} (searched from line {})' + ).format(self.current.block_type_name, linecount)) + + if collect_raw_data: + raw_data = self.data[self.last_position:] + if raw_data: + yield BlockData(raw_data) + + def lex_for_blocks(self, allowed_blocks=None, collect_raw_data=True): + return list(self.find_blocks(allowed_blocks=allowed_blocks, + collect_raw_data=collect_raw_data)) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/agate_helper.py b/dbt-env/lib/python3.8/site-packages/dbt/clients/agate_helper.py new file mode 100644 index 0000000..d2f9298 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/clients/agate_helper.py @@ -0,0 +1,228 @@ +from codecs import BOM_UTF8 + +import agate +import datetime +import isodate +import json +import dbt.utils +from typing import Iterable, List, Dict, Union, Optional, Any + +from dbt.exceptions import RuntimeException + + +BOM = BOM_UTF8.decode('utf-8') # '\ufeff' + + +class ISODateTime(agate.data_types.DateTime): + def cast(self, d): + # this is agate.data_types.DateTime.cast with the "clever" bits removed + # so we only handle ISO8601 stuff + if isinstance(d, datetime.datetime) or d is None: + return d + elif isinstance(d, datetime.date): + return datetime.datetime.combine(d, datetime.time(0, 0, 0)) + elif isinstance(d, str): + d = d.strip() + if d.lower() in self.null_values: + return None + try: + return isodate.parse_datetime(d) + except: # noqa + pass + + raise agate.exceptions.CastError( + 'Can not parse value "%s" as datetime.' % d + ) + + +def build_type_tester( + text_columns: Iterable[str], + string_null_values: Optional[Iterable[str]] = ('null', '') +) -> agate.TypeTester: + + types = [ + agate.data_types.Number(null_values=('null', '')), + agate.data_types.Date(null_values=('null', ''), + date_format='%Y-%m-%d'), + agate.data_types.DateTime(null_values=('null', ''), + datetime_format='%Y-%m-%d %H:%M:%S'), + ISODateTime(null_values=('null', '')), + agate.data_types.Boolean(true_values=('true',), + false_values=('false',), + null_values=('null', '')), + agate.data_types.Text(null_values=string_null_values) + ] + force = { + k: agate.data_types.Text(null_values=string_null_values) + for k in text_columns + } + return agate.TypeTester(force=force, types=types) + + +DEFAULT_TYPE_TESTER = build_type_tester(()) + + +def table_from_rows( + rows: List[Any], + column_names: Iterable[str], + text_only_columns: Optional[Iterable[str]] = None, +) -> agate.Table: + if text_only_columns is None: + column_types = DEFAULT_TYPE_TESTER + else: + # If text_only_columns are present, prevent coercing empty string or + # literal 'null' strings to a None representation. + column_types = build_type_tester( + text_only_columns, + string_null_values=() + ) + + return agate.Table(rows, column_names, column_types=column_types) + + +def table_from_data(data, column_names: Iterable[str]) -> agate.Table: + "Convert list of dictionaries into an Agate table" + + # The agate table is generated from a list of dicts, so the column order + # from `data` is not preserved. We can use `select` to reorder the columns + # + # If there is no data, create an empty table with the specified columns + + if len(data) == 0: + return agate.Table([], column_names=column_names) + else: + table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) + return table.select(column_names) + + +def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table: + """ + Convert a list of dictionaries into an Agate table. This method does not + coerce string values into more specific types (eg. '005' will not be + coerced to '5'). Additionally, this method does not coerce values to + None (eg. '' or 'null' will retain their string literal representations). + """ + + rows = [] + text_only_columns = set() + for _row in data: + row = [] + for col_name in column_names: + value = _row[col_name] + if isinstance(value, (dict, list, tuple)): + # Represent container types as json strings + value = json.dumps(value, cls=dbt.utils.JSONEncoder) + text_only_columns.add(col_name) + elif isinstance(value, str): + text_only_columns.add(col_name) + row.append(value) + + rows.append(row) + + return table_from_rows( + rows=rows, + column_names=column_names, + text_only_columns=text_only_columns + ) + + +def empty_table(): + "Returns an empty Agate table. To be used in place of None" + + return agate.Table(rows=[]) + + +def as_matrix(table): + "Return an agate table as a matrix of data sans columns" + + return [r.values() for r in table.rows.values()] + + +def from_csv(abspath, text_columns): + type_tester = build_type_tester(text_columns=text_columns) + with open(abspath, encoding='utf-8') as fp: + if fp.read(1) != BOM: + fp.seek(0) + return agate.Table.from_csv(fp, column_types=type_tester) + + +class _NullMarker: + pass + + +NullableAgateType = Union[agate.data_types.DataType, _NullMarker] + + +class ColumnTypeBuilder(Dict[str, NullableAgateType]): + def __init__(self): + super().__init__() + + def __setitem__(self, key, value): + if key not in self: + super().__setitem__(key, value) + return + + existing_type = self[key] + if isinstance(existing_type, _NullMarker): + # overwrite + super().__setitem__(key, value) + elif isinstance(value, _NullMarker): + # use the existing value + return + elif not isinstance(value, type(existing_type)): + # actual type mismatch! + raise RuntimeException( + f'Tables contain columns with the same names ({key}), ' + f'but different types ({value} vs {existing_type})' + ) + + def finalize(self) -> Dict[str, agate.data_types.DataType]: + result: Dict[str, agate.data_types.DataType] = {} + for key, value in self.items(): + if isinstance(value, _NullMarker): + # this is what agate would do. + result[key] = agate.data_types.Number() + else: + result[key] = value + return result + + +def _merged_column_types( + tables: List[agate.Table] +) -> Dict[str, agate.data_types.DataType]: + # this is a lot like agate.Table.merge, but with handling for all-null + # rows being "any type". + new_columns: ColumnTypeBuilder = ColumnTypeBuilder() + for table in tables: + for i in range(len(table.columns)): + column_name: str = table.column_names[i] + column_type: NullableAgateType = table.column_types[i] + # avoid over-sensitive type inference + if all(x is None for x in table.columns[column_name]): + column_type = _NullMarker() + new_columns[column_name] = column_type + + return new_columns.finalize() + + +def merge_tables(tables: List[agate.Table]) -> agate.Table: + """This is similar to agate.Table.merge, but it handles rows of all 'null' + values more gracefully during merges. + """ + new_columns = _merged_column_types(tables) + column_names = tuple(new_columns.keys()) + column_types = tuple(new_columns.values()) + + rows: List[agate.Row] = [] + for table in tables: + if ( + table.column_names == column_names and + table.column_types == column_types + ): + rows.extend(table.rows) + else: + for row in table.rows: + data = [row.get(name, None) for name in column_names] + rows.append(agate.Row(data, column_names)) + # _is_fork to tell agate that we already made things into `Row`s. + return agate.Table(rows, column_names, column_types, _is_fork=True) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/gcloud.py b/dbt-env/lib/python3.8/site-packages/dbt/clients/gcloud.py new file mode 100644 index 0000000..77ed74f --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/clients/gcloud.py @@ -0,0 +1,26 @@ +from dbt.logger import GLOBAL_LOGGER as logger +import dbt.exceptions +from dbt.clients.system import run_cmd + +NOT_INSTALLED_MSG = """ +dbt requires the gcloud SDK to be installed to authenticate with BigQuery. +Please download and install the SDK, or use a Service Account instead. + +https://cloud.google.com/sdk/ +""" + + +def gcloud_installed(): + try: + run_cmd('.', ['gcloud', '--version']) + return True + except OSError as e: + logger.debug(e) + return False + + +def setup_default_credentials(): + if gcloud_installed(): + run_cmd('.', ["gcloud", "auth", "application-default", "login"]) + else: + raise dbt.exceptions.RuntimeException(NOT_INSTALLED_MSG) diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/git.py b/dbt-env/lib/python3.8/site-packages/dbt/clients/git.py new file mode 100644 index 0000000..ba3bf2e --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/clients/git.py @@ -0,0 +1,142 @@ +import re +import os.path + +from dbt.clients.system import run_cmd, rmdir +from dbt.logger import GLOBAL_LOGGER as logger +import dbt.exceptions +from packaging import version + + +def _is_commit(revision: str) -> bool: + # match SHA-1 git commit + return bool(re.match(r"\b[0-9a-f]{40}\b", revision)) + + +def clone(repo, cwd, dirname=None, remove_git_dir=False, revision=None, subdirectory=None): + has_revision = revision is not None + is_commit = _is_commit(revision or "") + + clone_cmd = ['git', 'clone', '--depth', '1'] + if subdirectory: + logger.debug(' Subdirectory specified: {}, using sparse checkout.'.format(subdirectory)) + out, _ = run_cmd(cwd, ['git', '--version'], env={'LC_ALL': 'C'}) + git_version = version.parse(re.search(r"\d+\.\d+\.\d+", out.decode("utf-8")).group(0)) + if not git_version >= version.parse("2.25.0"): + # 2.25.0 introduces --sparse + raise RuntimeError( + "Please update your git version to pull a dbt package " + "from a subdirectory: your version is {}, >= 2.25.0 needed".format(git_version) + ) + clone_cmd.extend(['--filter=blob:none', '--sparse']) + + if has_revision and not is_commit: + clone_cmd.extend(['--branch', revision]) + + clone_cmd.append(repo) + + if dirname is not None: + clone_cmd.append(dirname) + result = run_cmd(cwd, clone_cmd, env={'LC_ALL': 'C'}) + + if subdirectory: + run_cmd(os.path.join(cwd, dirname or ''), ['git', 'sparse-checkout', 'set', subdirectory]) + + if remove_git_dir: + rmdir(os.path.join(dirname, '.git')) + + return result + + +def list_tags(cwd): + out, err = run_cmd(cwd, ['git', 'tag', '--list'], env={'LC_ALL': 'C'}) + tags = out.decode('utf-8').strip().split("\n") + return tags + + +def _checkout(cwd, repo, revision): + logger.debug(' Checking out revision {}.'.format(revision)) + + fetch_cmd = ["git", "fetch", "origin", "--depth", "1"] + + if _is_commit(revision): + run_cmd(cwd, fetch_cmd + [revision]) + else: + run_cmd(cwd, ['git', 'remote', 'set-branches', 'origin', revision]) + run_cmd(cwd, fetch_cmd + ["--tags", revision]) + + if _is_commit(revision): + spec = revision + # Prefer tags to branches if one exists + elif revision in list_tags(cwd): + spec = 'tags/{}'.format(revision) + else: + spec = 'origin/{}'.format(revision) + + out, err = run_cmd(cwd, ['git', 'reset', '--hard', spec], + env={'LC_ALL': 'C'}) + return out, err + + +def checkout(cwd, repo, revision=None): + if revision is None: + revision = 'HEAD' + try: + return _checkout(cwd, repo, revision) + except dbt.exceptions.CommandResultError as exc: + stderr = exc.stderr.decode('utf-8').strip() + dbt.exceptions.bad_package_spec(repo, revision, stderr) + + +def get_current_sha(cwd): + out, err = run_cmd(cwd, ['git', 'rev-parse', 'HEAD'], env={'LC_ALL': 'C'}) + + return out.decode('utf-8') + + +def remove_remote(cwd): + return run_cmd(cwd, ['git', 'remote', 'rm', 'origin'], env={'LC_ALL': 'C'}) + + +def clone_and_checkout(repo, cwd, dirname=None, remove_git_dir=False, + revision=None, subdirectory=None): + exists = None + try: + _, err = clone( + repo, + cwd, + dirname=dirname, + remove_git_dir=remove_git_dir, + subdirectory=subdirectory, + ) + except dbt.exceptions.CommandResultError as exc: + err = exc.stderr.decode('utf-8') + exists = re.match("fatal: destination path '(.+)' already exists", err) + if not exists: # something else is wrong, raise it + raise + + directory = None + start_sha = None + if exists: + directory = exists.group(1) + logger.debug('Updating existing dependency {}.', directory) + else: + matches = re.match("Cloning into '(.+)'", err.decode('utf-8')) + if matches is None: + raise dbt.exceptions.RuntimeException( + f'Error cloning {repo} - never saw "Cloning into ..." from git' + ) + directory = matches.group(1) + logger.debug('Pulling new dependency {}.', directory) + full_path = os.path.join(cwd, directory) + start_sha = get_current_sha(full_path) + checkout(full_path, repo, revision) + end_sha = get_current_sha(full_path) + if exists: + if start_sha == end_sha: + logger.debug(' Already at {}, nothing to do.', start_sha[:7]) + else: + logger.debug(' Updated checkout from {} to {}.', + start_sha[:7], end_sha[:7]) + else: + logger.debug(' Checked out at {}.', end_sha[:7]) + return os.path.join(directory, subdirectory or '') diff --git a/dbt-env/lib/python3.8/site-packages/dbt/clients/jinja.py b/dbt-env/lib/python3.8/site-packages/dbt/clients/jinja.py new file mode 100644 index 0000000..270ae94 --- /dev/null +++ b/dbt-env/lib/python3.8/site-packages/dbt/clients/jinja.py @@ -0,0 +1,665 @@ +import codecs +import linecache +import os +import re +import tempfile +import threading +from ast import literal_eval +from contextlib import contextmanager +from itertools import chain, islice +from typing import ( + List, Union, Set, Optional, Dict, Any, Iterator, Type, NoReturn, Tuple, + Callable +) + +import jinja2 +import jinja2.ext +import jinja2.nativetypes # type: ignore +import jinja2.nodes +import jinja2.parser +import jinja2.sandbox + +from dbt.utils import ( + get_dbt_macro_name, get_docs_macro_name, get_materialization_macro_name, + get_test_macro_name, deep_map +) + +from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag +from dbt.contracts.graph.compiled import CompiledSchemaTestNode +from dbt.contracts.graph.parsed import ParsedSchemaTestNode +from dbt.exceptions import ( + InternalException, raise_compiler_error, CompilationException, + invalid_materialization_argument, MacroReturn, JinjaRenderingException, + UndefinedMacroException +) +from dbt import flags +from dbt.logger import GLOBAL_LOGGER as logger # noqa + + +def _linecache_inject(source, write): + if write: + # this is the only reliable way to accomplish this. Obviously, it's + # really darn noisy and will fill your temporary directory + tmp_file = tempfile.NamedTemporaryFile( + prefix='dbt-macro-compiled-', + suffix='.py', + delete=False, + mode='w+', + encoding='utf-8', + ) + tmp_file.write(source) + filename = tmp_file.name + else: + # `codecs.encode` actually takes a `bytes` as the first argument if + # the second argument is 'hex' - mypy does not know this. + rnd = codecs.encode(os.urandom(12), 'hex') # type: ignore + filename = rnd.decode('ascii') + + # put ourselves in the cache + cache_entry = ( + len(source), + None, + [line + '\n' for line in source.splitlines()], + filename + ) + # linecache does in fact have an attribute `cache`, thanks + linecache.cache[filename] = cache_entry # type: ignore + return filename + + +class MacroFuzzParser(jinja2.parser.Parser): + def parse_macro(self): + node = jinja2.nodes.Macro(lineno=next(self.stream).lineno) + + # modified to fuzz macros defined in the same file. this way + # dbt can understand the stack of macros being called. + # - @cmcarthur + node.name = get_dbt_macro_name( + self.parse_assign_target(name_only=True).name) + + self.parse_signature(node) + node.body = self.parse_statements(('name:endmacro',), + drop_needle=True) + return node + + +class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment): + def _parse(self, source, name, filename): + return MacroFuzzParser(self, source, name, filename).parse() + + def _compile(self, source, filename): + """Override jinja's compilation to stash the rendered source inside + the python linecache for debugging when the appropriate environment + variable is set. + + If the value is 'write', also write the files to disk. + WARNING: This can write a ton of data if you aren't careful. + """ + if filename == '