diff --git a/ansible/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc index 2d1b246c7..5270407d7 100644 Binary files a/ansible/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/__pycache__/__init__.cpython-311.pyc index 4db3ee81b..c32d3949b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/__pycache__/constants.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/__pycache__/constants.cpython-311.pyc index 2c9884a06..0406e24a2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/__pycache__/constants.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/__pycache__/constants.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/__pycache__/context.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/__pycache__/context.cpython-311.pyc index 95c9220d7..da78a789d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/__pycache__/context.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/__pycache__/context.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/__pycache__/release.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/__pycache__/release.cpython-311.pyc index b1cc2784c..b9c242c9a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/__pycache__/release.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/__pycache__/release.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/_vendor/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/_vendor/__pycache__/__init__.cpython-311.pyc index 040168fef..2bd507a74 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/_vendor/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/_vendor/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/__init__.cpython-311.pyc index e0906f294..b1ea9b14f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/playbook.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/playbook.cpython-311.pyc index ce9f3b2b6..e5c4f30cd 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/playbook.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/cli/__pycache__/playbook.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/__init__.cpython-311.pyc index c14a3bece..5c822301f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/option_helpers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/option_helpers.cpython-311.pyc index d699f3c2f..ff2c392fa 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/option_helpers.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/cli/arguments/__pycache__/option_helpers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/cli/scripts/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/cli/scripts/__pycache__/__init__.cpython-311.pyc index e93332714..fecce2b79 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/cli/scripts/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/cli/scripts/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/__init__.cpython-311.pyc index 4f9ab5cb5..3c14c133e 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/manager.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/manager.cpython-311.pyc index 00cdc1a92..aaf0e906a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/manager.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/config/__pycache__/manager.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/__init__.cpython-311.pyc index 6e548d723..1fd40ad02 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/yaml_strings.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/yaml_strings.cpython-311.pyc index b10eb8297..ee51f81ce 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/yaml_strings.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/errors/__pycache__/yaml_strings.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/__init__.cpython-311.pyc index c2b1932ac..e385c0aea 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/action_write_locks.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/action_write_locks.cpython-311.pyc index 8c7559d9a..09870b7cd 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/action_write_locks.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/action_write_locks.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/interpreter_discovery.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/interpreter_discovery.cpython-311.pyc index 51ff46bb5..02ffc81f9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/interpreter_discovery.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/interpreter_discovery.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/module_common.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/module_common.cpython-311.pyc index bf325f4b9..d1c3ea4d3 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/module_common.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/module_common.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/play_iterator.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/play_iterator.cpython-311.pyc index 368948fc4..89e746b85 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/play_iterator.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/play_iterator.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/playbook_executor.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/playbook_executor.cpython-311.pyc index fd074d00a..1ccd617ac 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/playbook_executor.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/playbook_executor.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/stats.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/stats.cpython-311.pyc index 8bbd7b6bb..419ced6fb 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/stats.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/stats.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_executor.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_executor.cpython-311.pyc index f54194e25..2a45c0317 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_executor.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_executor.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_queue_manager.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_queue_manager.cpython-311.pyc index 43a03c3d9..f20544821 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_queue_manager.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_queue_manager.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_result.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_result.cpython-311.pyc index 5e552732e..e001de418 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_result.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/__pycache__/task_result.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/discovery/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/discovery/__pycache__/__init__.cpython-311.pyc index 636133ce2..19183d293 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/discovery/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/discovery/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/__init__.cpython-311.pyc index a6cba73d9..1a0f72e86 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/module_manifest.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/module_manifest.cpython-311.pyc index 118273d3f..507b292e7 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/module_manifest.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/powershell/__pycache__/module_manifest.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/__init__.cpython-311.pyc index a496aa426..fe1780c73 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/worker.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/worker.cpython-311.pyc index 6d535fc9b..d72689008 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/worker.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/executor/process/__pycache__/worker.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/__init__.cpython-311.pyc index da5fedc2c..60df78e4c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/data.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/data.cpython-311.pyc index f91f4e2a8..0ce38995b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/data.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/data.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/group.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/group.cpython-311.pyc index 375cc1736..b6f1e176d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/group.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/group.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/helpers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/helpers.cpython-311.pyc index 35d7f97a4..ecba5d8c4 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/helpers.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/helpers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/host.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/host.cpython-311.pyc index da5ef8fb9..cfa1accb3 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/host.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/host.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/manager.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/manager.cpython-311.pyc index e1a162be1..6ed3a33c2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/manager.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/inventory/__pycache__/manager.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/__init__.cpython-311.pyc index db801c25b..be6159864 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/_text.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/_text.cpython-311.pyc index c6c55429b..0a6ee00a0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/_text.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/_text.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/basic.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/basic.cpython-311.pyc index ee9ac0f59..03ddebfc8 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/basic.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/basic.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/connection.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/connection.cpython-311.pyc index cb3fef7c7..f407a6ef6 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/connection.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/connection.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/errors.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/errors.cpython-311.pyc index 4f3b3b133..1cd05b64f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/errors.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/errors.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/json_utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/json_utils.cpython-311.pyc index 29f6d53c8..3b25c1a8b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/json_utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/__pycache__/json_utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/__pycache__/__init__.cpython-311.pyc index d311bfeb7..95c756b3d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/__init__.cpython-311.pyc index f8c420076..5ae1bcc07 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_daemon_threading.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_daemon_threading.cpython-311.pyc index e1bb11a28..0fcbf8835 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_daemon_threading.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_daemon_threading.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_futures.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_futures.cpython-311.pyc index d93bd1741..3a80b1548 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_futures.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/_internal/_concurrent/__pycache__/_futures.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/__init__.cpython-311.pyc index f388c2ed2..7fbcf47bb 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/_utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/_utils.cpython-311.pyc index 61af3b975..b75172c20 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/_utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/_utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/arg_spec.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/arg_spec.cpython-311.pyc index 43e43fd7b..c27a78251 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/arg_spec.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/arg_spec.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/collections.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/collections.cpython-311.pyc index 7127f70bb..29a957b92 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/collections.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/collections.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/file.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/file.cpython-311.pyc index fe1b8e7a5..930986963 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/file.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/file.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/json.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/json.cpython-311.pyc index 1f16a0c49..c16a41f11 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/json.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/json.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/locale.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/locale.cpython-311.pyc index 99c0f7d95..0cf3e4295 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/locale.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/locale.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/parameters.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/parameters.cpython-311.pyc index c2d63fec7..34c91fc87 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/parameters.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/parameters.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/process.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/process.cpython-311.pyc index c3cb0c06f..e6b22c58f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/process.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/process.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/sys_info.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/sys_info.cpython-311.pyc index 7c437e659..9dd8e7277 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/sys_info.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/sys_info.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/validation.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/validation.cpython-311.pyc index fcbc0826b..9a427708a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/validation.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/validation.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/warnings.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/warnings.cpython-311.pyc index 8a9e155b3..649c6888d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/warnings.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/warnings.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/yaml.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/yaml.cpython-311.pyc index 96abb5dbb..79f127fe1 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/yaml.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/__pycache__/yaml.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/__init__.cpython-311.pyc index bebee0b21..cb3318b11 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/converters.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/converters.cpython-311.pyc index b10499f6c..8888f9cb8 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/converters.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/converters.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/formatters.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/formatters.cpython-311.pyc index 939fcddf7..ba8143855 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/formatters.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/common/text/__pycache__/formatters.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/__init__.cpython-311.pyc index a022d0f8c..601d8949c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/datetime.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/datetime.cpython-311.pyc index 010f7e5f4..de0248872 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/datetime.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/datetime.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/paramiko.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/paramiko.cpython-311.pyc index afb2806a7..6b1110abd 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/paramiko.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/paramiko.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/selinux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/selinux.cpython-311.pyc index bc5e62caf..39b00b55d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/selinux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/selinux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/typing.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/typing.cpython-311.pyc index ea4098cc7..7b0ec9b48 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/typing.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/typing.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/version.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/version.cpython-311.pyc index da0ddce41..68cf6d530 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/version.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/compat/__pycache__/version.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/__init__.cpython-311.pyc index dad5878ca..1e80913fc 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/_distro.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/_distro.cpython-311.pyc index 1b3e22b85..506db6875 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/_distro.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/distro/__pycache__/_distro.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/__init__.cpython-311.pyc index a96e608d0..171c38318 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/ansible_collector.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/ansible_collector.cpython-311.pyc index fd4408090..8bbd6504e 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/ansible_collector.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/ansible_collector.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/collector.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/collector.cpython-311.pyc index 4e58f3213..901a2791c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/collector.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/collector.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/compat.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/compat.cpython-311.pyc index 661d906a2..f8aed0d7b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/compat.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/compat.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/default_collectors.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/default_collectors.cpython-311.pyc index 05a60f7e2..c12f03301 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/default_collectors.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/default_collectors.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/namespace.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/namespace.cpython-311.pyc index d26e2910c..70f271987 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/namespace.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/namespace.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/sysctl.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/sysctl.cpython-311.pyc index 2922a3887..b80f00872 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/sysctl.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/sysctl.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/timeout.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/timeout.cpython-311.pyc index 86c4789c3..e3823a404 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/timeout.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/timeout.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/utils.cpython-311.pyc index f8751801b..6c7258685 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/__pycache__/utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/__init__.cpython-311.pyc index 1b4958d74..3cb25d0eb 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/aix.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/aix.cpython-311.pyc index a15852f8e..ba5cab037 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/aix.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/aix.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/base.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/base.cpython-311.pyc index 2e5329a14..63536d8d9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/base.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/base.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/darwin.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/darwin.cpython-311.pyc index 343d730b4..a3f4cf8b0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/darwin.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/darwin.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/dragonfly.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/dragonfly.cpython-311.pyc index 87c2d21c2..5e2265cf2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/dragonfly.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/dragonfly.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/freebsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/freebsd.cpython-311.pyc index 02bb4fccd..75d77f9b7 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/freebsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/freebsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hpux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hpux.cpython-311.pyc index b4344fca8..450d9fc86 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hpux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hpux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hurd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hurd.cpython-311.pyc index 3c447feed..b36eace63 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hurd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/hurd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/linux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/linux.cpython-311.pyc index affedb0f1..084e9f01b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/linux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/linux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/netbsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/netbsd.cpython-311.pyc index 9c43b2331..e99fbdb03 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/netbsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/netbsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/openbsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/openbsd.cpython-311.pyc index e1d28c7a2..2421554d2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/openbsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/openbsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/sunos.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/sunos.cpython-311.pyc index 8cf7b10f7..50131a13c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/sunos.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/hardware/__pycache__/sunos.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/__init__.cpython-311.pyc index 348d66925..6fe2360be 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/aix.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/aix.cpython-311.pyc index df844bebc..11284c21a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/aix.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/aix.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/base.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/base.cpython-311.pyc index 53c02d92d..9bb63eae8 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/base.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/base.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/darwin.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/darwin.cpython-311.pyc index 19a76cc04..11f32b15c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/darwin.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/darwin.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/dragonfly.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/dragonfly.cpython-311.pyc index 8ddfc289c..307f46c8d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/dragonfly.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/dragonfly.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/fc_wwn.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/fc_wwn.cpython-311.pyc index 3faf3c7c0..c89e57ce0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/fc_wwn.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/fc_wwn.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/freebsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/freebsd.cpython-311.pyc index 32c5f9799..7c6796684 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/freebsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/freebsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/generic_bsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/generic_bsd.cpython-311.pyc index ad0e2488c..1a038f1dc 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/generic_bsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/generic_bsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hpux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hpux.cpython-311.pyc index e99878b65..c67a7107b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hpux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hpux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hurd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hurd.cpython-311.pyc index bd680441e..da3ca9381 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hurd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/hurd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/iscsi.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/iscsi.cpython-311.pyc index 10443c6bd..41e4414dd 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/iscsi.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/iscsi.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/linux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/linux.cpython-311.pyc index f751959c5..09db6122d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/linux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/linux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/netbsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/netbsd.cpython-311.pyc index af900e907..b1a00da67 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/netbsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/netbsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/nvme.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/nvme.cpython-311.pyc index bf530298e..7fa5b932c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/nvme.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/nvme.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/openbsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/openbsd.cpython-311.pyc index ca3ceb9f1..e0330922b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/openbsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/openbsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/sunos.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/sunos.cpython-311.pyc index 9c91b7497..81cff349b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/sunos.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/network/__pycache__/sunos.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/__init__.cpython-311.pyc index b891bb173..f0056b3cc 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/facter.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/facter.cpython-311.pyc index 1c7eaed1a..5280dfba2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/facter.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/facter.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/ohai.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/ohai.cpython-311.pyc index 98a335d18..3d2156d19 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/ohai.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/other/__pycache__/ohai.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/__init__.cpython-311.pyc index efc2cce8a..f2bf454eb 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/apparmor.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/apparmor.cpython-311.pyc index 219c74aae..4ac1b33ec 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/apparmor.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/apparmor.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/caps.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/caps.cpython-311.pyc index 4ea0fd434..c6407d6ce 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/caps.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/caps.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/chroot.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/chroot.cpython-311.pyc index b2caa1c26..26dff63ee 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/chroot.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/chroot.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/cmdline.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/cmdline.cpython-311.pyc index acd979362..18d8a061e 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/cmdline.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/cmdline.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/date_time.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/date_time.cpython-311.pyc index 69805b644..7d3369846 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/date_time.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/date_time.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/distribution.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/distribution.cpython-311.pyc index 505ebd0c2..1701998e0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/distribution.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/distribution.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/dns.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/dns.cpython-311.pyc index d6790fadf..7ec0140af 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/dns.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/dns.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/env.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/env.cpython-311.pyc index 0c97e739b..2c1a2bce9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/env.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/env.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/fips.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/fips.cpython-311.pyc index 3d7a62ec7..0f3a6cab5 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/fips.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/fips.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/loadavg.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/loadavg.cpython-311.pyc index a5e41c855..9ec84acc0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/loadavg.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/loadavg.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/local.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/local.cpython-311.pyc index 2e644d901..79b0fceeb 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/local.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/local.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/lsb.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/lsb.cpython-311.pyc index ef27e828c..12e101dd0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/lsb.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/lsb.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/pkg_mgr.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/pkg_mgr.cpython-311.pyc index 2e023b832..89e19e906 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/pkg_mgr.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/pkg_mgr.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/platform.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/platform.cpython-311.pyc index 6e3d9f90a..3f3c457fe 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/platform.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/platform.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/python.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/python.cpython-311.pyc index 8ff7bd38f..07bf9f7d9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/python.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/python.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/selinux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/selinux.cpython-311.pyc index ff17edfb1..95ebd3bcf 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/selinux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/selinux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/service_mgr.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/service_mgr.cpython-311.pyc index b223711ba..63674ca50 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/service_mgr.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/service_mgr.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/ssh_pub_keys.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/ssh_pub_keys.cpython-311.pyc index 10c2d1a16..0c45a16da 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/ssh_pub_keys.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/ssh_pub_keys.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/systemd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/systemd.cpython-311.pyc index 261d61aa7..02d892d7a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/systemd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/systemd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/user.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/user.cpython-311.pyc index 3969c283e..1c07b1cd0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/user.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/system/__pycache__/user.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/__init__.cpython-311.pyc index 7523538fa..eae44700f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/base.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/base.cpython-311.pyc index 38980a155..a5ae1648d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/base.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/base.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/dragonfly.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/dragonfly.cpython-311.pyc index d196517b8..874452d91 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/dragonfly.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/dragonfly.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/freebsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/freebsd.cpython-311.pyc index 7cb927913..3cf02ae21 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/freebsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/freebsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/hpux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/hpux.cpython-311.pyc index 1e386ce0d..6caedc6d7 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/hpux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/hpux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/linux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/linux.cpython-311.pyc index 7ee5a4a81..edb3daa06 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/linux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/linux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/netbsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/netbsd.cpython-311.pyc index 192e69f85..994c504ef 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/netbsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/netbsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/openbsd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/openbsd.cpython-311.pyc index 836f416de..23ba0d382 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/openbsd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/openbsd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sunos.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sunos.cpython-311.pyc index 340830c7b..4d3183b40 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sunos.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sunos.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sysctl.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sysctl.cpython-311.pyc index 071fd09fc..3173beaee 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sysctl.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/facts/virtual/__pycache__/sysctl.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/__init__.cpython-311.pyc index 9a0a52b07..70705888c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/convert_bool.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/convert_bool.cpython-311.pyc index d63a3a24e..c60a846b0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/convert_bool.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/parsing/__pycache__/convert_bool.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/module_utils/six/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/module_utils/six/__pycache__/__init__.cpython-311.pyc index cfddaa45b..cd65481e2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/module_utils/six/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/module_utils/six/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/modules/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/modules/__pycache__/__init__.cpython-311.pyc index 335bf3f9f..25e997744 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/modules/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/modules/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/__init__.cpython-311.pyc index 0f0d2c1bb..e88c35b37 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/ajson.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/ajson.cpython-311.pyc index b1875d2b9..12a1bd718 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/ajson.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/ajson.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/dataloader.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/dataloader.cpython-311.pyc index 4fb4e9847..074720a2c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/dataloader.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/dataloader.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/mod_args.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/mod_args.cpython-311.pyc index bb776358a..467babf85 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/mod_args.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/mod_args.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/plugin_docs.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/plugin_docs.cpython-311.pyc index 04ec88342..56d99eb73 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/plugin_docs.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/plugin_docs.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/quoting.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/quoting.cpython-311.pyc index 821eae655..07b920a12 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/quoting.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/quoting.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/splitter.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/splitter.cpython-311.pyc index 03b10c2b4..eb0b98dd7 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/splitter.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/__pycache__/splitter.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/__init__.cpython-311.pyc index 3636e36eb..20a615d60 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/addresses.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/addresses.cpython-311.pyc index d79dc78c5..ea4a21477 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/addresses.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/addresses.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/jsonify.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/jsonify.cpython-311.pyc index 90fc7860d..1e92db7b6 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/jsonify.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/jsonify.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/yaml.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/yaml.cpython-311.pyc index a3b2f6d8d..633c8ffb5 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/yaml.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/utils/__pycache__/yaml.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/vault/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/vault/__pycache__/__init__.cpython-311.pyc index 3fb0c88d5..8862c79a7 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/vault/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/vault/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/__init__.cpython-311.pyc index 381d1f61f..c4de932f7 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/constructor.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/constructor.cpython-311.pyc index bf65eea6a..c1c86fa24 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/constructor.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/constructor.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/dumper.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/dumper.cpython-311.pyc index 254fe5891..0bae0284d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/dumper.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/dumper.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/loader.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/loader.cpython-311.pyc index 9c4896d81..c4c08c2e5 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/loader.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/loader.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/objects.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/objects.cpython-311.pyc index 14318d1dd..701c3c57d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/objects.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/parsing/yaml/__pycache__/objects.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/__init__.cpython-311.pyc index e1c939115..9c54809ac 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/attribute.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/attribute.cpython-311.pyc index fbbceea90..e277e3db3 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/attribute.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/attribute.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/base.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/base.cpython-311.pyc index cf14c2297..807398f08 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/base.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/base.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/block.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/block.cpython-311.pyc index 20674ca90..0546f1e37 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/block.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/block.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/collectionsearch.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/collectionsearch.cpython-311.pyc index 7727046f5..0c880d253 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/collectionsearch.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/collectionsearch.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/conditional.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/conditional.cpython-311.pyc index a549d64a1..163b2aa57 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/conditional.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/conditional.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/delegatable.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/delegatable.cpython-311.pyc index f70684dae..da92dd849 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/delegatable.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/delegatable.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler.cpython-311.pyc index d18bdddad..c8b2b4036 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler_task_include.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler_task_include.cpython-311.pyc index 6a83a0cd8..983fba7bd 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler_task_include.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/handler_task_include.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/helpers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/helpers.cpython-311.pyc index 90458a9fc..253c65c50 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/helpers.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/helpers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/included_file.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/included_file.cpython-311.pyc index 45c350a4b..3317ebd5f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/included_file.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/included_file.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/loop_control.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/loop_control.cpython-311.pyc index 0d177b542..a33b75253 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/loop_control.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/loop_control.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/notifiable.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/notifiable.cpython-311.pyc index 18df2d17e..fc0cc21af 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/notifiable.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/notifiable.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play.cpython-311.pyc index 99c53bfc4..b40214300 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play_context.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play_context.cpython-311.pyc index b10335083..0f4a20b80 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play_context.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/play_context.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/playbook_include.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/playbook_include.cpython-311.pyc index 10da2bef9..6c0ecdaab 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/playbook_include.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/playbook_include.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/role_include.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/role_include.cpython-311.pyc index 74573eee9..59a3da24d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/role_include.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/role_include.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/taggable.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/taggable.cpython-311.pyc index e5b36f508..502132119 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/taggable.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/taggable.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task.cpython-311.pyc index 8174e3bee..7327e88a2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task_include.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task_include.cpython-311.pyc index 4e7921114..b2256a5b2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task_include.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/__pycache__/task_include.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/__init__.cpython-311.pyc index 44a9aad7d..36f08f449 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/definition.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/definition.cpython-311.pyc index f8fcca62e..05c81096c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/definition.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/definition.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/include.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/include.cpython-311.pyc index dff77d336..c1284a7e6 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/include.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/include.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/metadata.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/metadata.cpython-311.pyc index 4efa6e8d5..cef727ab6 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/metadata.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/metadata.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/requirement.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/requirement.cpython-311.pyc index 8cac5d097..497f1c829 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/requirement.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/playbook/role/__pycache__/requirement.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/__init__.cpython-311.pyc index cefbdd765..3a1e013b2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/loader.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/loader.cpython-311.pyc index 5282678f3..a676e41fb 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/loader.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/__pycache__/loader.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/__init__.cpython-311.pyc index cb233d9d6..fce8dd1d4 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/command.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/command.cpython-311.pyc index 2e7558118..07e7e1257 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/command.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/command.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/copy.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/copy.cpython-311.pyc index 808b59a33..72d562935 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/copy.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/copy.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/debug.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/debug.cpython-311.pyc index 1a7bd4aa0..4aef0ab06 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/debug.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/debug.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/gather_facts.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/gather_facts.cpython-311.pyc index dbd22c6e9..282316a6a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/gather_facts.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/gather_facts.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/normal.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/normal.cpython-311.pyc index 83d46f5f3..52b5d7012 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/normal.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/action/__pycache__/normal.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/__init__.cpython-311.pyc index f528cb36c..52dd35065 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/runas.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/runas.cpython-311.pyc index 48263d0ee..fb9479b58 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/runas.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/runas.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/su.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/su.cpython-311.pyc index bbe763a13..07072e354 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/su.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/su.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/sudo.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/sudo.cpython-311.pyc index 57dcabeef..b000e8209 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/sudo.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/become/__pycache__/sudo.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/__init__.cpython-311.pyc index c752ab9f5..52619420c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/memory.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/memory.cpython-311.pyc index 5f8091aa4..12a7a4a4c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/memory.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/cache/__pycache__/memory.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/__init__.cpython-311.pyc index 147527983..44186542f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/default.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/default.cpython-311.pyc index 6ac565b95..eaa9cf0d0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/default.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/default.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/junit.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/junit.cpython-311.pyc index 169c47704..e711a8469 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/junit.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/junit.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/minimal.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/minimal.cpython-311.pyc index bd8244d96..187a9f58b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/minimal.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/minimal.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/oneline.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/oneline.cpython-311.pyc index f1f651ae4..23f099540 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/oneline.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/oneline.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/tree.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/tree.cpython-311.pyc index 3c2abbf14..87ab161ff 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/tree.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/callback/__pycache__/tree.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/__init__.cpython-311.pyc index 5e35f6562..0111b450f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/local.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/local.cpython-311.pyc index 04438e743..760c1cf79 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/local.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/local.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/paramiko_ssh.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/paramiko_ssh.cpython-311.pyc index c9d8b6133..f61f75ce0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/paramiko_ssh.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/paramiko_ssh.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/psrp.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/psrp.cpython-311.pyc index d5e4354ea..18b3590e9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/psrp.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/psrp.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/ssh.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/ssh.cpython-311.pyc index 0699d51db..4eee68863 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/ssh.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/ssh.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/winrm.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/winrm.cpython-311.pyc index 26421849b..fa1f8e51a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/winrm.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/connection/__pycache__/winrm.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/__init__.cpython-311.pyc index 4c2309d4a..76da58d84 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/connection_pipelining.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/connection_pipelining.cpython-311.pyc index f3c82ca2f..b0e4e93b2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/connection_pipelining.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/connection_pipelining.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/default_callback.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/default_callback.cpython-311.pyc index 91dcddda2..c29572501 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/default_callback.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/default_callback.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/result_format_callback.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/result_format_callback.cpython-311.pyc index d1c3f9062..eb3195693 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/result_format_callback.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/result_format_callback.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_common.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_common.cpython-311.pyc index f21da0da1..1130b0fd6 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_common.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_common.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_windows.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_windows.cpython-311.pyc index 22311d348..a9f1ead8d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_windows.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/shell_windows.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/vars_plugin_staging.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/vars_plugin_staging.cpython-311.pyc index de2c5f4a9..172a53c89 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/vars_plugin_staging.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/doc_fragments/__pycache__/vars_plugin_staging.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/__init__.cpython-311.pyc index 58370d860..b2ede7864 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/core.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/core.cpython-311.pyc index 7067abb30..68189f066 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/core.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/core.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/encryption.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/encryption.cpython-311.pyc index a346ad47a..8a3b5df4c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/encryption.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/encryption.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/mathstuff.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/mathstuff.cpython-311.pyc index df23c467d..05b20d683 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/mathstuff.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/mathstuff.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urls.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urls.cpython-311.pyc index f8e49d771..1afc7d9e7 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urls.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urls.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urlsplit.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urlsplit.cpython-311.pyc index 709a00963..3a332d18a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urlsplit.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/filter/__pycache__/urlsplit.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/__init__.cpython-311.pyc index 3ca283879..702cf5cba 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/auto.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/auto.cpython-311.pyc index 96d16d453..11ccb7b72 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/auto.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/auto.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/host_list.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/host_list.cpython-311.pyc index 46e79629b..c802cfba4 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/host_list.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/host_list.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/ini.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/ini.cpython-311.pyc index 8d3533af2..282a15594 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/ini.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/ini.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/script.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/script.cpython-311.pyc index f4e39473c..c9168c4c8 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/script.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/script.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/toml.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/toml.cpython-311.pyc index 88b724be0..49c9cd113 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/toml.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/toml.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/yaml.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/yaml.cpython-311.pyc index 3bdc3a4d9..ec99a3b00 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/yaml.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/inventory/__pycache__/yaml.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/__init__.cpython-311.pyc index d81e96d0d..0856b895c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/cmd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/cmd.cpython-311.pyc index 2ec0f0580..7c78127d4 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/cmd.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/cmd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/powershell.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/powershell.cpython-311.pyc index c5217b5ed..9f802ad10 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/powershell.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/powershell.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/sh.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/sh.cpython-311.pyc index 5f12dc98d..b716cbbd1 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/sh.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/shell/__pycache__/sh.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/__init__.cpython-311.pyc index fdb1bd874..3c590e1ae 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/linear.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/linear.cpython-311.pyc index 70d771824..361d5395b 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/linear.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/strategy/__pycache__/linear.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/__init__.cpython-311.pyc index f5566a4ec..83df36986 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/core.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/core.cpython-311.pyc index 8afab5b34..cfd0906b2 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/core.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/core.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/files.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/files.cpython-311.pyc index 982cc683a..2312ce14d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/files.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/files.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/mathstuff.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/mathstuff.cpython-311.pyc index 004ebc0e7..26b93855c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/mathstuff.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/mathstuff.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/uri.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/uri.cpython-311.pyc index fa6185bef..3c3fe893e 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/uri.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/test/__pycache__/uri.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/__init__.cpython-311.pyc index df63995f8..2174c7057 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/host_group_vars.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/host_group_vars.cpython-311.pyc index 3d64d3f4f..b1555d793 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/host_group_vars.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/plugins/vars/__pycache__/host_group_vars.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/__init__.cpython-311.pyc index 7f1603e7f..b434db2e9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/native_helpers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/native_helpers.cpython-311.pyc index 7ee6c8e3e..2fa89664a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/native_helpers.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/native_helpers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/template.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/template.cpython-311.pyc index f70a4f8af..c35fbb5d9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/template.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/template.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/vars.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/vars.cpython-311.pyc index 7805558bc..a615353cb 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/vars.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/template/__pycache__/vars.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/__init__.cpython-311.pyc index 5971e8ba2..703cbc99d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/_junit_xml.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/_junit_xml.cpython-311.pyc index 09cbc35f3..a96d2078d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/_junit_xml.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/_junit_xml.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/color.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/color.cpython-311.pyc index 3ef09e9a7..64d8a3d3e 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/color.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/color.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/context_objects.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/context_objects.cpython-311.pyc index c6e9ab940..e6b3a341d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/context_objects.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/context_objects.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/display.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/display.cpython-311.pyc index 217159535..3fd62947d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/display.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/display.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/encrypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/encrypt.cpython-311.pyc index 587be4cc9..754c24950 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/encrypt.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/encrypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/fqcn.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/fqcn.cpython-311.pyc index c5284a865..fd49fb0ca 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/fqcn.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/fqcn.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/galaxy.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/galaxy.cpython-311.pyc index 9f8985124..6fc8d33e9 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/galaxy.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/galaxy.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/hashing.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/hashing.cpython-311.pyc index 79c081703..049dd8d7c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/hashing.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/hashing.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/helpers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/helpers.cpython-311.pyc index 7f40cb5b6..909e1d219 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/helpers.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/helpers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/listify.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/listify.cpython-311.pyc index 5e956f59e..5572c708c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/listify.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/listify.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/lock.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/lock.cpython-311.pyc index 72bc96c6e..d4b36ce4a 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/lock.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/lock.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/multiprocessing.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/multiprocessing.cpython-311.pyc index c6eb6e627..596b4829c 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/multiprocessing.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/multiprocessing.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/native_jinja.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/native_jinja.cpython-311.pyc index 25ee772f1..01aec7b25 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/native_jinja.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/native_jinja.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/path.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/path.cpython-311.pyc index 76bd47da4..1003733e6 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/path.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/path.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/plugin_docs.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/plugin_docs.cpython-311.pyc index 61f429c11..7c40cc6f0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/plugin_docs.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/plugin_docs.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/sentinel.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/sentinel.cpython-311.pyc index a677fb7fa..515fd2e2f 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/sentinel.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/sentinel.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/shlex.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/shlex.cpython-311.pyc index 1dcbc4177..c210903bd 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/shlex.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/shlex.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/singleton.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/singleton.cpython-311.pyc index fc4962a57..ec7054956 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/singleton.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/singleton.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/ssh_functions.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/ssh_functions.cpython-311.pyc index c65e8e315..3bb40d09d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/ssh_functions.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/ssh_functions.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unicode.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unicode.cpython-311.pyc index ac507952e..f006a86f0 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unicode.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unicode.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unsafe_proxy.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unsafe_proxy.cpython-311.pyc index 030c637a2..420e4bda8 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unsafe_proxy.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/unsafe_proxy.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/vars.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/vars.cpython-311.pyc index e62935625..9a3a3fcc4 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/vars.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/vars.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/version.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/version.cpython-311.pyc index 48f58c5e6..e0dca215e 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/version.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/__pycache__/version.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/__init__.cpython-311.pyc index 9d56de197..9a114b27e 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_config.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_config.cpython-311.pyc index 6881a3fd3..91a2af199 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_config.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_config.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_finder.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_finder.cpython-311.pyc index 16710637b..79bdeb354 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_finder.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_finder.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_meta.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_meta.cpython-311.pyc index 9dea1e058..514ffb2e1 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_meta.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/utils/collection_loader/__pycache__/_collection_meta.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/__init__.cpython-311.pyc index ae6e74bab..7efa21a24 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/clean.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/clean.cpython-311.pyc index 4d7e2a2d0..5246e4e0d 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/clean.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/clean.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/fact_cache.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/fact_cache.cpython-311.pyc index 3dfba389b..9067d03d6 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/fact_cache.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/fact_cache.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/hostvars.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/hostvars.cpython-311.pyc index 8ae83c277..55f8d7e90 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/hostvars.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/hostvars.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/manager.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/manager.cpython-311.pyc index 063cc236a..44486b3d1 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/manager.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/manager.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/plugins.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/plugins.cpython-311.pyc index da21a85a8..93bb19778 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/plugins.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/plugins.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/reserved.cpython-311.pyc b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/reserved.cpython-311.pyc index 1e7cd84cc..6a0644503 100644 Binary files a/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/reserved.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/ansible/vars/__pycache__/reserved.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__about__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__about__.cpython-311.pyc index b4ffd6d13..50145ed02 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__about__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__about__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__init__.cpython-311.pyc index 94ba98f79..59928477d 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/exceptions.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/exceptions.cpython-311.pyc index 83a6f3910..57c56884d 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/exceptions.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/exceptions.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/utils.cpython-311.pyc index 210d16073..672b064cb 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/__pycache__/utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/__pycache__/utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-311.pyc index 2c52fa520..08e97675b 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-311.pyc index 45255a3a8..e0e2f2e7d 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc index bf1cea14b..3da25f62f 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc index 46f6fa9e6..60a2fa175 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc index 4d69e0871..7875c4612 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc index 3958ea007..f3b9cd2ea 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc index 950b2dc7d..0f335a2cb 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc index bf7dabc30..1a1cc96cd 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc index a3487ef40..723cabe93 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-311.pyc index 64e666fd1..4b3f78f66 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-311.pyc index cfe755524..55ae49c6c 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-311.pyc index 72ce03a38..310d65e33 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc index 43f102850..aacbf999d 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc index d3ddd9e8e..e6dd39a02 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc index c5fc838bb..2da48980d 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc index 5c88ad372..b37f80736 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc index c75f00952..cd6a8e288 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc index c1737feef..e02ae67ae 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc index 91e649f74..74bdea93f 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc index b1d1ac883..0fcf5d3d8 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc index 91e2d3298..10ef351df 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc index 35ac18917..93a93129b 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc index 394442271..dd977e2b5 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc index 2d816557c..994527886 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc index c623f2fd6..5064d3ab3 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc index 4a6789d51..3dca52308 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc index 49e22d933..eae111492 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc index eb1ab8eef..e9f189d37 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc index dd673ba65..964ff937e 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc index 13281da85..885867209 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc index 41a049ca9..c51d831b5 100644 Binary files a/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc index e01182bde..aae3ac2ad 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc index 7e7d471ab..2fbc0cbb1 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/_identifier.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc index 6d352b942..9b060e482 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/async_utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc index 5b1e8bb3a..bc1763937 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/bccache.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc index bd7311659..cef9ab257 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/compiler.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc index 9f7ca5ed6..11c177f2b 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/defaults.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc index cb861aa2d..530e1d9e6 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/environment.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc index 359dacd05..c9f741f26 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/exceptions.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc index 69c00e28d..a9cdb3399 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/filters.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc index 52c59a75b..85ebf8f54 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/idtracking.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc index 4d6495e32..2700bc3a8 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/lexer.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc index 9abcc09d4..3cffa1be9 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/loaders.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc index 54cb192f8..8d6e63aa8 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nativetypes.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc index 92246ed90..8d4a8364c 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/nodes.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc index 27ef18546..8ad272d3e 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/optimizer.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc index 34ecc5758..eee2eb59c 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/parser.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc index c0d4b7f26..c3a3f2628 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/runtime.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc index 382fb3e08..c750437ce 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/tests.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc index e2b2dca86..9afcb0579 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc index 12fdf56eb..d8c81b6fb 100644 Binary files a/ansible/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/jinja2/__pycache__/visitor.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc index e8da9202e..44b0f09ee 100644 Binary files a/ansible/lib/python3.11/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/markupsafe/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/__init__.cpython-311.pyc index 75448a3c9..486aa44f9 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/__init__.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc index 793309c56..c45909b86 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_elffile.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc index 736812217..9ffc895e4 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_manylinux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc index 05fb9ecd4..c1daa3596 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_musllinux.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_structures.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_structures.cpython-311.pyc index 25af08378..f32bbf1af 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/_structures.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/_structures.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc index d013d57d8..3cd7b1c8f 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/specifiers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/tags.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/tags.cpython-311.pyc index 194605d63..6dba0134b 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/tags.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/tags.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/utils.cpython-311.pyc index 1161992f2..26a7966d9 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/utils.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/packaging/__pycache__/version.cpython-311.pyc b/ansible/lib/python3.11/site-packages/packaging/__pycache__/version.cpython-311.pyc index 87f227dbc..b482241b5 100644 Binary files a/ansible/lib/python3.11/site-packages/packaging/__pycache__/version.cpython-311.pyc and b/ansible/lib/python3.11/site-packages/packaging/__pycache__/version.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/INSTALLER b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/INSTALLER new file mode 100644 index 000000000..a1b589e38 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/LICENSE b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/LICENSE new file mode 100644 index 000000000..48c0f7208 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/LICENSE @@ -0,0 +1,116 @@ +.. -*- restructuredtext -*- + +===================== +Copyrights & Licenses +===================== + +Credits +======= +Passlib is primarily developed by Eli Collins. + +Special thanks to Darin Gordon for testing and +feedback on the :mod:`passlib.totp` module. + +License for Passlib +=================== +Passlib is (c) `Assurance Technologies `_, +and is released under the `BSD license `_:: + + Passlib + Copyright (c) 2008-2020 Assurance Technologies, LLC. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + * Neither the name of Assurance Technologies, nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Licenses for incorporated software +================================== +Passlib contains some code derived from the following sources: + +MD5-Crypt +--------- +The source file ``passlib/handlers/md5_crypt.py`` contains code derived from the original +`FreeBSD md5-crypt implementation `_, +which is available under the following license:: + + "THE BEER-WARE LICENSE" (Revision 42): + wrote this file. As long as you retain this notice you + can do whatever you want with this stuff. If we meet some day, and you think + this stuff is worth it, you can buy me a beer in return. Poul-Henning Kamp + + converted to python May 2008 + by Eli Collins + +DES +--- +The source file ``passlib/crypto/des.py`` contains code derived from +`UnixCrypt.java `_, +a pure-java implementation of the historic unix-crypt password hash algorithm. +It is available under the following license:: + + UnixCrypt.java 0.9 96/11/25 + Copyright (c) 1996 Aki Yoshida. All rights reserved. + Permission to use, copy, modify and distribute this software + for non-commercial or commercial purposes and without fee is + hereby granted provided that this copyright notice appears in + all copies. + + modified April 2001 + by Iris Van den Broeke, Daniel Deville + + modified Aug 2005 + by Greg Wilkins (gregw) + + converted to python Jun 2009 + by Eli Collins + +jBCrypt +------- +The source file ``passlib/crypto/_blowfish/base.py`` contains code derived +from `jBcrypt 0.2 `_, a Java +implementation of the BCrypt password hash algorithm. It is available under +a BSD/ISC license:: + + Copyright (c) 2006 Damien Miller + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTUOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Wordsets +-------- +The EFF wordsets in ``passlib/_data/wordsets`` are (c) 2016 the Electronic Freedom Foundation. +They were downloaded from ``_, +and are released under the `Creative Commons License `_. diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/METADATA b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/METADATA new file mode 100644 index 000000000..0665d8af7 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/METADATA @@ -0,0 +1,40 @@ +Metadata-Version: 2.1 +Name: passlib +Version: 1.7.4 +Summary: comprehensive password hashing framework supporting over 30 schemes +Home-page: https://passlib.readthedocs.io +Author: Eli Collins +Author-email: elic@assurancetechnologies.com +License: BSD +Download-URL: https://pypi.python.org/packages/source/p/passlib/passlib-1.7.4.tar.gz +Keywords: password secret hash security +Provides-Extra: argon2 +Requires-Dist: argon2-cffi (>=18.2.0) ; extra == 'argon2' +Provides-Extra: bcrypt +Requires-Dist: bcrypt (>=3.1.0) ; extra == 'bcrypt' +Provides-Extra: build_docs +Requires-Dist: sphinx (>=1.6) ; extra == 'build_docs' +Requires-Dist: sphinxcontrib-fulltoc (>=1.2.0) ; extra == 'build_docs' +Requires-Dist: cloud-sptheme (>=1.10.1) ; extra == 'build_docs' +Provides-Extra: totp +Requires-Dist: cryptography ; extra == 'totp' + +Passlib is a password hashing library for Python 2 & 3, which provides +cross-platform implementations of over 30 password hashing algorithms, as well +as a framework for managing existing password hashes. It's designed to be useful +for a wide range of tasks, from verifying a hash found in /etc/shadow, to +providing full-strength password hashing for multi-user applications. + +* See the `documentation `_ + for details, installation instructions, and examples. + +* See the `homepage `_ + for the latest news and more information. + +* See the `changelog `_ + for a description of what's new in Passlib. + +All releases are signed with the gpg key +`4D8592DF4CE1ED31 `_. + + diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/RECORD b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/RECORD new file mode 100644 index 000000000..a3b5e3fcd --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/RECORD @@ -0,0 +1,202 @@ +passlib-1.7.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +passlib-1.7.4.dist-info/LICENSE,sha256=qVuo8a-I_41fDQwzUZ9DC3-diZK2nUvDaawEI6egWok,4954 +passlib-1.7.4.dist-info/METADATA,sha256=l-uRq14ie328RCoVsayT7AfMHaJqv34ICbpQtKG00jM,1688 +passlib-1.7.4.dist-info/RECORD,, +passlib-1.7.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +passlib-1.7.4.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +passlib-1.7.4.dist-info/top_level.txt,sha256=BA9xbJpLdaTxqvYbKigYnMQkzp8-UQr6S4m3lBTkxzw,8 +passlib-1.7.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +passlib/__init__.py,sha256=nSZrPEtlMQSKZqxERmYcWCDBD6pJ1P_DL5TdeSjIReU,87 +passlib/__pycache__/__init__.cpython-311.pyc,, +passlib/__pycache__/apache.cpython-311.pyc,, +passlib/__pycache__/apps.cpython-311.pyc,, +passlib/__pycache__/context.cpython-311.pyc,, +passlib/__pycache__/exc.cpython-311.pyc,, +passlib/__pycache__/hash.cpython-311.pyc,, +passlib/__pycache__/hosts.cpython-311.pyc,, +passlib/__pycache__/ifc.cpython-311.pyc,, +passlib/__pycache__/pwd.cpython-311.pyc,, +passlib/__pycache__/registry.cpython-311.pyc,, +passlib/__pycache__/totp.cpython-311.pyc,, +passlib/__pycache__/win32.cpython-311.pyc,, +passlib/_data/wordsets/bip39.txt,sha256=JM5Cwv1KlcG4a77pvOHhzyVb0AIuGbq2vVka_Wi379s,13117 +passlib/_data/wordsets/eff_long.txt,sha256=bVV_BpOVj7XmULaLW-5YXrgs9NoyllUFx4npJHQ7xSI,62144 +passlib/_data/wordsets/eff_prefixed.txt,sha256=eqV6TT7PZYFymZK62VdbrN6_fCg3ivKuxqUPEa7DJvU,10778 +passlib/_data/wordsets/eff_short.txt,sha256=NuzKSeT6IMqEsXbDLy6cgvmPRGWFGQ51-YealcCCR78,7180 +passlib/apache.py,sha256=TsHUCur5W8tK3Rsb9jYeeBCc7Ua_hP9e2tSxzoUVzwc,46661 +passlib/apps.py,sha256=AYqni3QIelR7HCiPj_hv2Mcr8bsfdcUkh07DwQqZxWs,8067 +passlib/context.py,sha256=aJeTjA-h7ke3KObvEM8aSJzKdN3wrOyu0hTt-MTbJt0,109195 +passlib/crypto/__init__.py,sha256=St6CGqhrfz3L5Da3aZvRK69le_FcLLE3gA2dEByOmC0,84 +passlib/crypto/__pycache__/__init__.cpython-311.pyc,, +passlib/crypto/__pycache__/_md4.cpython-311.pyc,, +passlib/crypto/__pycache__/des.cpython-311.pyc,, +passlib/crypto/__pycache__/digest.cpython-311.pyc,, +passlib/crypto/_blowfish/__init__.py,sha256=iZb7ft1vxBjCW7lpDtWwTxuMicgvi673M5F_1PKdVkg,6426 +passlib/crypto/_blowfish/__pycache__/__init__.cpython-311.pyc,, +passlib/crypto/_blowfish/__pycache__/_gen_files.cpython-311.pyc,, +passlib/crypto/_blowfish/__pycache__/base.cpython-311.pyc,, +passlib/crypto/_blowfish/__pycache__/unrolled.cpython-311.pyc,, +passlib/crypto/_blowfish/_gen_files.py,sha256=fUrNGWA5NX9CyvoJbNhJv7PJmptbp1uSR9iaWzKkb1I,6176 +passlib/crypto/_blowfish/base.py,sha256=_zF7x6XSbqCl2HH5Eya8KIhhJVbDYuYAWKfxbjOQZWg,20390 +passlib/crypto/_blowfish/unrolled.py,sha256=FOMhVo_jnGS3bMafXfjEffDPSP5vMogFvupnVKAa1lg,37153 +passlib/crypto/_md4.py,sha256=_5RXBX_gowtN0x05PnN0EF_csO4Q_NA5whm6e_vJx08,6905 +passlib/crypto/des.py,sha256=1EsvVd34Z82BYmGb8JIzfVWvTMN70fWhJGmIfmNrBAU,51878 +passlib/crypto/digest.py,sha256=WsfpcC8IM-gvZh56m6v8bjzG4nsNAsaoSv2LNY1_5go,36158 +passlib/crypto/scrypt/__init__.py,sha256=bXmeIerN6DKJSw8XsQEYcsUKCfRpXGb190e-gdHbbqU,9630 +passlib/crypto/scrypt/__pycache__/__init__.cpython-311.pyc,, +passlib/crypto/scrypt/__pycache__/_builtin.cpython-311.pyc,, +passlib/crypto/scrypt/__pycache__/_gen_files.cpython-311.pyc,, +passlib/crypto/scrypt/__pycache__/_salsa.cpython-311.pyc,, +passlib/crypto/scrypt/_builtin.py,sha256=82RZc_4LQv2JCL06bX70hCICBaK30Uy7PGzmZtiOjA0,8910 +passlib/crypto/scrypt/_gen_files.py,sha256=vRhjlIKqwvcILCo20sVf8dXr15tW636t5oojAZFssJE,4683 +passlib/crypto/scrypt/_salsa.py,sha256=b87_YEP3jJSmlU2BHSx-NKiJ4e_1eK-RlC4pWA4y71I,5719 +passlib/exc.py,sha256=MIjUTBLcOai52paDLM1nFh6lMTLBLPAn1PTdbCm-9Fo,14481 +passlib/ext/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +passlib/ext/__pycache__/__init__.cpython-311.pyc,, +passlib/ext/django/__init__.py,sha256=RvooHmuUwjLXuSuUJr-9URnY1CRVCzU2xdh-jW-mrN0,228 +passlib/ext/django/__pycache__/__init__.cpython-311.pyc,, +passlib/ext/django/__pycache__/models.cpython-311.pyc,, +passlib/ext/django/__pycache__/utils.cpython-311.pyc,, +passlib/ext/django/models.py,sha256=-XpQRLGG2kTuLWNoh-EhKOaeEV5aIfzavw8qTQ-p1fM,1314 +passlib/ext/django/utils.py,sha256=ObpILR1seOZyecYhuQ1G_R9_N6DMuS4kWZve_giRLiw,49409 +passlib/handlers/__init__.py,sha256=sIPjJgOGHpOIstAwDeHTfxKR8wLVqP4zSa4mvBhAZ_8,86 +passlib/handlers/__pycache__/__init__.cpython-311.pyc,, +passlib/handlers/__pycache__/argon2.cpython-311.pyc,, +passlib/handlers/__pycache__/bcrypt.cpython-311.pyc,, +passlib/handlers/__pycache__/cisco.cpython-311.pyc,, +passlib/handlers/__pycache__/des_crypt.cpython-311.pyc,, +passlib/handlers/__pycache__/digests.cpython-311.pyc,, +passlib/handlers/__pycache__/django.cpython-311.pyc,, +passlib/handlers/__pycache__/fshp.cpython-311.pyc,, +passlib/handlers/__pycache__/ldap_digests.cpython-311.pyc,, +passlib/handlers/__pycache__/md5_crypt.cpython-311.pyc,, +passlib/handlers/__pycache__/misc.cpython-311.pyc,, +passlib/handlers/__pycache__/mssql.cpython-311.pyc,, +passlib/handlers/__pycache__/mysql.cpython-311.pyc,, +passlib/handlers/__pycache__/oracle.cpython-311.pyc,, +passlib/handlers/__pycache__/pbkdf2.cpython-311.pyc,, +passlib/handlers/__pycache__/phpass.cpython-311.pyc,, +passlib/handlers/__pycache__/postgres.cpython-311.pyc,, +passlib/handlers/__pycache__/roundup.cpython-311.pyc,, +passlib/handlers/__pycache__/scram.cpython-311.pyc,, +passlib/handlers/__pycache__/scrypt.cpython-311.pyc,, +passlib/handlers/__pycache__/sha1_crypt.cpython-311.pyc,, +passlib/handlers/__pycache__/sha2_crypt.cpython-311.pyc,, +passlib/handlers/__pycache__/sun_md5_crypt.cpython-311.pyc,, +passlib/handlers/__pycache__/windows.cpython-311.pyc,, +passlib/handlers/argon2.py,sha256=XrMPknuG-16IAwrd7WUuTKdIkKOD-3UPlHHZOjXZe68,38934 +passlib/handlers/bcrypt.py,sha256=LF33HnoxOhjtr7aFtrKgU5SB4mtw3xGx7C4tqecosrk,53582 +passlib/handlers/cisco.py,sha256=Yz0KhmqVVAV_szNnuZq40WgYg6eomBRoAJBbRrSUkGg,16284 +passlib/handlers/des_crypt.py,sha256=W3srE5kIaRQdhfIObz237sm0vPgqR4p_9ZkSd-9UNPo,22367 +passlib/handlers/digests.py,sha256=AeuVSxas2793ILXX0s6xm1lA1u4RPpE9G8wZSaq0Bs4,6327 +passlib/handlers/django.py,sha256=MmoLua6kZWVItsjRrnDgfktzuEpqlvwlPaexvti6I9M,20185 +passlib/handlers/fshp.py,sha256=78sMdnAkW5YHCPC13bLptdElLFrWzZF7rm3bwUWHATo,7799 +passlib/handlers/ldap_digests.py,sha256=jgxtxcERep4xXgVVfKfSVe5JEE45b5tt87NKGvK9_Zk,13049 +passlib/handlers/md5_crypt.py,sha256=jLt3IP-l0HFfU1u2VEtGI1WBYVNjTqhjvwovfFREiwg,13740 +passlib/handlers/misc.py,sha256=o1tWKAdTp3EnCYJOERpdkQnRwrQfWWKeiJXSQurbVMo,10109 +passlib/handlers/mssql.py,sha256=BECU0VaVtc-RzhGx7E_LVu2moZpEI5GASChFJnzDVxA,8482 +passlib/handlers/mysql.py,sha256=8h83lpTHs5q8zflXP0TyMavrELgtlvgUbcLtFUHnbDY,4796 +passlib/handlers/oracle.py,sha256=WDCqJEo2rDihcuUs4Ka48JBpSm4_JnNqXIVsCGUrkO8,6691 +passlib/handlers/pbkdf2.py,sha256=jVqdo1MSD3_7B5m-osqUTBTwTXnhedLan9lQaM-gysU,19010 +passlib/handlers/phpass.py,sha256=0c7maDUNGxIuyiG_O2hK6MJbxcTu7V2vx67xOq8d7ps,4785 +passlib/handlers/postgres.py,sha256=y9AzGpxjK-z1HLHElRQtLzCMqqtvBwd_xxJraHdGpN4,2274 +passlib/handlers/roundup.py,sha256=lvYArKerC702_MHZXMi3F-iZ9Y2jH10h2UXKDpgqoO8,1178 +passlib/handlers/scram.py,sha256=wBsoBg0qLW8HA5Nsgcnd1bM7ZDYEFbapAGoP0_44N58,22539 +passlib/handlers/scrypt.py,sha256=OYfF2Jjltydr5BswyZ-uFgl4yEjQZowGdIZpEyB7s5Q,14146 +passlib/handlers/sha1_crypt.py,sha256=DZOdKExzlucHCfpgszG1cFdareTqpGUGORNIEn4FJCs,5873 +passlib/handlers/sha2_crypt.py,sha256=kTZm-jmRVnKRhquetVBbiDWi9eY87NTJvUYkjGEm7MY,21800 +passlib/handlers/sun_md5_crypt.py,sha256=uWhoKxBITVwPlh9MIQ3WjVrYjlRMgLrBjLR1Ui2kmZw,13933 +passlib/handlers/windows.py,sha256=nviGebFjOiJO_cDJRo7RiccEhlN2UM7nAQL0pTso9MQ,12384 +passlib/hash.py,sha256=9lVasGFiXDGcL8VOWuEwAjzlATQbmEYF30wOIVotP-U,3750 +passlib/hosts.py,sha256=odRo2WnSfjMuktSIwfR50rzxbKGfzUwZ2CUkvcxvJoA,3302 +passlib/ifc.py,sha256=kL2svtkF99VQDOim_6TE6OGhmSf2EyHrzp0v_UQksqA,14196 +passlib/pwd.py,sha256=VeU_PVkZSvwXPI6AQA96cjqIKyuTvXtUoCK7eI5ab7w,28690 +passlib/registry.py,sha256=5qLDF72XHGSQVoEVqhvEngfZsO2fxVsBpWntX_D0YRs,20301 +passlib/tests/__init__.py,sha256=JIK29mBP8OKz3ChmaEbyr9vvml3weGe7YHMTHzBJcr0,20 +passlib/tests/__main__.py,sha256=iKv9ZuQe5jBzp4Gyp_G3wXhQBxSTJguMx1BCCVVZL6Y,82 +passlib/tests/__pycache__/__init__.cpython-311.pyc,, +passlib/tests/__pycache__/__main__.cpython-311.pyc,, +passlib/tests/__pycache__/_test_bad_register.cpython-311.pyc,, +passlib/tests/__pycache__/backports.cpython-311.pyc,, +passlib/tests/__pycache__/test_apache.cpython-311.pyc,, +passlib/tests/__pycache__/test_apps.cpython-311.pyc,, +passlib/tests/__pycache__/test_context.cpython-311.pyc,, +passlib/tests/__pycache__/test_context_deprecated.cpython-311.pyc,, +passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-311.pyc,, +passlib/tests/__pycache__/test_crypto_des.cpython-311.pyc,, +passlib/tests/__pycache__/test_crypto_digest.cpython-311.pyc,, +passlib/tests/__pycache__/test_crypto_scrypt.cpython-311.pyc,, +passlib/tests/__pycache__/test_ext_django.cpython-311.pyc,, +passlib/tests/__pycache__/test_ext_django_source.cpython-311.pyc,, +passlib/tests/__pycache__/test_handlers.cpython-311.pyc,, +passlib/tests/__pycache__/test_handlers_argon2.cpython-311.pyc,, +passlib/tests/__pycache__/test_handlers_bcrypt.cpython-311.pyc,, +passlib/tests/__pycache__/test_handlers_cisco.cpython-311.pyc,, +passlib/tests/__pycache__/test_handlers_django.cpython-311.pyc,, +passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-311.pyc,, +passlib/tests/__pycache__/test_handlers_scrypt.cpython-311.pyc,, +passlib/tests/__pycache__/test_hosts.cpython-311.pyc,, +passlib/tests/__pycache__/test_pwd.cpython-311.pyc,, +passlib/tests/__pycache__/test_registry.cpython-311.pyc,, +passlib/tests/__pycache__/test_totp.cpython-311.pyc,, +passlib/tests/__pycache__/test_utils.cpython-311.pyc,, +passlib/tests/__pycache__/test_utils_handlers.cpython-311.pyc,, +passlib/tests/__pycache__/test_utils_md4.cpython-311.pyc,, +passlib/tests/__pycache__/test_utils_pbkdf2.cpython-311.pyc,, +passlib/tests/__pycache__/test_win32.cpython-311.pyc,, +passlib/tests/__pycache__/tox_support.cpython-311.pyc,, +passlib/tests/__pycache__/utils.cpython-311.pyc,, +passlib/tests/_test_bad_register.py,sha256=yws8uO2HsUWg8GRQPlxKvE5HniP84QSQW6ncCPiZDpw,541 +passlib/tests/backports.py,sha256=QTi9tD9DO_RlawkInpPDsFaol--5hsMI-cFvwLIE9B0,2593 +passlib/tests/sample1.cfg,sha256=lJsayArbi6FElINzcTQ1VbgTTGY5LKpMdbCJvK_6H8s,243 +passlib/tests/sample1b.cfg,sha256=2ZQnnpumQsEJpKFsTOHuv_ULhQY5PhQPnsa2rSZmTEU,252 +passlib/tests/sample1c.cfg,sha256=u-BGMklAN05efndzADJfFV9gP1Jbns1gDdwC__VfW-8,490 +passlib/tests/sample_config_1s.cfg,sha256=mMgYjX_UvxVVLFTfZ4m-vxVo31MbSNrZA0R7VY6DzTk,238 +passlib/tests/test_apache.py,sha256=_XhDKgV1nON4ddQQU3GdUfSXrwY_x2OoJQ6l7w2Gzbw,29432 +passlib/tests/test_apps.py,sha256=6MrGeFenjSACzbAtp6jf3PNHoITv_v5DbT_7nhrR-KA,5281 +passlib/tests/test_context.py,sha256=Vsl2hhouEi3yn4_J7J10E09OotLneRHzkAY_jS16F08,74546 +passlib/tests/test_context_deprecated.py,sha256=cVXqcPx_Xqlsh6QF2az34RY23wP3pv8SOBbJFQn65Jg,29282 +passlib/tests/test_crypto_builtin_md4.py,sha256=5PWKh1HoQKC4gI4BcgVDh89xw7lix0R1n9Jn0Y8t8mQ,5660 +passlib/tests/test_crypto_des.py,sha256=0xWgS74G6ygl7gIvF6uhjcoThVTt1TqIH4ZUeqXbVmA,8874 +passlib/tests/test_crypto_digest.py,sha256=b15XIFLDUsjsaxPEQUJkb-csM65IRz_9glwZz7qwN7U,20478 +passlib/tests/test_crypto_scrypt.py,sha256=xJDU3e4bt9N1X0fA9zBLBxESk3PsTR89qJeEWNX2Em4,26646 +passlib/tests/test_ext_django.py,sha256=QUKoa6rLn3hbCVNk7_0z9JW5aOFmyLbBwj0PiWhQJ7s,41364 +passlib/tests/test_ext_django_source.py,sha256=AW-PQRQeLz2cOpKGPeKPLSESC4o-ATbu3-Zd45Coi3k,11034 +passlib/tests/test_handlers.py,sha256=WxYhRTthTzDj-FIP2vS_mH0nlpjgrWOp2C-h3mN6DzE,68622 +passlib/tests/test_handlers_argon2.py,sha256=bSNARahGKPZTawLq-qhVdcuvprCDTNXGWPhSh8aRyaY,22837 +passlib/tests/test_handlers_bcrypt.py,sha256=izOVd0WthIi90YKkvskrW5DZPMMCvO2qtwRkefvgkdY,29549 +passlib/tests/test_handlers_cisco.py,sha256=TLvuGQZygEZbjA01t1hfGfBvx3THnv6ZwbNQCKUhsuI,20471 +passlib/tests/test_handlers_django.py,sha256=ADphUgbG9PwoXQPFbEAPeIDfqjK6DENl_wizP52wYSE,15538 +passlib/tests/test_handlers_pbkdf2.py,sha256=vDM9ipts9EYoauheNHtOOYq0Nl8-9ltTML4gnw2EB2g,18788 +passlib/tests/test_handlers_scrypt.py,sha256=wHsbgoV5xhY4SQtgWFCuit3lygkNvd0AQKZ0lmp72do,4188 +passlib/tests/test_hosts.py,sha256=n0gCywmbsw8q8p4WLp-AlQrQuPfe-29fYwUfWwXi4Co,3906 +passlib/tests/test_pwd.py,sha256=Si9qFDXwkbjTsJ9wQTYe-QhlprVoMQ2E79-eX11FPBk,7190 +passlib/tests/test_registry.py,sha256=9BgXvMhHKQQHBGdgV4WyDDZUboUh0tbHYdgPYr1upSo,9246 +passlib/tests/test_totp.py,sha256=T1o3B97SltvC1OKweXQpX1bBGf6KYQnMl8jcpBSg5DU,65746 +passlib/tests/test_utils.py,sha256=yMWrrnsMIg8b8guyzRK8lDJ243rul6ANhrIgImGlyVI,46118 +passlib/tests/test_utils_handlers.py,sha256=rVSuaNqRUb4Q520nVD4C5smzVs-LdFqQjFZMDRTz-zU,32134 +passlib/tests/test_utils_md4.py,sha256=CfQor3ZfV2JO_8x2RxY5Tl5ZsS0hDvIje46cLvLN5Ew,1474 +passlib/tests/test_utils_pbkdf2.py,sha256=gIhycQf4NUNd5yjUrtKfRm3eqqpklS9W2B7-8INp4Cg,12193 +passlib/tests/test_win32.py,sha256=BXVpHSm71ePXmmbBPTN4H38lUgGqG6-iZasbj_l1mVg,1920 +passlib/tests/tox_support.py,sha256=PDaO1ftDtOFzd299EXm0X5HWRzg37VsBiHsdiMOu5FA,2473 +passlib/tests/utils.py,sha256=mNbhjFNG16dmU13ChMyqOSY39OiR2d8LRUBi41dAMko,147541 +passlib/totp.py,sha256=Wryr57req8NFJnw1fI_eycCaTwmSY8WA7Z3OFjAwHOE,73033 +passlib/utils/__init__.py,sha256=VHkQHu7DcdVKyDjhPuyRG_2-25aI4Zwat3wr6K-rAlo,42925 +passlib/utils/__pycache__/__init__.cpython-311.pyc,, +passlib/utils/__pycache__/binary.cpython-311.pyc,, +passlib/utils/__pycache__/decor.cpython-311.pyc,, +passlib/utils/__pycache__/des.cpython-311.pyc,, +passlib/utils/__pycache__/handlers.cpython-311.pyc,, +passlib/utils/__pycache__/md4.cpython-311.pyc,, +passlib/utils/__pycache__/pbkdf2.cpython-311.pyc,, +passlib/utils/binary.py,sha256=dZe2ZjuGr0g6iQseO-ThkQ5XM6KnQFISGQr68vUOOhM,31422 +passlib/utils/compat/__init__.py,sha256=xuPP5PsmLJh_I5NrlaYa012zmWrdzfrYbL_oHqc4tCk,14235 +passlib/utils/compat/__pycache__/__init__.cpython-311.pyc,, +passlib/utils/compat/__pycache__/_ordered_dict.cpython-311.pyc,, +passlib/utils/compat/_ordered_dict.py,sha256=1nga6blaxokrrDdY3UrQgRXYdifZHCDgPYie1aCJkuI,8368 +passlib/utils/decor.py,sha256=svc2C-_DKfiCMmOBNhn_DK7IeS_WYNg26asjhx76LUA,7651 +passlib/utils/des.py,sha256=jFuvhUA3aaiR1xWX4NpXYm5XgcdewRT5Uas-7jLoSTE,2163 +passlib/utils/handlers.py,sha256=E3oRL908uudK_ZLZWeX5DoPxJL8uCfCGpmAkyfJoWQ8,105286 +passlib/utils/md4.py,sha256=pyxEpUe_t8E0u2ZDWOzYIJa0oXgTQBO7DQ8SMKGX8ag,1218 +passlib/utils/pbkdf2.py,sha256=foDGTAKeZywBAVlLZIRf4bX6fC3bzsoC1i_DtcdXr2I,6832 +passlib/win32.py,sha256=E6Ca-4Ki5ZlCSzd86N1CXjh-xQoJYjW-74-kJ6VsHUU,2591 diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/REQUESTED b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/REQUESTED new file mode 100644 index 000000000..e69de29bb diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/WHEEL b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/WHEEL new file mode 100644 index 000000000..6d38aa060 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/top_level.txt b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/top_level.txt new file mode 100644 index 000000000..419829dd6 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/top_level.txt @@ -0,0 +1 @@ +passlib diff --git a/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/zip-safe b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/zip-safe new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib-1.7.4.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/ansible/lib/python3.11/site-packages/passlib/__init__.py b/ansible/lib/python3.11/site-packages/passlib/__init__.py new file mode 100644 index 000000000..963bfcc9c --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/__init__.py @@ -0,0 +1,3 @@ +"""passlib - suite of password hashing & generation routines""" + +__version__ = '1.7.4' diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 000000000..21ab2c8e4 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/apache.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/apache.cpython-311.pyc new file mode 100644 index 000000000..5f118f916 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/apache.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/apps.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/apps.cpython-311.pyc new file mode 100644 index 000000000..1f00696bd Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/apps.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/context.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/context.cpython-311.pyc new file mode 100644 index 000000000..c793cd1d1 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/context.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/exc.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/exc.cpython-311.pyc new file mode 100644 index 000000000..f22292739 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/exc.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/hash.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/hash.cpython-311.pyc new file mode 100644 index 000000000..4f0e84529 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/hash.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/hosts.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/hosts.cpython-311.pyc new file mode 100644 index 000000000..ccbca2f99 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/hosts.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/ifc.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/ifc.cpython-311.pyc new file mode 100644 index 000000000..c388dc71e Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/ifc.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/pwd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/pwd.cpython-311.pyc new file mode 100644 index 000000000..1b9f1991d Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/pwd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/registry.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/registry.cpython-311.pyc new file mode 100644 index 000000000..7e7d9834a Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/registry.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/totp.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/totp.cpython-311.pyc new file mode 100644 index 000000000..a1ac77449 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/totp.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/__pycache__/win32.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/__pycache__/win32.cpython-311.pyc new file mode 100644 index 000000000..df3fce1cd Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/__pycache__/win32.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/bip39.txt b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/bip39.txt new file mode 100644 index 000000000..e29842e6c --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/bip39.txt @@ -0,0 +1,2049 @@ +abandon +ability +able +about +above +absent +absorb +abstract +absurd +abuse +access +accident +account +accuse +achieve +acid +acoustic +acquire +across +act +action +actor +actress +actual +adapt +add +addict +address +adjust +admit +adult +advance +advice +aerobic +affair +afford +afraid +again +age +agent +agree +ahead +aim +air +airport +aisle +alarm +album +alcohol +alert +alien +all +alley +allow +almost +alone +alpha +already +also +alter +always +amateur +amazing +among +amount +amused +analyst +anchor +ancient +anger +angle +angry +animal +ankle +announce +annual +another +answer +antenna +antique +anxiety +any +apart +apology +appear +apple +approve +april +arch +arctic +area +arena +argue +arm +armed +armor +army +around +arrange +arrest +arrive +arrow +art +artefact +artist +artwork +ask +aspect +assault +asset +assist +assume +asthma +athlete +atom +attack +attend +attitude +attract +auction +audit +august +aunt +author +auto +autumn +average +avocado +avoid +awake +aware +away +awesome +awful +awkward +axis +baby +bachelor +bacon +badge +bag +balance +balcony +ball +bamboo +banana +banner +bar +barely +bargain +barrel +base +basic +basket +battle +beach +bean +beauty +because +become +beef +before +begin +behave +behind +believe +below +belt +bench +benefit +best +betray +better +between +beyond +bicycle +bid +bike +bind +biology +bird +birth +bitter +black +blade +blame +blanket +blast +bleak +bless +blind +blood +blossom +blouse +blue +blur +blush +board +boat +body +boil +bomb +bone +bonus +book +boost +border +boring +borrow +boss +bottom +bounce +box +boy +bracket +brain +brand +brass +brave +bread +breeze +brick +bridge +brief +bright +bring +brisk +broccoli +broken +bronze +broom +brother +brown +brush +bubble +buddy +budget +buffalo +build +bulb +bulk +bullet +bundle +bunker +burden +burger +burst +bus +business +busy +butter +buyer +buzz +cabbage +cabin +cable +cactus +cage +cake +call +calm +camera +camp +can +canal +cancel +candy +cannon +canoe +canvas +canyon +capable +capital +captain +car +carbon +card +cargo +carpet +carry +cart +case +cash +casino +castle +casual +cat +catalog +catch +category +cattle +caught +cause +caution +cave +ceiling +celery +cement +census +century +cereal +certain +chair +chalk +champion +change +chaos +chapter +charge +chase +chat +cheap +check +cheese +chef +cherry +chest +chicken +chief +child +chimney +choice +choose +chronic +chuckle +chunk +churn +cigar +cinnamon +circle +citizen +city +civil +claim +clap +clarify +claw +clay +clean +clerk +clever +click +client +cliff +climb +clinic +clip +clock +clog +close +cloth +cloud +clown +club +clump +cluster +clutch +coach +coast +coconut +code +coffee +coil +coin +collect +color +column +combine +come +comfort +comic +common +company +concert +conduct +confirm +congress +connect +consider +control +convince +cook +cool +copper +copy +coral +core +corn +correct +cost +cotton +couch +country +couple +course +cousin +cover +coyote +crack +cradle +craft +cram +crane +crash +crater +crawl +crazy +cream +credit +creek +crew +cricket +crime +crisp +critic +crop +cross +crouch +crowd +crucial +cruel +cruise +crumble +crunch +crush +cry +crystal +cube +culture +cup +cupboard +curious +current +curtain +curve +cushion +custom +cute +cycle +dad +damage +damp +dance +danger +daring +dash +daughter +dawn +day +deal +debate +debris +decade +december +decide +decline +decorate +decrease +deer +defense +define +defy +degree +delay +deliver +demand +demise +denial +dentist +deny +depart +depend +deposit +depth +deputy +derive +describe +desert +design +desk +despair +destroy +detail +detect +develop +device +devote +diagram +dial +diamond +diary +dice +diesel +diet +differ +digital +dignity +dilemma +dinner +dinosaur +direct +dirt +disagree +discover +disease +dish +dismiss +disorder +display +distance +divert +divide +divorce +dizzy +doctor +document +dog +doll +dolphin +domain +donate +donkey +donor +door +dose +double +dove +draft +dragon +drama +drastic +draw +dream +dress +drift +drill +drink +drip +drive +drop +drum +dry +duck +dumb +dune +during +dust +dutch +duty +dwarf +dynamic +eager +eagle +early +earn +earth +easily +east +easy +echo +ecology +economy +edge +edit +educate +effort +egg +eight +either +elbow +elder +electric +elegant +element +elephant +elevator +elite +else +embark +embody +embrace +emerge +emotion +employ +empower +empty +enable +enact +end +endless +endorse +enemy +energy +enforce +engage +engine +enhance +enjoy +enlist +enough +enrich +enroll +ensure +enter +entire +entry +envelope +episode +equal +equip +era +erase +erode +erosion +error +erupt +escape +essay +essence +estate +eternal +ethics +evidence +evil +evoke +evolve +exact +example +excess +exchange +excite +exclude +excuse +execute +exercise +exhaust +exhibit +exile +exist +exit +exotic +expand +expect +expire +explain +expose +express +extend +extra +eye +eyebrow +fabric +face +faculty +fade +faint +faith +fall +false +fame +family +famous +fan +fancy +fantasy +farm +fashion +fat +fatal +father +fatigue +fault +favorite +feature +february +federal +fee +feed +feel +female +fence +festival +fetch +fever +few +fiber +fiction +field +figure +file +film +filter +final +find +fine +finger +finish +fire +firm +first +fiscal +fish +fit +fitness +fix +flag +flame +flash +flat +flavor +flee +flight +flip +float +flock +floor +flower +fluid +flush +fly +foam +focus +fog +foil +fold +follow +food +foot +force +forest +forget +fork +fortune +forum +forward +fossil +foster +found +fox +fragile +frame +frequent +fresh +friend +fringe +frog +front +frost +frown +frozen +fruit +fuel +fun +funny +furnace +fury +future +gadget +gain +galaxy +gallery +game +gap +garage +garbage +garden +garlic +garment +gas +gasp +gate +gather +gauge +gaze +general +genius +genre +gentle +genuine +gesture +ghost +giant +gift +giggle +ginger +giraffe +girl +give +glad +glance +glare +glass +glide +glimpse +globe +gloom +glory +glove +glow +glue +goat +goddess +gold +good +goose +gorilla +gospel +gossip +govern +gown +grab +grace +grain +grant +grape +grass +gravity +great +green +grid +grief +grit +grocery +group +grow +grunt +guard +guess +guide +guilt +guitar +gun +gym +habit +hair +half +hammer +hamster +hand +happy +harbor +hard +harsh +harvest +hat +have +hawk +hazard +head +health +heart +heavy +hedgehog +height +hello +helmet +help +hen +hero +hidden +high +hill +hint +hip +hire +history +hobby +hockey +hold +hole +holiday +hollow +home +honey +hood +hope +horn +horror +horse +hospital +host +hotel +hour +hover +hub +huge +human +humble +humor +hundred +hungry +hunt +hurdle +hurry +hurt +husband +hybrid +ice +icon +idea +identify +idle +ignore +ill +illegal +illness +image +imitate +immense +immune +impact +impose +improve +impulse +inch +include +income +increase +index +indicate +indoor +industry +infant +inflict +inform +inhale +inherit +initial +inject +injury +inmate +inner +innocent +input +inquiry +insane +insect +inside +inspire +install +intact +interest +into +invest +invite +involve +iron +island +isolate +issue +item +ivory +jacket +jaguar +jar +jazz +jealous +jeans +jelly +jewel +job +join +joke +journey +joy +judge +juice +jump +jungle +junior +junk +just +kangaroo +keen +keep +ketchup +key +kick +kid +kidney +kind +kingdom +kiss +kit +kitchen +kite +kitten +kiwi +knee +knife +knock +know +lab +label +labor +ladder +lady +lake +lamp +language +laptop +large +later +latin +laugh +laundry +lava +law +lawn +lawsuit +layer +lazy +leader +leaf +learn +leave +lecture +left +leg +legal +legend +leisure +lemon +lend +length +lens +leopard +lesson +letter +level +liar +liberty +library +license +life +lift +light +like +limb +limit +link +lion +liquid +list +little +live +lizard +load +loan +lobster +local +lock +logic +lonely +long +loop +lottery +loud +lounge +love +loyal +lucky +luggage +lumber +lunar +lunch +luxury +lyrics +machine +mad +magic +magnet +maid +mail +main +major +make +mammal +man +manage +mandate +mango +mansion +manual +maple +marble +march +margin +marine +market +marriage +mask +mass +master +match +material +math +matrix +matter +maximum +maze +meadow +mean +measure +meat +mechanic +medal +media +melody +melt +member +memory +mention +menu +mercy +merge +merit +merry +mesh +message +metal +method +middle +midnight +milk +million +mimic +mind +minimum +minor +minute +miracle +mirror +misery +miss +mistake +mix +mixed +mixture +mobile +model +modify +mom +moment +monitor +monkey +monster +month +moon +moral +more +morning +mosquito +mother +motion +motor +mountain +mouse +move +movie +much +muffin +mule +multiply +muscle +museum +mushroom +music +must +mutual +myself +mystery +myth +naive +name +napkin +narrow +nasty +nation +nature +near +neck +need +negative +neglect +neither +nephew +nerve +nest +net +network +neutral +never +news +next +nice +night +noble +noise +nominee +noodle +normal +north +nose +notable +note +nothing +notice +novel +now +nuclear +number +nurse +nut +oak +obey +object +oblige +obscure +observe +obtain +obvious +occur +ocean +october +odor +off +offer +office +often +oil +okay +old +olive +olympic +omit +once +one +onion +online +only +open +opera +opinion +oppose +option +orange +orbit +orchard +order +ordinary +organ +orient +original +orphan +ostrich +other +outdoor +outer +output +outside +oval +oven +over +own +owner +oxygen +oyster +ozone +pact +paddle +page +pair +palace +palm +panda +panel +panic +panther +paper +parade +parent +park +parrot +party +pass +patch +path +patient +patrol +pattern +pause +pave +payment +peace +peanut +pear +peasant +pelican +pen +penalty +pencil +people +pepper +perfect +permit +person +pet +phone +photo +phrase +physical +piano +picnic +picture +piece +pig +pigeon +pill +pilot +pink +pioneer +pipe +pistol +pitch +pizza +place +planet +plastic +plate +play +please +pledge +pluck +plug +plunge +poem +poet +point +polar +pole +police +pond +pony +pool +popular +portion +position +possible +post +potato +pottery +poverty +powder +power +practice +praise +predict +prefer +prepare +present +pretty +prevent +price +pride +primary +print +priority +prison +private +prize +problem +process +produce +profit +program +project +promote +proof +property +prosper +protect +proud +provide +public +pudding +pull +pulp +pulse +pumpkin +punch +pupil +puppy +purchase +purity +purpose +purse +push +put +puzzle +pyramid +quality +quantum +quarter +question +quick +quit +quiz +quote +rabbit +raccoon +race +rack +radar +radio +rail +rain +raise +rally +ramp +ranch +random +range +rapid +rare +rate +rather +raven +raw +razor +ready +real +reason +rebel +rebuild +recall +receive +recipe +record +recycle +reduce +reflect +reform +refuse +region +regret +regular +reject +relax +release +relief +rely +remain +remember +remind +remove +render +renew +rent +reopen +repair +repeat +replace +report +require +rescue +resemble +resist +resource +response +result +retire +retreat +return +reunion +reveal +review +reward +rhythm +rib +ribbon +rice +rich +ride +ridge +rifle +right +rigid +ring +riot +ripple +risk +ritual +rival +river +road +roast +robot +robust +rocket +romance +roof +rookie +room +rose +rotate +rough +round +route +royal +rubber +rude +rug +rule +run +runway +rural +sad +saddle +sadness +safe +sail +salad +salmon +salon +salt +salute +same +sample +sand +satisfy +satoshi +sauce +sausage +save +say +scale +scan +scare +scatter +scene +scheme +school +science +scissors +scorpion +scout +scrap +screen +script +scrub +sea +search +season +seat +second +secret +section +security +seed +seek +segment +select +sell +seminar +senior +sense +sentence +series +service +session +settle +setup +seven +shadow +shaft +shallow +share +shed +shell +sheriff +shield +shift +shine +ship +shiver +shock +shoe +shoot +shop +short +shoulder +shove +shrimp +shrug +shuffle +shy +sibling +sick +side +siege +sight +sign +silent +silk +silly +silver +similar +simple +since +sing +siren +sister +situate +six +size +skate +sketch +ski +skill +skin +skirt +skull +slab +slam +sleep +slender +slice +slide +slight +slim +slogan +slot +slow +slush +small +smart +smile +smoke +smooth +snack +snake +snap +sniff +snow +soap +soccer +social +sock +soda +soft +solar +soldier +solid +solution +solve +someone +song +soon +sorry +sort +soul +sound +soup +source +south +space +spare +spatial +spawn +speak +special +speed +spell +spend +sphere +spice +spider +spike +spin +spirit +split +spoil +sponsor +spoon +sport +spot +spray +spread +spring +spy +square +squeeze +squirrel +stable +stadium +staff +stage +stairs +stamp +stand +start +state +stay +steak +steel +stem +step +stereo +stick +still +sting +stock +stomach +stone +stool +story +stove +strategy +street +strike +strong +struggle +student +stuff +stumble +style +subject +submit +subway +success +such +sudden +suffer +sugar +suggest +suit +summer +sun +sunny +sunset +super +supply +supreme +sure +surface +surge +surprise +surround +survey +suspect +sustain +swallow +swamp +swap +swarm +swear +sweet +swift +swim +swing +switch +sword +symbol +symptom +syrup +system +table +tackle +tag +tail +talent +talk +tank +tape +target +task +taste +tattoo +taxi +teach +team +tell +ten +tenant +tennis +tent +term +test +text +thank +that +theme +then +theory +there +they +thing +this +thought +three +thrive +throw +thumb +thunder +ticket +tide +tiger +tilt +timber +time +tiny +tip +tired +tissue +title +toast +tobacco +today +toddler +toe +together +toilet +token +tomato +tomorrow +tone +tongue +tonight +tool +tooth +top +topic +topple +torch +tornado +tortoise +toss +total +tourist +toward +tower +town +toy +track +trade +traffic +tragic +train +transfer +trap +trash +travel +tray +treat +tree +trend +trial +tribe +trick +trigger +trim +trip +trophy +trouble +truck +true +truly +trumpet +trust +truth +try +tube +tuition +tumble +tuna +tunnel +turkey +turn +turtle +twelve +twenty +twice +twin +twist +two +type +typical +ugly +umbrella +unable +unaware +uncle +uncover +under +undo +unfair +unfold +unhappy +uniform +unique +unit +universe +unknown +unlock +until +unusual +unveil +update +upgrade +uphold +upon +upper +upset +urban +urge +usage +use +used +useful +useless +usual +utility +vacant +vacuum +vague +valid +valley +valve +van +vanish +vapor +various +vast +vault +vehicle +velvet +vendor +venture +venue +verb +verify +version +very +vessel +veteran +viable +vibrant +vicious +victory +video +view +village +vintage +violin +virtual +virus +visa +visit +visual +vital +vivid +vocal +voice +void +volcano +volume +vote +voyage +wage +wagon +wait +walk +wall +walnut +want +warfare +warm +warrior +wash +wasp +waste +water +wave +way +wealth +weapon +wear +weasel +weather +web +wedding +weekend +weird +welcome +west +wet +whale +what +wheat +wheel +when +where +whip +whisper +wide +width +wife +wild +will +win +window +wine +wing +wink +winner +winter +wire +wisdom +wise +wish +witness +wolf +woman +wonder +wood +wool +word +work +world +worry +worth +wrap +wreck +wrestle +wrist +write +wrong +yard +year +yellow +you +young +youth +zebra +zero +zone +zoo + diff --git a/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_long.txt b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_long.txt new file mode 100644 index 000000000..caf71f526 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_long.txt @@ -0,0 +1,7776 @@ +abacus +abdomen +abdominal +abide +abiding +ability +ablaze +able +abnormal +abrasion +abrasive +abreast +abridge +abroad +abruptly +absence +absentee +absently +absinthe +absolute +absolve +abstain +abstract +absurd +accent +acclaim +acclimate +accompany +account +accuracy +accurate +accustom +acetone +achiness +aching +acid +acorn +acquaint +acquire +acre +acrobat +acronym +acting +action +activate +activator +active +activism +activist +activity +actress +acts +acutely +acuteness +aeration +aerobics +aerosol +aerospace +afar +affair +affected +affecting +affection +affidavit +affiliate +affirm +affix +afflicted +affluent +afford +affront +aflame +afloat +aflutter +afoot +afraid +afterglow +afterlife +aftermath +aftermost +afternoon +aged +ageless +agency +agenda +agent +aggregate +aghast +agile +agility +aging +agnostic +agonize +agonizing +agony +agreeable +agreeably +agreed +agreeing +agreement +aground +ahead +ahoy +aide +aids +aim +ajar +alabaster +alarm +albatross +album +alfalfa +algebra +algorithm +alias +alibi +alienable +alienate +aliens +alike +alive +alkaline +alkalize +almanac +almighty +almost +aloe +aloft +aloha +alone +alongside +aloof +alphabet +alright +although +altitude +alto +aluminum +alumni +always +amaretto +amaze +amazingly +amber +ambiance +ambiguity +ambiguous +ambition +ambitious +ambulance +ambush +amendable +amendment +amends +amenity +amiable +amicably +amid +amigo +amino +amiss +ammonia +ammonium +amnesty +amniotic +among +amount +amperage +ample +amplifier +amplify +amply +amuck +amulet +amusable +amused +amusement +amuser +amusing +anaconda +anaerobic +anagram +anatomist +anatomy +anchor +anchovy +ancient +android +anemia +anemic +aneurism +anew +angelfish +angelic +anger +angled +angler +angles +angling +angrily +angriness +anguished +angular +animal +animate +animating +animation +animator +anime +animosity +ankle +annex +annotate +announcer +annoying +annually +annuity +anointer +another +answering +antacid +antarctic +anteater +antelope +antennae +anthem +anthill +anthology +antibody +antics +antidote +antihero +antiquely +antiques +antiquity +antirust +antitoxic +antitrust +antiviral +antivirus +antler +antonym +antsy +anvil +anybody +anyhow +anymore +anyone +anyplace +anything +anytime +anyway +anywhere +aorta +apache +apostle +appealing +appear +appease +appeasing +appendage +appendix +appetite +appetizer +applaud +applause +apple +appliance +applicant +applied +apply +appointee +appraisal +appraiser +apprehend +approach +approval +approve +apricot +april +apron +aptitude +aptly +aqua +aqueduct +arbitrary +arbitrate +ardently +area +arena +arguable +arguably +argue +arise +armadillo +armband +armchair +armed +armful +armhole +arming +armless +armoire +armored +armory +armrest +army +aroma +arose +around +arousal +arrange +array +arrest +arrival +arrive +arrogance +arrogant +arson +art +ascend +ascension +ascent +ascertain +ashamed +ashen +ashes +ashy +aside +askew +asleep +asparagus +aspect +aspirate +aspire +aspirin +astonish +astound +astride +astrology +astronaut +astronomy +astute +atlantic +atlas +atom +atonable +atop +atrium +atrocious +atrophy +attach +attain +attempt +attendant +attendee +attention +attentive +attest +attic +attire +attitude +attractor +attribute +atypical +auction +audacious +audacity +audible +audibly +audience +audio +audition +augmented +august +authentic +author +autism +autistic +autograph +automaker +automated +automatic +autopilot +available +avalanche +avatar +avenge +avenging +avenue +average +aversion +avert +aviation +aviator +avid +avoid +await +awaken +award +aware +awhile +awkward +awning +awoke +awry +axis +babble +babbling +babied +baboon +backache +backboard +backboned +backdrop +backed +backer +backfield +backfire +backhand +backing +backlands +backlash +backless +backlight +backlit +backlog +backpack +backpedal +backrest +backroom +backshift +backside +backslid +backspace +backspin +backstab +backstage +backtalk +backtrack +backup +backward +backwash +backwater +backyard +bacon +bacteria +bacterium +badass +badge +badland +badly +badness +baffle +baffling +bagel +bagful +baggage +bagged +baggie +bagginess +bagging +baggy +bagpipe +baguette +baked +bakery +bakeshop +baking +balance +balancing +balcony +balmy +balsamic +bamboo +banana +banish +banister +banjo +bankable +bankbook +banked +banker +banking +banknote +bankroll +banner +bannister +banshee +banter +barbecue +barbed +barbell +barber +barcode +barge +bargraph +barista +baritone +barley +barmaid +barman +barn +barometer +barrack +barracuda +barrel +barrette +barricade +barrier +barstool +bartender +barterer +bash +basically +basics +basil +basin +basis +basket +batboy +batch +bath +baton +bats +battalion +battered +battering +battery +batting +battle +bauble +bazooka +blabber +bladder +blade +blah +blame +blaming +blanching +blandness +blank +blaspheme +blasphemy +blast +blatancy +blatantly +blazer +blazing +bleach +bleak +bleep +blemish +blend +bless +blighted +blimp +bling +blinked +blinker +blinking +blinks +blip +blissful +blitz +blizzard +bloated +bloating +blob +blog +bloomers +blooming +blooper +blot +blouse +blubber +bluff +bluish +blunderer +blunt +blurb +blurred +blurry +blurt +blush +blustery +boaster +boastful +boasting +boat +bobbed +bobbing +bobble +bobcat +bobsled +bobtail +bodacious +body +bogged +boggle +bogus +boil +bok +bolster +bolt +bonanza +bonded +bonding +bondless +boned +bonehead +boneless +bonelike +boney +bonfire +bonnet +bonsai +bonus +bony +boogeyman +boogieman +book +boondocks +booted +booth +bootie +booting +bootlace +bootleg +boots +boozy +borax +boring +borough +borrower +borrowing +boss +botanical +botanist +botany +botch +both +bottle +bottling +bottom +bounce +bouncing +bouncy +bounding +boundless +bountiful +bovine +boxcar +boxer +boxing +boxlike +boxy +breach +breath +breeches +breeching +breeder +breeding +breeze +breezy +brethren +brewery +brewing +briar +bribe +brick +bride +bridged +brigade +bright +brilliant +brim +bring +brink +brisket +briskly +briskness +bristle +brittle +broadband +broadcast +broaden +broadly +broadness +broadside +broadways +broiler +broiling +broken +broker +bronchial +bronco +bronze +bronzing +brook +broom +brought +browbeat +brownnose +browse +browsing +bruising +brunch +brunette +brunt +brush +brussels +brute +brutishly +bubble +bubbling +bubbly +buccaneer +bucked +bucket +buckle +buckshot +buckskin +bucktooth +buckwheat +buddhism +buddhist +budding +buddy +budget +buffalo +buffed +buffer +buffing +buffoon +buggy +bulb +bulge +bulginess +bulgur +bulk +bulldog +bulldozer +bullfight +bullfrog +bullhorn +bullion +bullish +bullpen +bullring +bullseye +bullwhip +bully +bunch +bundle +bungee +bunion +bunkbed +bunkhouse +bunkmate +bunny +bunt +busboy +bush +busily +busload +bust +busybody +buzz +cabana +cabbage +cabbie +cabdriver +cable +caboose +cache +cackle +cacti +cactus +caddie +caddy +cadet +cadillac +cadmium +cage +cahoots +cake +calamari +calamity +calcium +calculate +calculus +caliber +calibrate +calm +caloric +calorie +calzone +camcorder +cameo +camera +camisole +camper +campfire +camping +campsite +campus +canal +canary +cancel +candied +candle +candy +cane +canine +canister +cannabis +canned +canning +cannon +cannot +canola +canon +canopener +canopy +canteen +canyon +capable +capably +capacity +cape +capillary +capital +capitol +capped +capricorn +capsize +capsule +caption +captivate +captive +captivity +capture +caramel +carat +caravan +carbon +cardboard +carded +cardiac +cardigan +cardinal +cardstock +carefully +caregiver +careless +caress +caretaker +cargo +caring +carless +carload +carmaker +carnage +carnation +carnival +carnivore +carol +carpenter +carpentry +carpool +carport +carried +carrot +carrousel +carry +cartel +cartload +carton +cartoon +cartridge +cartwheel +carve +carving +carwash +cascade +case +cash +casing +casino +casket +cassette +casually +casualty +catacomb +catalog +catalyst +catalyze +catapult +cataract +catatonic +catcall +catchable +catcher +catching +catchy +caterer +catering +catfight +catfish +cathedral +cathouse +catlike +catnap +catnip +catsup +cattail +cattishly +cattle +catty +catwalk +caucasian +caucus +causal +causation +cause +causing +cauterize +caution +cautious +cavalier +cavalry +caviar +cavity +cedar +celery +celestial +celibacy +celibate +celtic +cement +census +ceramics +ceremony +certainly +certainty +certified +certify +cesarean +cesspool +chafe +chaffing +chain +chair +chalice +challenge +chamber +chamomile +champion +chance +change +channel +chant +chaos +chaperone +chaplain +chapped +chaps +chapter +character +charbroil +charcoal +charger +charging +chariot +charity +charm +charred +charter +charting +chase +chasing +chaste +chastise +chastity +chatroom +chatter +chatting +chatty +cheating +cheddar +cheek +cheer +cheese +cheesy +chef +chemicals +chemist +chemo +cherisher +cherub +chess +chest +chevron +chevy +chewable +chewer +chewing +chewy +chief +chihuahua +childcare +childhood +childish +childless +childlike +chili +chill +chimp +chip +chirping +chirpy +chitchat +chivalry +chive +chloride +chlorine +choice +chokehold +choking +chomp +chooser +choosing +choosy +chop +chosen +chowder +chowtime +chrome +chubby +chuck +chug +chummy +chump +chunk +churn +chute +cider +cilantro +cinch +cinema +cinnamon +circle +circling +circular +circulate +circus +citable +citadel +citation +citizen +citric +citrus +city +civic +civil +clad +claim +clambake +clammy +clamor +clamp +clamshell +clang +clanking +clapped +clapper +clapping +clarify +clarinet +clarity +clash +clasp +class +clatter +clause +clavicle +claw +clay +clean +clear +cleat +cleaver +cleft +clench +clergyman +clerical +clerk +clever +clicker +client +climate +climatic +cling +clinic +clinking +clip +clique +cloak +clobber +clock +clone +cloning +closable +closure +clothes +clothing +cloud +clover +clubbed +clubbing +clubhouse +clump +clumsily +clumsy +clunky +clustered +clutch +clutter +coach +coagulant +coastal +coaster +coasting +coastland +coastline +coat +coauthor +cobalt +cobbler +cobweb +cocoa +coconut +cod +coeditor +coerce +coexist +coffee +cofounder +cognition +cognitive +cogwheel +coherence +coherent +cohesive +coil +coke +cola +cold +coleslaw +coliseum +collage +collapse +collar +collected +collector +collide +collie +collision +colonial +colonist +colonize +colony +colossal +colt +coma +come +comfort +comfy +comic +coming +comma +commence +commend +comment +commerce +commode +commodity +commodore +common +commotion +commute +commuting +compacted +compacter +compactly +compactor +companion +company +compare +compel +compile +comply +component +composed +composer +composite +compost +composure +compound +compress +comprised +computer +computing +comrade +concave +conceal +conceded +concept +concerned +concert +conch +concierge +concise +conclude +concrete +concur +condense +condiment +condition +condone +conducive +conductor +conduit +cone +confess +confetti +confidant +confident +confider +confiding +configure +confined +confining +confirm +conflict +conform +confound +confront +confused +confusing +confusion +congenial +congested +congrats +congress +conical +conjoined +conjure +conjuror +connected +connector +consensus +consent +console +consoling +consonant +constable +constant +constrain +constrict +construct +consult +consumer +consuming +contact +container +contempt +contend +contented +contently +contents +contest +context +contort +contour +contrite +control +contusion +convene +convent +copartner +cope +copied +copier +copilot +coping +copious +copper +copy +coral +cork +cornball +cornbread +corncob +cornea +corned +corner +cornfield +cornflake +cornhusk +cornmeal +cornstalk +corny +coronary +coroner +corporal +corporate +corral +correct +corridor +corrode +corroding +corrosive +corsage +corset +cortex +cosigner +cosmetics +cosmic +cosmos +cosponsor +cost +cottage +cotton +couch +cough +could +countable +countdown +counting +countless +country +county +courier +covenant +cover +coveted +coveting +coyness +cozily +coziness +cozy +crabbing +crabgrass +crablike +crabmeat +cradle +cradling +crafter +craftily +craftsman +craftwork +crafty +cramp +cranberry +crane +cranial +cranium +crank +crate +crave +craving +crawfish +crawlers +crawling +crayfish +crayon +crazed +crazily +craziness +crazy +creamed +creamer +creamlike +crease +creasing +creatable +create +creation +creative +creature +credible +credibly +credit +creed +creme +creole +crepe +crept +crescent +crested +cresting +crestless +crevice +crewless +crewman +crewmate +crib +cricket +cried +crier +crimp +crimson +cringe +cringing +crinkle +crinkly +crisped +crisping +crisply +crispness +crispy +criteria +critter +croak +crock +crook +croon +crop +cross +crouch +crouton +crowbar +crowd +crown +crucial +crudely +crudeness +cruelly +cruelness +cruelty +crumb +crummiest +crummy +crumpet +crumpled +cruncher +crunching +crunchy +crusader +crushable +crushed +crusher +crushing +crust +crux +crying +cryptic +crystal +cubbyhole +cube +cubical +cubicle +cucumber +cuddle +cuddly +cufflink +culinary +culminate +culpable +culprit +cultivate +cultural +culture +cupbearer +cupcake +cupid +cupped +cupping +curable +curator +curdle +cure +curfew +curing +curled +curler +curliness +curling +curly +curry +curse +cursive +cursor +curtain +curtly +curtsy +curvature +curve +curvy +cushy +cusp +cussed +custard +custodian +custody +customary +customer +customize +customs +cut +cycle +cyclic +cycling +cyclist +cylinder +cymbal +cytoplasm +cytoplast +dab +dad +daffodil +dagger +daily +daintily +dainty +dairy +daisy +dallying +dance +dancing +dandelion +dander +dandruff +dandy +danger +dangle +dangling +daredevil +dares +daringly +darkened +darkening +darkish +darkness +darkroom +darling +darn +dart +darwinism +dash +dastardly +data +datebook +dating +daughter +daunting +dawdler +dawn +daybed +daybreak +daycare +daydream +daylight +daylong +dayroom +daytime +dazzler +dazzling +deacon +deafening +deafness +dealer +dealing +dealmaker +dealt +dean +debatable +debate +debating +debit +debrief +debtless +debtor +debug +debunk +decade +decaf +decal +decathlon +decay +deceased +deceit +deceiver +deceiving +december +decency +decent +deception +deceptive +decibel +decidable +decimal +decimeter +decipher +deck +declared +decline +decode +decompose +decorated +decorator +decoy +decrease +decree +dedicate +dedicator +deduce +deduct +deed +deem +deepen +deeply +deepness +deface +defacing +defame +default +defeat +defection +defective +defendant +defender +defense +defensive +deferral +deferred +defiance +defiant +defile +defiling +define +definite +deflate +deflation +deflator +deflected +deflector +defog +deforest +defraud +defrost +deftly +defuse +defy +degraded +degrading +degrease +degree +dehydrate +deity +dejected +delay +delegate +delegator +delete +deletion +delicacy +delicate +delicious +delighted +delirious +delirium +deliverer +delivery +delouse +delta +deluge +delusion +deluxe +demanding +demeaning +demeanor +demise +democracy +democrat +demote +demotion +demystify +denatured +deniable +denial +denim +denote +dense +density +dental +dentist +denture +deny +deodorant +deodorize +departed +departure +depict +deplete +depletion +deplored +deploy +deport +depose +depraved +depravity +deprecate +depress +deprive +depth +deputize +deputy +derail +deranged +derby +derived +desecrate +deserve +deserving +designate +designed +designer +designing +deskbound +desktop +deskwork +desolate +despair +despise +despite +destiny +destitute +destruct +detached +detail +detection +detective +detector +detention +detergent +detest +detonate +detonator +detoxify +detract +deuce +devalue +deviancy +deviant +deviate +deviation +deviator +device +devious +devotedly +devotee +devotion +devourer +devouring +devoutly +dexterity +dexterous +diabetes +diabetic +diabolic +diagnoses +diagnosis +diagram +dial +diameter +diaper +diaphragm +diary +dice +dicing +dictate +dictation +dictator +difficult +diffused +diffuser +diffusion +diffusive +dig +dilation +diligence +diligent +dill +dilute +dime +diminish +dimly +dimmed +dimmer +dimness +dimple +diner +dingbat +dinghy +dinginess +dingo +dingy +dining +dinner +diocese +dioxide +diploma +dipped +dipper +dipping +directed +direction +directive +directly +directory +direness +dirtiness +disabled +disagree +disallow +disarm +disarray +disaster +disband +disbelief +disburse +discard +discern +discharge +disclose +discolor +discount +discourse +discover +discuss +disdain +disengage +disfigure +disgrace +dish +disinfect +disjoin +disk +dislike +disliking +dislocate +dislodge +disloyal +dismantle +dismay +dismiss +dismount +disobey +disorder +disown +disparate +disparity +dispatch +dispense +dispersal +dispersed +disperser +displace +display +displease +disposal +dispose +disprove +dispute +disregard +disrupt +dissuade +distance +distant +distaste +distill +distinct +distort +distract +distress +district +distrust +ditch +ditto +ditzy +dividable +divided +dividend +dividers +dividing +divinely +diving +divinity +divisible +divisibly +division +divisive +divorcee +dizziness +dizzy +doable +docile +dock +doctrine +document +dodge +dodgy +doily +doing +dole +dollar +dollhouse +dollop +dolly +dolphin +domain +domelike +domestic +dominion +dominoes +donated +donation +donator +donor +donut +doodle +doorbell +doorframe +doorknob +doorman +doormat +doornail +doorpost +doorstep +doorstop +doorway +doozy +dork +dormitory +dorsal +dosage +dose +dotted +doubling +douche +dove +down +dowry +doze +drab +dragging +dragonfly +dragonish +dragster +drainable +drainage +drained +drainer +drainpipe +dramatic +dramatize +drank +drapery +drastic +draw +dreaded +dreadful +dreadlock +dreamboat +dreamily +dreamland +dreamless +dreamlike +dreamt +dreamy +drearily +dreary +drench +dress +drew +dribble +dried +drier +drift +driller +drilling +drinkable +drinking +dripping +drippy +drivable +driven +driver +driveway +driving +drizzle +drizzly +drone +drool +droop +drop-down +dropbox +dropkick +droplet +dropout +dropper +drove +drown +drowsily +drudge +drum +dry +dubbed +dubiously +duchess +duckbill +ducking +duckling +ducktail +ducky +duct +dude +duffel +dugout +duh +duke +duller +dullness +duly +dumping +dumpling +dumpster +duo +dupe +duplex +duplicate +duplicity +durable +durably +duration +duress +during +dusk +dust +dutiful +duty +duvet +dwarf +dweeb +dwelled +dweller +dwelling +dwindle +dwindling +dynamic +dynamite +dynasty +dyslexia +dyslexic +each +eagle +earache +eardrum +earflap +earful +earlobe +early +earmark +earmuff +earphone +earpiece +earplugs +earring +earshot +earthen +earthlike +earthling +earthly +earthworm +earthy +earwig +easeful +easel +easiest +easily +easiness +easing +eastbound +eastcoast +easter +eastward +eatable +eaten +eatery +eating +eats +ebay +ebony +ebook +ecard +eccentric +echo +eclair +eclipse +ecologist +ecology +economic +economist +economy +ecosphere +ecosystem +edge +edginess +edging +edgy +edition +editor +educated +education +educator +eel +effective +effects +efficient +effort +eggbeater +egging +eggnog +eggplant +eggshell +egomaniac +egotism +egotistic +either +eject +elaborate +elastic +elated +elbow +eldercare +elderly +eldest +electable +election +elective +elephant +elevate +elevating +elevation +elevator +eleven +elf +eligible +eligibly +eliminate +elite +elitism +elixir +elk +ellipse +elliptic +elm +elongated +elope +eloquence +eloquent +elsewhere +elude +elusive +elves +email +embargo +embark +embassy +embattled +embellish +ember +embezzle +emblaze +emblem +embody +embolism +emboss +embroider +emcee +emerald +emergency +emission +emit +emote +emoticon +emotion +empathic +empathy +emperor +emphases +emphasis +emphasize +emphatic +empirical +employed +employee +employer +emporium +empower +emptier +emptiness +empty +emu +enable +enactment +enamel +enchanted +enchilada +encircle +enclose +enclosure +encode +encore +encounter +encourage +encroach +encrust +encrypt +endanger +endeared +endearing +ended +ending +endless +endnote +endocrine +endorphin +endorse +endowment +endpoint +endurable +endurance +enduring +energetic +energize +energy +enforced +enforcer +engaged +engaging +engine +engorge +engraved +engraver +engraving +engross +engulf +enhance +enigmatic +enjoyable +enjoyably +enjoyer +enjoying +enjoyment +enlarged +enlarging +enlighten +enlisted +enquirer +enrage +enrich +enroll +enslave +ensnare +ensure +entail +entangled +entering +entertain +enticing +entire +entitle +entity +entomb +entourage +entrap +entree +entrench +entrust +entryway +entwine +enunciate +envelope +enviable +enviably +envious +envision +envoy +envy +enzyme +epic +epidemic +epidermal +epidermis +epidural +epilepsy +epileptic +epilogue +epiphany +episode +equal +equate +equation +equator +equinox +equipment +equity +equivocal +eradicate +erasable +erased +eraser +erasure +ergonomic +errand +errant +erratic +error +erupt +escalate +escalator +escapable +escapade +escapist +escargot +eskimo +esophagus +espionage +espresso +esquire +essay +essence +essential +establish +estate +esteemed +estimate +estimator +estranged +estrogen +etching +eternal +eternity +ethanol +ether +ethically +ethics +euphemism +evacuate +evacuee +evade +evaluate +evaluator +evaporate +evasion +evasive +even +everglade +evergreen +everybody +everyday +everyone +evict +evidence +evident +evil +evoke +evolution +evolve +exact +exalted +example +excavate +excavator +exceeding +exception +excess +exchange +excitable +exciting +exclaim +exclude +excluding +exclusion +exclusive +excretion +excretory +excursion +excusable +excusably +excuse +exemplary +exemplify +exemption +exerciser +exert +exes +exfoliate +exhale +exhaust +exhume +exile +existing +exit +exodus +exonerate +exorcism +exorcist +expand +expanse +expansion +expansive +expectant +expedited +expediter +expel +expend +expenses +expensive +expert +expire +expiring +explain +expletive +explicit +explode +exploit +explore +exploring +exponent +exporter +exposable +expose +exposure +express +expulsion +exquisite +extended +extending +extent +extenuate +exterior +external +extinct +extortion +extradite +extras +extrovert +extrude +extruding +exuberant +fable +fabric +fabulous +facebook +facecloth +facedown +faceless +facelift +faceplate +faceted +facial +facility +facing +facsimile +faction +factoid +factor +factsheet +factual +faculty +fade +fading +failing +falcon +fall +false +falsify +fame +familiar +family +famine +famished +fanatic +fancied +fanciness +fancy +fanfare +fang +fanning +fantasize +fantastic +fantasy +fascism +fastball +faster +fasting +fastness +faucet +favorable +favorably +favored +favoring +favorite +fax +feast +federal +fedora +feeble +feed +feel +feisty +feline +felt-tip +feminine +feminism +feminist +feminize +femur +fence +fencing +fender +ferment +fernlike +ferocious +ferocity +ferret +ferris +ferry +fervor +fester +festival +festive +festivity +fetal +fetch +fever +fiber +fiction +fiddle +fiddling +fidelity +fidgeting +fidgety +fifteen +fifth +fiftieth +fifty +figment +figure +figurine +filing +filled +filler +filling +film +filter +filth +filtrate +finale +finalist +finalize +finally +finance +financial +finch +fineness +finer +finicky +finished +finisher +finishing +finite +finless +finlike +fiscally +fit +five +flaccid +flagman +flagpole +flagship +flagstick +flagstone +flail +flakily +flaky +flame +flammable +flanked +flanking +flannels +flap +flaring +flashback +flashbulb +flashcard +flashily +flashing +flashy +flask +flatbed +flatfoot +flatly +flatness +flatten +flattered +flatterer +flattery +flattop +flatware +flatworm +flavored +flavorful +flavoring +flaxseed +fled +fleshed +fleshy +flick +flier +flight +flinch +fling +flint +flip +flirt +float +flock +flogging +flop +floral +florist +floss +flounder +flyable +flyaway +flyer +flying +flyover +flypaper +foam +foe +fog +foil +folic +folk +follicle +follow +fondling +fondly +fondness +fondue +font +food +fool +footage +football +footbath +footboard +footer +footgear +foothill +foothold +footing +footless +footman +footnote +footpad +footpath +footprint +footrest +footsie +footsore +footwear +footwork +fossil +foster +founder +founding +fountain +fox +foyer +fraction +fracture +fragile +fragility +fragment +fragrance +fragrant +frail +frame +framing +frantic +fraternal +frayed +fraying +frays +freckled +freckles +freebase +freebee +freebie +freedom +freefall +freehand +freeing +freeload +freely +freemason +freeness +freestyle +freeware +freeway +freewill +freezable +freezing +freight +french +frenzied +frenzy +frequency +frequent +fresh +fretful +fretted +friction +friday +fridge +fried +friend +frighten +frightful +frigidity +frigidly +frill +fringe +frisbee +frisk +fritter +frivolous +frolic +from +front +frostbite +frosted +frostily +frosting +frostlike +frosty +froth +frown +frozen +fructose +frugality +frugally +fruit +frustrate +frying +gab +gaffe +gag +gainfully +gaining +gains +gala +gallantly +galleria +gallery +galley +gallon +gallows +gallstone +galore +galvanize +gambling +game +gaming +gamma +gander +gangly +gangrene +gangway +gap +garage +garbage +garden +gargle +garland +garlic +garment +garnet +garnish +garter +gas +gatherer +gathering +gating +gauging +gauntlet +gauze +gave +gawk +gazing +gear +gecko +geek +geiger +gem +gender +generic +generous +genetics +genre +gentile +gentleman +gently +gents +geography +geologic +geologist +geology +geometric +geometry +geranium +gerbil +geriatric +germicide +germinate +germless +germproof +gestate +gestation +gesture +getaway +getting +getup +giant +gibberish +giblet +giddily +giddiness +giddy +gift +gigabyte +gigahertz +gigantic +giggle +giggling +giggly +gigolo +gilled +gills +gimmick +girdle +giveaway +given +giver +giving +gizmo +gizzard +glacial +glacier +glade +gladiator +gladly +glamorous +glamour +glance +glancing +glandular +glare +glaring +glass +glaucoma +glazing +gleaming +gleeful +glider +gliding +glimmer +glimpse +glisten +glitch +glitter +glitzy +gloater +gloating +gloomily +gloomy +glorified +glorifier +glorify +glorious +glory +gloss +glove +glowing +glowworm +glucose +glue +gluten +glutinous +glutton +gnarly +gnat +goal +goatskin +goes +goggles +going +goldfish +goldmine +goldsmith +golf +goliath +gonad +gondola +gone +gong +good +gooey +goofball +goofiness +goofy +google +goon +gopher +gore +gorged +gorgeous +gory +gosling +gossip +gothic +gotten +gout +gown +grab +graceful +graceless +gracious +gradation +graded +grader +gradient +grading +gradually +graduate +graffiti +grafted +grafting +grain +granddad +grandkid +grandly +grandma +grandpa +grandson +granite +granny +granola +grant +granular +grape +graph +grapple +grappling +grasp +grass +gratified +gratify +grating +gratitude +gratuity +gravel +graveness +graves +graveyard +gravitate +gravity +gravy +gray +grazing +greasily +greedily +greedless +greedy +green +greeter +greeting +grew +greyhound +grid +grief +grievance +grieving +grievous +grill +grimace +grimacing +grime +griminess +grimy +grinch +grinning +grip +gristle +grit +groggily +groggy +groin +groom +groove +grooving +groovy +grope +ground +grouped +grout +grove +grower +growing +growl +grub +grudge +grudging +grueling +gruffly +grumble +grumbling +grumbly +grumpily +grunge +grunt +guacamole +guidable +guidance +guide +guiding +guileless +guise +gulf +gullible +gully +gulp +gumball +gumdrop +gumminess +gumming +gummy +gurgle +gurgling +guru +gush +gusto +gusty +gutless +guts +gutter +guy +guzzler +gyration +habitable +habitant +habitat +habitual +hacked +hacker +hacking +hacksaw +had +haggler +haiku +half +halogen +halt +halved +halves +hamburger +hamlet +hammock +hamper +hamster +hamstring +handbag +handball +handbook +handbrake +handcart +handclap +handclasp +handcraft +handcuff +handed +handful +handgrip +handgun +handheld +handiness +handiwork +handlebar +handled +handler +handling +handmade +handoff +handpick +handprint +handrail +handsaw +handset +handsfree +handshake +handstand +handwash +handwork +handwoven +handwrite +handyman +hangnail +hangout +hangover +hangup +hankering +hankie +hanky +haphazard +happening +happier +happiest +happily +happiness +happy +harbor +hardcopy +hardcore +hardcover +harddisk +hardened +hardener +hardening +hardhat +hardhead +hardiness +hardly +hardness +hardship +hardware +hardwired +hardwood +hardy +harmful +harmless +harmonica +harmonics +harmonize +harmony +harness +harpist +harsh +harvest +hash +hassle +haste +hastily +hastiness +hasty +hatbox +hatchback +hatchery +hatchet +hatching +hatchling +hate +hatless +hatred +haunt +haven +hazard +hazelnut +hazily +haziness +hazing +hazy +headache +headband +headboard +headcount +headdress +headed +header +headfirst +headgear +heading +headlamp +headless +headlock +headphone +headpiece +headrest +headroom +headscarf +headset +headsman +headstand +headstone +headway +headwear +heap +heat +heave +heavily +heaviness +heaving +hedge +hedging +heftiness +hefty +helium +helmet +helper +helpful +helping +helpless +helpline +hemlock +hemstitch +hence +henchman +henna +herald +herbal +herbicide +herbs +heritage +hermit +heroics +heroism +herring +herself +hertz +hesitancy +hesitant +hesitate +hexagon +hexagram +hubcap +huddle +huddling +huff +hug +hula +hulk +hull +human +humble +humbling +humbly +humid +humiliate +humility +humming +hummus +humongous +humorist +humorless +humorous +humpback +humped +humvee +hunchback +hundredth +hunger +hungrily +hungry +hunk +hunter +hunting +huntress +huntsman +hurdle +hurled +hurler +hurling +hurray +hurricane +hurried +hurry +hurt +husband +hush +husked +huskiness +hut +hybrid +hydrant +hydrated +hydration +hydrogen +hydroxide +hyperlink +hypertext +hyphen +hypnoses +hypnosis +hypnotic +hypnotism +hypnotist +hypnotize +hypocrisy +hypocrite +ibuprofen +ice +iciness +icing +icky +icon +icy +idealism +idealist +idealize +ideally +idealness +identical +identify +identity +ideology +idiocy +idiom +idly +igloo +ignition +ignore +iguana +illicitly +illusion +illusive +image +imaginary +imagines +imaging +imbecile +imitate +imitation +immature +immerse +immersion +imminent +immobile +immodest +immorally +immortal +immovable +immovably +immunity +immunize +impaired +impale +impart +impatient +impeach +impeding +impending +imperfect +imperial +impish +implant +implement +implicate +implicit +implode +implosion +implosive +imply +impolite +important +importer +impose +imposing +impotence +impotency +impotent +impound +imprecise +imprint +imprison +impromptu +improper +improve +improving +improvise +imprudent +impulse +impulsive +impure +impurity +iodine +iodize +ion +ipad +iphone +ipod +irate +irk +iron +irregular +irrigate +irritable +irritably +irritant +irritate +islamic +islamist +isolated +isolating +isolation +isotope +issue +issuing +italicize +italics +item +itinerary +itunes +ivory +ivy +jab +jackal +jacket +jackknife +jackpot +jailbird +jailbreak +jailer +jailhouse +jalapeno +jam +janitor +january +jargon +jarring +jasmine +jaundice +jaunt +java +jawed +jawless +jawline +jaws +jaybird +jaywalker +jazz +jeep +jeeringly +jellied +jelly +jersey +jester +jet +jiffy +jigsaw +jimmy +jingle +jingling +jinx +jitters +jittery +job +jockey +jockstrap +jogger +jogging +john +joining +jokester +jokingly +jolliness +jolly +jolt +jot +jovial +joyfully +joylessly +joyous +joyride +joystick +jubilance +jubilant +judge +judgingly +judicial +judiciary +judo +juggle +juggling +jugular +juice +juiciness +juicy +jujitsu +jukebox +july +jumble +jumbo +jump +junction +juncture +june +junior +juniper +junkie +junkman +junkyard +jurist +juror +jury +justice +justifier +justify +justly +justness +juvenile +kabob +kangaroo +karaoke +karate +karma +kebab +keenly +keenness +keep +keg +kelp +kennel +kept +kerchief +kerosene +kettle +kick +kiln +kilobyte +kilogram +kilometer +kilowatt +kilt +kimono +kindle +kindling +kindly +kindness +kindred +kinetic +kinfolk +king +kinship +kinsman +kinswoman +kissable +kisser +kissing +kitchen +kite +kitten +kitty +kiwi +kleenex +knapsack +knee +knelt +knickers +knoll +koala +kooky +kosher +krypton +kudos +kung +labored +laborer +laboring +laborious +labrador +ladder +ladies +ladle +ladybug +ladylike +lagged +lagging +lagoon +lair +lake +lance +landed +landfall +landfill +landing +landlady +landless +landline +landlord +landmark +landmass +landmine +landowner +landscape +landside +landslide +language +lankiness +lanky +lantern +lapdog +lapel +lapped +lapping +laptop +lard +large +lark +lash +lasso +last +latch +late +lather +latitude +latrine +latter +latticed +launch +launder +laundry +laurel +lavender +lavish +laxative +lazily +laziness +lazy +lecturer +left +legacy +legal +legend +legged +leggings +legible +legibly +legislate +lego +legroom +legume +legwarmer +legwork +lemon +lend +length +lens +lent +leotard +lesser +letdown +lethargic +lethargy +letter +lettuce +level +leverage +levers +levitate +levitator +liability +liable +liberty +librarian +library +licking +licorice +lid +life +lifter +lifting +liftoff +ligament +likely +likeness +likewise +liking +lilac +lilly +lily +limb +limeade +limelight +limes +limit +limping +limpness +line +lingo +linguini +linguist +lining +linked +linoleum +linseed +lint +lion +lip +liquefy +liqueur +liquid +lisp +list +litigate +litigator +litmus +litter +little +livable +lived +lively +liver +livestock +lividly +living +lizard +lubricant +lubricate +lucid +luckily +luckiness +luckless +lucrative +ludicrous +lugged +lukewarm +lullaby +lumber +luminance +luminous +lumpiness +lumping +lumpish +lunacy +lunar +lunchbox +luncheon +lunchroom +lunchtime +lung +lurch +lure +luridness +lurk +lushly +lushness +luster +lustfully +lustily +lustiness +lustrous +lusty +luxurious +luxury +lying +lyrically +lyricism +lyricist +lyrics +macarena +macaroni +macaw +mace +machine +machinist +magazine +magenta +maggot +magical +magician +magma +magnesium +magnetic +magnetism +magnetize +magnifier +magnify +magnitude +magnolia +mahogany +maimed +majestic +majesty +majorette +majority +makeover +maker +makeshift +making +malformed +malt +mama +mammal +mammary +mammogram +manager +managing +manatee +mandarin +mandate +mandatory +mandolin +manger +mangle +mango +mangy +manhandle +manhole +manhood +manhunt +manicotti +manicure +manifesto +manila +mankind +manlike +manliness +manly +manmade +manned +mannish +manor +manpower +mantis +mantra +manual +many +map +marathon +marauding +marbled +marbles +marbling +march +mardi +margarine +margarita +margin +marigold +marina +marine +marital +maritime +marlin +marmalade +maroon +married +marrow +marry +marshland +marshy +marsupial +marvelous +marxism +mascot +masculine +mashed +mashing +massager +masses +massive +mastiff +matador +matchbook +matchbox +matcher +matching +matchless +material +maternal +maternity +math +mating +matriarch +matrimony +matrix +matron +matted +matter +maturely +maturing +maturity +mauve +maverick +maximize +maximum +maybe +mayday +mayflower +moaner +moaning +mobile +mobility +mobilize +mobster +mocha +mocker +mockup +modified +modify +modular +modulator +module +moisten +moistness +moisture +molar +molasses +mold +molecular +molecule +molehill +mollusk +mom +monastery +monday +monetary +monetize +moneybags +moneyless +moneywise +mongoose +mongrel +monitor +monkhood +monogamy +monogram +monologue +monopoly +monorail +monotone +monotype +monoxide +monsieur +monsoon +monstrous +monthly +monument +moocher +moodiness +moody +mooing +moonbeam +mooned +moonlight +moonlike +moonlit +moonrise +moonscape +moonshine +moonstone +moonwalk +mop +morale +morality +morally +morbidity +morbidly +morphine +morphing +morse +mortality +mortally +mortician +mortified +mortify +mortuary +mosaic +mossy +most +mothball +mothproof +motion +motivate +motivator +motive +motocross +motor +motto +mountable +mountain +mounted +mounting +mourner +mournful +mouse +mousiness +moustache +mousy +mouth +movable +move +movie +moving +mower +mowing +much +muck +mud +mug +mulberry +mulch +mule +mulled +mullets +multiple +multiply +multitask +multitude +mumble +mumbling +mumbo +mummified +mummify +mummy +mumps +munchkin +mundane +municipal +muppet +mural +murkiness +murky +murmuring +muscular +museum +mushily +mushiness +mushroom +mushy +music +musket +muskiness +musky +mustang +mustard +muster +mustiness +musty +mutable +mutate +mutation +mute +mutilated +mutilator +mutiny +mutt +mutual +muzzle +myself +myspace +mystified +mystify +myth +nacho +nag +nail +name +naming +nanny +nanometer +nape +napkin +napped +napping +nappy +narrow +nastily +nastiness +national +native +nativity +natural +nature +naturist +nautical +navigate +navigator +navy +nearby +nearest +nearly +nearness +neatly +neatness +nebula +nebulizer +nectar +negate +negation +negative +neglector +negligee +negligent +negotiate +nemeses +nemesis +neon +nephew +nerd +nervous +nervy +nest +net +neurology +neuron +neurosis +neurotic +neuter +neutron +never +next +nibble +nickname +nicotine +niece +nifty +nimble +nimbly +nineteen +ninetieth +ninja +nintendo +ninth +nuclear +nuclei +nucleus +nugget +nullify +number +numbing +numbly +numbness +numeral +numerate +numerator +numeric +numerous +nuptials +nursery +nursing +nurture +nutcase +nutlike +nutmeg +nutrient +nutshell +nuttiness +nutty +nuzzle +nylon +oaf +oak +oasis +oat +obedience +obedient +obituary +object +obligate +obliged +oblivion +oblivious +oblong +obnoxious +oboe +obscure +obscurity +observant +observer +observing +obsessed +obsession +obsessive +obsolete +obstacle +obstinate +obstruct +obtain +obtrusive +obtuse +obvious +occultist +occupancy +occupant +occupier +occupy +ocean +ocelot +octagon +octane +october +octopus +ogle +oil +oink +ointment +okay +old +olive +olympics +omega +omen +ominous +omission +omit +omnivore +onboard +oncoming +ongoing +onion +online +onlooker +only +onscreen +onset +onshore +onslaught +onstage +onto +onward +onyx +oops +ooze +oozy +opacity +opal +open +operable +operate +operating +operation +operative +operator +opium +opossum +opponent +oppose +opposing +opposite +oppressed +oppressor +opt +opulently +osmosis +other +otter +ouch +ought +ounce +outage +outback +outbid +outboard +outbound +outbreak +outburst +outcast +outclass +outcome +outdated +outdoors +outer +outfield +outfit +outflank +outgoing +outgrow +outhouse +outing +outlast +outlet +outline +outlook +outlying +outmatch +outmost +outnumber +outplayed +outpost +outpour +output +outrage +outrank +outreach +outright +outscore +outsell +outshine +outshoot +outsider +outskirts +outsmart +outsource +outspoken +outtakes +outthink +outward +outweigh +outwit +oval +ovary +oven +overact +overall +overarch +overbid +overbill +overbite +overblown +overboard +overbook +overbuilt +overcast +overcoat +overcome +overcook +overcrowd +overdraft +overdrawn +overdress +overdrive +overdue +overeager +overeater +overexert +overfed +overfeed +overfill +overflow +overfull +overgrown +overhand +overhang +overhaul +overhead +overhear +overheat +overhung +overjoyed +overkill +overlabor +overlaid +overlap +overlay +overload +overlook +overlord +overlying +overnight +overpass +overpay +overplant +overplay +overpower +overprice +overrate +overreach +overreact +override +overripe +overrule +overrun +overshoot +overshot +oversight +oversized +oversleep +oversold +overspend +overstate +overstay +overstep +overstock +overstuff +oversweet +overtake +overthrow +overtime +overtly +overtone +overture +overturn +overuse +overvalue +overview +overwrite +owl +oxford +oxidant +oxidation +oxidize +oxidizing +oxygen +oxymoron +oyster +ozone +paced +pacemaker +pacific +pacifier +pacifism +pacifist +pacify +padded +padding +paddle +paddling +padlock +pagan +pager +paging +pajamas +palace +palatable +palm +palpable +palpitate +paltry +pampered +pamperer +pampers +pamphlet +panama +pancake +pancreas +panda +pandemic +pang +panhandle +panic +panning +panorama +panoramic +panther +pantomime +pantry +pants +pantyhose +paparazzi +papaya +paper +paprika +papyrus +parabola +parachute +parade +paradox +paragraph +parakeet +paralegal +paralyses +paralysis +paralyze +paramedic +parameter +paramount +parasail +parasite +parasitic +parcel +parched +parchment +pardon +parish +parka +parking +parkway +parlor +parmesan +parole +parrot +parsley +parsnip +partake +parted +parting +partition +partly +partner +partridge +party +passable +passably +passage +passcode +passenger +passerby +passing +passion +passive +passivism +passover +passport +password +pasta +pasted +pastel +pastime +pastor +pastrami +pasture +pasty +patchwork +patchy +paternal +paternity +path +patience +patient +patio +patriarch +patriot +patrol +patronage +patronize +pauper +pavement +paver +pavestone +pavilion +paving +pawing +payable +payback +paycheck +payday +payee +payer +paying +payment +payphone +payroll +pebble +pebbly +pecan +pectin +peculiar +peddling +pediatric +pedicure +pedigree +pedometer +pegboard +pelican +pellet +pelt +pelvis +penalize +penalty +pencil +pendant +pending +penholder +penknife +pennant +penniless +penny +penpal +pension +pentagon +pentagram +pep +perceive +percent +perch +percolate +perennial +perfected +perfectly +perfume +periscope +perish +perjurer +perjury +perkiness +perky +perm +peroxide +perpetual +perplexed +persecute +persevere +persuaded +persuader +pesky +peso +pessimism +pessimist +pester +pesticide +petal +petite +petition +petri +petroleum +petted +petticoat +pettiness +petty +petunia +phantom +phobia +phoenix +phonebook +phoney +phonics +phoniness +phony +phosphate +photo +phrase +phrasing +placard +placate +placidly +plank +planner +plant +plasma +plaster +plastic +plated +platform +plating +platinum +platonic +platter +platypus +plausible +plausibly +playable +playback +player +playful +playgroup +playhouse +playing +playlist +playmaker +playmate +playoff +playpen +playroom +playset +plaything +playtime +plaza +pleading +pleat +pledge +plentiful +plenty +plethora +plexiglas +pliable +plod +plop +plot +plow +ploy +pluck +plug +plunder +plunging +plural +plus +plutonium +plywood +poach +pod +poem +poet +pogo +pointed +pointer +pointing +pointless +pointy +poise +poison +poker +poking +polar +police +policy +polio +polish +politely +polka +polo +polyester +polygon +polygraph +polymer +poncho +pond +pony +popcorn +pope +poplar +popper +poppy +popsicle +populace +popular +populate +porcupine +pork +porous +porridge +portable +portal +portfolio +porthole +portion +portly +portside +poser +posh +posing +possible +possibly +possum +postage +postal +postbox +postcard +posted +poster +posting +postnasal +posture +postwar +pouch +pounce +pouncing +pound +pouring +pout +powdered +powdering +powdery +power +powwow +pox +praising +prance +prancing +pranker +prankish +prankster +prayer +praying +preacher +preaching +preachy +preamble +precinct +precise +precision +precook +precut +predator +predefine +predict +preface +prefix +preflight +preformed +pregame +pregnancy +pregnant +preheated +prelaunch +prelaw +prelude +premiere +premises +premium +prenatal +preoccupy +preorder +prepaid +prepay +preplan +preppy +preschool +prescribe +preseason +preset +preshow +president +presoak +press +presume +presuming +preteen +pretended +pretender +pretense +pretext +pretty +pretzel +prevail +prevalent +prevent +preview +previous +prewar +prewashed +prideful +pried +primal +primarily +primary +primate +primer +primp +princess +print +prior +prism +prison +prissy +pristine +privacy +private +privatize +prize +proactive +probable +probably +probation +probe +probing +probiotic +problem +procedure +process +proclaim +procreate +procurer +prodigal +prodigy +produce +product +profane +profanity +professed +professor +profile +profound +profusely +progeny +prognosis +program +progress +projector +prologue +prolonged +promenade +prominent +promoter +promotion +prompter +promptly +prone +prong +pronounce +pronto +proofing +proofread +proofs +propeller +properly +property +proponent +proposal +propose +props +prorate +protector +protegee +proton +prototype +protozoan +protract +protrude +proud +provable +proved +proven +provided +provider +providing +province +proving +provoke +provoking +provolone +prowess +prowler +prowling +proximity +proxy +prozac +prude +prudishly +prune +pruning +pry +psychic +public +publisher +pucker +pueblo +pug +pull +pulmonary +pulp +pulsate +pulse +pulverize +puma +pumice +pummel +punch +punctual +punctuate +punctured +pungent +punisher +punk +pupil +puppet +puppy +purchase +pureblood +purebred +purely +pureness +purgatory +purge +purging +purifier +purify +purist +puritan +purity +purple +purplish +purposely +purr +purse +pursuable +pursuant +pursuit +purveyor +pushcart +pushchair +pusher +pushiness +pushing +pushover +pushpin +pushup +pushy +putdown +putt +puzzle +puzzling +pyramid +pyromania +python +quack +quadrant +quail +quaintly +quake +quaking +qualified +qualifier +qualify +quality +qualm +quantum +quarrel +quarry +quartered +quarterly +quarters +quartet +quench +query +quicken +quickly +quickness +quicksand +quickstep +quiet +quill +quilt +quintet +quintuple +quirk +quit +quiver +quizzical +quotable +quotation +quote +rabid +race +racing +racism +rack +racoon +radar +radial +radiance +radiantly +radiated +radiation +radiator +radio +radish +raffle +raft +rage +ragged +raging +ragweed +raider +railcar +railing +railroad +railway +raisin +rake +raking +rally +ramble +rambling +ramp +ramrod +ranch +rancidity +random +ranged +ranger +ranging +ranked +ranking +ransack +ranting +rants +rare +rarity +rascal +rash +rasping +ravage +raven +ravine +raving +ravioli +ravishing +reabsorb +reach +reacquire +reaction +reactive +reactor +reaffirm +ream +reanalyze +reappear +reapply +reappoint +reapprove +rearrange +rearview +reason +reassign +reassure +reattach +reawake +rebalance +rebate +rebel +rebirth +reboot +reborn +rebound +rebuff +rebuild +rebuilt +reburial +rebuttal +recall +recant +recapture +recast +recede +recent +recess +recharger +recipient +recital +recite +reckless +reclaim +recliner +reclining +recluse +reclusive +recognize +recoil +recollect +recolor +reconcile +reconfirm +reconvene +recopy +record +recount +recoup +recovery +recreate +rectal +rectangle +rectified +rectify +recycled +recycler +recycling +reemerge +reenact +reenter +reentry +reexamine +referable +referee +reference +refill +refinance +refined +refinery +refining +refinish +reflected +reflector +reflex +reflux +refocus +refold +reforest +reformat +reformed +reformer +reformist +refract +refrain +refreeze +refresh +refried +refueling +refund +refurbish +refurnish +refusal +refuse +refusing +refutable +refute +regain +regalia +regally +reggae +regime +region +register +registrar +registry +regress +regretful +regroup +regular +regulate +regulator +rehab +reheat +rehire +rehydrate +reimburse +reissue +reiterate +rejoice +rejoicing +rejoin +rekindle +relapse +relapsing +relatable +related +relation +relative +relax +relay +relearn +release +relenting +reliable +reliably +reliance +reliant +relic +relieve +relieving +relight +relish +relive +reload +relocate +relock +reluctant +rely +remake +remark +remarry +rematch +remedial +remedy +remember +reminder +remindful +remission +remix +remnant +remodeler +remold +remorse +remote +removable +removal +removed +remover +removing +rename +renderer +rendering +rendition +renegade +renewable +renewably +renewal +renewed +renounce +renovate +renovator +rentable +rental +rented +renter +reoccupy +reoccur +reopen +reorder +repackage +repacking +repaint +repair +repave +repaying +repayment +repeal +repeated +repeater +repent +rephrase +replace +replay +replica +reply +reporter +repose +repossess +repost +repressed +reprimand +reprint +reprise +reproach +reprocess +reproduce +reprogram +reps +reptile +reptilian +repugnant +repulsion +repulsive +repurpose +reputable +reputably +request +require +requisite +reroute +rerun +resale +resample +rescuer +reseal +research +reselect +reseller +resemble +resend +resent +reset +reshape +reshoot +reshuffle +residence +residency +resident +residual +residue +resigned +resilient +resistant +resisting +resize +resolute +resolved +resonant +resonate +resort +resource +respect +resubmit +result +resume +resupply +resurface +resurrect +retail +retainer +retaining +retake +retaliate +retention +rethink +retinal +retired +retiree +retiring +retold +retool +retorted +retouch +retrace +retract +retrain +retread +retreat +retrial +retrieval +retriever +retry +return +retying +retype +reunion +reunite +reusable +reuse +reveal +reveler +revenge +revenue +reverb +revered +reverence +reverend +reversal +reverse +reversing +reversion +revert +revisable +revise +revision +revisit +revivable +revival +reviver +reviving +revocable +revoke +revolt +revolver +revolving +reward +rewash +rewind +rewire +reword +rework +rewrap +rewrite +rhyme +ribbon +ribcage +rice +riches +richly +richness +rickety +ricotta +riddance +ridden +ride +riding +rifling +rift +rigging +rigid +rigor +rimless +rimmed +rind +rink +rinse +rinsing +riot +ripcord +ripeness +ripening +ripping +ripple +rippling +riptide +rise +rising +risk +risotto +ritalin +ritzy +rival +riverbank +riverbed +riverboat +riverside +riveter +riveting +roamer +roaming +roast +robbing +robe +robin +robotics +robust +rockband +rocker +rocket +rockfish +rockiness +rocking +rocklike +rockslide +rockstar +rocky +rogue +roman +romp +rope +roping +roster +rosy +rotten +rotting +rotunda +roulette +rounding +roundish +roundness +roundup +roundworm +routine +routing +rover +roving +royal +rubbed +rubber +rubbing +rubble +rubdown +ruby +ruckus +rudder +rug +ruined +rule +rumble +rumbling +rummage +rumor +runaround +rundown +runner +running +runny +runt +runway +rupture +rural +ruse +rush +rust +rut +sabbath +sabotage +sacrament +sacred +sacrifice +sadden +saddlebag +saddled +saddling +sadly +sadness +safari +safeguard +safehouse +safely +safeness +saffron +saga +sage +sagging +saggy +said +saint +sake +salad +salami +salaried +salary +saline +salon +saloon +salsa +salt +salutary +salute +salvage +salvaging +salvation +same +sample +sampling +sanction +sanctity +sanctuary +sandal +sandbag +sandbank +sandbar +sandblast +sandbox +sanded +sandfish +sanding +sandlot +sandpaper +sandpit +sandstone +sandstorm +sandworm +sandy +sanitary +sanitizer +sank +santa +sapling +sappiness +sappy +sarcasm +sarcastic +sardine +sash +sasquatch +sassy +satchel +satiable +satin +satirical +satisfied +satisfy +saturate +saturday +sauciness +saucy +sauna +savage +savanna +saved +savings +savior +savor +saxophone +say +scabbed +scabby +scalded +scalding +scale +scaling +scallion +scallop +scalping +scam +scandal +scanner +scanning +scant +scapegoat +scarce +scarcity +scarecrow +scared +scarf +scarily +scariness +scarring +scary +scavenger +scenic +schedule +schematic +scheme +scheming +schilling +schnapps +scholar +science +scientist +scion +scoff +scolding +scone +scoop +scooter +scope +scorch +scorebook +scorecard +scored +scoreless +scorer +scoring +scorn +scorpion +scotch +scoundrel +scoured +scouring +scouting +scouts +scowling +scrabble +scraggly +scrambled +scrambler +scrap +scratch +scrawny +screen +scribble +scribe +scribing +scrimmage +script +scroll +scrooge +scrounger +scrubbed +scrubber +scruffy +scrunch +scrutiny +scuba +scuff +sculptor +sculpture +scurvy +scuttle +secluded +secluding +seclusion +second +secrecy +secret +sectional +sector +secular +securely +security +sedan +sedate +sedation +sedative +sediment +seduce +seducing +segment +seismic +seizing +seldom +selected +selection +selective +selector +self +seltzer +semantic +semester +semicolon +semifinal +seminar +semisoft +semisweet +senate +senator +send +senior +senorita +sensation +sensitive +sensitize +sensually +sensuous +sepia +september +septic +septum +sequel +sequence +sequester +series +sermon +serotonin +serpent +serrated +serve +service +serving +sesame +sessions +setback +setting +settle +settling +setup +sevenfold +seventeen +seventh +seventy +severity +shabby +shack +shaded +shadily +shadiness +shading +shadow +shady +shaft +shakable +shakily +shakiness +shaking +shaky +shale +shallot +shallow +shame +shampoo +shamrock +shank +shanty +shape +shaping +share +sharpener +sharper +sharpie +sharply +sharpness +shawl +sheath +shed +sheep +sheet +shelf +shell +shelter +shelve +shelving +sherry +shield +shifter +shifting +shiftless +shifty +shimmer +shimmy +shindig +shine +shingle +shininess +shining +shiny +ship +shirt +shivering +shock +shone +shoplift +shopper +shopping +shoptalk +shore +shortage +shortcake +shortcut +shorten +shorter +shorthand +shortlist +shortly +shortness +shorts +shortwave +shorty +shout +shove +showbiz +showcase +showdown +shower +showgirl +showing +showman +shown +showoff +showpiece +showplace +showroom +showy +shrank +shrapnel +shredder +shredding +shrewdly +shriek +shrill +shrimp +shrine +shrink +shrivel +shrouded +shrubbery +shrubs +shrug +shrunk +shucking +shudder +shuffle +shuffling +shun +shush +shut +shy +siamese +siberian +sibling +siding +sierra +siesta +sift +sighing +silenced +silencer +silent +silica +silicon +silk +silliness +silly +silo +silt +silver +similarly +simile +simmering +simple +simplify +simply +sincere +sincerity +singer +singing +single +singular +sinister +sinless +sinner +sinuous +sip +siren +sister +sitcom +sitter +sitting +situated +situation +sixfold +sixteen +sixth +sixties +sixtieth +sixtyfold +sizable +sizably +size +sizing +sizzle +sizzling +skater +skating +skedaddle +skeletal +skeleton +skeptic +sketch +skewed +skewer +skid +skied +skier +skies +skiing +skilled +skillet +skillful +skimmed +skimmer +skimming +skimpily +skincare +skinhead +skinless +skinning +skinny +skintight +skipper +skipping +skirmish +skirt +skittle +skydiver +skylight +skyline +skype +skyrocket +skyward +slab +slacked +slacker +slacking +slackness +slacks +slain +slam +slander +slang +slapping +slapstick +slashed +slashing +slate +slather +slaw +sled +sleek +sleep +sleet +sleeve +slept +sliceable +sliced +slicer +slicing +slick +slider +slideshow +sliding +slighted +slighting +slightly +slimness +slimy +slinging +slingshot +slinky +slip +slit +sliver +slobbery +slogan +sloped +sloping +sloppily +sloppy +slot +slouching +slouchy +sludge +slug +slum +slurp +slush +sly +small +smartly +smartness +smasher +smashing +smashup +smell +smelting +smile +smilingly +smirk +smite +smith +smitten +smock +smog +smoked +smokeless +smokiness +smoking +smoky +smolder +smooth +smother +smudge +smudgy +smuggler +smuggling +smugly +smugness +snack +snagged +snaking +snap +snare +snarl +snazzy +sneak +sneer +sneeze +sneezing +snide +sniff +snippet +snipping +snitch +snooper +snooze +snore +snoring +snorkel +snort +snout +snowbird +snowboard +snowbound +snowcap +snowdrift +snowdrop +snowfall +snowfield +snowflake +snowiness +snowless +snowman +snowplow +snowshoe +snowstorm +snowsuit +snowy +snub +snuff +snuggle +snugly +snugness +speak +spearfish +spearhead +spearman +spearmint +species +specimen +specked +speckled +specks +spectacle +spectator +spectrum +speculate +speech +speed +spellbind +speller +spelling +spendable +spender +spending +spent +spew +sphere +spherical +sphinx +spider +spied +spiffy +spill +spilt +spinach +spinal +spindle +spinner +spinning +spinout +spinster +spiny +spiral +spirited +spiritism +spirits +spiritual +splashed +splashing +splashy +splatter +spleen +splendid +splendor +splice +splicing +splinter +splotchy +splurge +spoilage +spoiled +spoiler +spoiling +spoils +spoken +spokesman +sponge +spongy +sponsor +spoof +spookily +spooky +spool +spoon +spore +sporting +sports +sporty +spotless +spotlight +spotted +spotter +spotting +spotty +spousal +spouse +spout +sprain +sprang +sprawl +spray +spree +sprig +spring +sprinkled +sprinkler +sprint +sprite +sprout +spruce +sprung +spry +spud +spur +sputter +spyglass +squabble +squad +squall +squander +squash +squatted +squatter +squatting +squeak +squealer +squealing +squeamish +squeegee +squeeze +squeezing +squid +squiggle +squiggly +squint +squire +squirt +squishier +squishy +stability +stabilize +stable +stack +stadium +staff +stage +staging +stagnant +stagnate +stainable +stained +staining +stainless +stalemate +staleness +stalling +stallion +stamina +stammer +stamp +stand +stank +staple +stapling +starboard +starch +stardom +stardust +starfish +stargazer +staring +stark +starless +starlet +starlight +starlit +starring +starry +starship +starter +starting +startle +startling +startup +starved +starving +stash +state +static +statistic +statue +stature +status +statute +statutory +staunch +stays +steadfast +steadier +steadily +steadying +steam +steed +steep +steerable +steering +steersman +stegosaur +stellar +stem +stench +stencil +step +stereo +sterile +sterility +sterilize +sterling +sternness +sternum +stew +stick +stiffen +stiffly +stiffness +stifle +stifling +stillness +stilt +stimulant +stimulate +stimuli +stimulus +stinger +stingily +stinging +stingray +stingy +stinking +stinky +stipend +stipulate +stir +stitch +stock +stoic +stoke +stole +stomp +stonewall +stoneware +stonework +stoning +stony +stood +stooge +stool +stoop +stoplight +stoppable +stoppage +stopped +stopper +stopping +stopwatch +storable +storage +storeroom +storewide +storm +stout +stove +stowaway +stowing +straddle +straggler +strained +strainer +straining +strangely +stranger +strangle +strategic +strategy +stratus +straw +stray +streak +stream +street +strength +strenuous +strep +stress +stretch +strewn +stricken +strict +stride +strife +strike +striking +strive +striving +strobe +strode +stroller +strongbox +strongly +strongman +struck +structure +strudel +struggle +strum +strung +strut +stubbed +stubble +stubbly +stubborn +stucco +stuck +student +studied +studio +study +stuffed +stuffing +stuffy +stumble +stumbling +stump +stung +stunned +stunner +stunning +stunt +stupor +sturdily +sturdy +styling +stylishly +stylist +stylized +stylus +suave +subarctic +subatomic +subdivide +subdued +subduing +subfloor +subgroup +subheader +subject +sublease +sublet +sublevel +sublime +submarine +submerge +submersed +submitter +subpanel +subpar +subplot +subprime +subscribe +subscript +subsector +subside +subsiding +subsidize +subsidy +subsoil +subsonic +substance +subsystem +subtext +subtitle +subtly +subtotal +subtract +subtype +suburb +subway +subwoofer +subzero +succulent +such +suction +sudden +sudoku +suds +sufferer +suffering +suffice +suffix +suffocate +suffrage +sugar +suggest +suing +suitable +suitably +suitcase +suitor +sulfate +sulfide +sulfite +sulfur +sulk +sullen +sulphate +sulphuric +sultry +superbowl +superglue +superhero +superior +superjet +superman +supermom +supernova +supervise +supper +supplier +supply +support +supremacy +supreme +surcharge +surely +sureness +surface +surfacing +surfboard +surfer +surgery +surgical +surging +surname +surpass +surplus +surprise +surreal +surrender +surrogate +surround +survey +survival +survive +surviving +survivor +sushi +suspect +suspend +suspense +sustained +sustainer +swab +swaddling +swagger +swampland +swan +swapping +swarm +sway +swear +sweat +sweep +swell +swept +swerve +swifter +swiftly +swiftness +swimmable +swimmer +swimming +swimsuit +swimwear +swinger +swinging +swipe +swirl +switch +swivel +swizzle +swooned +swoop +swoosh +swore +sworn +swung +sycamore +sympathy +symphonic +symphony +symptom +synapse +syndrome +synergy +synopses +synopsis +synthesis +synthetic +syrup +system +t-shirt +tabasco +tabby +tableful +tables +tablet +tableware +tabloid +tackiness +tacking +tackle +tackling +tacky +taco +tactful +tactical +tactics +tactile +tactless +tadpole +taekwondo +tag +tainted +take +taking +talcum +talisman +tall +talon +tamale +tameness +tamer +tamper +tank +tanned +tannery +tanning +tantrum +tapeless +tapered +tapering +tapestry +tapioca +tapping +taps +tarantula +target +tarmac +tarnish +tarot +tartar +tartly +tartness +task +tassel +taste +tastiness +tasting +tasty +tattered +tattle +tattling +tattoo +taunt +tavern +thank +that +thaw +theater +theatrics +thee +theft +theme +theology +theorize +thermal +thermos +thesaurus +these +thesis +thespian +thicken +thicket +thickness +thieving +thievish +thigh +thimble +thing +think +thinly +thinner +thinness +thinning +thirstily +thirsting +thirsty +thirteen +thirty +thong +thorn +those +thousand +thrash +thread +threaten +threefold +thrift +thrill +thrive +thriving +throat +throbbing +throng +throttle +throwaway +throwback +thrower +throwing +thud +thumb +thumping +thursday +thus +thwarting +thyself +tiara +tibia +tidal +tidbit +tidiness +tidings +tidy +tiger +tighten +tightly +tightness +tightrope +tightwad +tigress +tile +tiling +till +tilt +timid +timing +timothy +tinderbox +tinfoil +tingle +tingling +tingly +tinker +tinkling +tinsel +tinsmith +tint +tinwork +tiny +tipoff +tipped +tipper +tipping +tiptoeing +tiptop +tiring +tissue +trace +tracing +track +traction +tractor +trade +trading +tradition +traffic +tragedy +trailing +trailside +train +traitor +trance +tranquil +transfer +transform +translate +transpire +transport +transpose +trapdoor +trapeze +trapezoid +trapped +trapper +trapping +traps +trash +travel +traverse +travesty +tray +treachery +treading +treadmill +treason +treat +treble +tree +trekker +tremble +trembling +tremor +trench +trend +trespass +triage +trial +triangle +tribesman +tribunal +tribune +tributary +tribute +triceps +trickery +trickily +tricking +trickle +trickster +tricky +tricolor +tricycle +trident +tried +trifle +trifocals +trillion +trilogy +trimester +trimmer +trimming +trimness +trinity +trio +tripod +tripping +triumph +trivial +trodden +trolling +trombone +trophy +tropical +tropics +trouble +troubling +trough +trousers +trout +trowel +truce +truck +truffle +trump +trunks +trustable +trustee +trustful +trusting +trustless +truth +try +tubby +tubeless +tubular +tucking +tuesday +tug +tuition +tulip +tumble +tumbling +tummy +turban +turbine +turbofan +turbojet +turbulent +turf +turkey +turmoil +turret +turtle +tusk +tutor +tutu +tux +tweak +tweed +tweet +tweezers +twelve +twentieth +twenty +twerp +twice +twiddle +twiddling +twig +twilight +twine +twins +twirl +twistable +twisted +twister +twisting +twisty +twitch +twitter +tycoon +tying +tyke +udder +ultimate +ultimatum +ultra +umbilical +umbrella +umpire +unabashed +unable +unadorned +unadvised +unafraid +unaired +unaligned +unaltered +unarmored +unashamed +unaudited +unawake +unaware +unbaked +unbalance +unbeaten +unbend +unbent +unbiased +unbitten +unblended +unblessed +unblock +unbolted +unbounded +unboxed +unbraided +unbridle +unbroken +unbuckled +unbundle +unburned +unbutton +uncanny +uncapped +uncaring +uncertain +unchain +unchanged +uncharted +uncheck +uncivil +unclad +unclaimed +unclamped +unclasp +uncle +unclip +uncloak +unclog +unclothed +uncoated +uncoiled +uncolored +uncombed +uncommon +uncooked +uncork +uncorrupt +uncounted +uncouple +uncouth +uncover +uncross +uncrown +uncrushed +uncured +uncurious +uncurled +uncut +undamaged +undated +undaunted +undead +undecided +undefined +underage +underarm +undercoat +undercook +undercut +underdog +underdone +underfed +underfeed +underfoot +undergo +undergrad +underhand +underline +underling +undermine +undermost +underpaid +underpass +underpay +underrate +undertake +undertone +undertook +undertow +underuse +underwear +underwent +underwire +undesired +undiluted +undivided +undocked +undoing +undone +undrafted +undress +undrilled +undusted +undying +unearned +unearth +unease +uneasily +uneasy +uneatable +uneaten +unedited +unelected +unending +unengaged +unenvied +unequal +unethical +uneven +unexpired +unexposed +unfailing +unfair +unfasten +unfazed +unfeeling +unfiled +unfilled +unfitted +unfitting +unfixable +unfixed +unflawed +unfocused +unfold +unfounded +unframed +unfreeze +unfrosted +unfrozen +unfunded +unglazed +ungloved +unglue +ungodly +ungraded +ungreased +unguarded +unguided +unhappily +unhappy +unharmed +unhealthy +unheard +unhearing +unheated +unhelpful +unhidden +unhinge +unhitched +unholy +unhook +unicorn +unicycle +unified +unifier +uniformed +uniformly +unify +unimpeded +uninjured +uninstall +uninsured +uninvited +union +uniquely +unisexual +unison +unissued +unit +universal +universe +unjustly +unkempt +unkind +unknotted +unknowing +unknown +unlaced +unlatch +unlawful +unleaded +unlearned +unleash +unless +unleveled +unlighted +unlikable +unlimited +unlined +unlinked +unlisted +unlit +unlivable +unloaded +unloader +unlocked +unlocking +unlovable +unloved +unlovely +unloving +unluckily +unlucky +unmade +unmanaged +unmanned +unmapped +unmarked +unmasked +unmasking +unmatched +unmindful +unmixable +unmixed +unmolded +unmoral +unmovable +unmoved +unmoving +unnamable +unnamed +unnatural +unneeded +unnerve +unnerving +unnoticed +unopened +unopposed +unpack +unpadded +unpaid +unpainted +unpaired +unpaved +unpeeled +unpicked +unpiloted +unpinned +unplanned +unplanted +unpleased +unpledged +unplowed +unplug +unpopular +unproven +unquote +unranked +unrated +unraveled +unreached +unread +unreal +unreeling +unrefined +unrelated +unrented +unrest +unretired +unrevised +unrigged +unripe +unrivaled +unroasted +unrobed +unroll +unruffled +unruly +unrushed +unsaddle +unsafe +unsaid +unsalted +unsaved +unsavory +unscathed +unscented +unscrew +unsealed +unseated +unsecured +unseeing +unseemly +unseen +unselect +unselfish +unsent +unsettled +unshackle +unshaken +unshaved +unshaven +unsheathe +unshipped +unsightly +unsigned +unskilled +unsliced +unsmooth +unsnap +unsocial +unsoiled +unsold +unsolved +unsorted +unspoiled +unspoken +unstable +unstaffed +unstamped +unsteady +unsterile +unstirred +unstitch +unstopped +unstuck +unstuffed +unstylish +unsubtle +unsubtly +unsuited +unsure +unsworn +untagged +untainted +untaken +untamed +untangled +untapped +untaxed +unthawed +unthread +untidy +untie +until +untimed +untimely +untitled +untoasted +untold +untouched +untracked +untrained +untreated +untried +untrimmed +untrue +untruth +unturned +untwist +untying +unusable +unused +unusual +unvalued +unvaried +unvarying +unveiled +unveiling +unvented +unviable +unvisited +unvocal +unwanted +unwarlike +unwary +unwashed +unwatched +unweave +unwed +unwelcome +unwell +unwieldy +unwilling +unwind +unwired +unwitting +unwomanly +unworldly +unworn +unworried +unworthy +unwound +unwoven +unwrapped +unwritten +unzip +upbeat +upchuck +upcoming +upcountry +update +upfront +upgrade +upheaval +upheld +uphill +uphold +uplifted +uplifting +upload +upon +upper +upright +uprising +upriver +uproar +uproot +upscale +upside +upstage +upstairs +upstart +upstate +upstream +upstroke +upswing +uptake +uptight +uptown +upturned +upward +upwind +uranium +urban +urchin +urethane +urgency +urgent +urging +urologist +urology +usable +usage +useable +used +uselessly +user +usher +usual +utensil +utility +utilize +utmost +utopia +utter +vacancy +vacant +vacate +vacation +vagabond +vagrancy +vagrantly +vaguely +vagueness +valiant +valid +valium +valley +valuables +value +vanilla +vanish +vanity +vanquish +vantage +vaporizer +variable +variably +varied +variety +various +varmint +varnish +varsity +varying +vascular +vaseline +vastly +vastness +veal +vegan +veggie +vehicular +velcro +velocity +velvet +vendetta +vending +vendor +veneering +vengeful +venomous +ventricle +venture +venue +venus +verbalize +verbally +verbose +verdict +verify +verse +version +versus +vertebrae +vertical +vertigo +very +vessel +vest +veteran +veto +vexingly +viability +viable +vibes +vice +vicinity +victory +video +viewable +viewer +viewing +viewless +viewpoint +vigorous +village +villain +vindicate +vineyard +vintage +violate +violation +violator +violet +violin +viper +viral +virtual +virtuous +virus +visa +viscosity +viscous +viselike +visible +visibly +vision +visiting +visitor +visor +vista +vitality +vitalize +vitally +vitamins +vivacious +vividly +vividness +vixen +vocalist +vocalize +vocally +vocation +voice +voicing +void +volatile +volley +voltage +volumes +voter +voting +voucher +vowed +vowel +voyage +wackiness +wad +wafer +waffle +waged +wager +wages +waggle +wagon +wake +waking +walk +walmart +walnut +walrus +waltz +wand +wannabe +wanted +wanting +wasabi +washable +washbasin +washboard +washbowl +washcloth +washday +washed +washer +washhouse +washing +washout +washroom +washstand +washtub +wasp +wasting +watch +water +waviness +waving +wavy +whacking +whacky +wham +wharf +wheat +whenever +whiff +whimsical +whinny +whiny +whisking +whoever +whole +whomever +whoopee +whooping +whoops +why +wick +widely +widen +widget +widow +width +wieldable +wielder +wife +wifi +wikipedia +wildcard +wildcat +wilder +wildfire +wildfowl +wildland +wildlife +wildly +wildness +willed +willfully +willing +willow +willpower +wilt +wimp +wince +wincing +wind +wing +winking +winner +winnings +winter +wipe +wired +wireless +wiring +wiry +wisdom +wise +wish +wisplike +wispy +wistful +wizard +wobble +wobbling +wobbly +wok +wolf +wolverine +womanhood +womankind +womanless +womanlike +womanly +womb +woof +wooing +wool +woozy +word +work +worried +worrier +worrisome +worry +worsening +worshiper +worst +wound +woven +wow +wrangle +wrath +wreath +wreckage +wrecker +wrecking +wrench +wriggle +wriggly +wrinkle +wrinkly +wrist +writing +written +wrongdoer +wronged +wrongful +wrongly +wrongness +wrought +xbox +xerox +yahoo +yam +yanking +yapping +yard +yarn +yeah +yearbook +yearling +yearly +yearning +yeast +yelling +yelp +yen +yesterday +yiddish +yield +yin +yippee +yo-yo +yodel +yoga +yogurt +yonder +yoyo +yummy +zap +zealous +zebra +zen +zeppelin +zero +zestfully +zesty +zigzagged +zipfile +zipping +zippy +zips +zit +zodiac +zombie +zone +zoning +zookeeper +zoologist +zoology +zoom diff --git a/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_prefixed.txt b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_prefixed.txt new file mode 100644 index 000000000..9ac732fe3 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_prefixed.txt @@ -0,0 +1,1296 @@ +aardvark +abandoned +abbreviate +abdomen +abhorrence +abiding +abnormal +abrasion +absorbing +abundant +abyss +academy +accountant +acetone +achiness +acid +acoustics +acquire +acrobat +actress +acuteness +aerosol +aesthetic +affidavit +afloat +afraid +aftershave +again +agency +aggressor +aghast +agitate +agnostic +agonizing +agreeing +aidless +aimlessly +ajar +alarmclock +albatross +alchemy +alfalfa +algae +aliens +alkaline +almanac +alongside +alphabet +already +also +altitude +aluminum +always +amazingly +ambulance +amendment +amiable +ammunition +amnesty +amoeba +amplifier +amuser +anagram +anchor +android +anesthesia +angelfish +animal +anklet +announcer +anonymous +answer +antelope +anxiety +anyplace +aorta +apartment +apnea +apostrophe +apple +apricot +aquamarine +arachnid +arbitrate +ardently +arena +argument +aristocrat +armchair +aromatic +arrowhead +arsonist +artichoke +asbestos +ascend +aseptic +ashamed +asinine +asleep +asocial +asparagus +astronaut +asymmetric +atlas +atmosphere +atom +atrocious +attic +atypical +auctioneer +auditorium +augmented +auspicious +automobile +auxiliary +avalanche +avenue +aviator +avocado +awareness +awhile +awkward +awning +awoke +axially +azalea +babbling +backpack +badass +bagpipe +bakery +balancing +bamboo +banana +barracuda +basket +bathrobe +bazooka +blade +blender +blimp +blouse +blurred +boatyard +bobcat +body +bogusness +bohemian +boiler +bonnet +boots +borough +bossiness +bottle +bouquet +boxlike +breath +briefcase +broom +brushes +bubblegum +buckle +buddhist +buffalo +bullfrog +bunny +busboy +buzzard +cabin +cactus +cadillac +cafeteria +cage +cahoots +cajoling +cakewalk +calculator +camera +canister +capsule +carrot +cashew +cathedral +caucasian +caviar +ceasefire +cedar +celery +cement +census +ceramics +cesspool +chalkboard +cheesecake +chimney +chlorine +chopsticks +chrome +chute +cilantro +cinnamon +circle +cityscape +civilian +clay +clergyman +clipboard +clock +clubhouse +coathanger +cobweb +coconut +codeword +coexistent +coffeecake +cognitive +cohabitate +collarbone +computer +confetti +copier +cornea +cosmetics +cotton +couch +coverless +coyote +coziness +crawfish +crewmember +crib +croissant +crumble +crystal +cubical +cucumber +cuddly +cufflink +cuisine +culprit +cup +curry +cushion +cuticle +cybernetic +cyclist +cylinder +cymbal +cynicism +cypress +cytoplasm +dachshund +daffodil +dagger +dairy +dalmatian +dandelion +dartboard +dastardly +datebook +daughter +dawn +daytime +dazzler +dealer +debris +decal +dedicate +deepness +defrost +degree +dehydrator +deliverer +democrat +dentist +deodorant +depot +deranged +desktop +detergent +device +dexterity +diamond +dibs +dictionary +diffuser +digit +dilated +dimple +dinnerware +dioxide +diploma +directory +dishcloth +ditto +dividers +dizziness +doctor +dodge +doll +dominoes +donut +doorstep +dorsal +double +downstairs +dozed +drainpipe +dresser +driftwood +droppings +drum +dryer +dubiously +duckling +duffel +dugout +dumpster +duplex +durable +dustpan +dutiful +duvet +dwarfism +dwelling +dwindling +dynamite +dyslexia +eagerness +earlobe +easel +eavesdrop +ebook +eccentric +echoless +eclipse +ecosystem +ecstasy +edged +editor +educator +eelworm +eerie +effects +eggnog +egomaniac +ejection +elastic +elbow +elderly +elephant +elfishly +eliminator +elk +elliptical +elongated +elsewhere +elusive +elves +emancipate +embroidery +emcee +emerald +emission +emoticon +emperor +emulate +enactment +enchilada +endorphin +energy +enforcer +engine +enhance +enigmatic +enjoyably +enlarged +enormous +enquirer +enrollment +ensemble +entryway +enunciate +envoy +enzyme +epidemic +equipment +erasable +ergonomic +erratic +eruption +escalator +eskimo +esophagus +espresso +essay +estrogen +etching +eternal +ethics +etiquette +eucalyptus +eulogy +euphemism +euthanize +evacuation +evergreen +evidence +evolution +exam +excerpt +exerciser +exfoliate +exhale +exist +exorcist +explode +exquisite +exterior +exuberant +fabric +factory +faded +failsafe +falcon +family +fanfare +fasten +faucet +favorite +feasibly +february +federal +feedback +feigned +feline +femur +fence +ferret +festival +fettuccine +feudalist +feverish +fiberglass +fictitious +fiddle +figurine +fillet +finalist +fiscally +fixture +flashlight +fleshiness +flight +florist +flypaper +foamless +focus +foggy +folksong +fondue +footpath +fossil +fountain +fox +fragment +freeway +fridge +frosting +fruit +fryingpan +gadget +gainfully +gallstone +gamekeeper +gangway +garlic +gaslight +gathering +gauntlet +gearbox +gecko +gem +generator +geographer +gerbil +gesture +getaway +geyser +ghoulishly +gibberish +giddiness +giftshop +gigabyte +gimmick +giraffe +giveaway +gizmo +glasses +gleeful +glisten +glove +glucose +glycerin +gnarly +gnomish +goatskin +goggles +goldfish +gong +gooey +gorgeous +gosling +gothic +gourmet +governor +grape +greyhound +grill +groundhog +grumbling +guacamole +guerrilla +guitar +gullible +gumdrop +gurgling +gusto +gutless +gymnast +gynecology +gyration +habitat +hacking +haggard +haiku +halogen +hamburger +handgun +happiness +hardhat +hastily +hatchling +haughty +hazelnut +headband +hedgehog +hefty +heinously +helmet +hemoglobin +henceforth +herbs +hesitation +hexagon +hubcap +huddling +huff +hugeness +hullabaloo +human +hunter +hurricane +hushing +hyacinth +hybrid +hydrant +hygienist +hypnotist +ibuprofen +icepack +icing +iconic +identical +idiocy +idly +igloo +ignition +iguana +illuminate +imaging +imbecile +imitator +immigrant +imprint +iodine +ionosphere +ipad +iphone +iridescent +irksome +iron +irrigation +island +isotope +issueless +italicize +itemizer +itinerary +itunes +ivory +jabbering +jackrabbit +jaguar +jailhouse +jalapeno +jamboree +janitor +jarring +jasmine +jaundice +jawbreaker +jaywalker +jazz +jealous +jeep +jelly +jeopardize +jersey +jetski +jezebel +jiffy +jigsaw +jingling +jobholder +jockstrap +jogging +john +joinable +jokingly +journal +jovial +joystick +jubilant +judiciary +juggle +juice +jujitsu +jukebox +jumpiness +junkyard +juror +justifying +juvenile +kabob +kamikaze +kangaroo +karate +kayak +keepsake +kennel +kerosene +ketchup +khaki +kickstand +kilogram +kimono +kingdom +kiosk +kissing +kite +kleenex +knapsack +kneecap +knickers +koala +krypton +laboratory +ladder +lakefront +lantern +laptop +laryngitis +lasagna +latch +laundry +lavender +laxative +lazybones +lecturer +leftover +leggings +leisure +lemon +length +leopard +leprechaun +lettuce +leukemia +levers +lewdness +liability +library +licorice +lifeboat +lightbulb +likewise +lilac +limousine +lint +lioness +lipstick +liquid +listless +litter +liverwurst +lizard +llama +luau +lubricant +lucidity +ludicrous +luggage +lukewarm +lullaby +lumberjack +lunchbox +luridness +luscious +luxurious +lyrics +macaroni +maestro +magazine +mahogany +maimed +majority +makeover +malformed +mammal +mango +mapmaker +marbles +massager +matchstick +maverick +maximum +mayonnaise +moaning +mobilize +moccasin +modify +moisture +molecule +momentum +monastery +moonshine +mortuary +mosquito +motorcycle +mousetrap +movie +mower +mozzarella +muckiness +mudflow +mugshot +mule +mummy +mundane +muppet +mural +mustard +mutation +myriad +myspace +myth +nail +namesake +nanosecond +napkin +narrator +nastiness +natives +nautically +navigate +nearest +nebula +nectar +nefarious +negotiator +neither +nemesis +neoliberal +nephew +nervously +nest +netting +neuron +nevermore +nextdoor +nicotine +niece +nimbleness +nintendo +nirvana +nuclear +nugget +nuisance +nullify +numbing +nuptials +nursery +nutcracker +nylon +oasis +oat +obediently +obituary +object +obliterate +obnoxious +observer +obtain +obvious +occupation +oceanic +octopus +ocular +office +oftentimes +oiliness +ointment +older +olympics +omissible +omnivorous +oncoming +onion +onlooker +onstage +onward +onyx +oomph +opaquely +opera +opium +opossum +opponent +optical +opulently +oscillator +osmosis +ostrich +otherwise +ought +outhouse +ovation +oven +owlish +oxford +oxidize +oxygen +oyster +ozone +pacemaker +padlock +pageant +pajamas +palm +pamphlet +pantyhose +paprika +parakeet +passport +patio +pauper +pavement +payphone +pebble +peculiarly +pedometer +pegboard +pelican +penguin +peony +pepperoni +peroxide +pesticide +petroleum +pewter +pharmacy +pheasant +phonebook +phrasing +physician +plank +pledge +plotted +plug +plywood +pneumonia +podiatrist +poetic +pogo +poison +poking +policeman +poncho +popcorn +porcupine +postcard +poultry +powerboat +prairie +pretzel +princess +propeller +prune +pry +pseudo +psychopath +publisher +pucker +pueblo +pulley +pumpkin +punchbowl +puppy +purse +pushup +putt +puzzle +pyramid +python +quarters +quesadilla +quilt +quote +racoon +radish +ragweed +railroad +rampantly +rancidity +rarity +raspberry +ravishing +rearrange +rebuilt +receipt +reentry +refinery +register +rehydrate +reimburse +rejoicing +rekindle +relic +remote +renovator +reopen +reporter +request +rerun +reservoir +retriever +reunion +revolver +rewrite +rhapsody +rhetoric +rhino +rhubarb +rhyme +ribbon +riches +ridden +rigidness +rimmed +riptide +riskily +ritzy +riverboat +roamer +robe +rocket +romancer +ropelike +rotisserie +roundtable +royal +rubber +rudderless +rugby +ruined +rulebook +rummage +running +rupture +rustproof +sabotage +sacrifice +saddlebag +saffron +sainthood +saltshaker +samurai +sandworm +sapphire +sardine +sassy +satchel +sauna +savage +saxophone +scarf +scenario +schoolbook +scientist +scooter +scrapbook +sculpture +scythe +secretary +sedative +segregator +seismology +selected +semicolon +senator +septum +sequence +serpent +sesame +settler +severely +shack +shelf +shirt +shovel +shrimp +shuttle +shyness +siamese +sibling +siesta +silicon +simmering +singles +sisterhood +sitcom +sixfold +sizable +skateboard +skeleton +skies +skulk +skylight +slapping +sled +slingshot +sloth +slumbering +smartphone +smelliness +smitten +smokestack +smudge +snapshot +sneezing +sniff +snowsuit +snugness +speakers +sphinx +spider +splashing +sponge +sprout +spur +spyglass +squirrel +statue +steamboat +stingray +stopwatch +strawberry +student +stylus +suave +subway +suction +suds +suffocate +sugar +suitcase +sulphur +superstore +surfer +sushi +swan +sweatshirt +swimwear +sword +sycamore +syllable +symphony +synagogue +syringes +systemize +tablespoon +taco +tadpole +taekwondo +tagalong +takeout +tallness +tamale +tanned +tapestry +tarantula +tastebud +tattoo +tavern +thaw +theater +thimble +thorn +throat +thumb +thwarting +tiara +tidbit +tiebreaker +tiger +timid +tinsel +tiptoeing +tirade +tissue +tractor +tree +tripod +trousers +trucks +tryout +tubeless +tuesday +tugboat +tulip +tumbleweed +tupperware +turtle +tusk +tutorial +tuxedo +tweezers +twins +tyrannical +ultrasound +umbrella +umpire +unarmored +unbuttoned +uncle +underwear +unevenness +unflavored +ungloved +unhinge +unicycle +unjustly +unknown +unlocking +unmarked +unnoticed +unopened +unpaved +unquenched +unroll +unscrewing +untied +unusual +unveiled +unwrinkled +unyielding +unzip +upbeat +upcountry +update +upfront +upgrade +upholstery +upkeep +upload +uppercut +upright +upstairs +uptown +upwind +uranium +urban +urchin +urethane +urgent +urologist +username +usher +utensil +utility +utmost +utopia +utterance +vacuum +vagrancy +valuables +vanquished +vaporizer +varied +vaseline +vegetable +vehicle +velcro +vendor +vertebrae +vestibule +veteran +vexingly +vicinity +videogame +viewfinder +vigilante +village +vinegar +violin +viperfish +virus +visor +vitamins +vivacious +vixen +vocalist +vogue +voicemail +volleyball +voucher +voyage +vulnerable +waffle +wagon +wakeup +walrus +wanderer +wasp +water +waving +wheat +whisper +wholesaler +wick +widow +wielder +wifeless +wikipedia +wildcat +windmill +wipeout +wired +wishbone +wizardry +wobbliness +wolverine +womb +woolworker +workbasket +wound +wrangle +wreckage +wristwatch +wrongdoing +xerox +xylophone +yacht +yahoo +yard +yearbook +yesterday +yiddish +yield +yo-yo +yodel +yogurt +yuppie +zealot +zebra +zeppelin +zestfully +zigzagged +zillion +zipping +zirconium +zodiac +zombie +zookeeper +zucchini diff --git a/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_short.txt b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_short.txt new file mode 100644 index 000000000..4c8baa4ce --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/_data/wordsets/eff_short.txt @@ -0,0 +1,1296 @@ +acid +acorn +acre +acts +afar +affix +aged +agent +agile +aging +agony +ahead +aide +aids +aim +ajar +alarm +alias +alibi +alien +alike +alive +aloe +aloft +aloha +alone +amend +amino +ample +amuse +angel +anger +angle +ankle +apple +april +apron +aqua +area +arena +argue +arise +armed +armor +army +aroma +array +arson +art +ashen +ashes +atlas +atom +attic +audio +avert +avoid +awake +award +awoke +axis +bacon +badge +bagel +baggy +baked +baker +balmy +banjo +barge +barn +bash +basil +bask +batch +bath +baton +bats +blade +blank +blast +blaze +bleak +blend +bless +blimp +blink +bloat +blob +blog +blot +blunt +blurt +blush +boast +boat +body +boil +bok +bolt +boned +boney +bonus +bony +book +booth +boots +boss +botch +both +boxer +breed +bribe +brick +bride +brim +bring +brink +brisk +broad +broil +broke +brook +broom +brush +buck +bud +buggy +bulge +bulk +bully +bunch +bunny +bunt +bush +bust +busy +buzz +cable +cache +cadet +cage +cake +calm +cameo +canal +candy +cane +canon +cape +card +cargo +carol +carry +carve +case +cash +cause +cedar +chain +chair +chant +chaos +charm +chase +cheek +cheer +chef +chess +chest +chew +chief +chili +chill +chip +chomp +chop +chow +chuck +chump +chunk +churn +chute +cider +cinch +city +civic +civil +clad +claim +clamp +clap +clash +clasp +class +claw +clay +clean +clear +cleat +cleft +clerk +click +cling +clink +clip +cloak +clock +clone +cloth +cloud +clump +coach +coast +coat +cod +coil +coke +cola +cold +colt +coma +come +comic +comma +cone +cope +copy +coral +cork +cost +cot +couch +cough +cover +cozy +craft +cramp +crane +crank +crate +crave +crawl +crazy +creme +crepe +crept +crib +cried +crisp +crook +crop +cross +crowd +crown +crumb +crush +crust +cub +cult +cupid +cure +curl +curry +curse +curve +curvy +cushy +cut +cycle +dab +dad +daily +dairy +daisy +dance +dandy +darn +dart +dash +data +date +dawn +deaf +deal +dean +debit +debt +debug +decaf +decal +decay +deck +decor +decoy +deed +delay +denim +dense +dent +depth +derby +desk +dial +diary +dice +dig +dill +dime +dimly +diner +dingy +disco +dish +disk +ditch +ditzy +dizzy +dock +dodge +doing +doll +dome +donor +donut +dose +dot +dove +down +dowry +doze +drab +drama +drank +draw +dress +dried +drift +drill +drive +drone +droop +drove +drown +drum +dry +duck +duct +dude +dug +duke +duo +dusk +dust +duty +dwarf +dwell +eagle +early +earth +easel +east +eaten +eats +ebay +ebony +ebook +echo +edge +eel +eject +elbow +elder +elf +elk +elm +elope +elude +elves +email +emit +empty +emu +enter +entry +envoy +equal +erase +error +erupt +essay +etch +evade +even +evict +evil +evoke +exact +exit +fable +faced +fact +fade +fall +false +fancy +fang +fax +feast +feed +femur +fence +fend +ferry +fetal +fetch +fever +fiber +fifth +fifty +film +filth +final +finch +fit +five +flag +flaky +flame +flap +flask +fled +flick +fling +flint +flip +flirt +float +flock +flop +floss +flyer +foam +foe +fog +foil +folic +folk +food +fool +found +fox +foyer +frail +frame +fray +fresh +fried +frill +frisk +from +front +frost +froth +frown +froze +fruit +gag +gains +gala +game +gap +gas +gave +gear +gecko +geek +gem +genre +gift +gig +gills +given +giver +glad +glass +glide +gloss +glove +glow +glue +goal +going +golf +gong +good +gooey +goofy +gore +gown +grab +grain +grant +grape +graph +grasp +grass +grave +gravy +gray +green +greet +grew +grid +grief +grill +grip +grit +groom +grope +growl +grub +grunt +guide +gulf +gulp +gummy +guru +gush +gut +guy +habit +half +halo +halt +happy +harm +hash +hasty +hatch +hate +haven +hazel +hazy +heap +heat +heave +hedge +hefty +help +herbs +hers +hub +hug +hula +hull +human +humid +hump +hung +hunk +hunt +hurry +hurt +hush +hut +ice +icing +icon +icy +igloo +image +ion +iron +islam +issue +item +ivory +ivy +jab +jam +jaws +jazz +jeep +jelly +jet +jiffy +job +jog +jolly +jolt +jot +joy +judge +juice +juicy +july +jumbo +jump +junky +juror +jury +keep +keg +kept +kick +kilt +king +kite +kitty +kiwi +knee +knelt +koala +kung +ladle +lady +lair +lake +lance +land +lapel +large +lash +lasso +last +latch +late +lazy +left +legal +lemon +lend +lens +lent +level +lever +lid +life +lift +lilac +lily +limb +limes +line +lint +lion +lip +list +lived +liver +lunar +lunch +lung +lurch +lure +lurk +lying +lyric +mace +maker +malt +mama +mango +manor +many +map +march +mardi +marry +mash +match +mate +math +moan +mocha +moist +mold +mom +moody +mop +morse +most +motor +motto +mount +mouse +mousy +mouth +move +movie +mower +mud +mug +mulch +mule +mull +mumbo +mummy +mural +muse +music +musky +mute +nacho +nag +nail +name +nanny +nap +navy +near +neat +neon +nerd +nest +net +next +niece +ninth +nutty +oak +oasis +oat +ocean +oil +old +olive +omen +onion +only +ooze +opal +open +opera +opt +otter +ouch +ounce +outer +oval +oven +owl +ozone +pace +pagan +pager +palm +panda +panic +pants +panty +paper +park +party +pasta +patch +path +patio +payer +pecan +penny +pep +perch +perky +perm +pest +petal +petri +petty +photo +plank +plant +plaza +plead +plot +plow +pluck +plug +plus +poach +pod +poem +poet +pogo +point +poise +poker +polar +polio +polka +polo +pond +pony +poppy +pork +poser +pouch +pound +pout +power +prank +press +print +prior +prism +prize +probe +prong +proof +props +prude +prune +pry +pug +pull +pulp +pulse +puma +punch +punk +pupil +puppy +purr +purse +push +putt +quack +quake +query +quiet +quill +quilt +quit +quota +quote +rabid +race +rack +radar +radio +raft +rage +raid +rail +rake +rally +ramp +ranch +range +rank +rant +rash +raven +reach +react +ream +rebel +recap +relax +relay +relic +remix +repay +repel +reply +rerun +reset +rhyme +rice +rich +ride +rigid +rigor +rinse +riot +ripen +rise +risk +ritzy +rival +river +roast +robe +robin +rock +rogue +roman +romp +rope +rover +royal +ruby +rug +ruin +rule +runny +rush +rust +rut +sadly +sage +said +saint +salad +salon +salsa +salt +same +sandy +santa +satin +sauna +saved +savor +sax +say +scale +scam +scan +scare +scarf +scary +scoff +scold +scoop +scoot +scope +score +scorn +scout +scowl +scrap +scrub +scuba +scuff +sect +sedan +self +send +sepia +serve +set +seven +shack +shade +shady +shaft +shaky +sham +shape +share +sharp +shed +sheep +sheet +shelf +shell +shine +shiny +ship +shirt +shock +shop +shore +shout +shove +shown +showy +shred +shrug +shun +shush +shut +shy +sift +silk +silly +silo +sip +siren +sixth +size +skate +skew +skid +skier +skies +skip +skirt +skit +sky +slab +slack +slain +slam +slang +slash +slate +slaw +sled +sleek +sleep +sleet +slept +slice +slick +slimy +sling +slip +slit +slob +slot +slug +slum +slurp +slush +small +smash +smell +smile +smirk +smog +snack +snap +snare +snarl +sneak +sneer +sniff +snore +snort +snout +snowy +snub +snuff +speak +speed +spend +spent +spew +spied +spill +spiny +spoil +spoke +spoof +spool +spoon +sport +spot +spout +spray +spree +spur +squad +squat +squid +stack +staff +stage +stain +stall +stamp +stand +stank +stark +start +stash +state +stays +steam +steep +stem +step +stew +stick +sting +stir +stock +stole +stomp +stony +stood +stool +stoop +stop +storm +stout +stove +straw +stray +strut +stuck +stud +stuff +stump +stung +stunt +suds +sugar +sulk +surf +sushi +swab +swan +swarm +sway +swear +sweat +sweep +swell +swept +swim +swing +swipe +swirl +swoop +swore +syrup +tacky +taco +tag +take +tall +talon +tamer +tank +taper +taps +tarot +tart +task +taste +tasty +taunt +thank +thaw +theft +theme +thigh +thing +think +thong +thorn +those +throb +thud +thumb +thump +thus +tiara +tidal +tidy +tiger +tile +tilt +tint +tiny +trace +track +trade +train +trait +trap +trash +tray +treat +tree +trek +trend +trial +tribe +trick +trio +trout +truce +truck +trump +trunk +try +tug +tulip +tummy +turf +tusk +tutor +tutu +tux +tweak +tweet +twice +twine +twins +twirl +twist +uncle +uncut +undo +unify +union +unit +untie +upon +upper +urban +used +user +usher +utter +value +vapor +vegan +venue +verse +vest +veto +vice +video +view +viral +virus +visa +visor +vixen +vocal +voice +void +volt +voter +vowel +wad +wafer +wager +wages +wagon +wake +walk +wand +wasp +watch +water +wavy +wheat +whiff +whole +whoop +wick +widen +widow +width +wife +wifi +wilt +wimp +wind +wing +wink +wipe +wired +wiry +wise +wish +wispy +wok +wolf +womb +wool +woozy +word +work +worry +wound +woven +wrath +wreck +wrist +xerox +yahoo +yam +yard +year +yeast +yelp +yield +yo-yo +yodel +yoga +yoyo +yummy +zebra +zero +zesty +zippy +zone +zoom diff --git a/ansible/lib/python3.11/site-packages/passlib/apache.py b/ansible/lib/python3.11/site-packages/passlib/apache.py new file mode 100644 index 000000000..a75f2cf3c --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/apache.py @@ -0,0 +1,1255 @@ +"""passlib.apache - apache password support""" +# XXX: relocate this to passlib.ext.apache? +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import os +from warnings import warn +# site +# pkg +from passlib import exc, registry +from passlib.context import CryptContext +from passlib.exc import ExpectedStringError +from passlib.hash import htdigest +from passlib.utils import render_bytes, to_bytes, is_ascii_codec +from passlib.utils.decor import deprecated_method +from passlib.utils.compat import join_bytes, unicode, BytesIO, PY3 +# local +__all__ = [ + 'HtpasswdFile', + 'HtdigestFile', +] + +#============================================================================= +# constants & support +#============================================================================= +_UNSET = object() + +_BCOLON = b":" +_BHASH = b"#" + +# byte values that aren't allowed in fields. +_INVALID_FIELD_CHARS = b":\n\r\t\x00" + +#: _CommonFile._source token types +_SKIPPED = "skipped" +_RECORD = "record" + +#============================================================================= +# common helpers +#============================================================================= +class _CommonFile(object): + """common framework for HtpasswdFile & HtdigestFile""" + #=================================================================== + # instance attrs + #=================================================================== + + # charset encoding used by file (defaults to utf-8) + encoding = None + + # whether users() and other public methods should return unicode or bytes? + # (defaults to False under PY2, True under PY3) + return_unicode = None + + # if bound to local file, these will be set. + _path = None # local file path + _mtime = None # mtime when last loaded, or 0 + + # if true, automatically save to local file after changes are made. + autosave = False + + # dict mapping key -> value for all records in database. + # (e.g. user => hash for Htpasswd) + _records = None + + #: list of tokens for recreating original file contents when saving. if present, + #: will be sequence of (_SKIPPED, b"whitespace/comments") and (_RECORD, ) tuples. + _source = None + + #=================================================================== + # alt constuctors + #=================================================================== + @classmethod + def from_string(cls, data, **kwds): + """create new object from raw string. + + :type data: unicode or bytes + :arg data: + database to load, as single string. + + :param \\*\\*kwds: + all other keywords are the same as in the class constructor + """ + if 'path' in kwds: + raise TypeError("'path' not accepted by from_string()") + self = cls(**kwds) + self.load_string(data) + return self + + @classmethod + def from_path(cls, path, **kwds): + """create new object from file, without binding object to file. + + :type path: str + :arg path: + local filepath to load from + + :param \\*\\*kwds: + all other keywords are the same as in the class constructor + """ + self = cls(**kwds) + self.load(path) + return self + + #=================================================================== + # init + #=================================================================== + def __init__(self, path=None, new=False, autoload=True, autosave=False, + encoding="utf-8", return_unicode=PY3, + ): + # set encoding + if not encoding: + warn("``encoding=None`` is deprecated as of Passlib 1.6, " + "and will cause a ValueError in Passlib 1.8, " + "use ``return_unicode=False`` instead.", + DeprecationWarning, stacklevel=2) + encoding = "utf-8" + return_unicode = False + elif not is_ascii_codec(encoding): + # htpasswd/htdigest files assumes 1-byte chars, and use ":" separator, + # so only ascii-compatible encodings are allowed. + raise ValueError("encoding must be 7-bit ascii compatible") + self.encoding = encoding + + # set other attrs + self.return_unicode = return_unicode + self.autosave = autosave + self._path = path + self._mtime = 0 + + # init db + if not autoload: + warn("``autoload=False`` is deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8, use ``new=True`` instead", + DeprecationWarning, stacklevel=2) + new = True + if path and not new: + self.load() + else: + self._records = {} + self._source = [] + + def __repr__(self): + tail = '' + if self.autosave: + tail += ' autosave=True' + if self._path: + tail += ' path=%r' % self._path + if self.encoding != "utf-8": + tail += ' encoding=%r' % self.encoding + return "<%s 0x%0x%s>" % (self.__class__.__name__, id(self), tail) + + # NOTE: ``path`` is a property so that ``_mtime`` is wiped when it's set. + + @property + def path(self): + return self._path + + @path.setter + def path(self, value): + if value != self._path: + self._mtime = 0 + self._path = value + + @property + def mtime(self): + """modify time when last loaded (if bound to a local file)""" + return self._mtime + + #=================================================================== + # loading + #=================================================================== + def load_if_changed(self): + """Reload from ``self.path`` only if file has changed since last load""" + if not self._path: + raise RuntimeError("%r is not bound to a local file" % self) + if self._mtime and self._mtime == os.path.getmtime(self._path): + return False + self.load() + return True + + def load(self, path=None, force=True): + """Load state from local file. + If no path is specified, attempts to load from ``self.path``. + + :type path: str + :arg path: local file to load from + + :type force: bool + :param force: + if ``force=False``, only load from ``self.path`` if file + has changed since last load. + + .. deprecated:: 1.6 + This keyword will be removed in Passlib 1.8; + Applications should use :meth:`load_if_changed` instead. + """ + if path is not None: + with open(path, "rb") as fh: + self._mtime = 0 + self._load_lines(fh) + elif not force: + warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6," + "and will be removed in Passlib 1.8; " + "use %(name)s.load_if_changed() instead." % + dict(name=self.__class__.__name__), + DeprecationWarning, stacklevel=2) + return self.load_if_changed() + elif self._path: + with open(self._path, "rb") as fh: + self._mtime = os.path.getmtime(self._path) + self._load_lines(fh) + else: + raise RuntimeError("%s().path is not set, an explicit path is required" % + self.__class__.__name__) + return True + + def load_string(self, data): + """Load state from unicode or bytes string, replacing current state""" + data = to_bytes(data, self.encoding, "data") + self._mtime = 0 + self._load_lines(BytesIO(data)) + + def _load_lines(self, lines): + """load from sequence of lists""" + parse = self._parse_record + records = {} + source = [] + skipped = b'' + for idx, line in enumerate(lines): + # NOTE: per htpasswd source (https://github.com/apache/httpd/blob/trunk/support/htpasswd.c), + # lines with only whitespace, or with "#" as first non-whitespace char, + # are left alone / ignored. + tmp = line.lstrip() + if not tmp or tmp.startswith(_BHASH): + skipped += line + continue + + # parse valid line + key, value = parse(line, idx+1) + + # NOTE: if multiple entries for a key, we use the first one, + # which seems to match htpasswd source + if key in records: + log.warning("username occurs multiple times in source file: %r" % key) + skipped += line + continue + + # flush buffer of skipped whitespace lines + if skipped: + source.append((_SKIPPED, skipped)) + skipped = b'' + + # store new user line + records[key] = value + source.append((_RECORD, key)) + + # don't bother preserving trailing whitespace, but do preserve trailing comments + if skipped.rstrip(): + source.append((_SKIPPED, skipped)) + + # NOTE: not replacing ._records until parsing succeeds, so loading is atomic. + self._records = records + self._source = source + + def _parse_record(self, record, lineno): # pragma: no cover - abstract method + """parse line of file into (key, value) pair""" + raise NotImplementedError("should be implemented in subclass") + + def _set_record(self, key, value): + """ + helper for setting record which takes care of inserting source line if needed; + + :returns: + bool if key already present + """ + records = self._records + existing = (key in records) + records[key] = value + if not existing: + self._source.append((_RECORD, key)) + return existing + + #=================================================================== + # saving + #=================================================================== + def _autosave(self): + """subclass helper to call save() after any changes""" + if self.autosave and self._path: + self.save() + + def save(self, path=None): + """Save current state to file. + If no path is specified, attempts to save to ``self.path``. + """ + if path is not None: + with open(path, "wb") as fh: + fh.writelines(self._iter_lines()) + elif self._path: + self.save(self._path) + self._mtime = os.path.getmtime(self._path) + else: + raise RuntimeError("%s().path is not set, cannot autosave" % + self.__class__.__name__) + + def to_string(self): + """Export current state as a string of bytes""" + return join_bytes(self._iter_lines()) + + # def clean(self): + # """ + # discard any comments or whitespace that were being preserved from the source file, + # and re-sort keys in alphabetical order + # """ + # self._source = [(_RECORD, key) for key in sorted(self._records)] + # self._autosave() + + def _iter_lines(self): + """iterator yielding lines of database""" + # NOTE: this relies on being an OrderedDict so that it outputs + # records in a deterministic order. + records = self._records + if __debug__: + pending = set(records) + for action, content in self._source: + if action == _SKIPPED: + # 'content' is whitespace/comments to write + yield content + else: + assert action == _RECORD + # 'content' is record key + if content not in records: + # record was deleted + # NOTE: doing it lazily like this so deleting & re-adding user + # preserves their original location in the file. + continue + yield self._render_record(content, records[content]) + if __debug__: + pending.remove(content) + if __debug__: + # sanity check that we actually wrote all the records + # (otherwise _source & _records are somehow out of sync) + assert not pending, "failed to write all records: missing=%r" % (pending,) + + def _render_record(self, key, value): # pragma: no cover - abstract method + """given key/value pair, encode as line of file""" + raise NotImplementedError("should be implemented in subclass") + + #=================================================================== + # field encoding + #=================================================================== + def _encode_user(self, user): + """user-specific wrapper for _encode_field()""" + return self._encode_field(user, "user") + + def _encode_realm(self, realm): # pragma: no cover - abstract method + """realm-specific wrapper for _encode_field()""" + return self._encode_field(realm, "realm") + + def _encode_field(self, value, param="field"): + """convert field to internal representation. + + internal representation is always bytes. byte strings are left as-is, + unicode strings encoding using file's default encoding (or ``utf-8`` + if no encoding has been specified). + + :raises UnicodeEncodeError: + if unicode value cannot be encoded using default encoding. + + :raises ValueError: + if resulting byte string contains a forbidden character, + or is too long (>255 bytes). + + :returns: + encoded identifer as bytes + """ + if isinstance(value, unicode): + value = value.encode(self.encoding) + elif not isinstance(value, bytes): + raise ExpectedStringError(value, param) + if len(value) > 255: + raise ValueError("%s must be at most 255 characters: %r" % + (param, value)) + if any(c in _INVALID_FIELD_CHARS for c in value): + raise ValueError("%s contains invalid characters: %r" % + (param, value,)) + return value + + def _decode_field(self, value): + """decode field from internal representation to format + returns by users() method, etc. + + :raises UnicodeDecodeError: + if unicode value cannot be decoded using default encoding. + (usually indicates wrong encoding set for file). + + :returns: + field as unicode or bytes, as appropriate. + """ + assert isinstance(value, bytes), "expected value to be bytes" + if self.return_unicode: + return value.decode(self.encoding) + else: + return value + + # FIXME: htpasswd doc says passwords limited to 255 chars under Windows & MPE, + # and that longer ones are truncated. this may be side-effect of those + # platforms supporting the 'plaintext' scheme. these classes don't currently + # check for this. + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htpasswd context +# +# This section sets up a CryptContexts to mimic what schemes Apache +# (and the htpasswd tool) should support on the current system. +# +# Apache has long-time supported some basic builtin schemes (listed below), +# as well as the host's crypt() method -- though it's limited to being able +# to *verify* any scheme using that method, but can only generate "des_crypt" hashes. +# +# Apache 2.4 added builtin bcrypt support (even for platforms w/o native support). +# c.f. http://httpd.apache.org/docs/2.4/programs/htpasswd.html vs the 2.2 docs. +#============================================================================= + +#: set of default schemes that (if chosen) should be using bcrypt, +#: but can't due to lack of bcrypt. +_warn_no_bcrypt = set() + +def _init_default_schemes(): + + #: pick strongest one for host + host_best = None + for name in ["bcrypt", "sha256_crypt"]: + if registry.has_os_crypt_support(name): + host_best = name + break + + # check if we have a bcrypt backend -- otherwise issue warning + # XXX: would like to not spam this unless the user *requests* apache 24 + bcrypt = "bcrypt" if registry.has_backend("bcrypt") else None + _warn_no_bcrypt.clear() + if not bcrypt: + _warn_no_bcrypt.update(["portable_apache_24", "host_apache_24", + "linux_apache_24", "portable", "host"]) + + defaults = dict( + # strongest hash builtin to specific apache version + portable_apache_24=bcrypt or "apr_md5_crypt", + portable_apache_22="apr_md5_crypt", + + # strongest hash across current host & specific apache version + host_apache_24=bcrypt or host_best or "apr_md5_crypt", + host_apache_22=host_best or "apr_md5_crypt", + + # strongest hash on a linux host + linux_apache_24=bcrypt or "sha256_crypt", + linux_apache_22="sha256_crypt", + ) + + # set latest-apache version aliases + # XXX: could check for apache install, and pick correct host 22/24 default? + # could reuse _detect_htpasswd() helper in UTs + defaults.update( + portable=defaults['portable_apache_24'], + host=defaults['host_apache_24'], + ) + return defaults + +#: dict mapping default alias -> appropriate scheme +htpasswd_defaults = _init_default_schemes() + +def _init_htpasswd_context(): + + # start with schemes built into apache + schemes = [ + # builtin support added in apache 2.4 + # (https://bz.apache.org/bugzilla/show_bug.cgi?id=49288) + "bcrypt", + + # support not "builtin" to apache, instead it requires support through host's crypt(). + # adding them here to allow editing htpasswd under windows and then deploying under unix. + "sha256_crypt", + "sha512_crypt", + "des_crypt", + + # apache default as of 2.2.18, and still default in 2.4 + "apr_md5_crypt", + + # NOTE: apache says ONLY intended for transitioning htpasswd <-> ldap + "ldap_sha1", + + # NOTE: apache says ONLY supported on Windows, Netware, TPF + "plaintext" + ] + + # apache can verify anything supported by the native crypt(), + # though htpasswd tool can only generate a limited set of hashes. + # (this list may overlap w/ builtin apache schemes) + schemes.extend(registry.get_supported_os_crypt_schemes()) + + # hack to remove dups and sort into preferred order + preferred = schemes[:3] + ["apr_md5_crypt"] + schemes + schemes = sorted(set(schemes), key=preferred.index) + + # create context object + return CryptContext( + schemes=schemes, + + # NOTE: default will change to "portable" in passlib 2.0 + default=htpasswd_defaults['portable_apache_22'], + + # NOTE: bcrypt "2y" is required, "2b" isn't recognized by libapr (issue 95) + bcrypt__ident="2y", + ) + +#: CryptContext configured to match htpasswd +htpasswd_context = _init_htpasswd_context() + +#============================================================================= +# htpasswd editing +#============================================================================= + +class HtpasswdFile(_CommonFile): + """class for reading & writing Htpasswd files. + + The class constructor accepts the following arguments: + + :type path: filepath + :param path: + + Specifies path to htpasswd file, use to implicitly load from and save to. + + This class has two modes of operation: + + 1. It can be "bound" to a local file by passing a ``path`` to the class + constructor. In this case it will load the contents of the file when + created, and the :meth:`load` and :meth:`save` methods will automatically + load from and save to that file if they are called without arguments. + + 2. Alternately, it can exist as an independant object, in which case + :meth:`load` and :meth:`save` will require an explicit path to be + provided whenever they are called. As well, ``autosave`` behavior + will not be available. + + This feature is new in Passlib 1.6, and is the default if no + ``path`` value is provided to the constructor. + + This is also exposed as a readonly instance attribute. + + :type new: bool + :param new: + + Normally, if *path* is specified, :class:`HtpasswdFile` will + immediately load the contents of the file. However, when creating + a new htpasswd file, applications can set ``new=True`` so that + the existing file (if any) will not be loaded. + + .. versionadded:: 1.6 + This feature was previously enabled by setting ``autoload=False``. + That alias has been deprecated, and will be removed in Passlib 1.8 + + :type autosave: bool + :param autosave: + + Normally, any changes made to an :class:`HtpasswdFile` instance + will not be saved until :meth:`save` is explicitly called. However, + if ``autosave=True`` is specified, any changes made will be + saved to disk immediately (assuming *path* has been set). + + This is also exposed as a writeable instance attribute. + + :type encoding: str + :param encoding: + + Optionally specify character encoding used to read/write file + and hash passwords. Defaults to ``utf-8``, though ``latin-1`` + is the only other commonly encountered encoding. + + This is also exposed as a readonly instance attribute. + + :type default_scheme: str + :param default_scheme: + Optionally specify default scheme to use when encoding new passwords. + + This can be any of the schemes with builtin Apache support, + OR natively supported by the host OS's :func:`crypt.crypt` function. + + * Builtin schemes include ``"bcrypt"`` (apache 2.4+), ``"apr_md5_crypt"`, + and ``"des_crypt"``. + + * Schemes commonly supported by Unix hosts + include ``"bcrypt"``, ``"sha256_crypt"``, and ``"des_crypt"``. + + In order to not have to sort out what you should use, + passlib offers a number of aliases, that will resolve + to the most appropriate scheme based on your needs: + + * ``"portable"``, ``"portable_apache_24"`` -- pick scheme that's portable across hosts + running apache >= 2.4. **This will be the default as of Passlib 2.0**. + + * ``"portable_apache_22"`` -- pick scheme that's portable across hosts + running apache >= 2.4. **This is the default up to Passlib 1.9**. + + * ``"host"``, ``"host_apache_24"`` -- pick strongest scheme supported by + apache >= 2.4 and/or host OS. + + * ``"host_apache_22"`` -- pick strongest scheme supported by + apache >= 2.2 and/or host OS. + + .. versionadded:: 1.6 + This keyword was previously named ``default``. That alias + has been deprecated, and will be removed in Passlib 1.8. + + .. versionchanged:: 1.6.3 + + Added support for ``"bcrypt"``, ``"sha256_crypt"``, and ``"portable"`` alias. + + .. versionchanged:: 1.7 + + Added apache 2.4 semantics, and additional aliases. + + :type context: :class:`~passlib.context.CryptContext` + :param context: + :class:`!CryptContext` instance used to create + and verify the hashes found in the htpasswd file. + The default value is a pre-built context which supports all + of the hashes officially allowed in an htpasswd file. + + This is also exposed as a readonly instance attribute. + + .. warning:: + + This option may be used to add support for non-standard hash + formats to an htpasswd file. However, the resulting file + will probably not be usable by another application, + and particularly not by Apache. + + :param autoload: + Set to ``False`` to prevent the constructor from automatically + loaded the file from disk. + + .. deprecated:: 1.6 + This has been replaced by the *new* keyword. + Instead of setting ``autoload=False``, you should use + ``new=True``. Support for this keyword will be removed + in Passlib 1.8. + + :param default: + Change the default algorithm used to hash new passwords. + + .. deprecated:: 1.6 + This has been renamed to *default_scheme* for clarity. + Support for this alias will be removed in Passlib 1.8. + + Loading & Saving + ================ + .. automethod:: load + .. automethod:: load_if_changed + .. automethod:: load_string + .. automethod:: save + .. automethod:: to_string + + Inspection + ================ + .. automethod:: users + .. automethod:: check_password + .. automethod:: get_hash + + Modification + ================ + .. automethod:: set_password + .. automethod:: delete + + Alternate Constructors + ====================== + .. automethod:: from_string + + Attributes + ========== + .. attribute:: path + + Path to local file that will be used as the default + for all :meth:`load` and :meth:`save` operations. + May be written to, initialized by the *path* constructor keyword. + + .. attribute:: autosave + + Writeable flag indicating whether changes will be automatically + written to *path*. + + Errors + ====== + :raises ValueError: + All of the methods in this class will raise a :exc:`ValueError` if + any user name contains a forbidden character (one of ``:\\r\\n\\t\\x00``), + or is longer than 255 characters. + """ + #=================================================================== + # instance attrs + #=================================================================== + + # NOTE: _records map stores for the key, and for the value, + # both in bytes which use self.encoding + + #=================================================================== + # init & serialization + #=================================================================== + def __init__(self, path=None, default_scheme=None, context=htpasswd_context, + **kwds): + if 'default' in kwds: + warn("``default`` is deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8, it has been renamed " + "to ``default_scheem``.", + DeprecationWarning, stacklevel=2) + default_scheme = kwds.pop("default") + if default_scheme: + if default_scheme in _warn_no_bcrypt: + warn("HtpasswdFile: no bcrypt backends available, " + "using fallback for default scheme %r" % default_scheme, + exc.PasslibSecurityWarning) + default_scheme = htpasswd_defaults.get(default_scheme, default_scheme) + context = context.copy(default=default_scheme) + self.context = context + super(HtpasswdFile, self).__init__(path, **kwds) + + def _parse_record(self, record, lineno): + # NOTE: should return (user, hash) tuple + result = record.rstrip().split(_BCOLON) + if len(result) != 2: + raise ValueError("malformed htpasswd file (error reading line %d)" + % lineno) + return result + + def _render_record(self, user, hash): + return render_bytes("%s:%s\n", user, hash) + + #=================================================================== + # public methods + #=================================================================== + + def users(self): + """ + Return list of all users in database + """ + return [self._decode_field(user) for user in self._records] + + ##def has_user(self, user): + ## "check whether entry is present for user" + ## return self._encode_user(user) in self._records + + ##def rename(self, old, new): + ## """rename user account""" + ## old = self._encode_user(old) + ## new = self._encode_user(new) + ## hash = self._records.pop(old) + ## self._records[new] = hash + ## self._autosave() + + def set_password(self, user, password): + """Set password for user; adds user if needed. + + :returns: + * ``True`` if existing user was updated. + * ``False`` if user account was added. + + .. versionchanged:: 1.6 + This method was previously called ``update``, it was renamed + to prevent ambiguity with the dictionary method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + hash = self.context.hash(password) + return self.set_hash(user, hash) + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="set_password") + def update(self, user, password): + """set password for user""" + return self.set_password(user, password) + + def get_hash(self, user): + """Return hash stored for user, or ``None`` if user not found. + + .. versionchanged:: 1.6 + This method was previously named ``find``, it was renamed + for clarity. The old name is deprecated, and will be removed + in Passlib 1.8. + """ + try: + return self._records[self._encode_user(user)] + except KeyError: + return None + + def set_hash(self, user, hash): + """ + semi-private helper which allows writing a hash directly; + adds user if needed. + + .. warning:: + does not (currently) do any validation of the hash string + + .. versionadded:: 1.7 + """ + # assert self.context.identify(hash), "unrecognized hash format" + if PY3 and isinstance(hash, str): + hash = hash.encode(self.encoding) + user = self._encode_user(user) + existing = self._set_record(user, hash) + self._autosave() + return existing + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="get_hash") + def find(self, user): + """return hash for user""" + return self.get_hash(user) + + # XXX: rename to something more explicit, like delete_user()? + def delete(self, user): + """Delete user's entry. + + :returns: + * ``True`` if user deleted. + * ``False`` if user not found. + """ + try: + del self._records[self._encode_user(user)] + except KeyError: + return False + self._autosave() + return True + + def check_password(self, user, password): + """ + Verify password for specified user. + If algorithm marked as deprecated by CryptContext, will automatically be re-hashed. + + :returns: + * ``None`` if user not found. + * ``False`` if user found, but password does not match. + * ``True`` if user found and password matches. + + .. versionchanged:: 1.6 + This method was previously called ``verify``, it was renamed + to prevent ambiguity with the :class:`!CryptContext` method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + user = self._encode_user(user) + hash = self._records.get(user) + if hash is None: + return None + if isinstance(password, unicode): + # NOTE: encoding password to match file, making the assumption + # that server will use same encoding to hash the password. + password = password.encode(self.encoding) + ok, new_hash = self.context.verify_and_update(password, hash) + if ok and new_hash is not None: + # rehash user's password if old hash was deprecated + assert user in self._records # otherwise would have to use ._set_record() + self._records[user] = new_hash + self._autosave() + return ok + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="check_password") + def verify(self, user, password): + """verify password for user""" + return self.check_password(user, password) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htdigest editing +#============================================================================= +class HtdigestFile(_CommonFile): + """class for reading & writing Htdigest files. + + The class constructor accepts the following arguments: + + :type path: filepath + :param path: + + Specifies path to htdigest file, use to implicitly load from and save to. + + This class has two modes of operation: + + 1. It can be "bound" to a local file by passing a ``path`` to the class + constructor. In this case it will load the contents of the file when + created, and the :meth:`load` and :meth:`save` methods will automatically + load from and save to that file if they are called without arguments. + + 2. Alternately, it can exist as an independant object, in which case + :meth:`load` and :meth:`save` will require an explicit path to be + provided whenever they are called. As well, ``autosave`` behavior + will not be available. + + This feature is new in Passlib 1.6, and is the default if no + ``path`` value is provided to the constructor. + + This is also exposed as a readonly instance attribute. + + :type default_realm: str + :param default_realm: + + If ``default_realm`` is set, all the :class:`HtdigestFile` + methods that require a realm will use this value if one is not + provided explicitly. If unset, they will raise an error stating + that an explicit realm is required. + + This is also exposed as a writeable instance attribute. + + .. versionadded:: 1.6 + + :type new: bool + :param new: + + Normally, if *path* is specified, :class:`HtdigestFile` will + immediately load the contents of the file. However, when creating + a new htpasswd file, applications can set ``new=True`` so that + the existing file (if any) will not be loaded. + + .. versionadded:: 1.6 + This feature was previously enabled by setting ``autoload=False``. + That alias has been deprecated, and will be removed in Passlib 1.8 + + :type autosave: bool + :param autosave: + + Normally, any changes made to an :class:`HtdigestFile` instance + will not be saved until :meth:`save` is explicitly called. However, + if ``autosave=True`` is specified, any changes made will be + saved to disk immediately (assuming *path* has been set). + + This is also exposed as a writeable instance attribute. + + :type encoding: str + :param encoding: + + Optionally specify character encoding used to read/write file + and hash passwords. Defaults to ``utf-8``, though ``latin-1`` + is the only other commonly encountered encoding. + + This is also exposed as a readonly instance attribute. + + :param autoload: + Set to ``False`` to prevent the constructor from automatically + loaded the file from disk. + + .. deprecated:: 1.6 + This has been replaced by the *new* keyword. + Instead of setting ``autoload=False``, you should use + ``new=True``. Support for this keyword will be removed + in Passlib 1.8. + + Loading & Saving + ================ + .. automethod:: load + .. automethod:: load_if_changed + .. automethod:: load_string + .. automethod:: save + .. automethod:: to_string + + Inspection + ========== + .. automethod:: realms + .. automethod:: users + .. automethod:: check_password(user[, realm], password) + .. automethod:: get_hash + + Modification + ============ + .. automethod:: set_password(user[, realm], password) + .. automethod:: delete + .. automethod:: delete_realm + + Alternate Constructors + ====================== + .. automethod:: from_string + + Attributes + ========== + .. attribute:: default_realm + + The default realm that will be used if one is not provided + to methods that require it. By default this is ``None``, + in which case an explicit realm must be provided for every + method call. Can be written to. + + .. attribute:: path + + Path to local file that will be used as the default + for all :meth:`load` and :meth:`save` operations. + May be written to, initialized by the *path* constructor keyword. + + .. attribute:: autosave + + Writeable flag indicating whether changes will be automatically + written to *path*. + + Errors + ====== + :raises ValueError: + All of the methods in this class will raise a :exc:`ValueError` if + any user name or realm contains a forbidden character (one of ``:\\r\\n\\t\\x00``), + or is longer than 255 characters. + """ + #=================================================================== + # instance attrs + #=================================================================== + + # NOTE: _records map stores (,) for the key, + # and as the value, all as bytes. + + # NOTE: unlike htpasswd, this class doesn't use a CryptContext, + # as only one hash format is supported: htdigest. + + # optionally specify default realm that will be used if none + # is provided to a method call. otherwise realm is always required. + default_realm = None + + #=================================================================== + # init & serialization + #=================================================================== + def __init__(self, path=None, default_realm=None, **kwds): + self.default_realm = default_realm + super(HtdigestFile, self).__init__(path, **kwds) + + def _parse_record(self, record, lineno): + result = record.rstrip().split(_BCOLON) + if len(result) != 3: + raise ValueError("malformed htdigest file (error reading line %d)" + % lineno) + user, realm, hash = result + return (user, realm), hash + + def _render_record(self, key, hash): + user, realm = key + return render_bytes("%s:%s:%s\n", user, realm, hash) + + def _require_realm(self, realm): + if realm is None: + realm = self.default_realm + if realm is None: + raise TypeError("you must specify a realm explicitly, " + "or set the default_realm attribute") + return realm + + def _encode_realm(self, realm): + realm = self._require_realm(realm) + return self._encode_field(realm, "realm") + + def _encode_key(self, user, realm): + return self._encode_user(user), self._encode_realm(realm) + + #=================================================================== + # public methods + #=================================================================== + + def realms(self): + """Return list of all realms in database""" + realms = set(key[1] for key in self._records) + return [self._decode_field(realm) for realm in realms] + + def users(self, realm=None): + """Return list of all users in specified realm. + + * uses ``self.default_realm`` if no realm explicitly provided. + * returns empty list if realm not found. + """ + realm = self._encode_realm(realm) + return [self._decode_field(key[0]) for key in self._records + if key[1] == realm] + + ##def has_user(self, user, realm=None): + ## "check if user+realm combination exists" + ## return self._encode_key(user,realm) in self._records + + ##def rename_realm(self, old, new): + ## """rename all accounts in realm""" + ## old = self._encode_realm(old) + ## new = self._encode_realm(new) + ## keys = [key for key in self._records if key[1] == old] + ## for key in keys: + ## hash = self._records.pop(key) + ## self._set_record((key[0], new), hash) + ## self._autosave() + ## return len(keys) + + ##def rename(self, old, new, realm=None): + ## """rename user account""" + ## old = self._encode_user(old) + ## new = self._encode_user(new) + ## realm = self._encode_realm(realm) + ## hash = self._records.pop((old,realm)) + ## self._set_record((new, realm), hash) + ## self._autosave() + + def set_password(self, user, realm=None, password=_UNSET): + """Set password for user; adds user & realm if needed. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``set_password(user, password)``, + otherwise it must be called with all three arguments: + ``set_password(user, realm, password)``. + + :returns: + * ``True`` if existing user was updated + * ``False`` if user account added. + """ + if password is _UNSET: + # called w/ two args - (user, password), use default realm + realm, password = None, realm + realm = self._require_realm(realm) + hash = htdigest.hash(password, user, realm, encoding=self.encoding) + return self.set_hash(user, realm, hash) + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="set_password") + def update(self, user, realm, password): + """set password for user""" + return self.set_password(user, realm, password) + + def get_hash(self, user, realm=None): + """Return :class:`~passlib.hash.htdigest` hash stored for user. + + * uses ``self.default_realm`` if no realm explicitly provided. + * returns ``None`` if user or realm not found. + + .. versionchanged:: 1.6 + This method was previously named ``find``, it was renamed + for clarity. The old name is deprecated, and will be removed + in Passlib 1.8. + """ + key = self._encode_key(user, realm) + hash = self._records.get(key) + if hash is None: + return None + if PY3: + hash = hash.decode(self.encoding) + return hash + + def set_hash(self, user, realm=None, hash=_UNSET): + """ + semi-private helper which allows writing a hash directly; + adds user & realm if needed. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``set_hash(user, hash)``, + otherwise it must be called with all three arguments: + ``set_hash(user, realm, hash)``. + + .. warning:: + does not (currently) do any validation of the hash string + + .. versionadded:: 1.7 + """ + if hash is _UNSET: + # called w/ two args - (user, hash), use default realm + realm, hash = None, realm + # assert htdigest.identify(hash), "unrecognized hash format" + if PY3 and isinstance(hash, str): + hash = hash.encode(self.encoding) + key = self._encode_key(user, realm) + existing = self._set_record(key, hash) + self._autosave() + return existing + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="get_hash") + def find(self, user, realm): + """return hash for user""" + return self.get_hash(user, realm) + + # XXX: rename to something more explicit, like delete_user()? + def delete(self, user, realm=None): + """Delete user's entry for specified realm. + + if realm is not specified, uses ``self.default_realm``. + + :returns: + * ``True`` if user deleted, + * ``False`` if user not found in realm. + """ + key = self._encode_key(user, realm) + try: + del self._records[key] + except KeyError: + return False + self._autosave() + return True + + def delete_realm(self, realm): + """Delete all users for specified realm. + + if realm is not specified, uses ``self.default_realm``. + + :returns: number of users deleted (0 if realm not found) + """ + realm = self._encode_realm(realm) + records = self._records + keys = [key for key in records if key[1] == realm] + for key in keys: + del records[key] + self._autosave() + return len(keys) + + def check_password(self, user, realm=None, password=_UNSET): + """Verify password for specified user + realm. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``check_password(user, password)``, + otherwise it must be called with all three arguments: + ``check_password(user, realm, password)``. + + :returns: + * ``None`` if user or realm not found. + * ``False`` if user found, but password does not match. + * ``True`` if user found and password matches. + + .. versionchanged:: 1.6 + This method was previously called ``verify``, it was renamed + to prevent ambiguity with the :class:`!CryptContext` method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + if password is _UNSET: + # called w/ two args - (user, password), use default realm + realm, password = None, realm + user = self._encode_user(user) + realm = self._encode_realm(realm) + hash = self._records.get((user,realm)) + if hash is None: + return None + return htdigest.verify(password, hash, user, realm, + encoding=self.encoding) + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="check_password") + def verify(self, user, realm, password): + """verify password for user""" + return self.check_password(user, realm, password) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/apps.py b/ansible/lib/python3.11/site-packages/passlib/apps.py new file mode 100644 index 000000000..682bbff6f --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/apps.py @@ -0,0 +1,245 @@ +"""passlib.apps""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +from itertools import chain +# site +# pkg +from passlib import hash +from passlib.context import LazyCryptContext +from passlib.utils import sys_bits +# local +__all__ = [ + 'custom_app_context', + 'django_context', + 'ldap_context', 'ldap_nocrypt_context', + 'mysql_context', 'mysql4_context', 'mysql3_context', + 'phpass_context', + 'phpbb3_context', + 'postgres_context', +] + +#============================================================================= +# master containing all identifiable hashes +#============================================================================= +def _load_master_config(): + from passlib.registry import list_crypt_handlers + + # get master list + schemes = list_crypt_handlers() + + # exclude the ones we know have ambiguous or greedy identify() methods. + excluded = [ + # frequently confused for eachother + 'bigcrypt', + 'crypt16', + + # no good identifiers + 'cisco_pix', + 'cisco_type7', + 'htdigest', + 'mysql323', + 'oracle10', + + # all have same size + 'lmhash', + 'msdcc', + 'msdcc2', + 'nthash', + + # plaintext handlers + 'plaintext', + 'ldap_plaintext', + + # disabled handlers + 'django_disabled', + 'unix_disabled', + 'unix_fallback', + ] + for name in excluded: + schemes.remove(name) + + # return config + return dict(schemes=schemes, default="sha256_crypt") +master_context = LazyCryptContext(onload=_load_master_config) + +#============================================================================= +# for quickly bootstrapping new custom applications +#============================================================================= +custom_app_context = LazyCryptContext( + # choose some reasonbly strong schemes + schemes=["sha512_crypt", "sha256_crypt"], + + # set some useful global options + default="sha256_crypt" if sys_bits < 64 else "sha512_crypt", + + # set a good starting point for rounds selection + sha512_crypt__min_rounds = 535000, + sha256_crypt__min_rounds = 535000, + + # if the admin user category is selected, make a much stronger hash, + admin__sha512_crypt__min_rounds = 1024000, + admin__sha256_crypt__min_rounds = 1024000, + ) + +#============================================================================= +# django +#============================================================================= + +#----------------------------------------------------------------------- +# 1.0 +#----------------------------------------------------------------------- + +_django10_schemes = [ + "django_salted_sha1", + "django_salted_md5", + "django_des_crypt", + "hex_md5", + "django_disabled", +] + +django10_context = LazyCryptContext( + schemes=_django10_schemes, + default="django_salted_sha1", + deprecated=["hex_md5"], +) + +#----------------------------------------------------------------------- +# 1.4 +#----------------------------------------------------------------------- + +_django14_schemes = [ + "django_pbkdf2_sha256", + "django_pbkdf2_sha1", + "django_bcrypt" +] + _django10_schemes + +django14_context = LazyCryptContext( + schemes=_django14_schemes, + deprecated=_django10_schemes, +) + +#----------------------------------------------------------------------- +# 1.6 +#----------------------------------------------------------------------- + +_django16_schemes = list(_django14_schemes) +_django16_schemes.insert(1, "django_bcrypt_sha256") +django16_context = LazyCryptContext( + schemes=_django16_schemes, + deprecated=_django10_schemes, +) + +#----------------------------------------------------------------------- +# 1.10 +#----------------------------------------------------------------------- + +_django_110_schemes = [ + "django_pbkdf2_sha256", + "django_pbkdf2_sha1", + "django_argon2", + "django_bcrypt", + "django_bcrypt_sha256", + "django_disabled", +] +django110_context = LazyCryptContext(schemes=_django_110_schemes) + +#----------------------------------------------------------------------- +# 2.1 +#----------------------------------------------------------------------- + +_django21_schemes = list(_django_110_schemes) +_django21_schemes.remove("django_bcrypt") +django21_context = LazyCryptContext(schemes=_django21_schemes) + +#----------------------------------------------------------------------- +# latest +#----------------------------------------------------------------------- + +# this will always point to latest version in passlib +django_context = django21_context + +#============================================================================= +# ldap +#============================================================================= + +#: standard ldap schemes +std_ldap_schemes = [ + "ldap_salted_sha512", + "ldap_salted_sha256", + "ldap_salted_sha1", + "ldap_salted_md5", + "ldap_sha1", + "ldap_md5", + "ldap_plaintext", +] + +# create context with all std ldap schemes EXCEPT crypt +ldap_nocrypt_context = LazyCryptContext(std_ldap_schemes) + +# create context with all possible std ldap + ldap crypt schemes +def _iter_ldap_crypt_schemes(): + from passlib.utils import unix_crypt_schemes + return ('ldap_' + name for name in unix_crypt_schemes) + +def _iter_ldap_schemes(): + """helper which iterates over supported std ldap schemes""" + return chain(std_ldap_schemes, _iter_ldap_crypt_schemes()) +ldap_context = LazyCryptContext(_iter_ldap_schemes()) + +### create context with all std ldap schemes + crypt schemes for localhost +##def _iter_host_ldap_schemes(): +## "helper which iterates over supported std ldap schemes" +## from passlib.handlers.ldap_digests import get_host_ldap_crypt_schemes +## return chain(std_ldap_schemes, get_host_ldap_crypt_schemes()) +##ldap_host_context = LazyCryptContext(_iter_host_ldap_schemes()) + +#============================================================================= +# mysql +#============================================================================= +mysql3_context = LazyCryptContext(["mysql323"]) +mysql4_context = LazyCryptContext(["mysql41", "mysql323"], deprecated="mysql323") +mysql_context = mysql4_context # tracks latest mysql version supported + +#============================================================================= +# postgres +#============================================================================= +postgres_context = LazyCryptContext(["postgres_md5"]) + +#============================================================================= +# phpass & variants +#============================================================================= +def _create_phpass_policy(**kwds): + """helper to choose default alg based on bcrypt availability""" + kwds['default'] = 'bcrypt' if hash.bcrypt.has_backend() else 'phpass' + return kwds + +phpass_context = LazyCryptContext( + schemes=["bcrypt", "phpass", "bsdi_crypt"], + onload=_create_phpass_policy, + ) + +phpbb3_context = LazyCryptContext(["phpass"], phpass__ident="H") + +# TODO: support the drupal phpass variants (see phpass homepage) + +#============================================================================= +# roundup +#============================================================================= + +_std_roundup_schemes = [ "ldap_hex_sha1", "ldap_hex_md5", "ldap_des_crypt", "roundup_plaintext" ] +roundup10_context = LazyCryptContext(_std_roundup_schemes) + +# NOTE: 'roundup15' really applies to roundup 1.4.17+ +roundup_context = roundup15_context = LazyCryptContext( + schemes=_std_roundup_schemes + [ "ldap_pbkdf2_sha1" ], + deprecated=_std_roundup_schemes, + default = "ldap_pbkdf2_sha1", + ldap_pbkdf2_sha1__default_rounds = 10000, + ) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/context.py b/ansible/lib/python3.11/site-packages/passlib/context.py new file mode 100644 index 000000000..bc3cbf50f --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/context.py @@ -0,0 +1,2637 @@ +"""passlib.context - CryptContext implementation""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import re +import logging; log = logging.getLogger(__name__) +import threading +import time +from warnings import warn +# site +# pkg +from passlib import exc +from passlib.exc import ExpectedStringError, ExpectedTypeError, PasslibConfigWarning +from passlib.registry import get_crypt_handler, _validate_handler_name +from passlib.utils import (handlers as uh, to_bytes, + to_unicode, splitcomma, + as_bool, timer, rng, getrandstr, + ) +from passlib.utils.binary import BASE64_CHARS +from passlib.utils.compat import (iteritems, num_types, irange, + PY2, PY3, unicode, SafeConfigParser, + NativeStringIO, BytesIO, + unicode_or_bytes_types, native_string_types, + ) +from passlib.utils.decor import deprecated_method, memoized_property +# local +__all__ = [ + 'CryptContext', + 'LazyCryptContext', + 'CryptPolicy', +] + +#============================================================================= +# support +#============================================================================= + +# private object to detect unset params +_UNSET = object() + +def _coerce_vary_rounds(value): + """parse vary_rounds string to percent as [0,1) float, or integer""" + if value.endswith("%"): + # XXX: deprecate this in favor of raw float? + return float(value.rstrip("%"))*.01 + try: + return int(value) + except ValueError: + return float(value) + +# set of options which aren't allowed to be set via policy +_forbidden_scheme_options = set(["salt"]) + # 'salt' - not allowed since a fixed salt would defeat the purpose. + +# dict containing funcs used to coerce strings to correct type for scheme option keys. +# NOTE: this isn't really needed any longer, since Handler.using() handles the actual parsing. +# keeping this around for now, though, since it makes context.to_dict() output cleaner. +_coerce_scheme_options = dict( + min_rounds=int, + max_rounds=int, + default_rounds=int, + vary_rounds=_coerce_vary_rounds, + salt_size=int, +) + +def _is_handler_registered(handler): + """detect if handler is registered or a custom handler""" + return get_crypt_handler(handler.name, None) is handler + +@staticmethod +def _always_needs_update(hash, secret=None): + """ + dummy function patched into handler.needs_update() by _CryptConfig + when hash alg has been deprecated for context. + """ + return True + +#: list of keys allowed under wildcard "all" scheme w/o a security warning. +_global_settings = set(["truncate_error", "vary_rounds"]) + +#============================================================================= +# crypt policy +#============================================================================= +_preamble = ("The CryptPolicy class has been deprecated as of " + "Passlib 1.6, and will be removed in Passlib 1.8. ") + +class CryptPolicy(object): + """ + .. deprecated:: 1.6 + This class has been deprecated, and will be removed in Passlib 1.8. + All of its functionality has been rolled into :class:`CryptContext`. + + This class previously stored the configuration options for the + CryptContext class. In the interest of interface simplification, + all of this class' functionality has been rolled into the CryptContext + class itself. + The documentation for this class is now focused on documenting how to + migrate to the new api. Additionally, where possible, the deprecation + warnings issued by the CryptPolicy methods will list the replacement call + that should be used. + + Constructors + ============ + CryptPolicy objects can be constructed directly using any of + the keywords accepted by :class:`CryptContext`. Direct uses of the + :class:`!CryptPolicy` constructor should either pass the keywords + directly into the CryptContext constructor, or to :meth:`CryptContext.update` + if the policy object was being used to update an existing context object. + + In addition to passing in keywords directly, + CryptPolicy objects can be constructed by the following methods: + + .. automethod:: from_path + .. automethod:: from_string + .. automethod:: from_source + .. automethod:: from_sources + .. automethod:: replace + + Introspection + ============= + All of the informational methods provided by this class have been deprecated + by identical or similar methods in the :class:`CryptContext` class: + + .. automethod:: has_schemes + .. automethod:: schemes + .. automethod:: iter_handlers + .. automethod:: get_handler + .. automethod:: get_options + .. automethod:: handler_is_deprecated + .. automethod:: get_min_verify_time + + Exporting + ========= + .. automethod:: iter_config + .. automethod:: to_dict + .. automethod:: to_file + .. automethod:: to_string + + .. note:: + CryptPolicy are immutable. + Use the :meth:`replace` method to mutate existing instances. + + .. deprecated:: 1.6 + """ + #=================================================================== + # class methods + #=================================================================== + @classmethod + def from_path(cls, path, section="passlib", encoding="utf-8"): + """create a CryptPolicy instance from a local file. + + .. deprecated:: 1.6 + + Creating a new CryptContext from a file, which was previously done via + ``CryptContext(policy=CryptPolicy.from_path(path))``, can now be + done via ``CryptContext.from_path(path)``. + See :meth:`CryptContext.from_path` for details. + + Updating an existing CryptContext from a file, which was previously done + ``context.policy = CryptPolicy.from_path(path)``, can now be + done via ``context.load_path(path)``. + See :meth:`CryptContext.load_path` for details. + """ + warn(_preamble + + "Instead of ``CryptPolicy.from_path(path)``, " + "use ``CryptContext.from_path(path)`` " + " or ``context.load_path(path)`` for an existing CryptContext.", + DeprecationWarning, stacklevel=2) + return cls(_internal_context=CryptContext.from_path(path, section, + encoding)) + + @classmethod + def from_string(cls, source, section="passlib", encoding="utf-8"): + """create a CryptPolicy instance from a string. + + .. deprecated:: 1.6 + + Creating a new CryptContext from a string, which was previously done via + ``CryptContext(policy=CryptPolicy.from_string(data))``, can now be + done via ``CryptContext.from_string(data)``. + See :meth:`CryptContext.from_string` for details. + + Updating an existing CryptContext from a string, which was previously done + ``context.policy = CryptPolicy.from_string(data)``, can now be + done via ``context.load(data)``. + See :meth:`CryptContext.load` for details. + """ + warn(_preamble + + "Instead of ``CryptPolicy.from_string(source)``, " + "use ``CryptContext.from_string(source)`` or " + "``context.load(source)`` for an existing CryptContext.", + DeprecationWarning, stacklevel=2) + return cls(_internal_context=CryptContext.from_string(source, section, + encoding)) + + @classmethod + def from_source(cls, source, _warn=True): + """create a CryptPolicy instance from some source. + + this method autodetects the source type, and invokes + the appropriate constructor automatically. it attempts + to detect whether the source is a configuration string, a filepath, + a dictionary, or an existing CryptPolicy instance. + + .. deprecated:: 1.6 + + Create a new CryptContext, which could previously be done via + ``CryptContext(policy=CryptPolicy.from_source(source))``, should + now be done using an explicit method: the :class:`CryptContext` + constructor itself, :meth:`CryptContext.from_path`, + or :meth:`CryptContext.from_string`. + + Updating an existing CryptContext, which could previously be done via + ``context.policy = CryptPolicy.from_source(source)``, should + now be done using an explicit method: :meth:`CryptContext.update`, + or :meth:`CryptContext.load`. + """ + if _warn: + warn(_preamble + + "Instead of ``CryptPolicy.from_source()``, " + "use ``CryptContext.from_string(path)`` " + " or ``CryptContext.from_path(source)``, as appropriate.", + DeprecationWarning, stacklevel=2) + if isinstance(source, CryptPolicy): + return source + elif isinstance(source, dict): + return cls(_internal_context=CryptContext(**source)) + elif not isinstance(source, (bytes,unicode)): + raise TypeError("source must be CryptPolicy, dict, config string, " + "or file path: %r" % (type(source),)) + elif any(c in source for c in "\n\r\t") or not source.strip(" \t./;:"): + return cls(_internal_context=CryptContext.from_string(source)) + else: + return cls(_internal_context=CryptContext.from_path(source)) + + @classmethod + def from_sources(cls, sources, _warn=True): + """create a CryptPolicy instance by merging multiple sources. + + each source is interpreted as by :meth:`from_source`, + and the results are merged together. + + .. deprecated:: 1.6 + Instead of using this method to merge multiple policies together, + a :class:`CryptContext` instance should be created, and then + the multiple sources merged together via :meth:`CryptContext.load`. + """ + if _warn: + warn(_preamble + + "Instead of ``CryptPolicy.from_sources()``, " + "use the various CryptContext constructors " + " followed by ``context.update()``.", + DeprecationWarning, stacklevel=2) + if len(sources) == 0: + raise ValueError("no sources specified") + if len(sources) == 1: + return cls.from_source(sources[0], _warn=False) + kwds = {} + for source in sources: + kwds.update(cls.from_source(source, _warn=False)._context.to_dict(resolve=True)) + return cls(_internal_context=CryptContext(**kwds)) + + def replace(self, *args, **kwds): + """create a new CryptPolicy, optionally updating parts of the + existing configuration. + + .. deprecated:: 1.6 + Callers of this method should :meth:`CryptContext.update` or + :meth:`CryptContext.copy` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.replace()``, " + "use ``context.update()`` or ``context.copy()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().replace()``, " + "create a CryptContext instance and " + "use ``context.update()`` or ``context.copy()``.", + DeprecationWarning, stacklevel=2) + sources = [ self ] + if args: + sources.extend(args) + if kwds: + sources.append(kwds) + return CryptPolicy.from_sources(sources, _warn=False) + + #=================================================================== + # instance attrs + #=================================================================== + + # internal CryptContext we're wrapping to handle everything + # until this class is removed. + _context = None + + # flag indicating this is wrapper generated by the CryptContext.policy + # attribute, rather than one created independantly by the application. + _stub_policy = False + + #=================================================================== + # init + #=================================================================== + def __init__(self, *args, **kwds): + context = kwds.pop("_internal_context", None) + if context: + assert isinstance(context, CryptContext) + self._context = context + self._stub_policy = kwds.pop("_stub_policy", False) + assert not (args or kwds), "unexpected args: %r %r" % (args,kwds) + else: + if args: + if len(args) != 1: + raise TypeError("only one positional argument accepted") + if kwds: + raise TypeError("cannot specify positional arg and kwds") + kwds = args[0] + warn(_preamble + + "Instead of constructing a CryptPolicy instance, " + "create a CryptContext directly, or use ``context.update()`` " + "and ``context.load()`` to reconfigure existing CryptContext " + "instances.", + DeprecationWarning, stacklevel=2) + self._context = CryptContext(**kwds) + + #=================================================================== + # public interface for examining options + #=================================================================== + def has_schemes(self): + """return True if policy defines *any* schemes for use. + + .. deprecated:: 1.6 + applications should use ``bool(context.schemes())`` instead. + see :meth:`CryptContext.schemes`. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.has_schemes()``, " + "use ``bool(context.schemes())``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().has_schemes()``, " + "create a CryptContext instance and " + "use ``bool(context.schemes())``.", + DeprecationWarning, stacklevel=2) + return bool(self._context.schemes()) + + def iter_handlers(self): + """return iterator over handlers defined in policy. + + .. deprecated:: 1.6 + applications should use ``context.schemes(resolve=True))`` instead. + see :meth:`CryptContext.schemes`. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.iter_handlers()``, " + "use ``context.schemes(resolve=True)``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().iter_handlers()``, " + "create a CryptContext instance and " + "use ``context.schemes(resolve=True)``.", + DeprecationWarning, stacklevel=2) + return self._context.schemes(resolve=True, unconfigured=True) + + def schemes(self, resolve=False): + """return list of schemes defined in policy. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.schemes` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.schemes()``, " + "use ``context.schemes()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().schemes()``, " + "create a CryptContext instance and " + "use ``context.schemes()``.", + DeprecationWarning, stacklevel=2) + return list(self._context.schemes(resolve=resolve, unconfigured=True)) + + def get_handler(self, name=None, category=None, required=False): + """return handler as specified by name, or default handler. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.handler` instead, + though note that the ``required`` keyword has been removed, + and the new method will always act as if ``required=True``. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.get_handler()``, " + "use ``context.handler()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().get_handler()``, " + "create a CryptContext instance and " + "use ``context.handler()``.", + DeprecationWarning, stacklevel=2) + # CryptContext.handler() doesn't support required=False, + # so wrapping it in try/except + try: + return self._context.handler(name, category, unconfigured=True) + except KeyError: + if required: + raise + else: + return None + + def get_min_verify_time(self, category=None): + """get min_verify_time setting for policy. + + .. deprecated:: 1.6 + min_verify_time option will be removed entirely in passlib 1.8 + + .. versionchanged:: 1.7 + this method now always returns the value automatically + calculated by :meth:`CryptContext.min_verify_time`, + any value specified by policy is ignored. + """ + warn("get_min_verify_time() and min_verify_time option is deprecated and ignored, " + "and will be removed in Passlib 1.8", DeprecationWarning, + stacklevel=2) + return 0 + + def get_options(self, name, category=None): + """return dictionary of options specific to a given handler. + + .. deprecated:: 1.6 + this method has no direct replacement in the 1.6 api, as there + is not a clearly defined use-case. however, examining the output of + :meth:`CryptContext.to_dict` should serve as the closest alternative. + """ + # XXX: might make a public replacement, but need more study of the use cases. + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "``context.policy.get_options()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "``CryptPolicy().get_options()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + if hasattr(name, "name"): + name = name.name + return self._context._config._get_record_options_with_flag(name, category)[0] + + def handler_is_deprecated(self, name, category=None): + """check if handler has been deprecated by policy. + + .. deprecated:: 1.6 + this method has no direct replacement in the 1.6 api, as there + is not a clearly defined use-case. however, examining the output of + :meth:`CryptContext.to_dict` should serve as the closest alternative. + """ + # XXX: might make a public replacement, but need more study of the use cases. + if self._stub_policy: + warn(_preamble + + "``context.policy.handler_is_deprecated()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "``CryptPolicy().handler_is_deprecated()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + if hasattr(name, "name"): + name = name.name + return self._context.handler(name, category).deprecated + + #=================================================================== + # serialization + #=================================================================== + + def iter_config(self, ini=False, resolve=False): + """iterate over key/value pairs representing the policy object. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_dict` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.iter_config()``, " + "use ``context.to_dict().items()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().iter_config()``, " + "create a CryptContext instance and " + "use ``context.to_dict().items()``.", + DeprecationWarning, stacklevel=2) + # hacked code that renders keys & values in manner that approximates + # old behavior. context.to_dict() is much cleaner. + context = self._context + if ini: + def render_key(key): + return context._render_config_key(key).replace("__", ".") + def render_value(value): + if isinstance(value, (list,tuple)): + value = ", ".join(value) + return value + resolve = False + else: + render_key = context._render_config_key + render_value = lambda value: value + return ( + (render_key(key), render_value(value)) + for key, value in context._config.iter_config(resolve) + ) + + def to_dict(self, resolve=False): + """export policy object as dictionary of options. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_dict` instead. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.to_dict()``, " + "use ``context.to_dict()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_dict()``, " + "create a CryptContext instance and " + "use ``context.to_dict()``.", + DeprecationWarning, stacklevel=2) + return self._context.to_dict(resolve) + + def to_file(self, stream, section="passlib"): # pragma: no cover -- deprecated & unused + """export policy to file. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_string` instead, + and then write the output to a file as desired. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.to_file(stream)``, " + "use ``stream.write(context.to_string())``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_file(stream)``, " + "create a CryptContext instance and " + "use ``stream.write(context.to_string())``.", + DeprecationWarning, stacklevel=2) + out = self._context.to_string(section=section) + if PY2: + out = out.encode("utf-8") + stream.write(out) + + def to_string(self, section="passlib", encoding=None): + """export policy to file. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_string` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.to_string()``, " + "use ``context.to_string()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_string()``, " + "create a CryptContext instance and " + "use ``context.to_string()``.", + DeprecationWarning, stacklevel=2) + out = self._context.to_string(section=section) + if encoding: + out = out.encode(encoding) + return out + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# _CryptConfig helper class +#============================================================================= +class _CryptConfig(object): + """parses, validates, and stores CryptContext config + + this is a helper used internally by CryptContext to handle + parsing, validation, and serialization of its config options. + split out from the main class, but not made public since + that just complicates interface too much (c.f. CryptPolicy) + + :arg source: config as dict mapping ``(cat,scheme,option) -> value`` + """ + #=================================================================== + # instance attrs + #=================================================================== + + # triple-nested dict which maps scheme -> category -> key -> value, + # storing all hash-specific options + _scheme_options = None + + # double-nested dict which maps key -> category -> value + # storing all CryptContext options + _context_options = None + + # tuple of handler objects + handlers = None + + # tuple of scheme objects in same order as handlers + schemes = None + + # tuple of categories in alphabetical order (not including None) + categories = None + + # set of all context keywords used by active schemes + context_kwds = None + + # dict mapping category -> default scheme + _default_schemes = None + + # dict mapping (scheme, category) -> custom handler + _records = None + + # dict mapping category -> list of custom handler instances for that category, + # in order of schemes(). populated on demand by _get_record_list() + _record_lists = None + + #=================================================================== + # constructor + #=================================================================== + def __init__(self, source): + self._init_scheme_list(source.get((None,None,"schemes"))) + self._init_options(source) + self._init_default_schemes() + self._init_records() + + def _init_scheme_list(self, data): + """initialize .handlers and .schemes attributes""" + handlers = [] + schemes = [] + if isinstance(data, native_string_types): + data = splitcomma(data) + for elem in data or (): + # resolve elem -> handler & scheme + if hasattr(elem, "name"): + handler = elem + scheme = handler.name + _validate_handler_name(scheme) + elif isinstance(elem, native_string_types): + handler = get_crypt_handler(elem) + scheme = handler.name + else: + raise TypeError("scheme must be name or CryptHandler, " + "not %r" % type(elem)) + + # check scheme name isn't already in use + if scheme in schemes: + raise KeyError("multiple handlers with same name: %r" % + (scheme,)) + + # add to handler list + handlers.append(handler) + schemes.append(scheme) + + self.handlers = tuple(handlers) + self.schemes = tuple(schemes) + + #=================================================================== + # lowlevel options + #=================================================================== + + #--------------------------------------------------------------- + # init lowlevel option storage + #--------------------------------------------------------------- + def _init_options(self, source): + """load config dict into internal representation, + and init .categories attr + """ + # prepare dicts & locals + norm_scheme_option = self._norm_scheme_option + norm_context_option = self._norm_context_option + self._scheme_options = scheme_options = {} + self._context_options = context_options = {} + categories = set() + + # load source config into internal storage + for (cat, scheme, key), value in iteritems(source): + categories.add(cat) + explicit_scheme = scheme + if not cat and not scheme and key in _global_settings: + # going forward, not using "__all__" format. instead... + # whitelisting set of keys which should be passed to (all) schemes, + # rather than passed to the CryptContext itself + scheme = "all" + if scheme: + # normalize scheme option + key, value = norm_scheme_option(key, value) + + # e.g. things like "min_rounds" should never be set cross-scheme + # this will be fatal under 2.0. + if scheme == "all" and key not in _global_settings: + warn("The '%s' option should be configured per-algorithm, and not set " + "globally in the context; This will be an error in Passlib 2.0" % + (key,), PasslibConfigWarning) + + # this scheme is going away in 2.0; + # but most keys deserve an extra warning since it impacts security. + if explicit_scheme == "all": + warn("The 'all' scheme is deprecated as of Passlib 1.7, " + "and will be removed in Passlib 2.0; Please configure " + "options on a per-algorithm basis.", DeprecationWarning) + + # store in scheme_options + # map structure: scheme_options[scheme][category][key] = value + try: + category_map = scheme_options[scheme] + except KeyError: + scheme_options[scheme] = {cat: {key: value}} + else: + try: + option_map = category_map[cat] + except KeyError: + category_map[cat] = {key: value} + else: + option_map[key] = value + else: + # normalize context option + if cat and key == "schemes": + raise KeyError("'schemes' context option is not allowed " + "per category") + key, value = norm_context_option(cat, key, value) + if key == "min_verify_time": # ignored in 1.7, to be removed in 1.8 + continue + + # store in context_options + # map structure: context_options[key][category] = value + try: + category_map = context_options[key] + except KeyError: + context_options[key] = {cat: value} + else: + category_map[cat] = value + + # store list of configured categories + categories.discard(None) + self.categories = tuple(sorted(categories)) + + def _norm_scheme_option(self, key, value): + # check for invalid options + if key in _forbidden_scheme_options: + raise KeyError("%r option not allowed in CryptContext " + "configuration" % (key,)) + # coerce strings for certain fields (e.g. min_rounds uses ints) + if isinstance(value, native_string_types): + func = _coerce_scheme_options.get(key) + if func: + value = func(value) + return key, value + + def _norm_context_option(self, cat, key, value): + schemes = self.schemes + if key == "default": + if hasattr(value, "name"): + value = value.name + elif not isinstance(value, native_string_types): + raise ExpectedTypeError(value, "str", "default") + if schemes and value not in schemes: + raise KeyError("default scheme not found in policy") + elif key == "deprecated": + if isinstance(value, native_string_types): + value = splitcomma(value) + elif not isinstance(value, (list,tuple)): + raise ExpectedTypeError(value, "str or seq", "deprecated") + if 'auto' in value: + # XXX: have any statements been made about when this is default? + # should do it in 1.8 at latest. + if len(value) > 1: + raise ValueError("cannot list other schemes if " + "``deprecated=['auto']`` is used") + elif schemes: + # make sure list of deprecated schemes is subset of configured schemes + for scheme in value: + if not isinstance(scheme, native_string_types): + raise ExpectedTypeError(value, "str", "deprecated element") + if scheme not in schemes: + raise KeyError("deprecated scheme not found " + "in policy: %r" % (scheme,)) + elif key == "min_verify_time": + warn("'min_verify_time' was deprecated in Passlib 1.6, is " + "ignored in 1.7, and will be removed in 1.8", + DeprecationWarning) + elif key == "harden_verify": + warn("'harden_verify' is deprecated & ignored as of Passlib 1.7.1, " + " and will be removed in 1.8", + DeprecationWarning) + elif key != "schemes": + raise KeyError("unknown CryptContext keyword: %r" % (key,)) + return key, value + + #--------------------------------------------------------------- + # reading context options + #--------------------------------------------------------------- + def get_context_optionmap(self, key, _default={}): + """return dict mapping category->value for specific context option. + + .. warning:: treat return value as readonly! + """ + return self._context_options.get(key, _default) + + def get_context_option_with_flag(self, category, key): + """return value of specific option, handling category inheritance. + also returns flag indicating whether value is category-specific. + """ + try: + category_map = self._context_options[key] + except KeyError: + return None, False + value = category_map.get(None) + if category: + try: + alt = category_map[category] + except KeyError: + pass + else: + if value is None or alt != value: + return alt, True + return value, False + + #--------------------------------------------------------------- + # reading scheme options + #--------------------------------------------------------------- + def _get_scheme_optionmap(self, scheme, category, default={}): + """return all options for (scheme,category) combination + + .. warning:: treat return value as readonly! + """ + try: + return self._scheme_options[scheme][category] + except KeyError: + return default + + def get_base_handler(self, scheme): + return self.handlers[self.schemes.index(scheme)] + + @staticmethod + def expand_settings(handler): + setting_kwds = handler.setting_kwds + if 'rounds' in handler.setting_kwds: + # XXX: historically this extras won't be listed in setting_kwds + setting_kwds += uh.HasRounds.using_rounds_kwds + return setting_kwds + + # NOTE: this is only used by _get_record_options_with_flag()... + def get_scheme_options_with_flag(self, scheme, category): + """return composite dict of all options set for scheme. + includes options inherited from 'all' and from default category. + result can be modified. + returns (kwds, has_cat_specific_options) + """ + # start out with copy of global options + get_optionmap = self._get_scheme_optionmap + kwds = get_optionmap("all", None).copy() + has_cat_options = False + + # add in category-specific global options + if category: + defkwds = kwds.copy() # <-- used to detect category-specific options + kwds.update(get_optionmap("all", category)) + + # filter out global settings not supported by handler + allowed_settings = self.expand_settings(self.get_base_handler(scheme)) + for key in set(kwds).difference(allowed_settings): + kwds.pop(key) + if category: + for key in set(defkwds).difference(allowed_settings): + defkwds.pop(key) + + # add in default options for scheme + other = get_optionmap(scheme, None) + kwds.update(other) + + # load category-specific options for scheme + if category: + defkwds.update(other) + kwds.update(get_optionmap(scheme, category)) + + # compare default category options to see if there's anything + # category-specific + if kwds != defkwds: + has_cat_options = True + + return kwds, has_cat_options + + #=================================================================== + # deprecated & default schemes + #=================================================================== + def _init_default_schemes(self): + """initialize maps containing default scheme for each category. + + have to do this after _init_options(), since the default scheme + is affected by the list of deprecated schemes. + """ + # init maps & locals + get_optionmap = self.get_context_optionmap + default_map = self._default_schemes = get_optionmap("default").copy() + dep_map = get_optionmap("deprecated") + schemes = self.schemes + if not schemes: + return + + # figure out default scheme + deps = dep_map.get(None) or () + default = default_map.get(None) + if not default: + for scheme in schemes: + if scheme not in deps: + default_map[None] = scheme + break + else: + raise ValueError("must have at least one non-deprecated scheme") + elif default in deps: + raise ValueError("default scheme cannot be deprecated") + + # figure out per-category default schemes, + for cat in self.categories: + cdeps = dep_map.get(cat, deps) + cdefault = default_map.get(cat, default) + if not cdefault: + for scheme in schemes: + if scheme not in cdeps: + default_map[cat] = scheme + break + else: + raise ValueError("must have at least one non-deprecated " + "scheme for %r category" % cat) + elif cdefault in cdeps: + raise ValueError("default scheme for %r category " + "cannot be deprecated" % cat) + + def default_scheme(self, category): + """return default scheme for specific category""" + defaults = self._default_schemes + try: + return defaults[category] + except KeyError: + pass + if not self.schemes: + raise KeyError("no hash schemes configured for this " + "CryptContext instance") + return defaults[None] + + def is_deprecated_with_flag(self, scheme, category): + """is scheme deprecated under particular category?""" + depmap = self.get_context_optionmap("deprecated") + def test(cat): + source = depmap.get(cat, depmap.get(None)) + if source is None: + return None + elif 'auto' in source: + return scheme != self.default_scheme(cat) + else: + return scheme in source + value = test(None) or False + if category: + alt = test(category) + if alt is not None and value != alt: + return alt, True + return value, False + + #=================================================================== + # CryptRecord objects + #=================================================================== + def _init_records(self): + # NOTE: this step handles final validation of settings, + # checking for violations against handler's internal invariants. + # this is why we create all the records now, + # so CryptContext throws error immediately rather than later. + self._record_lists = {} + records = self._records = {} + all_context_kwds = self.context_kwds = set() + get_options = self._get_record_options_with_flag + categories = (None,) + self.categories + for handler in self.handlers: + scheme = handler.name + all_context_kwds.update(handler.context_kwds) + for cat in categories: + kwds, has_cat_options = get_options(scheme, cat) + if cat is None or has_cat_options: + records[scheme, cat] = self._create_record(handler, cat, **kwds) + # NOTE: if handler has no category-specific opts, get_record() + # will automatically use the default category's record. + # NOTE: default records for specific category stored under the + # key (None,category); these are populated on-demand by get_record(). + + @staticmethod + def _create_record(handler, category=None, deprecated=False, **settings): + # create custom handler if needed. + try: + # XXX: relaxed=True is mostly here to retain backwards-compat behavior. + # could make this optional flag in future. + subcls = handler.using(relaxed=True, **settings) + except TypeError as err: + m = re.match(r".* unexpected keyword argument '(.*)'$", str(err)) + if m and m.group(1) in settings: + # translate into KeyError, for backwards compat. + # XXX: push this down to GenericHandler.using() implementation? + key = m.group(1) + raise KeyError("keyword not supported by %s handler: %r" % + (handler.name, key)) + raise + + # using private attrs to store some extra metadata in custom handler + assert subcls is not handler, "expected unique variant of handler" + ##subcls._Context__category = category + subcls._Context__orig_handler = handler + subcls.deprecated = deprecated # attr reserved for this purpose + return subcls + + def _get_record_options_with_flag(self, scheme, category): + """return composite dict of options for given scheme + category. + + this is currently a private method, though some variant + of its output may eventually be made public. + + given a scheme & category, it returns two things: + a set of all the keyword options to pass to :meth:`_create_record`, + and a bool flag indicating whether any of these options + were specific to the named category. if this flag is false, + the options are identical to the options for the default category. + + the options dict includes all the scheme-specific settings, + as well as optional *deprecated* keyword. + """ + # get scheme options + kwds, has_cat_options = self.get_scheme_options_with_flag(scheme, category) + + # throw in deprecated flag + value, not_inherited = self.is_deprecated_with_flag(scheme, category) + if value: + kwds['deprecated'] = True + if not_inherited: + has_cat_options = True + + return kwds, has_cat_options + + def get_record(self, scheme, category): + """return record for specific scheme & category (cached)""" + # NOTE: this is part of the critical path shared by + # all of CryptContext's PasswordHash methods, + # hence all the caching and error checking. + + # quick lookup in cache + try: + return self._records[scheme, category] + except KeyError: + pass + + # type check + if category is not None and not isinstance(category, native_string_types): + if PY2 and isinstance(category, unicode): + # for compatibility with unicode-centric py2 apps + return self.get_record(scheme, category.encode("utf-8")) + raise ExpectedTypeError(category, "str or None", "category") + if scheme is not None and not isinstance(scheme, native_string_types): + raise ExpectedTypeError(scheme, "str or None", "scheme") + + # if scheme=None, + # use record for category's default scheme, and cache result. + if not scheme: + default = self.default_scheme(category) + assert default + record = self._records[None, category] = self.get_record(default, + category) + return record + + # if no record for (scheme, category), + # use record for (scheme, None), and cache result. + if category: + try: + cache = self._records + record = cache[scheme, category] = cache[scheme, None] + return record + except KeyError: + pass + + # scheme not found in configuration for default category + raise KeyError("crypt algorithm not found in policy: %r" % (scheme,)) + + def _get_record_list(self, category=None): + """return list of records for category (cached) + + this is an internal helper used only by identify_record() + """ + # type check of category - handled by _get_record() + # quick lookup in cache + try: + return self._record_lists[category] + except KeyError: + pass + # cache miss - build list from scratch + value = self._record_lists[category] = [ + self.get_record(scheme, category) + for scheme in self.schemes + ] + return value + + def identify_record(self, hash, category, required=True): + """internal helper to identify appropriate custom handler for hash""" + # NOTE: this is part of the critical path shared by + # all of CryptContext's PasswordHash methods, + # hence all the caching and error checking. + # FIXME: if multiple hashes could match (e.g. lmhash vs nthash) + # this will only return first match. might want to do something + # about this in future, but for now only hashes with + # unique identifiers will work properly in a CryptContext. + # XXX: if all handlers have a unique prefix (e.g. all are MCF / LDAP), + # could use dict-lookup to speed up this search. + if not isinstance(hash, unicode_or_bytes_types): + raise ExpectedStringError(hash, "hash") + # type check of category - handled by _get_record_list() + for record in self._get_record_list(category): + if record.identify(hash): + return record + if not required: + return None + elif not self.schemes: + raise KeyError("no crypt algorithms supported") + else: + raise exc.UnknownHashError("hash could not be identified") + + @memoized_property + def disabled_record(self): + for record in self._get_record_list(None): + if record.is_disabled: + return record + raise RuntimeError("no disabled hasher present " + "(perhaps add 'unix_disabled' to list of schemes?)") + + #=================================================================== + # serialization + #=================================================================== + def iter_config(self, resolve=False): + """regenerate original config. + + this is an iterator which yields ``(cat,scheme,option),value`` items, + in the order they generally appear inside an INI file. + if interpreted as a dictionary, it should match the original + keywords passed to the CryptContext (aside from any canonization). + + it's mainly used as the internal backend for most of the public + serialization methods. + """ + # grab various bits of data + scheme_options = self._scheme_options + context_options = self._context_options + scheme_keys = sorted(scheme_options) + context_keys = sorted(context_options) + + # write loaded schemes (may differ from 'schemes' local var) + if 'schemes' in context_keys: + context_keys.remove("schemes") + value = self.handlers if resolve else self.schemes + if value: + yield (None, None, "schemes"), list(value) + + # then run through config for each user category + for cat in (None,) + self.categories: + + # write context options + for key in context_keys: + try: + value = context_options[key][cat] + except KeyError: + pass + else: + if isinstance(value, list): + value = list(value) + yield (cat, None, key), value + + # write per-scheme options for all schemes. + for scheme in scheme_keys: + try: + kwds = scheme_options[scheme][cat] + except KeyError: + pass + else: + for key in sorted(kwds): + yield (cat, scheme, key), kwds[key] + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# main CryptContext class +#============================================================================= +class CryptContext(object): + """Helper for hashing & verifying passwords using multiple algorithms. + + Instances of this class allow applications to choose a specific + set of hash algorithms which they wish to support, set limits and defaults + for the rounds and salt sizes those algorithms should use, flag + which algorithms should be deprecated, and automatically handle + migrating users to stronger hashes when they log in. + + Basic usage:: + + >>> ctx = CryptContext(schemes=[...]) + + See the Passlib online documentation for details and full documentation. + """ + # FIXME: altering the configuration of this object isn't threadsafe, + # but is generally only done during application init, so not a major + # issue (just yet). + + # XXX: would like some way to restrict the categories that are allowed, + # to restrict what the app OR the config can use. + + # XXX: add wrap/unwrap callback hooks so app can mutate hash format? + + # XXX: add method for detecting and warning user about schemes + # which don't have any good distinguishing marks? + # or greedy ones (unix_disabled, plaintext) which are not listed at the end? + + #=================================================================== + # instance attrs + #=================================================================== + + # _CryptConfig instance holding current parsed config + _config = None + + # copy of _config methods, stored in CryptContext instance for speed. + _get_record = None + _identify_record = None + + #=================================================================== + # secondary constructors + #=================================================================== + @classmethod + def _norm_source(cls, source): + """internal helper - accepts string, dict, or context""" + if isinstance(source, dict): + return cls(**source) + elif isinstance(source, cls): + return source + else: + self = cls() + self.load(source) + return self + + @classmethod + def from_string(cls, source, section="passlib", encoding="utf-8"): + """create new CryptContext instance from an INI-formatted string. + + :type source: unicode or bytes + :arg source: + string containing INI-formatted content. + + :type section: str + :param section: + option name of section to read from, defaults to ``"passlib"``. + + :type encoding: str + :arg encoding: + optional encoding used when source is bytes, defaults to ``"utf-8"``. + + :returns: + new :class:`CryptContext` instance, configured based on the + parameters in the *source* string. + + Usage example:: + + >>> from passlib.context import CryptContext + >>> context = CryptContext.from_string(''' + ... [passlib] + ... schemes = sha256_crypt, des_crypt + ... sha256_crypt__default_rounds = 30000 + ... ''') + + .. versionadded:: 1.6 + + .. seealso:: :meth:`to_string`, the inverse of this constructor. + """ + if not isinstance(source, unicode_or_bytes_types): + raise ExpectedTypeError(source, "unicode or bytes", "source") + self = cls(_autoload=False) + self.load(source, section=section, encoding=encoding) + return self + + @classmethod + def from_path(cls, path, section="passlib", encoding="utf-8"): + """create new CryptContext instance from an INI-formatted file. + + this functions exactly the same as :meth:`from_string`, + except that it loads from a local file. + + :type path: str + :arg path: + path to local file containing INI-formatted config. + + :type section: str + :param section: + option name of section to read from, defaults to ``"passlib"``. + + :type encoding: str + :arg encoding: + encoding used to load file, defaults to ``"utf-8"``. + + :returns: + new CryptContext instance, configured based on the parameters + stored in the file *path*. + + .. versionadded:: 1.6 + + .. seealso:: :meth:`from_string` for an equivalent usage example. + """ + self = cls(_autoload=False) + self.load_path(path, section=section, encoding=encoding) + return self + + def copy(self, **kwds): + """Return copy of existing CryptContext instance. + + This function returns a new CryptContext instance whose configuration + is exactly the same as the original, with the exception that any keywords + passed in will take precedence over the original settings. + As an example:: + + >>> from passlib.context import CryptContext + + >>> # given an existing context... + >>> ctx1 = CryptContext(["sha256_crypt", "md5_crypt"]) + + >>> # copy can be used to make a clone, and update + >>> # some of the settings at the same time... + >>> ctx2 = custom_app_context.copy(default="md5_crypt") + + >>> # and the original will be unaffected by the change + >>> ctx1.default_scheme() + "sha256_crypt" + >>> ctx2.default_scheme() + "md5_crypt" + + .. versionadded:: 1.6 + This method was previously named :meth:`!replace`. That alias + has been deprecated, and will be removed in Passlib 1.8. + + .. seealso:: :meth:`update` + """ + # XXX: it would be faster to store ref to self._config, + # but don't want to share config objects til sure + # can rely on them being immutable. + other = CryptContext(_autoload=False) + other.load(self) + if kwds: + other.load(kwds, update=True) + return other + + def using(self, **kwds): + """ + alias for :meth:`copy`, to match PasswordHash.using() + """ + return self.copy(**kwds) + + def replace(self, **kwds): + """deprecated alias of :meth:`copy`""" + warn("CryptContext().replace() has been deprecated in Passlib 1.6, " + "and will be removed in Passlib 1.8, " + "it has been renamed to CryptContext().copy()", + DeprecationWarning, stacklevel=2) + return self.copy(**kwds) + + #=================================================================== + # init + #=================================================================== + def __init__(self, schemes=None, + # keyword only... + policy=_UNSET, # <-- deprecated + _autoload=True, **kwds): + # XXX: add ability to make flag certain contexts as immutable, + # e.g. the builtin passlib ones? + # XXX: add a name or import path for the contexts, to help out repr? + if schemes is not None: + kwds['schemes'] = schemes + if policy is not _UNSET: + warn("The CryptContext ``policy`` keyword has been deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8; please use " + "``CryptContext.from_string()` or " + "``CryptContext.from_path()`` instead.", + DeprecationWarning) + if policy is None: + self.load(kwds) + elif isinstance(policy, CryptPolicy): + self.load(policy._context) + self.update(kwds) + else: + raise TypeError("policy must be a CryptPolicy instance") + elif _autoload: + self.load(kwds) + else: + assert not kwds, "_autoload=False and kwds are mutually exclusive" + + # XXX: would this be useful? + ##def __str__(self): + ## if PY3: + ## return self.to_string() + ## else: + ## return self.to_string().encode("utf-8") + + def __repr__(self): + return "" % id(self) + + #=================================================================== + # deprecated policy object + #=================================================================== + def _get_policy(self): + # The CryptPolicy class has been deprecated, so to support any + # legacy accesses, we create a stub policy object so .policy attr + # will continue to work. + # + # the code waits until app accesses a specific policy object attribute + # before issuing deprecation warning, so developer gets method-specific + # suggestion for how to upgrade. + + # NOTE: making a copy of the context so the policy acts like a snapshot, + # to retain the pre-1.6 behavior. + return CryptPolicy(_internal_context=self.copy(), _stub_policy=True) + + def _set_policy(self, policy): + warn("The CryptPolicy class and the ``context.policy`` attribute have " + "been deprecated as of Passlib 1.6, and will be removed in " + "Passlib 1.8; please use the ``context.load()`` and " + "``context.update()`` methods instead.", + DeprecationWarning, stacklevel=2) + if isinstance(policy, CryptPolicy): + self.load(policy._context) + else: + raise TypeError("expected CryptPolicy instance") + + policy = property(_get_policy, _set_policy, + doc="[deprecated] returns CryptPolicy instance " + "tied to this CryptContext") + + #=================================================================== + # loading / updating configuration + #=================================================================== + @staticmethod + def _parse_ini_stream(stream, section, filename): + """helper read INI from stream, extract passlib section as dict""" + # NOTE: this expects a unicode stream under py3, + # and a utf-8 bytes stream under py2, + # allowing the resulting dict to always use native strings. + p = SafeConfigParser() + if PY3: + # python 3.2 deprecated readfp in favor of read_file + p.read_file(stream, filename) + else: + p.readfp(stream, filename) + # XXX: could change load() to accept list of items, + # and skip intermediate dict creation + return dict(p.items(section)) + + def load_path(self, path, update=False, section="passlib", encoding="utf-8"): + """Load new configuration into CryptContext from a local file. + + This function is a wrapper for :meth:`load` which + loads a configuration string from the local file *path*, + instead of an in-memory source. Its behavior and options + are otherwise identical to :meth:`!load` when provided with + an INI-formatted string. + + .. versionadded:: 1.6 + """ + def helper(stream): + kwds = self._parse_ini_stream(stream, section, path) + return self.load(kwds, update=update) + if PY3: + # decode to unicode, which load() expected under py3 + with open(path, "rt", encoding=encoding) as stream: + return helper(stream) + elif encoding in ["utf-8", "ascii"]: + # keep as utf-8 bytes, which load() expects under py2 + with open(path, "rb") as stream: + return helper(stream) + else: + # transcode to utf-8 bytes + with open(path, "rb") as fh: + tmp = fh.read().decode(encoding).encode("utf-8") + return helper(BytesIO(tmp)) + + def load(self, source, update=False, section="passlib", encoding="utf-8"): + """Load new configuration into CryptContext, replacing existing config. + + :arg source: + source of new configuration to load. + this value can be a number of different types: + + * a :class:`!dict` object, or compatible Mapping + + the key/value pairs will be interpreted the same + keywords for the :class:`CryptContext` class constructor. + + * a :class:`!unicode` or :class:`!bytes` string + + this will be interpreted as an INI-formatted file, + and appropriate key/value pairs will be loaded from + the specified *section*. + + * another :class:`!CryptContext` object. + + this will export a snapshot of its configuration + using :meth:`to_dict`. + + :type update: bool + :param update: + By default, :meth:`load` will replace the existing configuration + entirely. If ``update=True``, it will preserve any existing + configuration options that are not overridden by the new source, + much like the :meth:`update` method. + + :type section: str + :param section: + When parsing an INI-formatted string, :meth:`load` will look for + a section named ``"passlib"``. This option allows an alternate + section name to be used. Ignored when loading from a dictionary. + + :type encoding: str + :param encoding: + Encoding to use when **source** is bytes. + Defaults to ``"utf-8"``. Ignored when loading from a dictionary. + + .. deprecated:: 1.8 + + This keyword, and support for bytes input, will be dropped in Passlib 2.0 + + :raises TypeError: + * If the source cannot be identified. + * If an unknown / malformed keyword is encountered. + + :raises ValueError: + If an invalid keyword value is encountered. + + .. note:: + + If an error occurs during a :meth:`!load` call, the :class:`!CryptContext` + instance will be restored to the configuration it was in before + the :meth:`!load` call was made; this is to ensure it is + *never* left in an inconsistent state due to a load error. + + .. versionadded:: 1.6 + """ + #----------------------------------------------------------- + # autodetect source type, convert to dict + #----------------------------------------------------------- + parse_keys = True + if isinstance(source, unicode_or_bytes_types): + if PY3: + source = to_unicode(source, encoding, param="source") + else: + source = to_bytes(source, "utf-8", source_encoding=encoding, + param="source") + source = self._parse_ini_stream(NativeStringIO(source), section, + "") + elif isinstance(source, CryptContext): + # extract dict directly from config, so it can be merged later + source = dict(source._config.iter_config(resolve=True)) + parse_keys = False + elif not hasattr(source, "items"): + # mappings are left alone, otherwise throw an error. + raise ExpectedTypeError(source, "string or dict", "source") + + # XXX: add support for other iterable types, e.g. sequence of pairs? + + #----------------------------------------------------------- + # parse dict keys into (category, scheme, option) format, + # and merge with existing configuration if needed. + #----------------------------------------------------------- + if parse_keys: + parse = self._parse_config_key + source = dict((parse(key), value) + for key, value in iteritems(source)) + if update and self._config is not None: + # if updating, do nothing if source is empty, + if not source: + return + # otherwise overlay source on top of existing config + tmp = source + source = dict(self._config.iter_config(resolve=True)) + source.update(tmp) + + #----------------------------------------------------------- + # compile into _CryptConfig instance, and update state + #----------------------------------------------------------- + config = _CryptConfig(source) + self._config = config + self._reset_dummy_verify() + self._get_record = config.get_record + self._identify_record = config.identify_record + if config.context_kwds: + # (re-)enable method for this instance (in case ELSE clause below ran last load). + self.__dict__.pop("_strip_unused_context_kwds", None) + else: + # disable method for this instance, it's not needed. + self._strip_unused_context_kwds = None + + @staticmethod + def _parse_config_key(ckey): + """helper used to parse ``cat__scheme__option`` keys into a tuple""" + # split string into 1-3 parts + assert isinstance(ckey, native_string_types) + parts = ckey.replace(".", "__").split("__") + count = len(parts) + if count == 1: + cat, scheme, key = None, None, parts[0] + elif count == 2: + cat = None + scheme, key = parts + elif count == 3: + cat, scheme, key = parts + else: + raise TypeError("keys must have less than 3 separators: %r" % + (ckey,)) + # validate & normalize the parts + if cat == "default": + cat = None + elif not cat and cat is not None: + raise TypeError("empty category: %r" % ckey) + if scheme == "context": + scheme = None + elif not scheme and scheme is not None: + raise TypeError("empty scheme: %r" % ckey) + if not key: + raise TypeError("empty option: %r" % ckey) + return cat, scheme, key + + def update(self, *args, **kwds): + """Helper for quickly changing configuration. + + This acts much like the :meth:`!dict.update` method: + it updates the context's configuration, + replacing the original value(s) for the specified keys, + and preserving the rest. + It accepts any :ref:`keyword ` + accepted by the :class:`!CryptContext` constructor. + + .. versionadded:: 1.6 + + .. seealso:: :meth:`copy` + """ + if args: + if len(args) > 1: + raise TypeError("expected at most one positional argument") + if kwds: + raise TypeError("positional arg and keywords mutually exclusive") + self.load(args[0], update=True) + elif kwds: + self.load(kwds, update=True) + + # XXX: make this public? even just as flag to load? + # FIXME: this function suffered some bitrot in 1.6.1, + # will need to be updated before works again. + ##def _simplify(self): + ## "helper to remove redundant/unused options" + ## # don't do anything if no schemes are defined + ## if not self._schemes: + ## return + ## + ## def strip_items(target, filter): + ## keys = [key for key,value in iteritems(target) + ## if filter(key,value)] + ## for key in keys: + ## del target[key] + ## + ## # remove redundant default. + ## defaults = self._default_schemes + ## if defaults.get(None) == self._schemes[0]: + ## del defaults[None] + ## + ## # remove options for unused schemes. + ## scheme_options = self._scheme_options + ## schemes = self._schemes + ("all",) + ## strip_items(scheme_options, lambda k,v: k not in schemes) + ## + ## # remove rendundant cat defaults. + ## cur = self.default_scheme() + ## strip_items(defaults, lambda k,v: k and v==cur) + ## + ## # remove redundant category deprecations. + ## # TODO: this should work w/ 'auto', but needs closer inspection + ## deprecated = self._deprecated_schemes + ## cur = self._deprecated_schemes.get(None) + ## strip_items(deprecated, lambda k,v: k and v==cur) + ## + ## # remove redundant category options. + ## for scheme, config in iteritems(scheme_options): + ## if None in config: + ## cur = config[None] + ## strip_items(config, lambda k,v: k and v==cur) + ## + ## # XXX: anything else? + + #=================================================================== + # reading configuration + #=================================================================== + def schemes(self, resolve=False, category=None, unconfigured=False): + """return schemes loaded into this CryptContext instance. + + :type resolve: bool + :arg resolve: + if ``True``, will return a tuple of :class:`~passlib.ifc.PasswordHash` + objects instead of their names. + + :returns: + returns tuple of the schemes configured for this context + via the *schemes* option. + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.schemes()`` + + .. seealso:: the :ref:`schemes ` option for usage example. + """ + # XXX: should resolv return records rather than handlers? + # or deprecate resolve keyword completely? + # offering up a .hashers Mapping in v1.8 would be great. + # NOTE: supporting 'category' and 'unconfigured' kwds as of 1.7 + # just to pass through to .handler(), but not documenting them... + # may not need to put them to use. + schemes = self._config.schemes + if resolve: + return tuple(self.handler(scheme, category, unconfigured=unconfigured) + for scheme in schemes) + else: + return schemes + + def default_scheme(self, category=None, resolve=False, unconfigured=False): + """return name of scheme that :meth:`hash` will use by default. + + :type resolve: bool + :arg resolve: + if ``True``, will return a :class:`~passlib.ifc.PasswordHash` + object instead of the name. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will return the catgory-specific default scheme instead. + + :returns: + name of the default scheme. + + .. seealso:: the :ref:`default ` option for usage example. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.7 + + This now returns a hasher configured with any CryptContext-specific + options (custom rounds settings, etc). Previously this returned + the base hasher from :mod:`passlib.hash`. + """ + # XXX: deprecate this in favor of .handler() or whatever it's replaced with? + # NOTE: supporting 'unconfigured' kwds as of 1.7 + # just to pass through to .handler(), but not documenting them... + # may not need to put them to use. + hasher = self.handler(None, category, unconfigured=unconfigured) + return hasher if resolve else hasher.name + + # XXX: need to decide if exposing this would be useful in any way + ##def categories(self): + ## """return user-categories with algorithm-specific options in this CryptContext. + ## + ## this will always return a tuple. + ## if no categories besides the default category have been configured, + ## the tuple will be empty. + ## """ + ## return self._config.categories + + # XXX: need to decide if exposing this would be useful to applications + # in any meaningful way that isn't already served by to_dict() + ##def options(self, scheme, category=None): + ## kwds, percat = self._config.get_options(scheme, category) + ## return kwds + + def handler(self, scheme=None, category=None, unconfigured=False): + """helper to resolve name of scheme -> :class:`~passlib.ifc.PasswordHash` object used by scheme. + + :arg scheme: + This should identify the scheme to lookup. + If omitted or set to ``None``, this will return the handler + for the default scheme. + + :arg category: + If a user category is specified, and no scheme is provided, + it will use the default for that category. + Otherwise this parameter is ignored. + + :param unconfigured: + + By default, this returns a handler object whose .hash() + and .needs_update() methods will honor the configured + provided by CryptContext. See ``unconfigured=True`` + to get the underlying handler from before any context-specific + configuration was applied. + + :raises KeyError: + If the scheme does not exist OR is not being used within this context. + + :returns: + :class:`~passlib.ifc.PasswordHash` object used to implement + the named scheme within this context (this will usually + be one of the objects from :mod:`passlib.hash`) + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.get_handler()`` + + .. versionchanged:: 1.7 + + This now returns a hasher configured with any CryptContext-specific + options (custom rounds settings, etc). Previously this returned + the base hasher from :mod:`passlib.hash`. + """ + try: + hasher = self._get_record(scheme, category) + if unconfigured: + return hasher._Context__orig_handler + else: + return hasher + except KeyError: + pass + if self._config.handlers: + raise KeyError("crypt algorithm not found in this " + "CryptContext instance: %r" % (scheme,)) + else: + raise KeyError("no crypt algorithms loaded in this " + "CryptContext instance") + + def _get_unregistered_handlers(self): + """check if any handlers in this context aren't in the global registry""" + return tuple(handler for handler in self._config.handlers + if not _is_handler_registered(handler)) + + @property + def context_kwds(self): + """ + return :class:`!set` containing union of all :ref:`contextual keywords ` + supported by the handlers in this context. + + .. versionadded:: 1.6.6 + """ + return self._config.context_kwds + + #=================================================================== + # exporting config + #=================================================================== + @staticmethod + def _render_config_key(key): + """convert 3-part config key to single string""" + cat, scheme, option = key + if cat: + return "%s__%s__%s" % (cat, scheme or "context", option) + elif scheme: + return "%s__%s" % (scheme, option) + else: + return option + + @staticmethod + def _render_ini_value(key, value): + """render value to string suitable for INI file""" + # convert lists to comma separated lists + # (mainly 'schemes' & 'deprecated') + if isinstance(value, (list,tuple)): + value = ", ".join(value) + + # convert numbers to strings + elif isinstance(value, num_types): + if isinstance(value, float) and key[2] == "vary_rounds": + value = ("%.2f" % value).rstrip("0") if value else "0" + else: + value = str(value) + + assert isinstance(value, native_string_types), \ + "expected string for key: %r %r" % (key, value) + + # escape any percent signs. + return value.replace("%", "%%") + + def to_dict(self, resolve=False): + """Return current configuration as a dictionary. + + :type resolve: bool + :arg resolve: + if ``True``, the ``schemes`` key will contain a list of + a :class:`~passlib.ifc.PasswordHash` objects instead of just + their names. + + This method dumps the current configuration of the CryptContext + instance. The key/value pairs should be in the format accepted + by the :class:`!CryptContext` class constructor, in fact + ``CryptContext(**myctx.to_dict())`` will create an exact copy of ``myctx``. + As an example:: + + >>> # you can dump the configuration of any crypt context... + >>> from passlib.apps import ldap_nocrypt_context + >>> ldap_nocrypt_context.to_dict() + {'schemes': ['ldap_salted_sha1', + 'ldap_salted_md5', + 'ldap_sha1', + 'ldap_md5', + 'ldap_plaintext']} + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.to_dict()`` + + .. seealso:: the :ref:`context-serialization-example` example in the tutorial. + """ + # XXX: should resolve default to conditional behavior + # based on presence of unregistered handlers? + render_key = self._render_config_key + return dict((render_key(key), value) + for key, value in self._config.iter_config(resolve)) + + def _write_to_parser(self, parser, section): + """helper to write to ConfigParser instance""" + render_key = self._render_config_key + render_value = self._render_ini_value + parser.add_section(section) + for k,v in self._config.iter_config(): + v = render_value(k, v) + k = render_key(k) + parser.set(section, k, v) + + def to_string(self, section="passlib"): + """serialize to INI format and return as unicode string. + + :param section: + name of INI section to output, defaults to ``"passlib"``. + + :returns: + CryptContext configuration, serialized to a INI unicode string. + + This function acts exactly like :meth:`to_dict`, except that it + serializes all the contents into a single human-readable string, + which can be hand edited, and/or stored in a file. The + output of this method is accepted by :meth:`from_string`, + :meth:`from_path`, and :meth:`load`. As an example:: + + >>> # you can dump the configuration of any crypt context... + >>> from passlib.apps import ldap_nocrypt_context + >>> print ldap_nocrypt_context.to_string() + [passlib] + schemes = ldap_salted_sha1, ldap_salted_md5, ldap_sha1, ldap_md5, ldap_plaintext + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.to_string()`` + + .. seealso:: the :ref:`context-serialization-example` example in the tutorial. + """ + parser = SafeConfigParser() + self._write_to_parser(parser, section) + buf = NativeStringIO() + parser.write(buf) + unregistered = self._get_unregistered_handlers() + if unregistered: + buf.write(( + "# NOTE: the %s handler(s) are not registered with Passlib,\n" + "# this string may not correctly reproduce the current configuration.\n\n" + ) % ", ".join(repr(handler.name) for handler in unregistered)) + out = buf.getvalue() + if not PY3: + out = out.decode("utf-8") + return out + + # XXX: is this useful enough to enable? + ##def write_to_path(self, path, section="passlib", update=False): + ## "write to INI file" + ## parser = ConfigParser() + ## if update and os.path.exists(path): + ## if not parser.read([path]): + ## raise EnvironmentError("failed to read existing file") + ## parser.remove_section(section) + ## self._write_to_parser(parser, section) + ## fh = file(path, "w") + ## parser.write(fh) + ## fh.close() + + #=================================================================== + # verify() hardening + # NOTE: this entire feature has been disabled. + # all contents of this section are NOOPs as of 1.7.1, + # and will be removed in 1.8. + #=================================================================== + + mvt_estimate_max_samples = 20 + mvt_estimate_min_samples = 10 + mvt_estimate_max_time = 2 + mvt_estimate_resolution = 0.01 + harden_verify = None + min_verify_time = 0 + + def reset_min_verify_time(self): + self._reset_dummy_verify() + + #=================================================================== + # password hash api + #=================================================================== + + # NOTE: all the following methods do is look up the appropriate + # custom handler for a given (scheme,category) combination, + # and hand off the real work to the handler itself, + # which is optimized for the specific (scheme,category) configuration. + # + # The custom handlers are cached inside the _CryptConfig + # instance stored in self._config, and are retrieved + # via get_record() and identify_record(). + # + # _get_record() and _identify_record() are references + # to _config methods of the same name, + # stored in CryptContext for speed. + + def _get_or_identify_record(self, hash, scheme=None, category=None): + """return record based on scheme, or failing that, by identifying hash""" + if scheme: + if not isinstance(hash, unicode_or_bytes_types): + raise ExpectedStringError(hash, "hash") + return self._get_record(scheme, category) + else: + # hash typecheck handled by identify_record() + return self._identify_record(hash, category) + + def _strip_unused_context_kwds(self, kwds, record): + """ + helper which removes any context keywords from **kwds** + that are known to be used by another scheme in this context, + but are NOT supported by handler specified by **record**. + + .. note:: + as optimization, load() will set this method to None on a per-instance basis + if there are no context kwds. + """ + if not kwds: + return + unused_kwds = self._config.context_kwds.difference(record.context_kwds) + for key in unused_kwds: + kwds.pop(key, None) + + def needs_update(self, hash, scheme=None, category=None, secret=None): + """Check if hash needs to be replaced for some reason, + in which case the secret should be re-hashed. + + This function is the core of CryptContext's support for hash migration: + This function takes in a hash string, and checks the scheme, + number of rounds, and other properties against the current policy. + It returns ``True`` if the hash is using a deprecated scheme, + or is otherwise outside of the bounds specified by the policy + (e.g. the number of rounds is lower than :ref:`min_rounds ` + configuration for that algorithm). + If so, the password should be re-hashed using :meth:`hash` + Otherwise, it will return ``False``. + + :type hash: unicode or bytes + :arg hash: + The hash string to examine. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, it will be identified + based on the value of *hash*. + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used when determining if the hash needs to be updated + (e.g. is below the minimum rounds). + + :type secret: unicode, bytes, or None + :param secret: + Optional secret associated with the provided ``hash``. + This is not required, or even currently used for anything... + it's for forward-compatibility with any future + update checks that might need this information. + If provided, Passlib assumes the secret has already been + verified successfully against the hash. + + .. versionadded:: 1.6 + + :returns: ``True`` if hash should be replaced, otherwise ``False``. + + :raises ValueError: + If the hash did not match any of the configured :meth:`schemes`. + + .. versionadded:: 1.6 + This method was previously named :meth:`hash_needs_update`. + + .. seealso:: the :ref:`context-migration-example` example in the tutorial. + """ + if scheme is not None: + # TODO: offer replacement alternative. + # ``context.handler(scheme).needs_update()`` would work, + # but may deprecate .handler() in passlib 1.8. + warn("CryptContext.needs_update(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + record = self._get_or_identify_record(hash, scheme, category) + return record.deprecated or record.needs_update(hash, secret=secret) + + @deprecated_method(deprecated="1.6", removed="2.0", replacement="CryptContext.needs_update()") + def hash_needs_update(self, hash, scheme=None, category=None): + """Legacy alias for :meth:`needs_update`. + + .. deprecated:: 1.6 + This method was renamed to :meth:`!needs_update` in version 1.6. + This alias will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.5. + """ + return self.needs_update(hash, scheme, category) + + @deprecated_method(deprecated="1.7", removed="2.0") + def genconfig(self, scheme=None, category=None, **settings): + """Generate a config string for specified scheme. + + .. deprecated:: 1.7 + + This method will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + record = self._get_record(scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(settings, record) + return record.genconfig(**settings) + + @deprecated_method(deprecated="1.7", removed="2.0") + def genhash(self, secret, config, scheme=None, category=None, **kwds): + """Generate hash for the specified secret using another hash. + + .. deprecated:: 1.7 + + This method will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + record = self._get_or_identify_record(config, scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(kwds, record) + return record.genhash(secret, config, **kwds) + + def identify(self, hash, category=None, resolve=False, required=False, + unconfigured=False): + """Attempt to identify which algorithm the hash belongs to. + + Note that this will only consider the algorithms + currently configured for this context + (see the :ref:`schemes ` option). + All registered algorithms will be checked, from first to last, + and whichever one positively identifies the hash first will be returned. + + :type hash: unicode or bytes + :arg hash: + The hash string to test. + + :type category: str or None + :param category: + Optional :ref:`user category `. + Ignored by this function, this parameter + is provided for symmetry with the other methods. + + :type resolve: bool + :param resolve: + If ``True``, returns the hash handler itself, + instead of the name of the hash. + + :type required: bool + :param required: + If ``True``, this will raise a ValueError if the hash + cannot be identified, instead of returning ``None``. + + :returns: + The handler which first identifies the hash, + or ``None`` if none of the algorithms identify the hash. + """ + record = self._identify_record(hash, category, required) + if record is None: + return None + elif resolve: + if unconfigured: + return record._Context__orig_handler + else: + return record + else: + return record.name + + def hash(self, secret, scheme=None, category=None, **kwds): + """run secret through selected algorithm, returning resulting hash. + + :type secret: unicode or bytes + :arg secret: + the password to hash. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, the configured default + will be used. + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used when hashing the password (e.g. different default scheme, + different default rounds values, etc). + + :param \\*\\*kwds: + All other keyword options are passed to the selected algorithm's + :meth:`PasswordHash.hash() ` method. + + :returns: + The secret as encoded by the specified algorithm and options. + The return value will always be a :class:`!str`. + + :raises TypeError, ValueError: + * If any of the arguments have an invalid type or value. + This includes any keywords passed to the underlying hash's + :meth:`PasswordHash.hash() ` method. + + .. seealso:: the :ref:`context-basic-example` example in the tutorial + """ + # XXX: could insert normalization to preferred unicode encoding here + if scheme is not None: + # TODO: offer replacement alternative. + # ``context.handler(scheme).hash()`` would work, + # but may deprecate .handler() in passlib 1.8. + warn("CryptContext.hash(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + record = self._get_record(scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(kwds, record) + return record.hash(secret, **kwds) + + @deprecated_method(deprecated="1.7", removed="2.0", replacement="CryptContext.hash()") + def encrypt(self, *args, **kwds): + """ + Legacy alias for :meth:`hash`. + + .. deprecated:: 1.7 + This method was renamed to :meth:`!hash` in version 1.7. + This alias will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + return self.hash(*args, **kwds) + + def verify(self, secret, hash, scheme=None, category=None, **kwds): + """verify secret against an existing hash. + + If no scheme is specified, this will attempt to identify + the scheme based on the contents of the provided hash + (limited to the schemes configured for this context). + It will then check whether the password verifies against the hash. + + :type secret: unicode or bytes + :arg secret: + the secret to verify + + :type hash: unicode or bytes + :arg hash: + hash string to compare to + + if ``None`` is passed in, this will be treated as "never verifying" + + :type scheme: str + :param scheme: + Optionally force context to use specific scheme. + This is usually not needed, as most hashes can be unambiguously + identified. Scheme must be one of the ones configured + for this context + (see the :ref:`schemes ` option). + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category ` string. + This is mainly used when generating new hashes, it has little + effect when verifying; this keyword is mainly provided for symmetry. + + :param \\*\\*kwds: + All additional keywords are passed to the appropriate handler, + and should match its :attr:`~passlib.ifc.PasswordHash.context_kwds`. + + :returns: + ``True`` if the password matched the hash, else ``False``. + + :raises ValueError: + * if the hash did not match any of the configured :meth:`schemes`. + + * if any of the arguments have an invalid value (this includes + any keywords passed to the underlying hash's + :meth:`PasswordHash.verify() ` method). + + :raises TypeError: + * if any of the arguments have an invalid type (this includes + any keywords passed to the underlying hash's + :meth:`PasswordHash.verify() ` method). + + .. seealso:: the :ref:`context-basic-example` example in the tutorial + """ + # XXX: could insert normalization to preferred unicode encoding here + # XXX: what about supporting a setter() callback ala django 1.4 ? + if scheme is not None: + # TODO: offer replacement alternative. + # ``context.handler(scheme).verify()`` would work, + # but may deprecate .handler() in passlib 1.8. + warn("CryptContext.verify(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + if hash is None: + # convenience feature -- let apps pass in hash=None when user + # isn't found / has no hash; useful because it invokes dummy_verify() + self.dummy_verify() + return False + record = self._get_or_identify_record(hash, scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused: + strip_unused(kwds, record) + return record.verify(secret, hash, **kwds) + + def verify_and_update(self, secret, hash, scheme=None, category=None, **kwds): + """verify password and re-hash the password if needed, all in a single call. + + This is a convenience method which takes care of all the following: + first it verifies the password (:meth:`~CryptContext.verify`), if this is successfull + it checks if the hash needs updating (:meth:`~CryptContext.needs_update`), and if so, + re-hashes the password (:meth:`~CryptContext.hash`), returning the replacement hash. + This series of steps is a very common task for applications + which wish to update deprecated hashes, and this call takes + care of all 3 steps efficiently. + + :type secret: unicode or bytes + :arg secret: + the secret to verify + + :type secret: unicode or bytes + :arg hash: + hash string to compare to. + + if ``None`` is passed in, this will be treated as "never verifying" + + :type scheme: str + :param scheme: + Optionally force context to use specific scheme. + This is usually not needed, as most hashes can be unambiguously + identified. Scheme must be one of the ones configured + for this context + (see the :ref:`schemes ` option). + + .. deprecated:: 1.7 + + Support for this keyword is deprecated, and will be removed in Passlib 2.0. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used if the password has to be re-hashed. + + :param \\*\\*kwds: + all additional keywords are passed to the appropriate handler, + and should match that hash's + :attr:`PasswordHash.context_kwds `. + + :returns: + This function returns a tuple containing two elements: + ``(verified, replacement_hash)``. The first is a boolean + flag indicating whether the password verified, + and the second an optional replacement hash. + The tuple will always match one of the following 3 cases: + + * ``(False, None)`` indicates the secret failed to verify. + * ``(True, None)`` indicates the secret verified correctly, + and the hash does not need updating. + * ``(True, str)`` indicates the secret verified correctly, + but the current hash needs to be updated. The :class:`!str` + will be the freshly generated hash, to replace the old one. + + :raises TypeError, ValueError: + For the same reasons as :meth:`verify`. + + .. seealso:: the :ref:`context-migration-example` example in the tutorial. + """ + # XXX: could insert normalization to preferred unicode encoding here. + if scheme is not None: + warn("CryptContext.verify(): 'scheme' keyword is deprecated as of " + "Passlib 1.7, and will be removed in Passlib 2.0", + DeprecationWarning) + if hash is None: + # convenience feature -- let apps pass in hash=None when user + # isn't found / has no hash; useful because it invokes dummy_verify() + self.dummy_verify() + return False, None + record = self._get_or_identify_record(hash, scheme, category) + strip_unused = self._strip_unused_context_kwds + if strip_unused and kwds: + clean_kwds = kwds.copy() + strip_unused(clean_kwds, record) + else: + clean_kwds = kwds + # XXX: if record is default scheme, could extend PasswordHash + # api to combine verify & needs_update to single call, + # potentially saving some round-trip parsing. + # but might make these codepaths more complex... + if not record.verify(secret, hash, **clean_kwds): + return False, None + elif record.deprecated or record.needs_update(hash, secret=secret): + # NOTE: we re-hash with default scheme, not current one. + return True, self.hash(secret, category=category, **kwds) + else: + return True, None + + #=================================================================== + # missing-user helper + #=================================================================== + + #: secret used for dummy_verify() + _dummy_secret = "too many secrets" + + @memoized_property + def _dummy_hash(self): + """ + precalculated hash for dummy_verify() to use + """ + return self.hash(self._dummy_secret) + + def _reset_dummy_verify(self): + """ + flush memoized values used by dummy_verify() + """ + type(self)._dummy_hash.clear_cache(self) + + def dummy_verify(self, elapsed=0): + """ + Helper that applications can call when user wasn't found, + in order to simulate time it would take to hash a password. + + Runs verify() against a dummy hash, to simulate verification + of a real account password. + + :param elapsed: + + .. deprecated:: 1.7.1 + + this option is ignored, and will be removed in passlib 1.8. + + .. versionadded:: 1.7 + """ + self.verify(self._dummy_secret, self._dummy_hash) + return False + + #=================================================================== + # disabled hash support + #=================================================================== + + def is_enabled(self, hash): + """ + test if hash represents a usuable password -- + i.e. does not represent an unusuable password such as ``"!"``, + which is recognized by the :class:`~passlib.hash.unix_disabled` hash. + + :raises ValueError: + if the hash is not recognized + (typically solved by adding ``unix_disabled`` to the list of schemes). + """ + return not self._identify_record(hash, None).is_disabled + + def disable(self, hash=None): + """ + return a string to disable logins for user, + usually by returning a non-verifying string such as ``"!"``. + + :param hash: + Callers can optionally provide the account's existing hash. + Some disabled handlers (such as :class:`!unix_disabled`) + will encode this into the returned value, + so that it can be recovered via :meth:`enable`. + + :raises RuntimeError: + if this function is called w/o a disabled hasher + (such as :class:`~passlib.hash.unix_disabled`) included + in the list of schemes. + + :returns: + hash string which will be recognized as valid by the context, + but is guaranteed to not validate against *any* password. + """ + record = self._config.disabled_record + assert record.is_disabled + return record.disable(hash) + + def enable(self, hash): + """ + inverse of :meth:`disable` -- + attempts to recover original hash which was converted + by a :meth:`!disable` call into a disabled hash -- + thus restoring the user's original password. + + :raises ValueError: + if original hash not present, or if the disabled handler doesn't + support encoding the original hash (e.g. ``django_disabled``) + + :returns: + the original hash. + """ + record = self._identify_record(hash, None) + if record.is_disabled: + # XXX: should we throw error if result can't be identified by context? + return record.enable(hash) + else: + # hash wasn't a disabled hash, so return unchanged + return hash + + #=================================================================== + # eoc + #=================================================================== + +class LazyCryptContext(CryptContext): + """CryptContext subclass which doesn't load handlers until needed. + + This is a subclass of CryptContext which takes in a set of arguments + exactly like CryptContext, but won't import any handlers + (or even parse its arguments) until + the first time one of its methods is accessed. + + :arg schemes: + The first positional argument can be a list of schemes, or omitted, + just like CryptContext. + + :param onload: + + If a callable is passed in via this keyword, + it will be invoked at lazy-load time + with the following signature: + ``onload(**kwds) -> kwds``; + where ``kwds`` is all the additional kwds passed to LazyCryptContext. + It should perform any additional deferred initialization, + and return the final dict of options to be passed to CryptContext. + + .. versionadded:: 1.6 + + :param create_policy: + + .. deprecated:: 1.6 + This option will be removed in Passlib 1.8, + applications should use ``onload`` instead. + + :param kwds: + + All additional keywords are passed to CryptContext; + or to the *onload* function (if provided). + + This is mainly used internally by modules such as :mod:`passlib.apps`, + which define a large number of contexts, but only a few of them will be needed + at any one time. Use of this class saves the memory needed to import + the specified handlers until the context instance is actually accessed. + As well, it allows constructing a context at *module-init* time, + but using :func:`!onload()` to provide dynamic configuration + at *application-run* time. + + .. note:: + This class is only useful if you're referencing handler objects by name, + and don't want them imported until runtime. If you want to have the config + validated before your application runs, or are passing in already-imported + handler instances, you should use :class:`CryptContext` instead. + + .. versionadded:: 1.4 + """ + _lazy_kwds = None + + # NOTE: the way this class works changed in 1.6. + # previously it just called _lazy_init() when ``.policy`` was + # first accessed. now that is done whenever any of the public + # attributes are accessed, and the class itself is changed + # to a regular CryptContext, to remove the overhead once it's unneeded. + + def __init__(self, schemes=None, **kwds): + if schemes is not None: + kwds['schemes'] = schemes + self._lazy_kwds = kwds + + def _lazy_init(self): + kwds = self._lazy_kwds + if 'create_policy' in kwds: + warn("The CryptPolicy class, and LazyCryptContext's " + "``create_policy`` keyword have been deprecated as of " + "Passlib 1.6, and will be removed in Passlib 1.8; " + "please use the ``onload`` keyword instead.", + DeprecationWarning) + create_policy = kwds.pop("create_policy") + result = create_policy(**kwds) + policy = CryptPolicy.from_source(result, _warn=False) + kwds = policy._context.to_dict() + elif 'onload' in kwds: + onload = kwds.pop("onload") + kwds = onload(**kwds) + del self._lazy_kwds + super(LazyCryptContext, self).__init__(**kwds) + self.__class__ = CryptContext + + def __getattribute__(self, attr): + if (not attr.startswith("_") or attr.startswith("__")) and \ + self._lazy_kwds is not None: + self._lazy_init() + return object.__getattribute__(self, attr) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/__init__.py b/ansible/lib/python3.11/site-packages/passlib/crypto/__init__.py new file mode 100644 index 000000000..89f54847e --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/__init__.py @@ -0,0 +1 @@ +"""passlib.crypto -- package containing cryptographic primitives used by passlib""" diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 000000000..cb9b01e03 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/_md4.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/_md4.cpython-311.pyc new file mode 100644 index 000000000..91b0c5b3c Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/_md4.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/des.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/des.cpython-311.pyc new file mode 100644 index 000000000..cf0cfcea8 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/des.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/digest.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/digest.cpython-311.pyc new file mode 100644 index 000000000..c01a2308e Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/__pycache__/digest.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/__init__.py b/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/__init__.py new file mode 100644 index 000000000..1aa1c85f7 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/__init__.py @@ -0,0 +1,169 @@ +"""passlib.crypto._blowfish - pure-python eks-blowfish implementation for bcrypt + +This is a pure-python implementation of the EKS-Blowfish algorithm described by +Provos and Mazieres in `A Future-Adaptable Password Scheme +`_. + +This package contains two submodules: + +* ``_blowfish/base.py`` contains a class implementing the eks-blowfish algorithm + using easy-to-examine code. + +* ``_blowfish/unrolled.py`` contains a subclass which replaces some methods + of the original class with sped-up versions, mainly using unrolled loops + and local variables. this is the class which is actually used by + Passlib to perform BCrypt in pure python. + + This module is auto-generated by a script, ``_blowfish/_gen_files.py``. + +Status +------ +This implementation is usable, but is an order of magnitude too slow to be +usable with real security. For "ok" security, BCrypt hashes should have at +least 2**11 rounds (as of 2011). Assuming a desired response time <= 100ms, +this means a BCrypt implementation should get at least 20 rounds/ms in order +to be both usable *and* secure. On a 2 ghz cpu, this implementation gets +roughly 0.09 rounds/ms under CPython (220x too slow), and 1.9 rounds/ms +under PyPy (10x too slow). + +History +------- +While subsequently modified considerly for Passlib, this code was originally +based on `jBcrypt 0.2 `_, which was +released under the BSD license:: + + Copyright (c) 2006 Damien Miller + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +#============================================================================= +# imports +#============================================================================= +# core +from itertools import chain +import struct +# pkg +from passlib.utils import getrandbytes, rng +from passlib.utils.binary import bcrypt64 +from passlib.utils.compat import BytesIO, unicode, u, native_string_types +from passlib.crypto._blowfish.unrolled import BlowfishEngine +# local +__all__ = [ + 'BlowfishEngine', + 'raw_bcrypt', +] + +#============================================================================= +# bcrypt constants +#============================================================================= + +# bcrypt constant data "OrpheanBeholderScryDoubt" as 6 integers +BCRYPT_CDATA = [ + 0x4f727068, 0x65616e42, 0x65686f6c, + 0x64657253, 0x63727944, 0x6f756274 +] + +# struct used to encode ciphertext as digest (last output byte discarded) +digest_struct = struct.Struct(">6I") + +#============================================================================= +# base bcrypt helper +# +# interface designed only for use by passlib.handlers.bcrypt:BCrypt +# probably not suitable for other purposes +#============================================================================= +BNULL = b'\x00' + +def raw_bcrypt(password, ident, salt, log_rounds): + """perform central password hashing step in bcrypt scheme. + + :param password: the password to hash + :param ident: identifier w/ minor version (e.g. 2, 2a) + :param salt: the binary salt to use (encoded in bcrypt-base64) + :param log_rounds: the log2 of the number of rounds (as int) + :returns: bcrypt-base64 encoded checksum + """ + #=================================================================== + # parse inputs + #=================================================================== + + # parse ident + assert isinstance(ident, native_string_types) + add_null_padding = True + if ident == u('2a') or ident == u('2y') or ident == u('2b'): + pass + elif ident == u('2'): + add_null_padding = False + elif ident == u('2x'): + raise ValueError("crypt_blowfish's buggy '2x' hashes are not " + "currently supported") + else: + raise ValueError("unknown ident: %r" % (ident,)) + + # decode & validate salt + assert isinstance(salt, bytes) + salt = bcrypt64.decode_bytes(salt) + if len(salt) < 16: + raise ValueError("Missing salt bytes") + elif len(salt) > 16: + salt = salt[:16] + + # prepare password + assert isinstance(password, bytes) + if add_null_padding: + password += BNULL + + # validate rounds + if log_rounds < 4 or log_rounds > 31: + raise ValueError("Bad number of rounds") + + #=================================================================== + # + # run EKS-Blowfish algorithm + # + # This uses the "enhanced key schedule" step described by + # Provos and Mazieres in "A Future-Adaptable Password Scheme" + # http://www.openbsd.org/papers/bcrypt-paper.ps + # + #=================================================================== + + engine = BlowfishEngine() + + # convert password & salt into list of 18 32-bit integers (72 bytes total). + pass_words = engine.key_to_words(password) + salt_words = engine.key_to_words(salt) + + # truncate salt_words to original 16 byte salt, or loop won't wrap + # correctly when passed to .eks_salted_expand() + salt_words16 = salt_words[:4] + + # do EKS key schedule setup + engine.eks_salted_expand(pass_words, salt_words16) + + # apply password & salt keys to key schedule a bunch more times. + rounds = 1<> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) +""".strip() + +def render_encipher(write, indent=0): + for i in irange(0, 15, 2): + write(indent, """\ + # Feistel substitution on left word (round %(i)d) + r ^= %(left)s ^ p%(i1)d + + # Feistel substitution on right word (round %(i1)d) + l ^= %(right)s ^ p%(i2)d + """, i=i, i1=i+1, i2=i+2, + left=BFSTR, right=BFSTR.replace("l","r"), + ) + +def write_encipher_function(write, indent=0): + write(indent, """\ + def encipher(self, l, r): + \"""blowfish encipher a single 64-bit block encoded as two 32-bit ints\""" + + (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) = self.P + S0, S1, S2, S3 = self.S + + l ^= p0 + + """) + render_encipher(write, indent+1) + + write(indent+1, """\ + + return r ^ p17, l + + """) + +def write_expand_function(write, indent=0): + write(indent, """\ + def expand(self, key_words): + \"""unrolled version of blowfish key expansion\""" + ##assert len(key_words) >= 18, "size of key_words must be >= 18" + + P, S = self.P, self.S + S0, S1, S2, S3 = S + + #============================================================= + # integrate key + #============================================================= + """) + for i in irange(18): + write(indent+1, """\ + p%(i)d = P[%(i)d] ^ key_words[%(i)d] + """, i=i) + write(indent+1, """\ + + #============================================================= + # update P + #============================================================= + + #------------------------------------------------ + # update P[0] and P[1] + #------------------------------------------------ + l, r = p0, 0 + + """) + + render_encipher(write, indent+1) + + write(indent+1, """\ + + p0, p1 = l, r = r ^ p17, l + + """) + + for i in irange(2, 18, 2): + write(indent+1, """\ + #------------------------------------------------ + # update P[%(i)d] and P[%(i1)d] + #------------------------------------------------ + l ^= p0 + + """, i=i, i1=i+1) + + render_encipher(write, indent+1) + + write(indent+1, """\ + p%(i)d, p%(i1)d = l, r = r ^ p17, l + + """, i=i, i1=i+1) + + write(indent+1, """\ + + #------------------------------------------------ + # save changes to original P array + #------------------------------------------------ + P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) + + #============================================================= + # update S + #============================================================= + + for box in S: + j = 0 + while j < 256: + l ^= p0 + + """) + + render_encipher(write, indent+3) + + write(indent+3, """\ + + box[j], box[j+1] = l, r = r ^ p17, l + j += 2 + """) + +#============================================================================= +# main +#============================================================================= + +def main(): + target = os.path.join(os.path.dirname(__file__), "unrolled.py") + fh = file(target, "w") + + def write(indent, msg, **kwds): + literal = kwds.pop("literal", False) + if kwds: + msg %= kwds + if not literal: + msg = textwrap.dedent(msg.rstrip(" ")) + if indent: + msg = indent_block(msg, " " * (indent*4)) + fh.write(msg) + + write(0, """\ + \"""passlib.crypto._blowfish.unrolled - unrolled loop implementation of bcrypt, + autogenerated by _gen_files.py + + currently this override the encipher() and expand() methods + with optimized versions, and leaves the other base.py methods alone. + \""" + #================================================================= + # imports + #================================================================= + # pkg + from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine + # local + __all__ = [ + "BlowfishEngine", + ] + #================================================================= + # + #================================================================= + class BlowfishEngine(_BlowfishEngine): + + """) + + write_encipher_function(write, indent=1) + write_expand_function(write, indent=1) + + write(0, """\ + #================================================================= + # eoc + #================================================================= + + #================================================================= + # eof + #================================================================= + """) + +if __name__ == "__main__": + main() + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/base.py b/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/base.py new file mode 100644 index 000000000..7b4f2cb4c --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/base.py @@ -0,0 +1,441 @@ +"""passlib.crypto._blowfish.base - unoptimized pure-python blowfish engine""" +#============================================================================= +# imports +#============================================================================= +# core +import struct +# pkg +from passlib.utils import repeat_string +# local +__all__ = [ + "BlowfishEngine", +] + +#============================================================================= +# blowfish constants +#============================================================================= +BLOWFISH_P = BLOWFISH_S = None + +def _init_constants(): + global BLOWFISH_P, BLOWFISH_S + + # NOTE: blowfish's spec states these numbers are the hex representation + # of the fractional portion of PI, in order. + + # Initial contents of key schedule - 18 integers + BLOWFISH_P = [ + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, + 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, + 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, + 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, + 0x9216d5d9, 0x8979fb1b, + ] + + # all 4 blowfish S boxes in one array - 256 integers per S box + BLOWFISH_S = [ + # sbox 1 + [ + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, + 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, + 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, + 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, + 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, + 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, + 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, + 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, + 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, + 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, + 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, + 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, + 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, + 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, + 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, + 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, + 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, + 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, + 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, + 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, + 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, + 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, + 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, + 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, + 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, + 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, + 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, + 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, + 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, + 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, + 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, + 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, + 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, + 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, + 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, + 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, + 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, + 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, + 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, + 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, + 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, + 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, + 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, + 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, + 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, + 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, + 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, + 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, + 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, + 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, + 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, + 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, + 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, + 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, + 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, + 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, + 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, + 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, + 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, + ], + # sbox 2 + [ + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, + 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, + 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, + 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, + 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, + 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, + 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, + 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, + 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, + 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, + 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, + 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, + 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, + 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, + 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, + 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, + 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, + 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, + 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, + 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, + 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, + 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, + 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, + 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, + 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, + 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, + 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, + 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, + 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, + 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, + 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, + 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, + 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, + 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, + 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, + 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, + 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, + 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, + 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, + 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, + 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, + 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, + 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, + 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, + 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, + 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, + 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, + 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, + 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, + 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, + 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, + 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, + 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, + 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, + 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, + 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, + 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, + 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, + 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, + ], + # sbox 3 + [ + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, + 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, + 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, + 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, + 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, + 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, + 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, + 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, + 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, + 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, + 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, + 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, + 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, + 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, + 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, + 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, + 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, + 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, + 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, + 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, + 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, + 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, + 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, + 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, + 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, + 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, + 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, + 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, + 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, + 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, + 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, + 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, + 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, + 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, + 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, + 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, + 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, + 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, + 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, + 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, + 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, + 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, + 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, + 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, + 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, + 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, + 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, + 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, + 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, + 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, + 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, + 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, + 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, + 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, + 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, + 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, + 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, + 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, + 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, + ], + # sbox 4 + [ + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, + 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, + 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, + 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, + 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, + 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, + 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, + 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, + 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, + 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, + 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, + 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, + 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, + 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, + 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, + 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, + 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, + 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, + 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, + 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, + 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, + 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, + 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, + 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, + 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, + 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, + 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, + 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, + 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, + 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, + 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, + 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, + 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, + 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, + 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, + 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, + 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, + 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, + 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, + 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, + 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, + 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, + 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, + 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, + 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, + 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, + 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, + 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, + 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, + 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, + 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, + 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, + 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, + 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, + 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, + 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, + 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, + 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, + 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6, + ] + ] + +#============================================================================= +# engine +#============================================================================= +class BlowfishEngine(object): + + def __init__(self): + if BLOWFISH_P is None: + _init_constants() + self.P = list(BLOWFISH_P) + self.S = [ list(box) for box in BLOWFISH_S ] + + #=================================================================== + # common helpers + #=================================================================== + @staticmethod + def key_to_words(data, size=18): + """convert data to tuple of 4-byte integers, repeating or + truncating data as needed to reach specified size""" + assert isinstance(data, bytes) + dlen = len(data) + if not dlen: + # return all zeros - original C code would just read the NUL after + # the password, so mimicing that behavior for this edge case. + return [0]*size + + # repeat data until it fills up 4*size bytes + data = repeat_string(data, size<<2) + + # unpack + return struct.unpack(">%dI" % (size,), data) + + #=================================================================== + # blowfish routines + #=================================================================== + def encipher(self, l, r): + """loop version of blowfish encipher routine""" + P, S = self.P, self.S + l ^= P[0] + i = 1 + while i < 17: + # Feistel substitution on left word + r = ((((S[0][l >> 24] + S[1][(l >> 16) & 0xff]) ^ S[2][(l >> 8) & 0xff]) + + S[3][l & 0xff]) & 0xffffffff) ^ P[i] ^ r + # swap vars so even rounds do Feistel substition on right word + l, r = r, l + i += 1 + return r ^ P[17], l + + # NOTE: decipher is same as above, just with reversed(P) instead. + + def expand(self, key_words): + """perform stock Blowfish keyschedule setup""" + assert len(key_words) >= 18, "key_words must be at least as large as P" + P, S, encipher = self.P, self.S, self.encipher + + i = 0 + while i < 18: + P[i] ^= key_words[i] + i += 1 + + i = l = r = 0 + while i < 18: + P[i], P[i+1] = l,r = encipher(l,r) + i += 2 + + for box in S: + i = 0 + while i < 256: + box[i], box[i+1] = l,r = encipher(l,r) + i += 2 + + #=================================================================== + # eks-blowfish routines + #=================================================================== + def eks_salted_expand(self, key_words, salt_words): + """perform EKS' salted version of Blowfish keyschedule setup""" + # NOTE: this is the same as expand(), except for the addition + # of the operations involving *salt_words*. + + assert len(key_words) >= 18, "key_words must be at least as large as P" + salt_size = len(salt_words) + assert salt_size, "salt_words must not be empty" + assert not salt_size & 1, "salt_words must have even length" + P, S, encipher = self.P, self.S, self.encipher + + i = 0 + while i < 18: + P[i] ^= key_words[i] + i += 1 + + s = i = l = r = 0 + while i < 18: + l ^= salt_words[s] + r ^= salt_words[s+1] + s += 2 + if s == salt_size: + s = 0 + P[i], P[i+1] = l,r = encipher(l,r) # next() + i += 2 + + for box in S: + i = 0 + while i < 256: + l ^= salt_words[s] + r ^= salt_words[s+1] + s += 2 + if s == salt_size: + s = 0 + box[i], box[i+1] = l,r = encipher(l,r) # next() + i += 2 + + def eks_repeated_expand(self, key_words, salt_words, rounds): + """perform rounds stage of EKS keyschedule setup""" + expand = self.expand + n = 0 + while n < rounds: + expand(key_words) + expand(salt_words) + n += 1 + + def repeat_encipher(self, l, r, count): + """repeatedly apply encipher operation to a block""" + encipher = self.encipher + n = 0 + while n < count: + l, r = encipher(l, r) + n += 1 + return l, r + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/unrolled.py b/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/unrolled.py new file mode 100644 index 000000000..4acf6e119 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/_blowfish/unrolled.py @@ -0,0 +1,771 @@ +"""passlib.crypto._blowfish.unrolled - unrolled loop implementation of bcrypt, +autogenerated by _gen_files.py + +currently this override the encipher() and expand() methods +with optimized versions, and leaves the other base.py methods alone. +""" +#============================================================================= +# imports +#============================================================================= +# pkg +from passlib.crypto._blowfish.base import BlowfishEngine as _BlowfishEngine +# local +__all__ = [ + "BlowfishEngine", +] +#============================================================================= +# +#============================================================================= +class BlowfishEngine(_BlowfishEngine): + + def encipher(self, l, r): + """blowfish encipher a single 64-bit block encoded as two 32-bit ints""" + + (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) = self.P + S0, S1, S2, S3 = self.S + + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + return r ^ p17, l + + def expand(self, key_words): + """unrolled version of blowfish key expansion""" + ##assert len(key_words) >= 18, "size of key_words must be >= 18" + + P, S = self.P, self.S + S0, S1, S2, S3 = S + + #============================================================= + # integrate key + #============================================================= + p0 = P[0] ^ key_words[0] + p1 = P[1] ^ key_words[1] + p2 = P[2] ^ key_words[2] + p3 = P[3] ^ key_words[3] + p4 = P[4] ^ key_words[4] + p5 = P[5] ^ key_words[5] + p6 = P[6] ^ key_words[6] + p7 = P[7] ^ key_words[7] + p8 = P[8] ^ key_words[8] + p9 = P[9] ^ key_words[9] + p10 = P[10] ^ key_words[10] + p11 = P[11] ^ key_words[11] + p12 = P[12] ^ key_words[12] + p13 = P[13] ^ key_words[13] + p14 = P[14] ^ key_words[14] + p15 = P[15] ^ key_words[15] + p16 = P[16] ^ key_words[16] + p17 = P[17] ^ key_words[17] + + #============================================================= + # update P + #============================================================= + + #------------------------------------------------ + # update P[0] and P[1] + #------------------------------------------------ + l, r = p0, 0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + p0, p1 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[2] and P[3] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p2, p3 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[4] and P[5] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p4, p5 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[6] and P[7] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p6, p7 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[8] and P[9] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p8, p9 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[10] and P[11] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p10, p11 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[12] and P[13] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p12, p13 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[14] and P[15] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p14, p15 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[16] and P[17] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p16, p17 = l, r = r ^ p17, l + + + #------------------------------------------------ + # save changes to original P array + #------------------------------------------------ + P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) + + #============================================================= + # update S + #============================================================= + + for box in S: + j = 0 + while j < 256: + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + box[j], box[j+1] = l, r = r ^ p17, l + j += 2 + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/_md4.py b/ansible/lib/python3.11/site-packages/passlib/crypto/_md4.py new file mode 100644 index 000000000..bdc211fa2 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/_md4.py @@ -0,0 +1,244 @@ +""" +passlib.crypto._md4 -- fallback implementation of MD4 + +Helper implementing insecure and obsolete md4 algorithm. +used for NTHASH format, which is also insecure and broken, +since it's just md4(password). + +Implementated based on rfc at http://www.faqs.org/rfcs/rfc1320.html + +.. note:: + + This shouldn't be imported directly, it's merely used conditionally + by ``passlib.crypto.lookup_hash()`` when a native implementation can't be found. +""" + +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import struct +# site +from passlib.utils.compat import bascii_to_str, irange, PY3 +# local +__all__ = ["md4"] + +#============================================================================= +# utils +#============================================================================= +def F(x,y,z): + return (x&y) | ((~x) & z) + +def G(x,y,z): + return (x&y) | (x&z) | (y&z) + +##def H(x,y,z): +## return x ^ y ^ z + +MASK_32 = 2**32-1 + +#============================================================================= +# main class +#============================================================================= +class md4(object): + """pep-247 compatible implementation of MD4 hash algorithm + + .. attribute:: digest_size + + size of md4 digest in bytes (16 bytes) + + .. method:: update + + update digest by appending additional content + + .. method:: copy + + create clone of digest object, including current state + + .. method:: digest + + return bytes representing md4 digest of current content + + .. method:: hexdigest + + return hexadecimal version of digest + """ + # FIXME: make this follow hash object PEP better. + # FIXME: this isn't threadsafe + + name = "md4" + digest_size = digestsize = 16 + block_size = 64 + + _count = 0 # number of 64-byte blocks processed so far (not including _buf) + _state = None # list of [a,b,c,d] 32 bit ints used as internal register + _buf = None # data processed in 64 byte blocks, this holds leftover from last update + + def __init__(self, content=None): + self._count = 0 + self._state = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476] + self._buf = b'' + if content: + self.update(content) + + # round 1 table - [abcd k s] + _round1 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 1,7], + [2,3,0,1, 2,11], + [1,2,3,0, 3,19], + + [0,1,2,3, 4,3], + [3,0,1,2, 5,7], + [2,3,0,1, 6,11], + [1,2,3,0, 7,19], + + [0,1,2,3, 8,3], + [3,0,1,2, 9,7], + [2,3,0,1, 10,11], + [1,2,3,0, 11,19], + + [0,1,2,3, 12,3], + [3,0,1,2, 13,7], + [2,3,0,1, 14,11], + [1,2,3,0, 15,19], + ] + + # round 2 table - [abcd k s] + _round2 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 4,5], + [2,3,0,1, 8,9], + [1,2,3,0, 12,13], + + [0,1,2,3, 1,3], + [3,0,1,2, 5,5], + [2,3,0,1, 9,9], + [1,2,3,0, 13,13], + + [0,1,2,3, 2,3], + [3,0,1,2, 6,5], + [2,3,0,1, 10,9], + [1,2,3,0, 14,13], + + [0,1,2,3, 3,3], + [3,0,1,2, 7,5], + [2,3,0,1, 11,9], + [1,2,3,0, 15,13], + ] + + # round 3 table - [abcd k s] + _round3 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 8,9], + [2,3,0,1, 4,11], + [1,2,3,0, 12,15], + + [0,1,2,3, 2,3], + [3,0,1,2, 10,9], + [2,3,0,1, 6,11], + [1,2,3,0, 14,15], + + [0,1,2,3, 1,3], + [3,0,1,2, 9,9], + [2,3,0,1, 5,11], + [1,2,3,0, 13,15], + + [0,1,2,3, 3,3], + [3,0,1,2, 11,9], + [2,3,0,1, 7,11], + [1,2,3,0, 15,15], + ] + + def _process(self, block): + """process 64 byte block""" + # unpack block into 16 32-bit ints + X = struct.unpack("<16I", block) + + # clone state + orig = self._state + state = list(orig) + + # round 1 - F function - (x&y)|(~x & z) + for a,b,c,d,k,s in self._round1: + t = (state[a] + F(state[b],state[c],state[d]) + X[k]) & MASK_32 + state[a] = ((t<>(32-s)) + + # round 2 - G function + for a,b,c,d,k,s in self._round2: + t = (state[a] + G(state[b],state[c],state[d]) + X[k] + 0x5a827999) & MASK_32 + state[a] = ((t<>(32-s)) + + # round 3 - H function - x ^ y ^ z + for a,b,c,d,k,s in self._round3: + t = (state[a] + (state[b] ^ state[c] ^ state[d]) + X[k] + 0x6ed9eba1) & MASK_32 + state[a] = ((t<>(32-s)) + + # add back into original state + for i in irange(4): + orig[i] = (orig[i]+state[i]) & MASK_32 + + def update(self, content): + if not isinstance(content, bytes): + if PY3: + raise TypeError("expected bytes") + else: + # replicate behavior of hashlib under py2 + content = content.encode("ascii") + buf = self._buf + if buf: + content = buf + content + idx = 0 + end = len(content) + while True: + next = idx + 64 + if next <= end: + self._process(content[idx:next]) + self._count += 1 + idx = next + else: + self._buf = content[idx:] + return + + def copy(self): + other = md4() + other._count = self._count + other._state = list(self._state) + other._buf = self._buf + return other + + def digest(self): + # NOTE: backing up state so we can restore it after _process is called, + # in case object is updated again (this is only attr altered by this method) + orig = list(self._state) + + # final block: buf + 0x80, + # then 0x00 padding until congruent w/ 56 mod 64 bytes + # then last 8 bytes = msg length in bits + buf = self._buf + msglen = self._count*512 + len(buf)*8 + block = buf + b'\x80' + b'\x00' * ((119-len(buf)) % 64) + \ + struct.pack("<2I", msglen & MASK_32, (msglen>>32) & MASK_32) + if len(block) == 128: + self._process(block[:64]) + self._process(block[64:]) + else: + assert len(block) == 64 + self._process(block) + + # render digest & restore un-finalized state + out = struct.pack("<4I", *self._state) + self._state = orig + return out + + def hexdigest(self): + return bascii_to_str(hexlify(self.digest())) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/des.py b/ansible/lib/python3.11/site-packages/passlib/crypto/des.py new file mode 100644 index 000000000..3f87aef3b --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/des.py @@ -0,0 +1,848 @@ +"""passlib.crypto.des -- DES block encryption routines + +History +======= +These routines (which have since been drastically modified for python) +are based on a Java implementation of the des-crypt algorithm, +found at ``_. + +The copyright & license for that source is as follows:: + + UnixCrypt.java 0.9 96/11/25 + Copyright (c) 1996 Aki Yoshida. All rights reserved. + Permission to use, copy, modify and distribute this software + for non-commercial or commercial purposes and without fee is + hereby granted provided that this copyright notice appears in + all copies. + + --- + + Unix crypt(3C) utility + @version 0.9, 11/25/96 + @author Aki Yoshida + + --- + + modified April 2001 + by Iris Van den Broeke, Daniel Deville + + --- + Unix Crypt. + Implements the one way cryptography used by Unix systems for + simple password protection. + @version $Id: UnixCrypt2.txt,v 1.1.1.1 2005/09/13 22:20:13 christos Exp $ + @author Greg Wilkins (gregw) + +The netbsd des-crypt implementation has some nice notes on how this all works - + http://fxr.googlebit.com/source/lib/libcrypt/crypt.c?v=NETBSD-CURRENT +""" + +# TODO: could use an accelerated C version of this module to speed up lmhash, +# des-crypt, and ext-des-crypt + +#============================================================================= +# imports +#============================================================================= +# core +import struct +# pkg +from passlib import exc +from passlib.utils.compat import join_byte_values, byte_elem_value, \ + irange, irange, int_types +# local +__all__ = [ + "expand_des_key", + "des_encrypt_block", +] + +#============================================================================= +# constants +#============================================================================= + +# masks/upper limits for various integer sizes +INT_24_MASK = 0xffffff +INT_56_MASK = 0xffffffffffffff +INT_64_MASK = 0xffffffffffffffff + +# mask to clear parity bits from 64-bit key +_KDATA_MASK = 0xfefefefefefefefe +_KPARITY_MASK = 0x0101010101010101 + +# mask used to setup key schedule +_KS_MASK = 0xfcfcfcfcffffffff + +#============================================================================= +# static DES tables +#============================================================================= + +# placeholders filled in by _load_tables() +PCXROT = IE3264 = SPE = CF6464 = None + +def _load_tables(): + """delay loading tables until they are actually needed""" + global PCXROT, IE3264, SPE, CF6464 + + #--------------------------------------------------------------- + # Initial key schedule permutation + # PC1ROT - bit reverse, then PC1, then Rotate, then PC2 + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC1ROT=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000002000, 0x0000000000002000, + 0x0000000000000020, 0x0000000000000020, 0x0000000000002020, 0x0000000000002020, + 0x0000000000000400, 0x0000000000000400, 0x0000000000002400, 0x0000000000002400, + 0x0000000000000420, 0x0000000000000420, 0x0000000000002420, 0x0000000000002420, ), + ( 0x0000000000000000, 0x2000000000000000, 0x0000000400000000, 0x2000000400000000, + 0x0000800000000000, 0x2000800000000000, 0x0000800400000000, 0x2000800400000000, + 0x0008000000000000, 0x2008000000000000, 0x0008000400000000, 0x2008000400000000, + 0x0008800000000000, 0x2008800000000000, 0x0008800400000000, 0x2008800400000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000040, 0x0000000000000040, + 0x0000000020000000, 0x0000000020000000, 0x0000000020000040, 0x0000000020000040, + 0x0000000000200000, 0x0000000000200000, 0x0000000000200040, 0x0000000000200040, + 0x0000000020200000, 0x0000000020200000, 0x0000000020200040, 0x0000000020200040, ), + ( 0x0000000000000000, 0x0002000000000000, 0x0800000000000000, 0x0802000000000000, + 0x0100000000000000, 0x0102000000000000, 0x0900000000000000, 0x0902000000000000, + 0x4000000000000000, 0x4002000000000000, 0x4800000000000000, 0x4802000000000000, + 0x4100000000000000, 0x4102000000000000, 0x4900000000000000, 0x4902000000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000040000, 0x0000000000040000, + 0x0000020000000000, 0x0000020000000000, 0x0000020000040000, 0x0000020000040000, + 0x0000000000000004, 0x0000000000000004, 0x0000000000040004, 0x0000000000040004, + 0x0000020000000004, 0x0000020000000004, 0x0000020000040004, 0x0000020000040004, ), + ( 0x0000000000000000, 0x0000400000000000, 0x0200000000000000, 0x0200400000000000, + 0x0080000000000000, 0x0080400000000000, 0x0280000000000000, 0x0280400000000000, + 0x0000008000000000, 0x0000408000000000, 0x0200008000000000, 0x0200408000000000, + 0x0080008000000000, 0x0080408000000000, 0x0280008000000000, 0x0280408000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000010000000, 0x0000000010000000, + 0x0000000000001000, 0x0000000000001000, 0x0000000010001000, 0x0000000010001000, + 0x0000000040000000, 0x0000000040000000, 0x0000000050000000, 0x0000000050000000, + 0x0000000040001000, 0x0000000040001000, 0x0000000050001000, 0x0000000050001000, ), + ( 0x0000000000000000, 0x0000001000000000, 0x0000080000000000, 0x0000081000000000, + 0x1000000000000000, 0x1000001000000000, 0x1000080000000000, 0x1000081000000000, + 0x0004000000000000, 0x0004001000000000, 0x0004080000000000, 0x0004081000000000, + 0x1004000000000000, 0x1004001000000000, 0x1004080000000000, 0x1004081000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000080, 0x0000000000000080, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080080, 0x0000000000080080, + 0x0000000000800000, 0x0000000000800000, 0x0000000000800080, 0x0000000000800080, + 0x0000000000880000, 0x0000000000880000, 0x0000000000880080, 0x0000000000880080, ), + ( 0x0000000000000000, 0x0000000008000000, 0x0000002000000000, 0x0000002008000000, + 0x0000100000000000, 0x0000100008000000, 0x0000102000000000, 0x0000102008000000, + 0x0000200000000000, 0x0000200008000000, 0x0000202000000000, 0x0000202008000000, + 0x0000300000000000, 0x0000300008000000, 0x0000302000000000, 0x0000302008000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000400000, 0x0000000000400000, + 0x0000000004000000, 0x0000000004000000, 0x0000000004400000, 0x0000000004400000, + 0x0000000000000800, 0x0000000000000800, 0x0000000000400800, 0x0000000000400800, + 0x0000000004000800, 0x0000000004000800, 0x0000000004400800, 0x0000000004400800, ), + ( 0x0000000000000000, 0x0000000000008000, 0x0040000000000000, 0x0040000000008000, + 0x0000004000000000, 0x0000004000008000, 0x0040004000000000, 0x0040004000008000, + 0x8000000000000000, 0x8000000000008000, 0x8040000000000000, 0x8040000000008000, + 0x8000004000000000, 0x8000004000008000, 0x8040004000000000, 0x8040004000008000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000004000, 0x0000000000004000, + 0x0000000000000008, 0x0000000000000008, 0x0000000000004008, 0x0000000000004008, + 0x0000000000000010, 0x0000000000000010, 0x0000000000004010, 0x0000000000004010, + 0x0000000000000018, 0x0000000000000018, 0x0000000000004018, 0x0000000000004018, ), + ( 0x0000000000000000, 0x0000000200000000, 0x0001000000000000, 0x0001000200000000, + 0x0400000000000000, 0x0400000200000000, 0x0401000000000000, 0x0401000200000000, + 0x0020000000000000, 0x0020000200000000, 0x0021000000000000, 0x0021000200000000, + 0x0420000000000000, 0x0420000200000000, 0x0421000000000000, 0x0421000200000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000010000000000, 0x0000010000000000, + 0x0000000100000000, 0x0000000100000000, 0x0000010100000000, 0x0000010100000000, + 0x0000000000100000, 0x0000000000100000, 0x0000010000100000, 0x0000010000100000, + 0x0000000100100000, 0x0000000100100000, 0x0000010100100000, 0x0000010100100000, ), + ( 0x0000000000000000, 0x0000000080000000, 0x0000040000000000, 0x0000040080000000, + 0x0010000000000000, 0x0010000080000000, 0x0010040000000000, 0x0010040080000000, + 0x0000000800000000, 0x0000000880000000, 0x0000040800000000, 0x0000040880000000, + 0x0010000800000000, 0x0010000880000000, 0x0010040800000000, 0x0010040880000000, ), + ) + #--------------------------------------------------------------- + # Subsequent key schedule rotation permutations + # PC2ROT - PC2 inverse, then Rotate, then PC2 + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC2ROTA=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, + 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, + 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, ), + ( 0x0000000000000000, 0x0000000000000800, 0x0000010000000000, 0x0000010000000800, + 0x0000000000002000, 0x0000000000002800, 0x0000010000002000, 0x0000010000002800, + 0x0000000010000000, 0x0000000010000800, 0x0000010010000000, 0x0000010010000800, + 0x0000000010002000, 0x0000000010002800, 0x0000010010002000, 0x0000010010002800, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, + 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, + 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, ), + ( 0x0000000000000000, 0x0000020000000000, 0x0000000080000000, 0x0000020080000000, + 0x0000000000400000, 0x0000020000400000, 0x0000000080400000, 0x0000020080400000, + 0x0000000008000000, 0x0000020008000000, 0x0000000088000000, 0x0000020088000000, + 0x0000000008400000, 0x0000020008400000, 0x0000000088400000, 0x0000020088400000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, + 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, + 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, ), + ( 0x0000000000000000, 0x0000000000000010, 0x0000000000000400, 0x0000000000000410, + 0x0000000000000080, 0x0000000000000090, 0x0000000000000480, 0x0000000000000490, + 0x0000000040000000, 0x0000000040000010, 0x0000000040000400, 0x0000000040000410, + 0x0000000040000080, 0x0000000040000090, 0x0000000040000480, 0x0000000040000490, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, + 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, + 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, ), + ( 0x0000000000000000, 0x0000000000040000, 0x0000000000000020, 0x0000000000040020, + 0x0000000000000004, 0x0000000000040004, 0x0000000000000024, 0x0000000000040024, + 0x0000000200000000, 0x0000000200040000, 0x0000000200000020, 0x0000000200040020, + 0x0000000200000004, 0x0000000200040004, 0x0000000200000024, 0x0000000200040024, ), + ( 0x0000000000000000, 0x0000000000000008, 0x0000000000008000, 0x0000000000008008, + 0x0010000000000000, 0x0010000000000008, 0x0010000000008000, 0x0010000000008008, + 0x0020000000000000, 0x0020000000000008, 0x0020000000008000, 0x0020000000008008, + 0x0030000000000000, 0x0030000000000008, 0x0030000000008000, 0x0030000000008008, ), + ( 0x0000000000000000, 0x0000400000000000, 0x0000080000000000, 0x0000480000000000, + 0x0000100000000000, 0x0000500000000000, 0x0000180000000000, 0x0000580000000000, + 0x4000000000000000, 0x4000400000000000, 0x4000080000000000, 0x4000480000000000, + 0x4000100000000000, 0x4000500000000000, 0x4000180000000000, 0x4000580000000000, ), + ( 0x0000000000000000, 0x0000000000004000, 0x0000000020000000, 0x0000000020004000, + 0x0001000000000000, 0x0001000000004000, 0x0001000020000000, 0x0001000020004000, + 0x0200000000000000, 0x0200000000004000, 0x0200000020000000, 0x0200000020004000, + 0x0201000000000000, 0x0201000000004000, 0x0201000020000000, 0x0201000020004000, ), + ( 0x0000000000000000, 0x1000000000000000, 0x0004000000000000, 0x1004000000000000, + 0x0002000000000000, 0x1002000000000000, 0x0006000000000000, 0x1006000000000000, + 0x0000000800000000, 0x1000000800000000, 0x0004000800000000, 0x1004000800000000, + 0x0002000800000000, 0x1002000800000000, 0x0006000800000000, 0x1006000800000000, ), + ( 0x0000000000000000, 0x0040000000000000, 0x2000000000000000, 0x2040000000000000, + 0x0000008000000000, 0x0040008000000000, 0x2000008000000000, 0x2040008000000000, + 0x0000001000000000, 0x0040001000000000, 0x2000001000000000, 0x2040001000000000, + 0x0000009000000000, 0x0040009000000000, 0x2000009000000000, 0x2040009000000000, ), + ( 0x0000000000000000, 0x0400000000000000, 0x8000000000000000, 0x8400000000000000, + 0x0000002000000000, 0x0400002000000000, 0x8000002000000000, 0x8400002000000000, + 0x0100000000000000, 0x0500000000000000, 0x8100000000000000, 0x8500000000000000, + 0x0100002000000000, 0x0500002000000000, 0x8100002000000000, 0x8500002000000000, ), + ( 0x0000000000000000, 0x0000800000000000, 0x0800000000000000, 0x0800800000000000, + 0x0000004000000000, 0x0000804000000000, 0x0800004000000000, 0x0800804000000000, + 0x0000000400000000, 0x0000800400000000, 0x0800000400000000, 0x0800800400000000, + 0x0000004400000000, 0x0000804400000000, 0x0800004400000000, 0x0800804400000000, ), + ( 0x0000000000000000, 0x0080000000000000, 0x0000040000000000, 0x0080040000000000, + 0x0008000000000000, 0x0088000000000000, 0x0008040000000000, 0x0088040000000000, + 0x0000200000000000, 0x0080200000000000, 0x0000240000000000, 0x0080240000000000, + 0x0008200000000000, 0x0088200000000000, 0x0008240000000000, 0x0088240000000000, ), + ) + + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC2ROTB=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, + 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, ), + ( 0x0000000000000000, 0x0000000000800000, 0x0000000000004000, 0x0000000000804000, + 0x0000000080000000, 0x0000000080800000, 0x0000000080004000, 0x0000000080804000, + 0x0000000000040000, 0x0000000000840000, 0x0000000000044000, 0x0000000000844000, + 0x0000000080040000, 0x0000000080840000, 0x0000000080044000, 0x0000000080844000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, + 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, + 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, ), + ( 0x0000000000000000, 0x0000000020000000, 0x0000000200000000, 0x0000000220000000, + 0x0000000000000080, 0x0000000020000080, 0x0000000200000080, 0x0000000220000080, + 0x0000000000100000, 0x0000000020100000, 0x0000000200100000, 0x0000000220100000, + 0x0000000000100080, 0x0000000020100080, 0x0000000200100080, 0x0000000220100080, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, + 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, + 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, ), + ( 0x0000000000000000, 0x0000000000000800, 0x0000000100000000, 0x0000000100000800, + 0x0000000010000000, 0x0000000010000800, 0x0000000110000000, 0x0000000110000800, + 0x0000000000000004, 0x0000000000000804, 0x0000000100000004, 0x0000000100000804, + 0x0000000010000004, 0x0000000010000804, 0x0000000110000004, 0x0000000110000804, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, + 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, + 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, ), + ( 0x0000000000000000, 0x0000000000000040, 0x0000010000000000, 0x0000010000000040, + 0x0000000000200000, 0x0000000000200040, 0x0000010000200000, 0x0000010000200040, + 0x0000000000008000, 0x0000000000008040, 0x0000010000008000, 0x0000010000008040, + 0x0000000000208000, 0x0000000000208040, 0x0000010000208000, 0x0000010000208040, ), + ( 0x0000000000000000, 0x0000000004000000, 0x0000000008000000, 0x000000000c000000, + 0x0400000000000000, 0x0400000004000000, 0x0400000008000000, 0x040000000c000000, + 0x8000000000000000, 0x8000000004000000, 0x8000000008000000, 0x800000000c000000, + 0x8400000000000000, 0x8400000004000000, 0x8400000008000000, 0x840000000c000000, ), + ( 0x0000000000000000, 0x0002000000000000, 0x0200000000000000, 0x0202000000000000, + 0x1000000000000000, 0x1002000000000000, 0x1200000000000000, 0x1202000000000000, + 0x0008000000000000, 0x000a000000000000, 0x0208000000000000, 0x020a000000000000, + 0x1008000000000000, 0x100a000000000000, 0x1208000000000000, 0x120a000000000000, ), + ( 0x0000000000000000, 0x0000000000400000, 0x0000000000000020, 0x0000000000400020, + 0x0040000000000000, 0x0040000000400000, 0x0040000000000020, 0x0040000000400020, + 0x0800000000000000, 0x0800000000400000, 0x0800000000000020, 0x0800000000400020, + 0x0840000000000000, 0x0840000000400000, 0x0840000000000020, 0x0840000000400020, ), + ( 0x0000000000000000, 0x0080000000000000, 0x0000008000000000, 0x0080008000000000, + 0x2000000000000000, 0x2080000000000000, 0x2000008000000000, 0x2080008000000000, + 0x0020000000000000, 0x00a0000000000000, 0x0020008000000000, 0x00a0008000000000, + 0x2020000000000000, 0x20a0000000000000, 0x2020008000000000, 0x20a0008000000000, ), + ( 0x0000000000000000, 0x0000002000000000, 0x0000040000000000, 0x0000042000000000, + 0x4000000000000000, 0x4000002000000000, 0x4000040000000000, 0x4000042000000000, + 0x0000400000000000, 0x0000402000000000, 0x0000440000000000, 0x0000442000000000, + 0x4000400000000000, 0x4000402000000000, 0x4000440000000000, 0x4000442000000000, ), + ( 0x0000000000000000, 0x0000004000000000, 0x0000200000000000, 0x0000204000000000, + 0x0000080000000000, 0x0000084000000000, 0x0000280000000000, 0x0000284000000000, + 0x0000800000000000, 0x0000804000000000, 0x0000a00000000000, 0x0000a04000000000, + 0x0000880000000000, 0x0000884000000000, 0x0000a80000000000, 0x0000a84000000000, ), + ( 0x0000000000000000, 0x0000000800000000, 0x0000000400000000, 0x0000000c00000000, + 0x0000100000000000, 0x0000100800000000, 0x0000100400000000, 0x0000100c00000000, + 0x0010000000000000, 0x0010000800000000, 0x0010000400000000, 0x0010000c00000000, + 0x0010100000000000, 0x0010100800000000, 0x0010100400000000, 0x0010100c00000000, ), + ( 0x0000000000000000, 0x0100000000000000, 0x0001000000000000, 0x0101000000000000, + 0x0000001000000000, 0x0100001000000000, 0x0001001000000000, 0x0101001000000000, + 0x0004000000000000, 0x0104000000000000, 0x0005000000000000, 0x0105000000000000, + 0x0004001000000000, 0x0104001000000000, 0x0005001000000000, 0x0105001000000000, ), + ) + #--------------------------------------------------------------- + # PCXROT - PC1ROT, PC2ROTA, PC2ROTB listed in order + # of the PC1 rotation schedule, as used by des_setkey + #--------------------------------------------------------------- + ##ROTATES = (1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1) + ##PCXROT = ( + ## PC1ROT, PC2ROTA, PC2ROTB, PC2ROTB, + ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTB, + ## PC2ROTA, PC2ROTB, PC2ROTB, PC2ROTB, + ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTA, + ## ) + + # NOTE: modified PCXROT to contain entrys broken into pairs, + # to help generate them in format best used by encoder. + PCXROT = ( + (PC1ROT, PC2ROTA), (PC2ROTB, PC2ROTB), + (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTB), + (PC2ROTA, PC2ROTB), (PC2ROTB, PC2ROTB), + (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTA), + ) + + #--------------------------------------------------------------- + # Bit reverse, intial permupation, expantion + # Initial permutation/expansion table + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + IE3264=( + ( 0x0000000000000000, 0x0000000000800800, 0x0000000000008008, 0x0000000000808808, + 0x0000008008000000, 0x0000008008800800, 0x0000008008008008, 0x0000008008808808, + 0x0000000080080000, 0x0000000080880800, 0x0000000080088008, 0x0000000080888808, + 0x0000008088080000, 0x0000008088880800, 0x0000008088088008, 0x0000008088888808, ), + ( 0x0000000000000000, 0x0080080000000000, 0x0000800800000000, 0x0080880800000000, + 0x0800000000000080, 0x0880080000000080, 0x0800800800000080, 0x0880880800000080, + 0x8008000000000000, 0x8088080000000000, 0x8008800800000000, 0x8088880800000000, + 0x8808000000000080, 0x8888080000000080, 0x8808800800000080, 0x8888880800000080, ), + ( 0x0000000000000000, 0x0000000000001000, 0x0000000000000010, 0x0000000000001010, + 0x0000000010000000, 0x0000000010001000, 0x0000000010000010, 0x0000000010001010, + 0x0000000000100000, 0x0000000000101000, 0x0000000000100010, 0x0000000000101010, + 0x0000000010100000, 0x0000000010101000, 0x0000000010100010, 0x0000000010101010, ), + ( 0x0000000000000000, 0x0000100000000000, 0x0000001000000000, 0x0000101000000000, + 0x1000000000000000, 0x1000100000000000, 0x1000001000000000, 0x1000101000000000, + 0x0010000000000000, 0x0010100000000000, 0x0010001000000000, 0x0010101000000000, + 0x1010000000000000, 0x1010100000000000, 0x1010001000000000, 0x1010101000000000, ), + ( 0x0000000000000000, 0x0000000000002000, 0x0000000000000020, 0x0000000000002020, + 0x0000000020000000, 0x0000000020002000, 0x0000000020000020, 0x0000000020002020, + 0x0000000000200000, 0x0000000000202000, 0x0000000000200020, 0x0000000000202020, + 0x0000000020200000, 0x0000000020202000, 0x0000000020200020, 0x0000000020202020, ), + ( 0x0000000000000000, 0x0000200000000000, 0x0000002000000000, 0x0000202000000000, + 0x2000000000000000, 0x2000200000000000, 0x2000002000000000, 0x2000202000000000, + 0x0020000000000000, 0x0020200000000000, 0x0020002000000000, 0x0020202000000000, + 0x2020000000000000, 0x2020200000000000, 0x2020002000000000, 0x2020202000000000, ), + ( 0x0000000000000000, 0x0000000000004004, 0x0400000000000040, 0x0400000000004044, + 0x0000000040040000, 0x0000000040044004, 0x0400000040040040, 0x0400000040044044, + 0x0000000000400400, 0x0000000000404404, 0x0400000000400440, 0x0400000000404444, + 0x0000000040440400, 0x0000000040444404, 0x0400000040440440, 0x0400000040444444, ), + ( 0x0000000000000000, 0x0000400400000000, 0x0000004004000000, 0x0000404404000000, + 0x4004000000000000, 0x4004400400000000, 0x4004004004000000, 0x4004404404000000, + 0x0040040000000000, 0x0040440400000000, 0x0040044004000000, 0x0040444404000000, + 0x4044040000000000, 0x4044440400000000, 0x4044044004000000, 0x4044444404000000, ), + ) + + #--------------------------------------------------------------- + # Table that combines the S, P, and E operations. + #--------------------------------------------------------------- + SPE=( + ( 0x0080088008200000, 0x0000008008000000, 0x0000000000200020, 0x0080088008200020, + 0x0000000000200000, 0x0080088008000020, 0x0000008008000020, 0x0000000000200020, + 0x0080088008000020, 0x0080088008200000, 0x0000008008200000, 0x0080080000000020, + 0x0080080000200020, 0x0000000000200000, 0x0000000000000000, 0x0000008008000020, + 0x0000008008000000, 0x0000000000000020, 0x0080080000200000, 0x0080088008000000, + 0x0080088008200020, 0x0000008008200000, 0x0080080000000020, 0x0080080000200000, + 0x0000000000000020, 0x0080080000000000, 0x0080088008000000, 0x0000008008200020, + 0x0080080000000000, 0x0080080000200020, 0x0000008008200020, 0x0000000000000000, + 0x0000000000000000, 0x0080088008200020, 0x0080080000200000, 0x0000008008000020, + 0x0080088008200000, 0x0000008008000000, 0x0080080000000020, 0x0080080000200000, + 0x0000008008200020, 0x0080080000000000, 0x0080088008000000, 0x0000000000200020, + 0x0080088008000020, 0x0000000000000020, 0x0000000000200020, 0x0000008008200000, + 0x0080088008200020, 0x0080088008000000, 0x0000008008200000, 0x0080080000200020, + 0x0000000000200000, 0x0080080000000020, 0x0000008008000020, 0x0000000000000000, + 0x0000008008000000, 0x0000000000200000, 0x0080080000200020, 0x0080088008200000, + 0x0000000000000020, 0x0000008008200020, 0x0080080000000000, 0x0080088008000020, ), + ( 0x1000800810004004, 0x0000000000000000, 0x0000800810000000, 0x0000000010004004, + 0x1000000000004004, 0x1000800800000000, 0x0000800800004004, 0x0000800810000000, + 0x0000800800000000, 0x1000000010004004, 0x1000000000000000, 0x0000800800004004, + 0x1000000010000000, 0x0000800810004004, 0x0000000010004004, 0x1000000000000000, + 0x0000000010000000, 0x1000800800004004, 0x1000000010004004, 0x0000800800000000, + 0x1000800810000000, 0x0000000000004004, 0x0000000000000000, 0x1000000010000000, + 0x1000800800004004, 0x1000800810000000, 0x0000800810004004, 0x1000000000004004, + 0x0000000000004004, 0x0000000010000000, 0x1000800800000000, 0x1000800810004004, + 0x1000000010000000, 0x0000800810004004, 0x0000800800004004, 0x1000800810000000, + 0x1000800810004004, 0x1000000010000000, 0x1000000000004004, 0x0000000000000000, + 0x0000000000004004, 0x1000800800000000, 0x0000000010000000, 0x1000000010004004, + 0x0000800800000000, 0x0000000000004004, 0x1000800810000000, 0x1000800800004004, + 0x0000800810004004, 0x0000800800000000, 0x0000000000000000, 0x1000000000004004, + 0x1000000000000000, 0x1000800810004004, 0x0000800810000000, 0x0000000010004004, + 0x1000000010004004, 0x0000000010000000, 0x1000800800000000, 0x0000800800004004, + 0x1000800800004004, 0x1000000000000000, 0x0000000010004004, 0x0000800810000000, ), + ( 0x0000000000400410, 0x0010004004400400, 0x0010000000000000, 0x0010000000400410, + 0x0000004004000010, 0x0000000000400400, 0x0010000000400410, 0x0010004004000000, + 0x0010000000400400, 0x0000004004000000, 0x0000004004400400, 0x0000000000000010, + 0x0010004004400410, 0x0010000000000010, 0x0000000000000010, 0x0000004004400410, + 0x0000000000000000, 0x0000004004000010, 0x0010004004400400, 0x0010000000000000, + 0x0010000000000010, 0x0010004004400410, 0x0000004004000000, 0x0000000000400410, + 0x0000004004400410, 0x0010000000400400, 0x0010004004000010, 0x0000004004400400, + 0x0010004004000000, 0x0000000000000000, 0x0000000000400400, 0x0010004004000010, + 0x0010004004400400, 0x0010000000000000, 0x0000000000000010, 0x0000004004000000, + 0x0010000000000010, 0x0000004004000010, 0x0000004004400400, 0x0010000000400410, + 0x0000000000000000, 0x0010004004400400, 0x0010004004000000, 0x0000004004400410, + 0x0000004004000010, 0x0000000000400400, 0x0010004004400410, 0x0000000000000010, + 0x0010004004000010, 0x0000000000400410, 0x0000000000400400, 0x0010004004400410, + 0x0000004004000000, 0x0010000000400400, 0x0010000000400410, 0x0010004004000000, + 0x0010000000400400, 0x0000000000000000, 0x0000004004400410, 0x0010000000000010, + 0x0000000000400410, 0x0010004004000010, 0x0010000000000000, 0x0000004004400400, ), + ( 0x0800100040040080, 0x0000100000001000, 0x0800000000000080, 0x0800100040041080, + 0x0000000000000000, 0x0000000040041000, 0x0800100000001080, 0x0800000040040080, + 0x0000100040041000, 0x0800000000001080, 0x0000000000001000, 0x0800100000000080, + 0x0800000000001080, 0x0800100040040080, 0x0000000040040000, 0x0000000000001000, + 0x0800000040041080, 0x0000100040040000, 0x0000100000000000, 0x0800000000000080, + 0x0000100040040000, 0x0800100000001080, 0x0000000040041000, 0x0000100000000000, + 0x0800100000000080, 0x0000000000000000, 0x0800000040040080, 0x0000100040041000, + 0x0000100000001000, 0x0800000040041080, 0x0800100040041080, 0x0000000040040000, + 0x0800000040041080, 0x0800100000000080, 0x0000000040040000, 0x0800000000001080, + 0x0000100040040000, 0x0000100000001000, 0x0800000000000080, 0x0000000040041000, + 0x0800100000001080, 0x0000000000000000, 0x0000100000000000, 0x0800000040040080, + 0x0000000000000000, 0x0800000040041080, 0x0000100040041000, 0x0000100000000000, + 0x0000000000001000, 0x0800100040041080, 0x0800100040040080, 0x0000000040040000, + 0x0800100040041080, 0x0800000000000080, 0x0000100000001000, 0x0800100040040080, + 0x0800000040040080, 0x0000100040040000, 0x0000000040041000, 0x0800100000001080, + 0x0800100000000080, 0x0000000000001000, 0x0800000000001080, 0x0000100040041000, ), + ( 0x0000000000800800, 0x0000001000000000, 0x0040040000000000, 0x2040041000800800, + 0x2000001000800800, 0x0040040000800800, 0x2040041000000000, 0x0000001000800800, + 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040041000000000, + 0x2040040000800800, 0x2000001000800800, 0x0040041000800800, 0x0000000000000000, + 0x0040041000000000, 0x0000000000800800, 0x2000001000000000, 0x2040040000000000, + 0x0040040000800800, 0x2040041000000000, 0x0000000000000000, 0x2000000000800800, + 0x2000000000000000, 0x2040040000800800, 0x2040041000800800, 0x2000001000000000, + 0x0000001000800800, 0x0040040000000000, 0x2040040000000000, 0x0040041000800800, + 0x0040041000800800, 0x2040040000800800, 0x2000001000000000, 0x0000001000800800, + 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040040000800800, + 0x0000000000800800, 0x0040041000000000, 0x2040041000800800, 0x0000000000000000, + 0x2040041000000000, 0x0000000000800800, 0x0040040000000000, 0x2000001000000000, + 0x2040040000800800, 0x0040040000000000, 0x0000000000000000, 0x2040041000800800, + 0x2000001000800800, 0x0040041000800800, 0x2040040000000000, 0x0000001000000000, + 0x0040041000000000, 0x2000001000800800, 0x0040040000800800, 0x2040040000000000, + 0x2000000000000000, 0x2040041000000000, 0x0000001000800800, 0x2000000000800800, ), + ( 0x4004000000008008, 0x4004000020000000, 0x0000000000000000, 0x0000200020008008, + 0x4004000020000000, 0x0000200000000000, 0x4004200000008008, 0x0000000020000000, + 0x4004200000000000, 0x4004200020008008, 0x0000200020000000, 0x0000000000008008, + 0x0000200000008008, 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, + 0x0000000020000000, 0x4004200000008008, 0x4004000020008008, 0x0000000000000000, + 0x0000200000000000, 0x4004000000000000, 0x0000200020008008, 0x4004000020008008, + 0x4004200020008008, 0x0000000020008008, 0x0000000000008008, 0x4004200000000000, + 0x4004000000000000, 0x0000200020000000, 0x4004200020000000, 0x0000200000008008, + 0x4004200000000000, 0x0000000000008008, 0x0000200000008008, 0x4004200020000000, + 0x0000200020008008, 0x4004000020000000, 0x0000000000000000, 0x0000200000008008, + 0x0000000000008008, 0x0000200000000000, 0x4004000020008008, 0x0000000020000000, + 0x4004000020000000, 0x4004200020008008, 0x0000200020000000, 0x4004000000000000, + 0x4004200020008008, 0x0000200020000000, 0x0000000020000000, 0x4004200000008008, + 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, 0x0000000000000000, + 0x0000200000000000, 0x4004000000008008, 0x4004200000008008, 0x0000200020008008, + 0x0000000020008008, 0x4004200000000000, 0x4004000000000000, 0x4004000020008008, ), + ( 0x0000400400000000, 0x0020000000000000, 0x0020000000100000, 0x0400000000100040, + 0x0420400400100040, 0x0400400400000040, 0x0020400400000000, 0x0000000000000000, + 0x0000000000100000, 0x0420000000100040, 0x0420000000000040, 0x0000400400100000, + 0x0400000000000040, 0x0020400400100000, 0x0000400400100000, 0x0420000000000040, + 0x0420000000100040, 0x0000400400000000, 0x0400400400000040, 0x0420400400100040, + 0x0000000000000000, 0x0020000000100000, 0x0400000000100040, 0x0020400400000000, + 0x0400400400100040, 0x0420400400000040, 0x0020400400100000, 0x0400000000000040, + 0x0420400400000040, 0x0400400400100040, 0x0020000000000000, 0x0000000000100000, + 0x0420400400000040, 0x0000400400100000, 0x0400400400100040, 0x0420000000000040, + 0x0000400400000000, 0x0020000000000000, 0x0000000000100000, 0x0400400400100040, + 0x0420000000100040, 0x0420400400000040, 0x0020400400000000, 0x0000000000000000, + 0x0020000000000000, 0x0400000000100040, 0x0400000000000040, 0x0020000000100000, + 0x0000000000000000, 0x0420000000100040, 0x0020000000100000, 0x0020400400000000, + 0x0420000000000040, 0x0000400400000000, 0x0420400400100040, 0x0000000000100000, + 0x0020400400100000, 0x0400000000000040, 0x0400400400000040, 0x0420400400100040, + 0x0400000000100040, 0x0020400400100000, 0x0000400400100000, 0x0400400400000040, ), + ( 0x8008000080082000, 0x0000002080082000, 0x8008002000000000, 0x0000000000000000, + 0x0000002000002000, 0x8008000080080000, 0x0000000080082000, 0x8008002080082000, + 0x8008000000000000, 0x0000000000002000, 0x0000002080080000, 0x8008002000000000, + 0x8008002080080000, 0x8008002000002000, 0x8008000000002000, 0x0000000080082000, + 0x0000002000000000, 0x8008002080080000, 0x8008000080080000, 0x0000002000002000, + 0x8008002080082000, 0x8008000000002000, 0x0000000000000000, 0x0000002080080000, + 0x0000000000002000, 0x0000000080080000, 0x8008002000002000, 0x8008000080082000, + 0x0000000080080000, 0x0000002000000000, 0x0000002080082000, 0x8008000000000000, + 0x0000000080080000, 0x0000002000000000, 0x8008000000002000, 0x8008002080082000, + 0x8008002000000000, 0x0000000000002000, 0x0000000000000000, 0x0000002080080000, + 0x8008000080082000, 0x8008002000002000, 0x0000002000002000, 0x8008000080080000, + 0x0000002080082000, 0x8008000000000000, 0x8008000080080000, 0x0000002000002000, + 0x8008002080082000, 0x0000000080080000, 0x0000000080082000, 0x8008000000002000, + 0x0000002080080000, 0x8008002000000000, 0x8008002000002000, 0x0000000080082000, + 0x8008000000000000, 0x0000002080082000, 0x8008002080080000, 0x0000000000000000, + 0x0000000000002000, 0x8008000080082000, 0x0000002000000000, 0x8008002080080000, ), + ) + + #--------------------------------------------------------------- + # compressed/interleaved => final permutation table + # Compression, final permutation, bit reverse + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm6464 logic simpler + CF6464=( + ( 0x0000000000000000, 0x0000002000000000, 0x0000200000000000, 0x0000202000000000, + 0x0020000000000000, 0x0020002000000000, 0x0020200000000000, 0x0020202000000000, + 0x2000000000000000, 0x2000002000000000, 0x2000200000000000, 0x2000202000000000, + 0x2020000000000000, 0x2020002000000000, 0x2020200000000000, 0x2020202000000000, ), + ( 0x0000000000000000, 0x0000000200000000, 0x0000020000000000, 0x0000020200000000, + 0x0002000000000000, 0x0002000200000000, 0x0002020000000000, 0x0002020200000000, + 0x0200000000000000, 0x0200000200000000, 0x0200020000000000, 0x0200020200000000, + 0x0202000000000000, 0x0202000200000000, 0x0202020000000000, 0x0202020200000000, ), + ( 0x0000000000000000, 0x0000000000000020, 0x0000000000002000, 0x0000000000002020, + 0x0000000000200000, 0x0000000000200020, 0x0000000000202000, 0x0000000000202020, + 0x0000000020000000, 0x0000000020000020, 0x0000000020002000, 0x0000000020002020, + 0x0000000020200000, 0x0000000020200020, 0x0000000020202000, 0x0000000020202020, ), + ( 0x0000000000000000, 0x0000000000000002, 0x0000000000000200, 0x0000000000000202, + 0x0000000000020000, 0x0000000000020002, 0x0000000000020200, 0x0000000000020202, + 0x0000000002000000, 0x0000000002000002, 0x0000000002000200, 0x0000000002000202, + 0x0000000002020000, 0x0000000002020002, 0x0000000002020200, 0x0000000002020202, ), + ( 0x0000000000000000, 0x0000008000000000, 0x0000800000000000, 0x0000808000000000, + 0x0080000000000000, 0x0080008000000000, 0x0080800000000000, 0x0080808000000000, + 0x8000000000000000, 0x8000008000000000, 0x8000800000000000, 0x8000808000000000, + 0x8080000000000000, 0x8080008000000000, 0x8080800000000000, 0x8080808000000000, ), + ( 0x0000000000000000, 0x0000000800000000, 0x0000080000000000, 0x0000080800000000, + 0x0008000000000000, 0x0008000800000000, 0x0008080000000000, 0x0008080800000000, + 0x0800000000000000, 0x0800000800000000, 0x0800080000000000, 0x0800080800000000, + 0x0808000000000000, 0x0808000800000000, 0x0808080000000000, 0x0808080800000000, ), + ( 0x0000000000000000, 0x0000000000000080, 0x0000000000008000, 0x0000000000008080, + 0x0000000000800000, 0x0000000000800080, 0x0000000000808000, 0x0000000000808080, + 0x0000000080000000, 0x0000000080000080, 0x0000000080008000, 0x0000000080008080, + 0x0000000080800000, 0x0000000080800080, 0x0000000080808000, 0x0000000080808080, ), + ( 0x0000000000000000, 0x0000000000000008, 0x0000000000000800, 0x0000000000000808, + 0x0000000000080000, 0x0000000000080008, 0x0000000000080800, 0x0000000000080808, + 0x0000000008000000, 0x0000000008000008, 0x0000000008000800, 0x0000000008000808, + 0x0000000008080000, 0x0000000008080008, 0x0000000008080800, 0x0000000008080808, ), + ( 0x0000000000000000, 0x0000001000000000, 0x0000100000000000, 0x0000101000000000, + 0x0010000000000000, 0x0010001000000000, 0x0010100000000000, 0x0010101000000000, + 0x1000000000000000, 0x1000001000000000, 0x1000100000000000, 0x1000101000000000, + 0x1010000000000000, 0x1010001000000000, 0x1010100000000000, 0x1010101000000000, ), + ( 0x0000000000000000, 0x0000000100000000, 0x0000010000000000, 0x0000010100000000, + 0x0001000000000000, 0x0001000100000000, 0x0001010000000000, 0x0001010100000000, + 0x0100000000000000, 0x0100000100000000, 0x0100010000000000, 0x0100010100000000, + 0x0101000000000000, 0x0101000100000000, 0x0101010000000000, 0x0101010100000000, ), + ( 0x0000000000000000, 0x0000000000000010, 0x0000000000001000, 0x0000000000001010, + 0x0000000000100000, 0x0000000000100010, 0x0000000000101000, 0x0000000000101010, + 0x0000000010000000, 0x0000000010000010, 0x0000000010001000, 0x0000000010001010, + 0x0000000010100000, 0x0000000010100010, 0x0000000010101000, 0x0000000010101010, ), + ( 0x0000000000000000, 0x0000000000000001, 0x0000000000000100, 0x0000000000000101, + 0x0000000000010000, 0x0000000000010001, 0x0000000000010100, 0x0000000000010101, + 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, 0x0000000001000101, + 0x0000000001010000, 0x0000000001010001, 0x0000000001010100, 0x0000000001010101, ), + ( 0x0000000000000000, 0x0000004000000000, 0x0000400000000000, 0x0000404000000000, + 0x0040000000000000, 0x0040004000000000, 0x0040400000000000, 0x0040404000000000, + 0x4000000000000000, 0x4000004000000000, 0x4000400000000000, 0x4000404000000000, + 0x4040000000000000, 0x4040004000000000, 0x4040400000000000, 0x4040404000000000, ), + ( 0x0000000000000000, 0x0000000400000000, 0x0000040000000000, 0x0000040400000000, + 0x0004000000000000, 0x0004000400000000, 0x0004040000000000, 0x0004040400000000, + 0x0400000000000000, 0x0400000400000000, 0x0400040000000000, 0x0400040400000000, + 0x0404000000000000, 0x0404000400000000, 0x0404040000000000, 0x0404040400000000, ), + ( 0x0000000000000000, 0x0000000000000040, 0x0000000000004000, 0x0000000000004040, + 0x0000000000400000, 0x0000000000400040, 0x0000000000404000, 0x0000000000404040, + 0x0000000040000000, 0x0000000040000040, 0x0000000040004000, 0x0000000040004040, + 0x0000000040400000, 0x0000000040400040, 0x0000000040404000, 0x0000000040404040, ), + ( 0x0000000000000000, 0x0000000000000004, 0x0000000000000400, 0x0000000000000404, + 0x0000000000040000, 0x0000000000040004, 0x0000000000040400, 0x0000000000040404, + 0x0000000004000000, 0x0000000004000004, 0x0000000004000400, 0x0000000004000404, + 0x0000000004040000, 0x0000000004040004, 0x0000000004040400, 0x0000000004040404, ), + ) + #=================================================================== + # eof _load_tables() + #=================================================================== + +#============================================================================= +# support +#============================================================================= + +def _permute(c, p): + """Returns the permutation of the given 32-bit or 64-bit code with + the specified permutation table.""" + # NOTE: only difference between 32 & 64 bit permutations + # is that len(p)==8 for 32 bit, and len(p)==16 for 64 bit. + out = 0 + for r in p: + out |= r[c&0xf] + c >>= 4 + return out + +#============================================================================= +# packing & unpacking +#============================================================================= +# FIXME: more properly named _uint8_struct... +_uint64_struct = struct.Struct(">Q") + +def _pack64(value): + return _uint64_struct.pack(value) + +def _unpack64(value): + return _uint64_struct.unpack(value)[0] + +def _pack56(value): + return _uint64_struct.pack(value)[1:] + +def _unpack56(value): + return _uint64_struct.unpack(b'\x00' + value)[0] + +#============================================================================= +# 56->64 key manipulation +#============================================================================= + +##def expand_7bit(value): +## "expand 7-bit integer => 7-bits + 1 odd-parity bit" +## # parity calc adapted from 32-bit even parity alg found at +## # http://graphics.stanford.edu/~seander/bithacks.html#ParityParallel +## assert 0 <= value < 0x80, "value out of range" +## return (value<<1) | (0x9669 >> ((value ^ (value >> 4)) & 0xf)) & 1 + +_EXPAND_ITER = irange(49,-7,-7) + +def expand_des_key(key): + """convert DES from 7 bytes to 8 bytes (by inserting empty parity bits)""" + if isinstance(key, bytes): + if len(key) != 7: + raise ValueError("key must be 7 bytes in size") + elif isinstance(key, int_types): + if key < 0 or key > INT_56_MASK: + raise ValueError("key must be 56-bit non-negative integer") + return _unpack64(expand_des_key(_pack56(key))) + else: + raise exc.ExpectedTypeError(key, "bytes or int", "key") + key = _unpack56(key) + # NOTE: the following would insert correctly-valued parity bits in each key, + # but the parity bit would just be ignored in des_encrypt_block(), + # so not bothering to use it. + # XXX: could make parity-restoring optionally available via flag + ##return join_byte_values(expand_7bit((key >> shift) & 0x7f) + ## for shift in _EXPAND_ITER) + return join_byte_values(((key>>shift) & 0x7f)<<1 for shift in _EXPAND_ITER) + +def shrink_des_key(key): + """convert DES key from 8 bytes to 7 bytes (by discarding the parity bits)""" + if isinstance(key, bytes): + if len(key) != 8: + raise ValueError("key must be 8 bytes in size") + return _pack56(shrink_des_key(_unpack64(key))) + elif isinstance(key, int_types): + if key < 0 or key > INT_64_MASK: + raise ValueError("key must be 64-bit non-negative integer") + else: + raise exc.ExpectedTypeError(key, "bytes or int", "key") + key >>= 1 + result = 0 + offset = 0 + while offset < 56: + result |= (key & 0x7f)<>= 8 + offset += 7 + assert not (result & ~INT_64_MASK) + return result + +#============================================================================= +# des encryption +#============================================================================= +def des_encrypt_block(key, input, salt=0, rounds=1): + """encrypt single block of data using DES, operates on 8-byte strings. + + :arg key: + DES key as 7 byte string, or 8 byte string with parity bits + (parity bit values are ignored). + + :arg input: + plaintext block to encrypt, as 8 byte string. + + :arg salt: + Optional 24-bit integer used to mutate the base DES algorithm in a + manner specific to :class:`~passlib.hash.des_crypt` and its variants. + The default value ``0`` provides the normal (unsalted) DES behavior. + The salt functions as follows: + if the ``i``'th bit of ``salt`` is set, + bits ``i`` and ``i+24`` are swapped in the DES E-box output. + + :arg rounds: + Optional number of rounds of to apply the DES key schedule. + the default (``rounds=1``) provides the normal DES behavior, + but :class:`~passlib.hash.des_crypt` and its variants use + alternate rounds values. + + :raises TypeError: if any of the provided args are of the wrong type. + :raises ValueError: + if any of the input blocks are the wrong size, + or the salt/rounds values are out of range. + + :returns: + resulting 8-byte ciphertext block. + """ + # validate & unpack key + if isinstance(key, bytes): + if len(key) == 7: + key = expand_des_key(key) + elif len(key) != 8: + raise ValueError("key must be 7 or 8 bytes") + key = _unpack64(key) + else: + raise exc.ExpectedTypeError(key, "bytes", "key") + + # validate & unpack input + if isinstance(input, bytes): + if len(input) != 8: + raise ValueError("input block must be 8 bytes") + input = _unpack64(input) + else: + raise exc.ExpectedTypeError(input, "bytes", "input") + + # hand things off to other func + result = des_encrypt_int_block(key, input, salt, rounds) + + # repack result + return _pack64(result) + +def des_encrypt_int_block(key, input, salt=0, rounds=1): + """encrypt single block of data using DES, operates on 64-bit integers. + + this function is essentially the same as :func:`des_encrypt_block`, + except that it operates on integers, and will NOT automatically + expand 56-bit keys if provided (since there's no way to detect them). + + :arg key: + DES key as 64-bit integer (the parity bits are ignored). + + :arg input: + input block as 64-bit integer + + :arg salt: + optional 24-bit integer used to mutate the base DES algorithm. + defaults to ``0`` (no mutation applied). + + :arg rounds: + optional number of rounds of to apply the DES key schedule. + defaults to ``1``. + + :raises TypeError: if any of the provided args are of the wrong type. + :raises ValueError: + if any of the input blocks are the wrong size, + or the salt/rounds values are out of range. + + :returns: + resulting ciphertext as 64-bit integer. + """ + #--------------------------------------------------------------- + # input validation + #--------------------------------------------------------------- + + # validate salt, rounds + if rounds < 1: + raise ValueError("rounds must be positive integer") + if salt < 0 or salt > INT_24_MASK: + raise ValueError("salt must be 24-bit non-negative integer") + + # validate & unpack key + if not isinstance(key, int_types): + raise exc.ExpectedTypeError(key, "int", "key") + elif key < 0 or key > INT_64_MASK: + raise ValueError("key must be 64-bit non-negative integer") + + # validate & unpack input + if not isinstance(input, int_types): + raise exc.ExpectedTypeError(input, "int", "input") + elif input < 0 or input > INT_64_MASK: + raise ValueError("input must be 64-bit non-negative integer") + + #--------------------------------------------------------------- + # DES setup + #--------------------------------------------------------------- + # load tables if not already done + global SPE, PCXROT, IE3264, CF6464 + if PCXROT is None: + _load_tables() + + # load SPE into local vars to speed things up and remove an array access call + SPE0, SPE1, SPE2, SPE3, SPE4, SPE5, SPE6, SPE7 = SPE + + # NOTE: parity bits are ignored completely + # (UTs do fuzz testing to ensure this) + + # generate key schedule + # NOTE: generation was modified to output two elements at a time, + # so that per-round loop could do two passes at once. + def _iter_key_schedule(ks_odd): + """given 64-bit key, iterates over the 8 (even,odd) key schedule pairs""" + for p_even, p_odd in PCXROT: + ks_even = _permute(ks_odd, p_even) + ks_odd = _permute(ks_even, p_odd) + yield ks_even & _KS_MASK, ks_odd & _KS_MASK + ks_list = list(_iter_key_schedule(key)) + + # expand 24 bit salt -> 32 bit per des_crypt & bsdi_crypt + salt = ( + ((salt & 0x00003f) << 26) | + ((salt & 0x000fc0) << 12) | + ((salt & 0x03f000) >> 2) | + ((salt & 0xfc0000) >> 16) + ) + + # init L & R + if input == 0: + L = R = 0 + else: + L = ((input >> 31) & 0xaaaaaaaa) | (input & 0x55555555) + L = _permute(L, IE3264) + + R = ((input >> 32) & 0xaaaaaaaa) | ((input >> 1) & 0x55555555) + R = _permute(R, IE3264) + + #--------------------------------------------------------------- + # main DES loop - run for specified number of rounds + #--------------------------------------------------------------- + while rounds: + rounds -= 1 + + # run over each part of the schedule, 2 parts at a time + for ks_even, ks_odd in ks_list: + k = ((R>>32) ^ R) & salt # use the salt to flip specific bits + B = (k<<32) ^ k ^ R ^ ks_even + + L ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ + SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ + SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ + SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) + + k = ((L>>32) ^ L) & salt # use the salt to flip specific bits + B = (k<<32) ^ k ^ L ^ ks_odd + + R ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ + SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ + SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ + SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) + + # swap L and R + L, R = R, L + + #--------------------------------------------------------------- + # return final result + #--------------------------------------------------------------- + C = ( + ((L>>3) & 0x0f0f0f0f00000000) + | + ((L<<33) & 0xf0f0f0f000000000) + | + ((R>>35) & 0x000000000f0f0f0f) + | + ((R<<1) & 0x00000000f0f0f0f0) + ) + return _permute(C, CF6464) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/digest.py b/ansible/lib/python3.11/site-packages/passlib/crypto/digest.py new file mode 100644 index 000000000..90e0cad56 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/digest.py @@ -0,0 +1,1057 @@ +"""passlib.crypto.digest -- crytographic helpers used by the password hashes in passlib + +.. versionadded:: 1.7 +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import division +# core +import hashlib +import logging; log = logging.getLogger(__name__) +try: + # new in py3.4 + from hashlib import pbkdf2_hmac as _stdlib_pbkdf2_hmac + if _stdlib_pbkdf2_hmac.__module__ == "hashlib": + # builtin pure-python backends are slightly faster than stdlib's pure python fallback, + # so only using stdlib's version if it's backed by openssl's pbkdf2_hmac() + log.debug("ignoring pure-python hashlib.pbkdf2_hmac()") + _stdlib_pbkdf2_hmac = None +except ImportError: + _stdlib_pbkdf2_hmac = None +import re +import os +from struct import Struct +from warnings import warn +# site +try: + # https://pypi.python.org/pypi/fastpbkdf2/ + from fastpbkdf2 import pbkdf2_hmac as _fast_pbkdf2_hmac +except ImportError: + _fast_pbkdf2_hmac = None +# pkg +from passlib import exc +from passlib.utils import join_bytes, to_native_str, join_byte_values, to_bytes, \ + SequenceMixin, as_bool +from passlib.utils.compat import irange, int_types, unicode_or_bytes_types, PY3, error_from +from passlib.utils.decor import memoized_property +# local +__all__ = [ + # hash utils + "lookup_hash", + "HashInfo", + "norm_hash_name", + + # hmac utils + "compile_hmac", + + # kdfs + "pbkdf1", + "pbkdf2_hmac", +] + +#============================================================================= +# generic constants +#============================================================================= + +#: max 32-bit value +MAX_UINT32 = (1 << 32) - 1 + +#: max 64-bit value +MAX_UINT64 = (1 << 64) - 1 + +#============================================================================= +# hash utils +#============================================================================= + +#: list of known hash names, used by lookup_hash()'s _norm_hash_name() helper +_known_hash_names = [ + # format: (hashlib/ssl name, iana name or standin, other known aliases ...) + + #---------------------------------------------------- + # hashes with official IANA-assigned names + # (as of 2012-03 - http://www.iana.org/assignments/hash-function-text-names) + #---------------------------------------------------- + ("md2", "md2"), # NOTE: openssl dropped md2 support in v1.0.0 + ("md5", "md5"), + ("sha1", "sha-1"), + ("sha224", "sha-224", "sha2-224"), + ("sha256", "sha-256", "sha2-256"), + ("sha384", "sha-384", "sha2-384"), + ("sha512", "sha-512", "sha2-512"), + + # TODO: add sha3 to this table. + + #---------------------------------------------------- + # hashlib/ssl-supported hashes without official IANA names, + # (hopefully-) compatible stand-ins have been chosen. + #---------------------------------------------------- + + ("blake2b", "blake-2b"), + ("blake2s", "blake-2s"), + ("md4", "md4"), + # NOTE: there was an older "ripemd" and "ripemd-128", + # but python 2.7+ resolves "ripemd" -> "ripemd160", + # so treating "ripemd" as alias here. + ("ripemd160", "ripemd-160", "ripemd"), +] + + +#: dict mapping hashlib names to hardcoded digest info; +#: so this is available even when hashes aren't present. +_fallback_info = { + # name: (digest_size, block_size) + 'blake2b': (64, 128), + 'blake2s': (32, 64), + 'md4': (16, 64), + 'md5': (16, 64), + 'sha1': (20, 64), + 'sha224': (28, 64), + 'sha256': (32, 64), + 'sha384': (48, 128), + 'sha3_224': (28, 144), + 'sha3_256': (32, 136), + 'sha3_384': (48, 104), + 'sha3_512': (64, 72), + 'sha512': (64, 128), + 'shake128': (16, 168), + 'shake256': (32, 136), +} + + +def _gen_fallback_info(): + """ + internal helper used to generate ``_fallback_info`` dict. + currently only run manually to update the above list; + not invoked at runtime. + """ + out = {} + for alg in sorted(hashlib.algorithms_available | set(["md4"])): + info = lookup_hash(alg) + out[info.name] = (info.digest_size, info.block_size) + return out + + +#: cache of hash info instances used by lookup_hash() +_hash_info_cache = {} + +def _get_hash_aliases(name): + """ + internal helper used by :func:`lookup_hash` -- + normalize arbitrary hash name to hashlib format. + if name not recognized, returns dummy record and issues a warning. + + :arg name: + unnormalized name + + :returns: + tuple with 2+ elements: ``(hashlib_name, iana_name|None, ... 0+ aliases)``. + """ + + # normalize input + orig = name + if not isinstance(name, str): + name = to_native_str(name, 'utf-8', 'hash name') + name = re.sub("[_ /]", "-", name.strip().lower()) + if name.startswith("scram-"): # helper for SCRAM protocol (see passlib.handlers.scram) + name = name[6:] + if name.endswith("-plus"): + name = name[:-5] + + # look through standard names and known aliases + def check_table(name): + for row in _known_hash_names: + if name in row: + return row + result = check_table(name) + if result: + return result + + # try to clean name up some more + m = re.match(r"(?i)^(?P[a-z]+)-?(?P\d)?-?(?P\d{3,4})?$", name) + if m: + # roughly follows "SHA2-256" style format, normalize representation, + # and checked table. + iana_name, rev, size = m.group("name", "rev", "size") + if rev: + iana_name += rev + hashlib_name = iana_name + if size: + iana_name += "-" + size + if rev: + hashlib_name += "_" + hashlib_name += size + result = check_table(iana_name) + if result: + return result + + # not found in table, but roughly recognize format. use names we built up as fallback. + log.info("normalizing unrecognized hash name %r => %r / %r", + orig, hashlib_name, iana_name) + + else: + # just can't make sense of it. return something + iana_name = name + hashlib_name = name.replace("-", "_") + log.warning("normalizing unrecognized hash name and format %r => %r / %r", + orig, hashlib_name, iana_name) + + return hashlib_name, iana_name + + +def _get_hash_const(name): + """ + internal helper used by :func:`lookup_hash` -- + lookup hash constructor by name + + :arg name: + name (normalized to hashlib format, e.g. ``"sha256"``) + + :returns: + hash constructor, e.g. ``hashlib.sha256()``; + or None if hash can't be located. + """ + # check hashlib. for an efficient constructor + if not name.startswith("_") and name not in ("new", "algorithms"): + try: + return getattr(hashlib, name) + except AttributeError: + pass + + # check hashlib.new() in case SSL supports the digest + new_ssl_hash = hashlib.new + try: + # new() should throw ValueError if alg is unknown + new_ssl_hash(name, b"") + except ValueError: + pass + else: + # create wrapper function + # XXX: is there a faster way to wrap this? + def const(msg=b""): + return new_ssl_hash(name, msg) + const.__name__ = name + const.__module__ = "hashlib" + const.__doc__ = ("wrapper for hashlib.new(%r),\n" + "generated by passlib.crypto.digest.lookup_hash()") % name + return const + + # use builtin md4 as fallback when not supported by hashlib + if name == "md4": + from passlib.crypto._md4 import md4 + return md4 + + # XXX: any other modules / registries we should check? + # TODO: add pysha3 support. + + return None + + +def lookup_hash(digest, # *, + return_unknown=False, required=True): + """ + Returns a :class:`HashInfo` record containing information about a given hash function. + Can be used to look up a hash constructor by name, normalize hash name representation, etc. + + :arg digest: + This can be any of: + + * A string containing a :mod:`!hashlib` digest name (e.g. ``"sha256"``), + * A string containing an IANA-assigned hash name, + * A digest constructor function (e.g. ``hashlib.sha256``). + + Case is ignored, underscores are converted to hyphens, + and various other cleanups are made. + + :param required: + By default (True), this function will throw an :exc:`~passlib.exc.UnknownHashError` if no hash constructor + can be found, or if the hash is not actually available. + + If this flag is False, it will instead return a dummy :class:`!HashInfo` record + which will defer throwing the error until it's constructor function is called. + This is mainly used by :func:`norm_hash_name`. + + :param return_unknown: + + .. deprecated:: 1.7.3 + + deprecated, and will be removed in passlib 2.0. + this acts like inverse of **required**. + + :returns HashInfo: + :class:`HashInfo` instance containing information about specified digest. + + Multiple calls resolving to the same hash should always + return the same :class:`!HashInfo` instance. + """ + # check for cached entry + cache = _hash_info_cache + try: + return cache[digest] + except (KeyError, TypeError): + # NOTE: TypeError is to catch 'TypeError: unhashable type' (e.g. HashInfo) + pass + + # legacy alias + if return_unknown: + required = False + + # resolve ``digest`` to ``const`` & ``name_record`` + cache_by_name = True + if isinstance(digest, unicode_or_bytes_types): + # normalize name + name_list = _get_hash_aliases(digest) + name = name_list[0] + assert name + + # if name wasn't normalized to hashlib format, + # get info for normalized name and reuse it. + if name != digest: + info = lookup_hash(name, required=required) + cache[digest] = info + return info + + # else look up constructor + # NOTE: may return None, which is handled by HashInfo constructor + const = _get_hash_const(name) + + # if mock fips mode is enabled, replace with dummy constructor + # (to replicate how it would behave on a real fips system). + if const and mock_fips_mode and name not in _fips_algorithms: + def const(source=b""): + raise ValueError("%r disabled for fips by passlib set_mock_fips_mode()" % name) + + elif isinstance(digest, HashInfo): + # handle border case where HashInfo is passed in. + return digest + + elif callable(digest): + # try to lookup digest based on it's self-reported name + # (which we trust to be the canonical "hashlib" name) + const = digest + name_list = _get_hash_aliases(const().name) + name = name_list[0] + other_const = _get_hash_const(name) + if other_const is None: + # this is probably a third-party digest we don't know about, + # so just pass it on through, and register reverse lookup for it's name. + pass + + elif other_const is const: + # if we got back same constructor, this is just a known stdlib constructor, + # which was passed in before we had cached it by name. proceed normally. + pass + + else: + # if we got back different object, then ``const`` is something else + # (such as a mock object), in which case we want to skip caching it by name, + # as that would conflict with real hash. + cache_by_name = False + + else: + raise exc.ExpectedTypeError(digest, "digest name or constructor", "digest") + + # create new instance + info = HashInfo(const=const, names=name_list, required=required) + + # populate cache + if const is not None: + cache[const] = info + if cache_by_name: + for name in name_list: + if name: # (skips iana name if it's empty) + assert cache.get(name) in [None, info], "%r already in cache" % name + cache[name] = info + return info + +#: UT helper for clearing internal cache +lookup_hash.clear_cache = _hash_info_cache.clear + + +def norm_hash_name(name, format="hashlib"): + """Normalize hash function name (convenience wrapper for :func:`lookup_hash`). + + :arg name: + Original hash function name. + + This name can be a Python :mod:`~hashlib` digest name, + a SCRAM mechanism name, IANA assigned hash name, etc. + Case is ignored, and underscores are converted to hyphens. + + :param format: + Naming convention to normalize to. + Possible values are: + + * ``"hashlib"`` (the default) - normalizes name to be compatible + with Python's :mod:`!hashlib`. + + * ``"iana"`` - normalizes name to IANA-assigned hash function name. + For hashes which IANA hasn't assigned a name for, this issues a warning, + and then uses a heuristic to return a "best guess" name. + + :returns: + Hash name, returned as native :class:`!str`. + """ + info = lookup_hash(name, required=False) + if info.unknown: + warn("norm_hash_name(): " + info.error_text, exc.PasslibRuntimeWarning) + if format == "hashlib": + return info.name + elif format == "iana": + return info.iana_name + else: + raise ValueError("unknown format: %r" % (format,)) + + +class HashInfo(SequenceMixin): + """ + Record containing information about a given hash algorithm, as returned :func:`lookup_hash`. + + This class exposes the following attributes: + + .. autoattribute:: const + .. autoattribute:: digest_size + .. autoattribute:: block_size + .. autoattribute:: name + .. autoattribute:: iana_name + .. autoattribute:: aliases + .. autoattribute:: supported + + This object can also be treated a 3-element sequence + containing ``(const, digest_size, block_size)``. + """ + #========================================================================= + # instance attrs + #========================================================================= + + #: Canonical / hashlib-compatible name (e.g. ``"sha256"``). + name = None + + #: IANA assigned name (e.g. ``"sha-256"``), may be ``None`` if unknown. + iana_name = None + + #: Tuple of other known aliases (may be empty) + aliases = () + + #: Hash constructor function (e.g. :func:`hashlib.sha256`) + const = None + + #: Hash's digest size + digest_size = None + + #: Hash's block size + block_size = None + + #: set when hash isn't available, will be filled in with string containing error text + #: that const() will raise. + error_text = None + + #: set when error_text is due to hash algorithm being completely unknown + #: (not just unavailable on current system) + unknown = False + + #========================================================================= + # init + #========================================================================= + + def __init__(self, # *, + const, names, required=True): + """ + initialize new instance. + :arg const: + hash constructor + :arg names: + list of 2+ names. should be list of ``(name, iana_name, ... 0+ aliases)``. + names must be lower-case. only iana name may be None. + """ + # init names + name = self.name = names[0] + self.iana_name = names[1] + self.aliases = names[2:] + + def use_stub_const(msg): + """ + helper that installs stub constructor which throws specified error . + """ + def const(source=b""): + raise exc.UnknownHashError(msg, name) + if required: + # if caller only wants supported digests returned, + # just throw error immediately... + const() + assert "shouldn't get here" + self.error_text = msg + self.const = const + try: + self.digest_size, self.block_size = _fallback_info[name] + except KeyError: + pass + + # handle "constructor not available" case + if const is None: + if names in _known_hash_names: + msg = "unsupported hash: %r" % name + else: + msg = "unknown hash: %r" % name + self.unknown = True + use_stub_const(msg) + # TODO: load in preset digest size info for known hashes. + return + + # create hash instance to inspect + try: + hash = const() + except ValueError as err: + # per issue 116, FIPS compliant systems will have a constructor; + # but it will throw a ValueError with this message. As of 1.7.3, + # translating this into DisabledHashError. + # "ValueError: error:060800A3:digital envelope routines:EVP_DigestInit_ex:disabled for fips" + if "disabled for fips" in str(err).lower(): + msg = "%r hash disabled for fips" % name + else: + msg = "internal error in %r constructor\n(%s: %s)" % (name, type(err).__name__, err) + use_stub_const(msg) + return + + # store stats about hash + self.const = const + self.digest_size = hash.digest_size + self.block_size = hash.block_size + + # do sanity check on digest size + if len(hash.digest()) != hash.digest_size: + raise RuntimeError("%r constructor failed sanity check" % self.name) + + # do sanity check on name. + if hash.name != self.name: + warn("inconsistent digest name: %r resolved to %r, which reports name as %r" % + (self.name, const, hash.name), exc.PasslibRuntimeWarning) + + #========================================================================= + # methods + #========================================================================= + def __repr__(self): + return " digest output``. + + However, if ``multipart=True``, the returned function has the signature + ``hmac() -> update, finalize``, where ``update(msg)`` may be called multiple times, + and ``finalize() -> digest_output`` may be repeatedly called at any point to + calculate the HMAC digest so far. + + The returned object will also have a ``digest_info`` attribute, containing + a :class:`lookup_hash` instance for the specified digest. + + This function exists, and has the weird signature it does, in order to squeeze as + provide as much efficiency as possible, by omitting much of the setup cost + and features of the stdlib :mod:`hmac` module. + """ + # all the following was adapted from stdlib's hmac module + + # resolve digest (cached) + digest_info = lookup_hash(digest) + const, digest_size, block_size = digest_info + assert block_size >= 16, "block size too small" + + # prepare key + if not isinstance(key, bytes): + key = to_bytes(key, param="key") + klen = len(key) + if klen > block_size: + key = const(key).digest() + klen = digest_size + if klen < block_size: + key += b'\x00' * (block_size - klen) + + # create pre-initialized hash constructors + _inner_copy = const(key.translate(_TRANS_36)).copy + _outer_copy = const(key.translate(_TRANS_5C)).copy + + if multipart: + # create multi-part function + # NOTE: this is slightly slower than the single-shot version, + # and should only be used if needed. + def hmac(): + """generated by compile_hmac(multipart=True)""" + inner = _inner_copy() + def finalize(): + outer = _outer_copy() + outer.update(inner.digest()) + return outer.digest() + return inner.update, finalize + else: + + # single-shot function + def hmac(msg): + """generated by compile_hmac()""" + inner = _inner_copy() + inner.update(msg) + outer = _outer_copy() + outer.update(inner.digest()) + return outer.digest() + + # add info attr + hmac.digest_info = digest_info + return hmac + +#============================================================================= +# pbkdf1 +#============================================================================= +def pbkdf1(digest, secret, salt, rounds, keylen=None): + """pkcs#5 password-based key derivation v1.5 + + :arg digest: + digest name or constructor. + + :arg secret: + secret to use when generating the key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :arg salt: + salt string to use when generating key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :param rounds: + number of rounds to use to generate key. + + :arg keylen: + number of bytes to generate (if omitted / ``None``, uses digest's native size) + + :returns: + raw :class:`bytes` of generated key + + .. note:: + + This algorithm has been deprecated, new code should use PBKDF2. + Among other limitations, ``keylen`` cannot be larger + than the digest size of the specified hash. + """ + # resolve digest + const, digest_size, block_size = lookup_hash(digest) + + # validate secret & salt + secret = to_bytes(secret, param="secret") + salt = to_bytes(salt, param="salt") + + # validate rounds + if not isinstance(rounds, int_types): + raise exc.ExpectedTypeError(rounds, "int", "rounds") + if rounds < 1: + raise ValueError("rounds must be at least 1") + + # validate keylen + if keylen is None: + keylen = digest_size + elif not isinstance(keylen, int_types): + raise exc.ExpectedTypeError(keylen, "int or None", "keylen") + elif keylen < 0: + raise ValueError("keylen must be at least 0") + elif keylen > digest_size: + raise ValueError("keylength too large for digest: %r > %r" % + (keylen, digest_size)) + + # main pbkdf1 loop + block = secret + salt + for _ in irange(rounds): + block = const(block).digest() + return block[:keylen] + +#============================================================================= +# pbkdf2 +#============================================================================= + +_pack_uint32 = Struct(">L").pack + +def pbkdf2_hmac(digest, secret, salt, rounds, keylen=None): + """pkcs#5 password-based key derivation v2.0 using HMAC + arbitrary digest. + + :arg digest: + digest name or constructor. + + :arg secret: + passphrase to use to generate key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :arg salt: + salt string to use when generating key. + may be :class:`!bytes` or :class:`unicode` (encoded using UTF-8). + + :param rounds: + number of rounds to use to generate key. + + :arg keylen: + number of bytes to generate. + if omitted / ``None``, will use digest's native output size. + + :returns: + raw bytes of generated key + + .. versionchanged:: 1.7 + + This function will use the first available of the following backends: + + * `fastpbk2 `_ + * :func:`hashlib.pbkdf2_hmac` (only available in py2 >= 2.7.8, and py3 >= 3.4) + * builtin pure-python backend + + See :data:`passlib.crypto.digest.PBKDF2_BACKENDS` to determine + which backend(s) are in use. + """ + # validate secret & salt + secret = to_bytes(secret, param="secret") + salt = to_bytes(salt, param="salt") + + # resolve digest + digest_info = lookup_hash(digest) + digest_size = digest_info.digest_size + + # validate rounds + if not isinstance(rounds, int_types): + raise exc.ExpectedTypeError(rounds, "int", "rounds") + if rounds < 1: + raise ValueError("rounds must be at least 1") + + # validate keylen + if keylen is None: + keylen = digest_size + elif not isinstance(keylen, int_types): + raise exc.ExpectedTypeError(keylen, "int or None", "keylen") + elif keylen < 1: + # XXX: could allow keylen=0, but want to be compat w/ stdlib + raise ValueError("keylen must be at least 1") + + # find smallest block count s.t. keylen <= block_count * digest_size; + # make sure block count won't overflow (per pbkdf2 spec) + # this corresponds to throwing error if keylen > digest_size * MAX_UINT32 + # NOTE: stdlib will throw error at lower bound (keylen > MAX_SINT32) + # NOTE: have do this before other backends checked, since fastpbkdf2 raises wrong error + # (InvocationError, not OverflowError) + block_count = (keylen + digest_size - 1) // digest_size + if block_count > MAX_UINT32: + raise OverflowError("keylen too long for digest") + + # + # check for various high-speed backends + # + + # ~3x faster than pure-python backend + # NOTE: have to do this after above guards since fastpbkdf2 lacks bounds checks. + if digest_info.supported_by_fastpbkdf2: + return _fast_pbkdf2_hmac(digest_info.name, secret, salt, rounds, keylen) + + # ~1.4x faster than pure-python backend + # NOTE: have to do this after fastpbkdf2 since hashlib-ssl is slower, + # will support larger number of hashes. + if digest_info.supported_by_hashlib_pbkdf2: + return _stdlib_pbkdf2_hmac(digest_info.name, secret, salt, rounds, keylen) + + # + # otherwise use our own implementation + # + + # generated keyed hmac + keyed_hmac = compile_hmac(digest, secret) + + # get helper to calculate pbkdf2 inner loop efficiently + calc_block = _get_pbkdf2_looper(digest_size) + + # assemble & return result + return join_bytes( + calc_block(keyed_hmac, keyed_hmac(salt + _pack_uint32(i)), rounds) + for i in irange(1, block_count + 1) + )[:keylen] + +#------------------------------------------------------------------------------------- +# pick best choice for pure-python helper +# TODO: consider some alternatives, such as C-accelerated xor_bytes helper if available +#------------------------------------------------------------------------------------- +# NOTE: this env var is only present to support the admin/benchmark_pbkdf2 script +_force_backend = os.environ.get("PASSLIB_PBKDF2_BACKEND") or "any" + +if PY3 and _force_backend in ["any", "from-bytes"]: + from functools import partial + + def _get_pbkdf2_looper(digest_size): + return partial(_pbkdf2_looper, digest_size) + + def _pbkdf2_looper(digest_size, keyed_hmac, digest, rounds): + """ + py3-only implementation of pbkdf2 inner loop; + uses 'int.from_bytes' + integer XOR + """ + from_bytes = int.from_bytes + BIG = "big" # endianess doesn't matter, just has to be consistent + accum = from_bytes(digest, BIG) + for _ in irange(rounds - 1): + digest = keyed_hmac(digest) + accum ^= from_bytes(digest, BIG) + return accum.to_bytes(digest_size, BIG) + + _builtin_backend = "from-bytes" + +elif _force_backend in ["any", "unpack", "from-bytes"]: + from struct import Struct + from passlib.utils import sys_bits + + _have_64_bit = (sys_bits >= 64) + + #: cache used by _get_pbkdf2_looper + _looper_cache = {} + + def _get_pbkdf2_looper(digest_size): + """ + We want a helper function which performs equivalent of the following:: + + def helper(keyed_hmac, digest, rounds): + accum = digest + for _ in irange(rounds - 1): + digest = keyed_hmac(digest) + accum ^= digest + return accum + + However, no efficient way to implement "bytes ^ bytes" in python. + Instead, using approach where we dynamically compile a helper function based + on digest size. Instead of a single `accum` var, this helper breaks the digest + into a series of integers. + + It stores these in a series of`accum_` vars, and performs `accum ^= digest` + by unpacking digest and perform xor for each "accum_ ^= digest_". + this keeps everything in locals, avoiding excessive list creation, encoding or decoding, + etc. + + :param digest_size: + digest size to compile for, in bytes. (must be multiple of 4). + + :return: + helper function with call signature outlined above. + """ + # + # cache helpers + # + try: + return _looper_cache[digest_size] + except KeyError: + pass + + # + # figure out most efficient struct format to unpack digest into list of native ints + # + if _have_64_bit and not digest_size & 0x7: + # digest size multiple of 8, on a 64 bit system -- use array of UINT64 + count = (digest_size >> 3) + fmt = "=%dQ" % count + elif not digest_size & 0x3: + if _have_64_bit: + # digest size multiple of 4, on a 64 bit system -- use array of UINT64 + 1 UINT32 + count = (digest_size >> 3) + fmt = "=%dQI" % count + count += 1 + else: + # digest size multiple of 4, on a 32 bit system -- use array of UINT32 + count = (digest_size >> 2) + fmt = "=%dI" % count + else: + # stopping here, cause no known hashes have digest size that isn't multiple of 4 bytes. + # if needed, could go crazy w/ "H" & "B" + raise NotImplementedError("unsupported digest size: %d" % digest_size) + struct = Struct(fmt) + + # + # build helper source + # + tdict = dict( + digest_size=digest_size, + accum_vars=", ".join("acc_%d" % i for i in irange(count)), + digest_vars=", ".join("dig_%d" % i for i in irange(count)), + ) + + # head of function + source = ( + "def helper(keyed_hmac, digest, rounds):\n" + " '''pbkdf2 loop helper for digest_size={digest_size}'''\n" + " unpack_digest = struct.unpack\n" + " {accum_vars} = unpack_digest(digest)\n" + " for _ in irange(1, rounds):\n" + " digest = keyed_hmac(digest)\n" + " {digest_vars} = unpack_digest(digest)\n" + ).format(**tdict) + + # xor digest + for i in irange(count): + source += " acc_%d ^= dig_%d\n" % (i, i) + + # return result + source += " return struct.pack({accum_vars})\n".format(**tdict) + + # + # compile helper + # + code = compile(source, "", "exec") + gdict = dict(irange=irange, struct=struct) + ldict = dict() + eval(code, gdict, ldict) + helper = ldict['helper'] + if __debug__: + helper.__source__ = source + + # + # store in cache + # + _looper_cache[digest_size] = helper + return helper + + _builtin_backend = "unpack" + +else: + assert _force_backend in ["any", "hexlify"] + + # XXX: older & slower approach that used int(hexlify()), + # keeping it around for a little while just for benchmarking. + + from binascii import hexlify as _hexlify + from passlib.utils import int_to_bytes + + def _get_pbkdf2_looper(digest_size): + return _pbkdf2_looper + + def _pbkdf2_looper(keyed_hmac, digest, rounds): + hexlify = _hexlify + accum = int(hexlify(digest), 16) + for _ in irange(rounds - 1): + digest = keyed_hmac(digest) + accum ^= int(hexlify(digest), 16) + return int_to_bytes(accum, len(digest)) + + _builtin_backend = "hexlify" + +# helper for benchmark script -- disable hashlib, fastpbkdf2 support if builtin requested +if _force_backend == _builtin_backend: + _fast_pbkdf2_hmac = _stdlib_pbkdf2_hmac = None + +# expose info about what backends are active +PBKDF2_BACKENDS = [b for b in [ + "fastpbkdf2" if _fast_pbkdf2_hmac else None, + "hashlib-ssl" if _stdlib_pbkdf2_hmac else None, + "builtin-" + _builtin_backend +] if b] + +# *very* rough estimate of relative speed (compared to sha256 using 'unpack' backend on 64bit arch) +if "fastpbkdf2" in PBKDF2_BACKENDS: + PBKDF2_SPEED_FACTOR = 3 +elif "hashlib-ssl" in PBKDF2_BACKENDS: + PBKDF2_SPEED_FACTOR = 1.4 +else: + # remaining backends have *some* difference in performance, but not enough to matter + PBKDF2_SPEED_FACTOR = 1 + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__init__.py b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__init__.py new file mode 100644 index 000000000..c71873abb --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__init__.py @@ -0,0 +1,281 @@ +""" +passlib.utils.scrypt -- scrypt hash frontend and help utilities + +XXX: add this module to public docs? +""" +#========================================================================== +# imports +#========================================================================== +from __future__ import absolute_import +# core +import logging; log = logging.getLogger(__name__) +from warnings import warn +# pkg +from passlib import exc +from passlib.utils import to_bytes +from passlib.utils.compat import PYPY +# local +__all__ =[ + "validate", + "scrypt", +] + +#========================================================================== +# config validation +#========================================================================== + +#: internal global constant for setting stdlib scrypt's maxmem (int bytes). +#: set to -1 to auto-calculate (see _load_stdlib_backend() below) +#: set to 0 for openssl default (32mb according to python docs) +#: TODO: standardize this across backends, and expose support via scrypt hash config; +#: currently not very configurable, and only applies to stdlib backend. +SCRYPT_MAXMEM = -1 + +#: max output length in bytes +MAX_KEYLEN = ((1 << 32) - 1) * 32 + +#: max ``r * p`` limit +MAX_RP = (1 << 30) - 1 + +# TODO: unittests for this function +def validate(n, r, p): + """ + helper which validates a set of scrypt config parameters. + scrypt will take ``O(n * r * p)`` time and ``O(n * r)`` memory. + limitations are that ``n = 2**``, ``n < 2**(16*r)``, ``r * p < 2 ** 30``. + + :param n: scrypt rounds + :param r: scrypt block size + :param p: scrypt parallel factor + """ + if r < 1: + raise ValueError("r must be > 0: r=%r" % r) + + if p < 1: + raise ValueError("p must be > 0: p=%r" % p) + + if r * p > MAX_RP: + # pbkdf2-hmac-sha256 limitation - it will be requested to generate ``p*(2*r)*64`` bytes, + # but pbkdf2 can do max of (2**31-1) blocks, and sha-256 has 32 byte block size... + # so ``(2**31-1)*32 >= p*r*128`` -> ``r*p < 2**30`` + raise ValueError("r * p must be < 2**30: r=%r, p=%r" % (r,p)) + + if n < 2 or n & (n - 1): + raise ValueError("n must be > 1, and a power of 2: n=%r" % n) + + return True + + +UINT32_SIZE = 4 + + +def estimate_maxmem(n, r, p, fudge=1.05): + """ + calculate memory required for parameter combination. + assumes parameters have already been validated. + + .. warning:: + this is derived from OpenSSL's scrypt maxmem formula; + and may not be correct for other implementations + (additional buffers, different parallelism tradeoffs, etc). + """ + # XXX: expand to provide upper bound for diff backends, or max across all of them? + # NOTE: openssl's scrypt() enforces it's maxmem parameter based on calc located at + # , ending in line containing "Blen + Vlen > maxmem" + # using the following formula: + # Blen = p * 128 * r + # Vlen = 32 * r * (N + 2) * sizeof(uint32_t) + # total_bytes = Blen + Vlen + maxmem = r * (128 * p + 32 * (n + 2) * UINT32_SIZE) + # add fudge factor so we don't have off-by-one mismatch w/ openssl + maxmem = int(maxmem * fudge) + return maxmem + + +# TODO: configuration picker (may need psutil for full effect) + +#========================================================================== +# hash frontend +#========================================================================== + +#: backend function used by scrypt(), filled in by _set_backend() +_scrypt = None + +#: name of backend currently in use, exposed for informational purposes. +backend = None + +def scrypt(secret, salt, n, r, p=1, keylen=32): + """run SCrypt key derivation function using specified parameters. + + :arg secret: + passphrase string (unicode is encoded to bytes using utf-8). + + :arg salt: + salt string (unicode is encoded to bytes using utf-8). + + :arg n: + integer 'N' parameter + + :arg r: + integer 'r' parameter + + :arg p: + integer 'p' parameter + + :arg keylen: + number of bytes of key to generate. + defaults to 32 (the internal block size). + + :returns: + a *keylen*-sized bytes instance + + SCrypt imposes a number of constraints on it's input parameters: + + * ``r * p < 2**30`` -- due to a limitation of PBKDF2-HMAC-SHA256. + * ``keylen < (2**32 - 1) * 32`` -- due to a limitation of PBKDF2-HMAC-SHA256. + * ``n`` must a be a power of 2, and > 1 -- internal limitation of scrypt() implementation + + :raises ValueError: if the provided parameters are invalid (see constraints above). + + .. warning:: + + Unless the third-party ``scrypt ``_ package + is installed, passlib will use a builtin pure-python implementation of scrypt, + which is *considerably* slower (and thus requires a much lower / less secure + ``n`` value in order to be usuable). Installing the :mod:`!scrypt` package + is strongly recommended. + """ + validate(n, r, p) + secret = to_bytes(secret, param="secret") + salt = to_bytes(salt, param="salt") + if keylen < 1: + raise ValueError("keylen must be at least 1") + if keylen > MAX_KEYLEN: + raise ValueError("keylen too large, must be <= %d" % MAX_KEYLEN) + return _scrypt(secret, salt, n, r, p, keylen) + + +def _load_builtin_backend(): + """ + Load pure-python scrypt implementation built into passlib. + """ + slowdown = 10 if PYPY else 100 + warn("Using builtin scrypt backend, which is %dx slower than is required " + "for adequate security. Installing scrypt support (via 'pip install scrypt') " + "is strongly recommended" % slowdown, exc.PasslibSecurityWarning) + from ._builtin import ScryptEngine + return ScryptEngine.execute + + +def _load_cffi_backend(): + """ + Try to import the ctypes-based scrypt hash function provided by the + ``scrypt ``_ package. + """ + try: + from scrypt import hash + return hash + except ImportError: + pass + # not available, but check to see if package present but outdated / not installed right + try: + import scrypt + except ImportError as err: + if "scrypt" not in str(err): + # e.g. if cffi isn't set up right + # user should try importing scrypt explicitly to diagnose problem. + warn("'scrypt' package failed to import correctly (possible installation issue?)", + exc.PasslibWarning) + # else: package just isn't installed + else: + warn("'scrypt' package is too old (lacks ``hash()`` method)", exc.PasslibWarning) + return None + + +def _load_stdlib_backend(): + """ + Attempt to load stdlib scrypt() implement and return wrapper. + Returns None if not found. + """ + try: + # new in python 3.6, if compiled with openssl >= 1.1 + from hashlib import scrypt as stdlib_scrypt + except ImportError: + return None + + def stdlib_scrypt_wrapper(secret, salt, n, r, p, keylen): + # work out appropriate "maxmem" parameter + # + # TODO: would like to enforce a single "maxmem" policy across all backends; + # and maybe expose this via scrypt hasher config. + # + # for now, since parameters should all be coming from internally-controlled sources + # (password hashes), using policy of "whatever memory the parameters needs". + # furthermore, since stdlib scrypt is only place that needs this, + # currently calculating exactly what maxmem needs to make things work for stdlib call. + # as hack, this can be overriden via SCRYPT_MAXMEM above, + # would like to formalize all of this. + maxmem = SCRYPT_MAXMEM + if maxmem < 0: + maxmem = estimate_maxmem(n, r, p) + return stdlib_scrypt(password=secret, salt=salt, n=n, r=r, p=p, dklen=keylen, + maxmem=maxmem) + + return stdlib_scrypt_wrapper + + +#: list of potential backends +backend_values = ("stdlib", "scrypt", "builtin") + +#: dict mapping backend name -> loader +_backend_loaders = dict( + stdlib=_load_stdlib_backend, + scrypt=_load_cffi_backend, # XXX: rename backend constant to "cffi"? + builtin=_load_builtin_backend, +) + + +def _set_backend(name, dryrun=False): + """ + set backend for scrypt(). if name not specified, loads first available. + + :raises ~passlib.exc.MissingBackendError: if backend can't be found + + .. note:: mainly intended to be called by unittests, and scrypt hash handler + """ + if name == "any": + return + elif name == "default": + for name in backend_values: + try: + return _set_backend(name, dryrun=dryrun) + except exc.MissingBackendError: + continue + raise exc.MissingBackendError("no scrypt backends available") + else: + loader = _backend_loaders.get(name) + if not loader: + raise ValueError("unknown scrypt backend: %r" % (name,)) + hash = loader() + if not hash: + raise exc.MissingBackendError("scrypt backend %r not available" % name) + if dryrun: + return + global _scrypt, backend + backend = name + _scrypt = hash + +# initialize backend +_set_backend("default") + + +def _has_backend(name): + try: + _set_backend(name, dryrun=True) + return True + except exc.MissingBackendError: + return False + +#========================================================================== +# eof +#========================================================================== diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 000000000..3ddaeba37 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_builtin.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_builtin.cpython-311.pyc new file mode 100644 index 000000000..8d8371670 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_builtin.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_gen_files.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_gen_files.cpython-311.pyc new file mode 100644 index 000000000..335d12f6f Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_gen_files.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_salsa.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_salsa.cpython-311.pyc new file mode 100644 index 000000000..d258c9e9b Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/__pycache__/_salsa.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_builtin.py b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_builtin.py new file mode 100644 index 000000000..e9bb305d2 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_builtin.py @@ -0,0 +1,244 @@ +"""passlib.utils.scrypt._builtin -- scrypt() kdf in pure-python""" +#========================================================================== +# imports +#========================================================================== +# core +import operator +import struct +# pkg +from passlib.utils.compat import izip +from passlib.crypto.digest import pbkdf2_hmac +from passlib.crypto.scrypt._salsa import salsa20 +# local +__all__ =[ + "ScryptEngine", +] + +#========================================================================== +# scrypt engine +#========================================================================== +class ScryptEngine(object): + """ + helper class used to run scrypt kdf, see scrypt() for frontend + + .. warning:: + this class does NO validation of the input ranges or types. + + it's not intended to be used directly, + but only as a backend for :func:`passlib.utils.scrypt.scrypt()`. + """ + #================================================================= + # instance attrs + #================================================================= + + # primary scrypt config parameters + n = 0 + r = 0 + p = 0 + + # derived values & objects + smix_bytes = 0 + iv_bytes = 0 + bmix_len = 0 + bmix_half_len = 0 + bmix_struct = None + integerify = None + + #================================================================= + # frontend + #================================================================= + @classmethod + def execute(cls, secret, salt, n, r, p, keylen): + """create engine & run scrypt() hash calculation""" + return cls(n, r, p).run(secret, salt, keylen) + + #================================================================= + # init + #================================================================= + def __init__(self, n, r, p): + # store config + self.n = n + self.r = r + self.p = p + self.smix_bytes = r << 7 # num bytes in smix input - 2*r*16*4 + self.iv_bytes = self.smix_bytes * p + self.bmix_len = bmix_len = r << 5 # length of bmix block list - 32*r integers + self.bmix_half_len = r << 4 + assert struct.calcsize("I") == 4 + self.bmix_struct = struct.Struct("<" + str(bmix_len) + "I") + + # use optimized bmix for certain cases + if r == 1: + self.bmix = self._bmix_1 + + # pick best integerify function - integerify(bmix_block) should + # take last 64 bytes of block and return a little-endian integer. + # since it's immediately converted % n, we only have to extract + # the first 32 bytes if n < 2**32 - which due to the current + # internal representation, is already unpacked as a 32-bit int. + if n <= 0xFFFFffff: + integerify = operator.itemgetter(-16) + else: + assert n <= 0xFFFFffffFFFFffff + ig1 = operator.itemgetter(-16) + ig2 = operator.itemgetter(-17) + def integerify(X): + return ig1(X) | (ig2(X)<<32) + self.integerify = integerify + + #================================================================= + # frontend + #================================================================= + def run(self, secret, salt, keylen): + """ + run scrypt kdf for specified secret, salt, and keylen + + .. note:: + + * time cost is ``O(n * r * p)`` + * mem cost is ``O(n * r)`` + """ + # stretch salt into initial byte array via pbkdf2 + iv_bytes = self.iv_bytes + input = pbkdf2_hmac("sha256", secret, salt, rounds=1, keylen=iv_bytes) + + # split initial byte array into 'p' mflen-sized chunks, + # and run each chunk through smix() to generate output chunk. + smix = self.smix + if self.p == 1: + output = smix(input) + else: + # XXX: *could* use threading here, if really high p values encountered, + # but would tradeoff for more memory usage. + smix_bytes = self.smix_bytes + output = b''.join( + smix(input[offset:offset+smix_bytes]) + for offset in range(0, iv_bytes, smix_bytes) + ) + + # stretch final byte array into output via pbkdf2 + return pbkdf2_hmac("sha256", secret, output, rounds=1, keylen=keylen) + + #================================================================= + # smix() helper + #================================================================= + def smix(self, input): + """run SCrypt smix function on a single input block + + :arg input: + byte string containing input data. + interpreted as 32*r little endian 4 byte integers. + + :returns: + byte string containing output data + derived by mixing input using n & r parameters. + + .. note:: time & mem cost are both ``O(n * r)`` + """ + # gather locals + bmix = self.bmix + bmix_struct = self.bmix_struct + integerify = self.integerify + n = self.n + + # parse input into 32*r integers ('X' in scrypt source) + # mem cost -- O(r) + buffer = list(bmix_struct.unpack(input)) + + # starting with initial buffer contents, derive V s.t. + # V[0]=initial_buffer ... V[i] = bmix(V[i-1], V[i-1]) ... V[n-1] = bmix(V[n-2], V[n-2]) + # final buffer contents should equal bmix(V[n-1], V[n-1]) + # + # time cost -- O(n * r) -- n loops, bmix is O(r) + # mem cost -- O(n * r) -- V is n-element array of r-element tuples + # NOTE: could do time / memory tradeoff to shrink size of V + def vgen(): + i = 0 + while i < n: + last = tuple(buffer) + yield last + bmix(last, buffer) + i += 1 + V = list(vgen()) + + # generate result from X & V. + # + # time cost -- O(n * r) -- loops n times, calls bmix() which has O(r) time cost + # mem cost -- O(1) -- allocates nothing, calls bmix() which has O(1) mem cost + get_v_elem = V.__getitem__ + n_mask = n - 1 + i = 0 + while i < n: + j = integerify(buffer) & n_mask + result = tuple(a ^ b for a, b in izip(buffer, get_v_elem(j))) + bmix(result, buffer) + i += 1 + + # # NOTE: we could easily support arbitrary values of ``n``, not just powers of 2, + # # but very few implementations have that ability, so not enabling it for now... + # if not n_is_log_2: + # while i < n: + # j = integerify(buffer) % n + # tmp = tuple(a^b for a,b in izip(buffer, get_v_elem(j))) + # bmix(tmp,buffer) + # i += 1 + + # repack tmp + return bmix_struct.pack(*buffer) + + #================================================================= + # bmix() helper + #================================================================= + def bmix(self, source, target): + """ + block mixing function used by smix() + uses salsa20/8 core to mix block contents. + + :arg source: + source to read from. + should be list of 32*r 4-byte integers + (2*r salsa20 blocks). + + :arg target: + target to write to. + should be list with same size as source. + the existing value of this buffer is ignored. + + .. warning:: + + this operates *in place* on target, + so source & target should NOT be same list. + + .. note:: + + * time cost is ``O(r)`` -- loops 16*r times, salsa20() has ``O(1)`` cost. + + * memory cost is ``O(1)`` -- salsa20() uses 16 x uint4, + all other operations done in-place. + """ + ## assert source is not target + # Y[-1] = B[2r-1], Y[i] = hash( Y[i-1] xor B[i]) + # B' <-- (Y_0, Y_2 ... Y_{2r-2}, Y_1, Y_3 ... Y_{2r-1}) */ + half = self.bmix_half_len # 16*r out of 32*r - start of Y_1 + tmp = source[-16:] # 'X' in scrypt source + siter = iter(source) + j = 0 + while j < half: + jn = j+16 + target[j:jn] = tmp = salsa20(a ^ b for a, b in izip(tmp, siter)) + target[half+j:half+jn] = tmp = salsa20(a ^ b for a, b in izip(tmp, siter)) + j = jn + + def _bmix_1(self, source, target): + """special bmix() method optimized for ``r=1`` case""" + B = source[16:] + target[:16] = tmp = salsa20(a ^ b for a, b in izip(B, iter(source))) + target[16:] = salsa20(a ^ b for a, b in izip(tmp, B)) + + #================================================================= + # eoc + #================================================================= + +#========================================================================== +# eof +#========================================================================== diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_gen_files.py b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_gen_files.py new file mode 100644 index 000000000..55ddfae3b --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_gen_files.py @@ -0,0 +1,154 @@ +"""passlib.utils.scrypt._gen_files - meta script that generates _salsa.py""" +#========================================================================== +# imports +#========================================================================== +# core +import os +# pkg +# local +#========================================================================== +# constants +#========================================================================== + +_SALSA_OPS = [ + # row = (target idx, source idx 1, source idx 2, rotate) + # interpreted as salsa operation over uint32... + # target = (source1+source2)<> (32 - (b)))) + ##x[ 4] ^= R(x[ 0]+x[12], 7); x[ 8] ^= R(x[ 4]+x[ 0], 9); + ##x[12] ^= R(x[ 8]+x[ 4],13); x[ 0] ^= R(x[12]+x[ 8],18); + ( 4, 0, 12, 7), + ( 8, 4, 0, 9), + ( 12, 8, 4, 13), + ( 0, 12, 8, 18), + + ##x[ 9] ^= R(x[ 5]+x[ 1], 7); x[13] ^= R(x[ 9]+x[ 5], 9); + ##x[ 1] ^= R(x[13]+x[ 9],13); x[ 5] ^= R(x[ 1]+x[13],18); + ( 9, 5, 1, 7), + ( 13, 9, 5, 9), + ( 1, 13, 9, 13), + ( 5, 1, 13, 18), + + ##x[14] ^= R(x[10]+x[ 6], 7); x[ 2] ^= R(x[14]+x[10], 9); + ##x[ 6] ^= R(x[ 2]+x[14],13); x[10] ^= R(x[ 6]+x[ 2],18); + ( 14, 10, 6, 7), + ( 2, 14, 10, 9), + ( 6, 2, 14, 13), + ( 10, 6, 2, 18), + + ##x[ 3] ^= R(x[15]+x[11], 7); x[ 7] ^= R(x[ 3]+x[15], 9); + ##x[11] ^= R(x[ 7]+x[ 3],13); x[15] ^= R(x[11]+x[ 7],18); + ( 3, 15, 11, 7), + ( 7, 3, 15, 9), + ( 11, 7, 3, 13), + ( 15, 11, 7, 18), + + ##/* Operate on rows. */ + ##x[ 1] ^= R(x[ 0]+x[ 3], 7); x[ 2] ^= R(x[ 1]+x[ 0], 9); + ##x[ 3] ^= R(x[ 2]+x[ 1],13); x[ 0] ^= R(x[ 3]+x[ 2],18); + ( 1, 0, 3, 7), + ( 2, 1, 0, 9), + ( 3, 2, 1, 13), + ( 0, 3, 2, 18), + + ##x[ 6] ^= R(x[ 5]+x[ 4], 7); x[ 7] ^= R(x[ 6]+x[ 5], 9); + ##x[ 4] ^= R(x[ 7]+x[ 6],13); x[ 5] ^= R(x[ 4]+x[ 7],18); + ( 6, 5, 4, 7), + ( 7, 6, 5, 9), + ( 4, 7, 6, 13), + ( 5, 4, 7, 18), + + ##x[11] ^= R(x[10]+x[ 9], 7); x[ 8] ^= R(x[11]+x[10], 9); + ##x[ 9] ^= R(x[ 8]+x[11],13); x[10] ^= R(x[ 9]+x[ 8],18); + ( 11, 10, 9, 7), + ( 8, 11, 10, 9), + ( 9, 8, 11, 13), + ( 10, 9, 8, 18), + + ##x[12] ^= R(x[15]+x[14], 7); x[13] ^= R(x[12]+x[15], 9); + ##x[14] ^= R(x[13]+x[12],13); x[15] ^= R(x[14]+x[13],18); + ( 12, 15, 14, 7), + ( 13, 12, 15, 9), + ( 14, 13, 12, 13), + ( 15, 14, 13, 18), +] + +def main(): + target = os.path.join(os.path.dirname(__file__), "_salsa.py") + fh = file(target, "w") + write = fh.write + + VNAMES = ["v%d" % i for i in range(16)] + + PAD = " " * 4 + PAD2 = " " * 8 + PAD3 = " " * 12 + TLIST = ", ".join("b%d" % i for i in range(16)) + VLIST = ", ".join(VNAMES) + kwds = dict( + VLIST=VLIST, + TLIST=TLIST, + ) + + write('''\ +"""passlib.utils.scrypt._salsa - salsa 20/8 core, autogenerated by _gen_salsa.py""" +#================================================================= +# salsa function +#================================================================= + +def salsa20(input): + \"""apply the salsa20/8 core to the provided input + + :args input: input list containing 16 32-bit integers + :returns: result list containing 16 32-bit integers + \""" + + %(TLIST)s = input + %(VLIST)s = \\ + %(TLIST)s + + i = 0 + while i < 4: +''' % kwds) + + for idx, (target, source1, source2, rotate) in enumerate(_SALSA_OPS): + write('''\ + # salsa op %(idx)d: [%(it)d] ^= ([%(is1)d]+[%(is2)d])<<<%(rot1)d + t = (%(src1)s + %(src2)s) & 0xffffffff + %(dst)s ^= ((t & 0x%(rmask)08x) << %(rot1)d) | (t >> %(rot2)d) + +''' % dict( + idx=idx, is1 = source1, is2=source2, it=target, + src1=VNAMES[source1], + src2=VNAMES[source2], + dst=VNAMES[target], + rmask=(1<<(32-rotate))-1, + rot1=rotate, + rot2=32-rotate, + )) + + write('''\ + i += 1 + +''') + + for idx in range(16): + write(PAD + "b%d = (b%d + v%d) & 0xffffffff\n" % (idx,idx,idx)) + + write('''\ + + return %(TLIST)s + +#================================================================= +# eof +#================================================================= +''' % kwds) + +if __name__ == "__main__": + main() + +#========================================================================== +# eof +#========================================================================== diff --git a/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_salsa.py b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_salsa.py new file mode 100644 index 000000000..9112732e8 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/crypto/scrypt/_salsa.py @@ -0,0 +1,170 @@ +"""passlib.utils.scrypt._salsa - salsa 20/8 core, autogenerated by _gen_salsa.py""" +#================================================================= +# salsa function +#================================================================= + +def salsa20(input): + """apply the salsa20/8 core to the provided input + + :args input: input list containing 16 32-bit integers + :returns: result list containing 16 32-bit integers + """ + + b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 = input + v0, v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15 = \ + b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 + + i = 0 + while i < 4: + # salsa op 0: [4] ^= ([0]+[12])<<<7 + t = (v0 + v12) & 0xffffffff + v4 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 1: [8] ^= ([4]+[0])<<<9 + t = (v4 + v0) & 0xffffffff + v8 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 2: [12] ^= ([8]+[4])<<<13 + t = (v8 + v4) & 0xffffffff + v12 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 3: [0] ^= ([12]+[8])<<<18 + t = (v12 + v8) & 0xffffffff + v0 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 4: [9] ^= ([5]+[1])<<<7 + t = (v5 + v1) & 0xffffffff + v9 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 5: [13] ^= ([9]+[5])<<<9 + t = (v9 + v5) & 0xffffffff + v13 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 6: [1] ^= ([13]+[9])<<<13 + t = (v13 + v9) & 0xffffffff + v1 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 7: [5] ^= ([1]+[13])<<<18 + t = (v1 + v13) & 0xffffffff + v5 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 8: [14] ^= ([10]+[6])<<<7 + t = (v10 + v6) & 0xffffffff + v14 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 9: [2] ^= ([14]+[10])<<<9 + t = (v14 + v10) & 0xffffffff + v2 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 10: [6] ^= ([2]+[14])<<<13 + t = (v2 + v14) & 0xffffffff + v6 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 11: [10] ^= ([6]+[2])<<<18 + t = (v6 + v2) & 0xffffffff + v10 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 12: [3] ^= ([15]+[11])<<<7 + t = (v15 + v11) & 0xffffffff + v3 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 13: [7] ^= ([3]+[15])<<<9 + t = (v3 + v15) & 0xffffffff + v7 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 14: [11] ^= ([7]+[3])<<<13 + t = (v7 + v3) & 0xffffffff + v11 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 15: [15] ^= ([11]+[7])<<<18 + t = (v11 + v7) & 0xffffffff + v15 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 16: [1] ^= ([0]+[3])<<<7 + t = (v0 + v3) & 0xffffffff + v1 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 17: [2] ^= ([1]+[0])<<<9 + t = (v1 + v0) & 0xffffffff + v2 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 18: [3] ^= ([2]+[1])<<<13 + t = (v2 + v1) & 0xffffffff + v3 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 19: [0] ^= ([3]+[2])<<<18 + t = (v3 + v2) & 0xffffffff + v0 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 20: [6] ^= ([5]+[4])<<<7 + t = (v5 + v4) & 0xffffffff + v6 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 21: [7] ^= ([6]+[5])<<<9 + t = (v6 + v5) & 0xffffffff + v7 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 22: [4] ^= ([7]+[6])<<<13 + t = (v7 + v6) & 0xffffffff + v4 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 23: [5] ^= ([4]+[7])<<<18 + t = (v4 + v7) & 0xffffffff + v5 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 24: [11] ^= ([10]+[9])<<<7 + t = (v10 + v9) & 0xffffffff + v11 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 25: [8] ^= ([11]+[10])<<<9 + t = (v11 + v10) & 0xffffffff + v8 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 26: [9] ^= ([8]+[11])<<<13 + t = (v8 + v11) & 0xffffffff + v9 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 27: [10] ^= ([9]+[8])<<<18 + t = (v9 + v8) & 0xffffffff + v10 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + # salsa op 28: [12] ^= ([15]+[14])<<<7 + t = (v15 + v14) & 0xffffffff + v12 ^= ((t & 0x01ffffff) << 7) | (t >> 25) + + # salsa op 29: [13] ^= ([12]+[15])<<<9 + t = (v12 + v15) & 0xffffffff + v13 ^= ((t & 0x007fffff) << 9) | (t >> 23) + + # salsa op 30: [14] ^= ([13]+[12])<<<13 + t = (v13 + v12) & 0xffffffff + v14 ^= ((t & 0x0007ffff) << 13) | (t >> 19) + + # salsa op 31: [15] ^= ([14]+[13])<<<18 + t = (v14 + v13) & 0xffffffff + v15 ^= ((t & 0x00003fff) << 18) | (t >> 14) + + i += 1 + + b0 = (b0 + v0) & 0xffffffff + b1 = (b1 + v1) & 0xffffffff + b2 = (b2 + v2) & 0xffffffff + b3 = (b3 + v3) & 0xffffffff + b4 = (b4 + v4) & 0xffffffff + b5 = (b5 + v5) & 0xffffffff + b6 = (b6 + v6) & 0xffffffff + b7 = (b7 + v7) & 0xffffffff + b8 = (b8 + v8) & 0xffffffff + b9 = (b9 + v9) & 0xffffffff + b10 = (b10 + v10) & 0xffffffff + b11 = (b11 + v11) & 0xffffffff + b12 = (b12 + v12) & 0xffffffff + b13 = (b13 + v13) & 0xffffffff + b14 = (b14 + v14) & 0xffffffff + b15 = (b15 + v15) & 0xffffffff + + return b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15 + +#================================================================= +# eof +#================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/exc.py b/ansible/lib/python3.11/site-packages/passlib/exc.py new file mode 100644 index 000000000..755c7dcca --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/exc.py @@ -0,0 +1,397 @@ +"""passlib.exc -- exceptions & warnings raised by passlib""" +#============================================================================= +# exceptions +#============================================================================= +class UnknownBackendError(ValueError): + """ + Error raised if multi-backend handler doesn't recognize backend name. + Inherits from :exc:`ValueError`. + + .. versionadded:: 1.7 + """ + def __init__(self, hasher, backend): + self.hasher = hasher + self.backend = backend + message = "%s: unknown backend: %r" % (hasher.name, backend) + ValueError.__init__(self, message) + + +# XXX: add a PasslibRuntimeError as base for Missing/Internal/Security runtime errors? + + +class MissingBackendError(RuntimeError): + """Error raised if multi-backend handler has no available backends; + or if specifically requested backend is not available. + + :exc:`!MissingBackendError` derives + from :exc:`RuntimeError`, since it usually indicates + lack of an external library or OS feature. + This is primarily raised by handlers which depend on + external libraries (which is currently just + :class:`~passlib.hash.bcrypt`). + """ + + +class InternalBackendError(RuntimeError): + """ + Error raised if something unrecoverable goes wrong with backend call; + such as if ``crypt.crypt()`` returning a malformed hash. + + .. versionadded:: 1.7.3 + """ + + +class PasswordValueError(ValueError): + """ + Error raised if a password can't be hashed / verified for various reasons. + This exception derives from the builtin :exc:`!ValueError`. + + May be thrown directly when password violates internal invariants of hasher + (e.g. some don't support NULL characters). Hashers may also throw more specific subclasses, + such as :exc:`!PasswordSizeError`. + + .. versionadded:: 1.7.3 + """ + pass + + +class PasswordSizeError(PasswordValueError): + """ + Error raised if a password exceeds the maximum size allowed + by Passlib (by default, 4096 characters); or if password exceeds + a hash-specific size limitation. + + This exception derives from :exc:`PasswordValueError` (above). + + Many password hash algorithms take proportionately larger amounts of time and/or + memory depending on the size of the password provided. This could present + a potential denial of service (DOS) situation if a maliciously large + password is provided to an application. Because of this, Passlib enforces + a maximum size limit, but one which should be *much* larger + than any legitimate password. :exc:`PasswordSizeError` derives + from :exc:`!ValueError`. + + .. note:: + Applications wishing to use a different limit should set the + ``PASSLIB_MAX_PASSWORD_SIZE`` environmental variable before + Passlib is loaded. The value can be any large positive integer. + + .. attribute:: max_size + + indicates the maximum allowed size. + + .. versionadded:: 1.6 + """ + + max_size = None + + def __init__(self, max_size, msg=None): + self.max_size = max_size + if msg is None: + msg = "password exceeds maximum allowed size" + PasswordValueError.__init__(self, msg) + + # this also prevents a glibc crypt segfault issue, detailed here ... + # http://www.openwall.com/lists/oss-security/2011/11/15/1 + +class PasswordTruncateError(PasswordSizeError): + """ + Error raised if password would be truncated by hash. + This derives from :exc:`PasswordSizeError` (above). + + Hashers such as :class:`~passlib.hash.bcrypt` can be configured to raises + this error by setting ``truncate_error=True``. + + .. attribute:: max_size + + indicates the maximum allowed size. + + .. versionadded:: 1.7 + """ + + def __init__(self, cls, msg=None): + if msg is None: + msg = ("Password too long (%s truncates to %d characters)" % + (cls.name, cls.truncate_size)) + PasswordSizeError.__init__(self, cls.truncate_size, msg) + + +class PasslibSecurityError(RuntimeError): + """ + Error raised if critical security issue is detected + (e.g. an attempt is made to use a vulnerable version of a bcrypt backend). + + .. versionadded:: 1.6.3 + """ + + +class TokenError(ValueError): + """ + Base error raised by v:mod:`passlib.totp` when + a token can't be parsed / isn't valid / etc. + Derives from :exc:`!ValueError`. + + Usually one of the more specific subclasses below will be raised: + + * :class:`MalformedTokenError` -- invalid chars, too few digits + * :class:`InvalidTokenError` -- no match found + * :class:`UsedTokenError` -- match found, but token already used + + .. versionadded:: 1.7 + """ + + #: default message to use if none provided -- subclasses may fill this in + _default_message = 'Token not acceptable' + + def __init__(self, msg=None, *args, **kwds): + if msg is None: + msg = self._default_message + ValueError.__init__(self, msg, *args, **kwds) + + +class MalformedTokenError(TokenError): + """ + Error raised by :mod:`passlib.totp` when a token isn't formatted correctly + (contains invalid characters, wrong number of digits, etc) + """ + _default_message = "Unrecognized token" + + +class InvalidTokenError(TokenError): + """ + Error raised by :mod:`passlib.totp` when a token is formatted correctly, + but doesn't match any tokens within valid range. + """ + _default_message = "Token did not match" + + +class UsedTokenError(TokenError): + """ + Error raised by :mod:`passlib.totp` if a token is reused. + Derives from :exc:`TokenError`. + + .. autoattribute:: expire_time + + .. versionadded:: 1.7 + """ + _default_message = "Token has already been used, please wait for another." + + #: optional value indicating when current counter period will end, + #: and a new token can be generated. + expire_time = None + + def __init__(self, *args, **kwds): + self.expire_time = kwds.pop("expire_time", None) + TokenError.__init__(self, *args, **kwds) + + +class UnknownHashError(ValueError): + """ + Error raised by :class:`~passlib.crypto.lookup_hash` if hash name is not recognized. + This exception derives from :exc:`!ValueError`. + + As of version 1.7.3, this may also be raised if hash algorithm is known, + but has been disabled due to FIPS mode (message will include phrase "disabled for fips"). + + As of version 1.7.4, this may be raised if a :class:`~passlib.context.CryptContext` + is unable to identify the algorithm used by a password hash. + + .. versionadded:: 1.7 + + .. versionchanged: 1.7.3 + added 'message' argument. + + .. versionchanged:: 1.7.4 + altered call signature. + """ + def __init__(self, message=None, value=None): + self.value = value + if message is None: + message = "unknown hash algorithm: %r" % value + self.message = message + ValueError.__init__(self, message, value) + + def __str__(self): + return self.message + + +#============================================================================= +# warnings +#============================================================================= +class PasslibWarning(UserWarning): + """base class for Passlib's user warnings, + derives from the builtin :exc:`UserWarning`. + + .. versionadded:: 1.6 + """ + +# XXX: there's only one reference to this class, and it will go away in 2.0; +# so can probably remove this along with this / roll this into PasslibHashWarning. +class PasslibConfigWarning(PasslibWarning): + """Warning issued when non-fatal issue is found related to the configuration + of a :class:`~passlib.context.CryptContext` instance. + + This occurs primarily in one of two cases: + + * The CryptContext contains rounds limits which exceed the hard limits + imposed by the underlying algorithm. + * An explicit rounds value was provided which exceeds the limits + imposed by the CryptContext. + + In both of these cases, the code will perform correctly & securely; + but the warning is issued as a sign the configuration may need updating. + + .. versionadded:: 1.6 + """ + +class PasslibHashWarning(PasslibWarning): + """Warning issued when non-fatal issue is found with parameters + or hash string passed to a passlib hash class. + + This occurs primarily in one of two cases: + + * A rounds value or other setting was explicitly provided which + exceeded the handler's limits (and has been clamped + by the :ref:`relaxed` flag). + + * A malformed hash string was encountered which (while parsable) + should be re-encoded. + + .. versionadded:: 1.6 + """ + +class PasslibRuntimeWarning(PasslibWarning): + """Warning issued when something unexpected happens during runtime. + + The fact that it's a warning instead of an error means Passlib + was able to correct for the issue, but that it's anomalous enough + that the developers would love to hear under what conditions it occurred. + + .. versionadded:: 1.6 + """ + +class PasslibSecurityWarning(PasslibWarning): + """Special warning issued when Passlib encounters something + that might affect security. + + .. versionadded:: 1.6 + """ + +#============================================================================= +# error constructors +# +# note: these functions are used by the hashes in Passlib to raise common +# error messages. They are currently just functions which return ValueError, +# rather than subclasses of ValueError, since the specificity isn't needed +# yet; and who wants to import a bunch of error classes when catching +# ValueError will do? +#============================================================================= + +def _get_name(handler): + return handler.name if handler else "" + +#------------------------------------------------------------------------ +# generic helpers +#------------------------------------------------------------------------ +def type_name(value): + """return pretty-printed string containing name of value's type""" + cls = value.__class__ + if cls.__module__ and cls.__module__ not in ["__builtin__", "builtins"]: + return "%s.%s" % (cls.__module__, cls.__name__) + elif value is None: + return 'None' + else: + return cls.__name__ + +def ExpectedTypeError(value, expected, param): + """error message when param was supposed to be one type, but found another""" + # NOTE: value is never displayed, since it may sometimes be a password. + name = type_name(value) + return TypeError("%s must be %s, not %s" % (param, expected, name)) + +def ExpectedStringError(value, param): + """error message when param was supposed to be unicode or bytes""" + return ExpectedTypeError(value, "unicode or bytes", param) + +#------------------------------------------------------------------------ +# hash/verify parameter errors +#------------------------------------------------------------------------ +def MissingDigestError(handler=None): + """raised when verify() method gets passed config string instead of hash""" + name = _get_name(handler) + return ValueError("expected %s hash, got %s config string instead" % + (name, name)) + +def NullPasswordError(handler=None): + """raised by OS crypt() supporting hashes, which forbid NULLs in password""" + name = _get_name(handler) + return PasswordValueError("%s does not allow NULL bytes in password" % name) + +#------------------------------------------------------------------------ +# errors when parsing hashes +#------------------------------------------------------------------------ +def InvalidHashError(handler=None): + """error raised if unrecognized hash provided to handler""" + return ValueError("not a valid %s hash" % _get_name(handler)) + +def MalformedHashError(handler=None, reason=None): + """error raised if recognized-but-malformed hash provided to handler""" + text = "malformed %s hash" % _get_name(handler) + if reason: + text = "%s (%s)" % (text, reason) + return ValueError(text) + +def ZeroPaddedRoundsError(handler=None): + """error raised if hash was recognized but contained zero-padded rounds field""" + return MalformedHashError(handler, "zero-padded rounds") + +#------------------------------------------------------------------------ +# settings / hash component errors +#------------------------------------------------------------------------ +def ChecksumSizeError(handler, raw=False): + """error raised if hash was recognized, but checksum was wrong size""" + # TODO: if handler.use_defaults is set, this came from app-provided value, + # not from parsing a hash string, might want different error msg. + checksum_size = handler.checksum_size + unit = "bytes" if raw else "chars" + reason = "checksum must be exactly %d %s" % (checksum_size, unit) + return MalformedHashError(handler, reason) + +#============================================================================= +# sensitive info helpers +#============================================================================= + +#: global flag, set temporarily by UTs to allow debug_only_repr() to display sensitive values. +ENABLE_DEBUG_ONLY_REPR = False + + +def debug_only_repr(value, param="hash"): + """ + helper used to display sensitive data (hashes etc) within error messages. + currently returns placeholder test UNLESS unittests are running, + in which case the real value is displayed. + + mainly useful to prevent hashes / secrets from being exposed in production tracebacks; + while still being visible from test failures. + + NOTE: api subject to change, may formalize this more in the future. + """ + if ENABLE_DEBUG_ONLY_REPR or value is None or isinstance(value, bool): + return repr(value) + return "<%s %s value omitted>" % (param, type(value)) + + +def CryptBackendError(handler, config, hash, # * + source="crypt.crypt()"): + """ + helper to generate standard message when ``crypt.crypt()`` returns invalid result. + takes care of automatically masking contents of config & hash outside of UTs. + """ + name = _get_name(handler) + msg = "%s returned invalid %s hash: config=%s hash=%s" % \ + (source, name, debug_only_repr(config), debug_only_repr(hash)) + raise InternalBackendError(msg) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/__init__.py b/ansible/lib/python3.11/site-packages/passlib/ext/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/ext/__init__.py @@ -0,0 +1 @@ + diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/ext/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 000000000..895142c08 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/ext/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/django/__init__.py b/ansible/lib/python3.11/site-packages/passlib/ext/django/__init__.py new file mode 100644 index 000000000..2dc9b2821 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/ext/django/__init__.py @@ -0,0 +1,6 @@ +"""passlib.ext.django.models -- monkeypatch django hashing framework + +this plugin monkeypatches django's hashing framework +so that it uses a passlib context object, allowing handling of arbitrary +hashes in Django databases. +""" diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 000000000..e866f4355 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/models.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/models.cpython-311.pyc new file mode 100644 index 000000000..c81977e74 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/models.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/utils.cpython-311.pyc new file mode 100644 index 000000000..220f9b970 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/ext/django/__pycache__/utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/django/models.py b/ansible/lib/python3.11/site-packages/passlib/ext/django/models.py new file mode 100644 index 000000000..e766c2db4 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/ext/django/models.py @@ -0,0 +1,36 @@ +"""passlib.ext.django.models -- monkeypatch django hashing framework""" +#============================================================================= +# imports +#============================================================================= +# core +# site +# pkg +from passlib.context import CryptContext +from passlib.ext.django.utils import DjangoContextAdapter +# local +__all__ = ["password_context"] + +#============================================================================= +# global attrs +#============================================================================= + +#: adapter instance used to drive most of this +adapter = DjangoContextAdapter() + +# the context object which this patches contrib.auth to use for password hashing. +# configuration controlled by ``settings.PASSLIB_CONFIG``. +password_context = adapter.context + +#: hook callers should use if context is changed +context_changed = adapter.reset_hashers + +#============================================================================= +# main code +#============================================================================= + +# load config & install monkeypatch +adapter.load_model() + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/ext/django/utils.py b/ansible/lib/python3.11/site-packages/passlib/ext/django/utils.py new file mode 100644 index 000000000..2f8a2efc5 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/ext/django/utils.py @@ -0,0 +1,1276 @@ +"""passlib.ext.django.utils - helper functions used by this plugin""" +#============================================================================= +# imports +#============================================================================= +# core +from functools import update_wrapper, wraps +import logging; log = logging.getLogger(__name__) +import sys +import weakref +from warnings import warn +# site +try: + from django import VERSION as DJANGO_VERSION + log.debug("found django %r installation", DJANGO_VERSION) +except ImportError: + log.debug("django installation not found") + DJANGO_VERSION = () +# pkg +from passlib import exc, registry +from passlib.context import CryptContext +from passlib.exc import PasslibRuntimeWarning +from passlib.utils.compat import get_method_function, iteritems, OrderedDict, unicode +from passlib.utils.decor import memoized_property +# local +__all__ = [ + "DJANGO_VERSION", + "MIN_DJANGO_VERSION", + "get_preset_config", + "quirks", +] + +#: minimum version supported by passlib.ext.django +MIN_DJANGO_VERSION = (1, 8) + +#============================================================================= +# quirk detection +#============================================================================= + +class quirks: + + #: django check_password() started throwing error on encoded=None + #: (really identify_hasher did) + none_causes_check_password_error = DJANGO_VERSION >= (2, 1) + + #: django is_usable_password() started returning True for password = {None, ""} values. + empty_is_usable_password = DJANGO_VERSION >= (2, 1) + + #: django is_usable_password() started returning True for non-hash strings in 2.1 + invalid_is_usable_password = DJANGO_VERSION >= (2, 1) + +#============================================================================= +# default policies +#============================================================================= + +# map preset names -> passlib.app attrs +_preset_map = { + "django-1.0": "django10_context", + "django-1.4": "django14_context", + "django-1.6": "django16_context", + "django-latest": "django_context", +} + +def get_preset_config(name): + """Returns configuration string for one of the preset strings + supported by the ``PASSLIB_CONFIG`` setting. + Currently supported presets: + + * ``"passlib-default"`` - default config used by this release of passlib. + * ``"django-default"`` - config matching currently installed django version. + * ``"django-latest"`` - config matching newest django version (currently same as ``"django-1.6"``). + * ``"django-1.0"`` - config used by stock Django 1.0 - 1.3 installs + * ``"django-1.4"`` - config used by stock Django 1.4 installs + * ``"django-1.6"`` - config used by stock Django 1.6 installs + """ + # TODO: add preset which includes HASHERS + PREFERRED_HASHERS, + # after having imported any custom hashers. e.g. "django-current" + if name == "django-default": + if not DJANGO_VERSION: + raise ValueError("can't resolve django-default preset, " + "django not installed") + name = "django-1.6" + if name == "passlib-default": + return PASSLIB_DEFAULT + try: + attr = _preset_map[name] + except KeyError: + raise ValueError("unknown preset config name: %r" % name) + import passlib.apps + return getattr(passlib.apps, attr).to_string() + +# default context used by passlib 1.6 +PASSLIB_DEFAULT = """ +[passlib] + +; list of schemes supported by configuration +; currently all django 1.6, 1.4, and 1.0 hashes, +; and three common modular crypt format hashes. +schemes = + django_pbkdf2_sha256, django_pbkdf2_sha1, django_bcrypt, django_bcrypt_sha256, + django_salted_sha1, django_salted_md5, django_des_crypt, hex_md5, + sha512_crypt, bcrypt, phpass + +; default scheme to use for new hashes +default = django_pbkdf2_sha256 + +; hashes using these schemes will automatically be re-hashed +; when the user logs in (currently all django 1.0 hashes) +deprecated = + django_pbkdf2_sha1, django_salted_sha1, django_salted_md5, + django_des_crypt, hex_md5 + +; sets some common options, including minimum rounds for two primary hashes. +; if a hash has less than this number of rounds, it will be re-hashed. +sha512_crypt__min_rounds = 80000 +django_pbkdf2_sha256__min_rounds = 10000 + +; set somewhat stronger iteration counts for ``User.is_staff`` +staff__sha512_crypt__default_rounds = 100000 +staff__django_pbkdf2_sha256__default_rounds = 12500 + +; and even stronger ones for ``User.is_superuser`` +superuser__sha512_crypt__default_rounds = 120000 +superuser__django_pbkdf2_sha256__default_rounds = 15000 +""" + +#============================================================================= +# helpers +#============================================================================= + +#: prefix used to shoehorn passlib's handler names into django hasher namespace +PASSLIB_WRAPPER_PREFIX = "passlib_" + +#: prefix used by all the django-specific hash formats in passlib; +#: all of these hashes should have a ``.django_name`` attribute. +DJANGO_COMPAT_PREFIX = "django_" + +#: set of hashes w/o "django_" prefix, but which also expose ``.django_name``. +_other_django_hashes = set(["hex_md5"]) + +def _wrap_method(method): + """wrap method object in bare function""" + @wraps(method) + def wrapper(*args, **kwds): + return method(*args, **kwds) + return wrapper + +#============================================================================= +# translator +#============================================================================= +class DjangoTranslator(object): + """ + Object which helps translate passlib hasher objects / names + to and from django hasher objects / names. + + These methods are wrapped in a class so that results can be cached, + but with the ability to have independant caches, since django hasher + names may / may not correspond to the same instance (or even class). + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: CryptContext instance + #: (if any -- generally only set by DjangoContextAdapter subclass) + context = None + + #: internal cache of passlib hasher -> django hasher instance. + #: key stores weakref to passlib hasher. + _django_hasher_cache = None + + #: special case -- unsalted_sha1 + _django_unsalted_sha1 = None + + #: internal cache of django name -> passlib hasher + #: value stores weakrefs to passlib hasher. + _passlib_hasher_cache = None + + #============================================================================= + # init + #============================================================================= + + def __init__(self, context=None, **kwds): + super(DjangoTranslator, self).__init__(**kwds) + if context is not None: + self.context = context + + self._django_hasher_cache = weakref.WeakKeyDictionary() + self._passlib_hasher_cache = weakref.WeakValueDictionary() + + def reset_hashers(self): + self._django_hasher_cache.clear() + self._passlib_hasher_cache.clear() + self._django_unsalted_sha1 = None + + def _get_passlib_hasher(self, passlib_name): + """ + resolve passlib hasher by name, using context if available. + """ + context = self.context + if context is None: + return registry.get_crypt_handler(passlib_name) + else: + return context.handler(passlib_name) + + #============================================================================= + # resolve passlib hasher -> django hasher + #============================================================================= + + def passlib_to_django_name(self, passlib_name): + """ + Convert passlib hasher / name to Django hasher name. + """ + return self.passlib_to_django(passlib_name).algorithm + + # XXX: add option (in class, or call signature) to always return a wrapper, + # rather than native builtin -- would let HashersTest check that + # our own wrapper + implementations are matching up with their tests. + def passlib_to_django(self, passlib_hasher, cached=True): + """ + Convert passlib hasher / name to Django hasher. + + :param passlib_hasher: + passlib hasher / name + + :returns: + django hasher instance + """ + # resolve names to hasher + if not hasattr(passlib_hasher, "name"): + passlib_hasher = self._get_passlib_hasher(passlib_hasher) + + # check cache + if cached: + cache = self._django_hasher_cache + try: + return cache[passlib_hasher] + except KeyError: + pass + result = cache[passlib_hasher] = \ + self.passlib_to_django(passlib_hasher, cached=False) + return result + + # find native equivalent, and return wrapper if there isn't one + django_name = getattr(passlib_hasher, "django_name", None) + if django_name: + return self._create_django_hasher(django_name) + else: + return _PasslibHasherWrapper(passlib_hasher) + + _builtin_django_hashers = dict( + md5="MD5PasswordHasher", + ) + + if DJANGO_VERSION > (2, 1): + # present but disabled by default as of django 2.1; not sure when added, + # so not listing it by default. + _builtin_django_hashers.update( + bcrypt="BCryptPasswordHasher", + ) + + def _create_django_hasher(self, django_name): + """ + helper to create new django hasher by name. + wraps underlying django methods. + """ + # if we haven't patched django, can use it directly + module = sys.modules.get("passlib.ext.django.models") + if module is None or not module.adapter.patched: + from django.contrib.auth.hashers import get_hasher + try: + return get_hasher(django_name) + except ValueError as err: + if not str(err).startswith("Unknown password hashing algorithm"): + raise + else: + # We've patched django's get_hashers(), so calling django's get_hasher() + # or get_hashers_by_algorithm() would only land us back here. + # As non-ideal workaround, have to use original get_hashers(), + get_hashers = module.adapter._manager.getorig("django.contrib.auth.hashers:get_hashers").__wrapped__ + for hasher in get_hashers(): + if hasher.algorithm == django_name: + return hasher + + # hardcode a few for cases where get_hashers() lookup won't work + # (mainly, hashers that are present in django, but disabled by their default config) + path = self._builtin_django_hashers.get(django_name) + if path: + if "." not in path: + path = "django.contrib.auth.hashers." + path + from django.utils.module_loading import import_string + return import_string(path)() + + raise ValueError("unknown hasher: %r" % django_name) + + #============================================================================= + # reverse django -> passlib + #============================================================================= + + def django_to_passlib_name(self, django_name): + """ + Convert Django hasher / name to Passlib hasher name. + """ + return self.django_to_passlib(django_name).name + + def django_to_passlib(self, django_name, cached=True): + """ + Convert Django hasher / name to Passlib hasher / name. + If present, CryptContext will be checked instead of main registry. + + :param django_name: + Django hasher class or algorithm name. + "default" allowed if context provided. + + :raises ValueError: + if can't resolve hasher. + + :returns: + passlib hasher or name + """ + # check for django hasher + if hasattr(django_name, "algorithm"): + + # check for passlib adapter + if isinstance(django_name, _PasslibHasherWrapper): + return django_name.passlib_handler + + # resolve django hasher -> name + django_name = django_name.algorithm + + # check cache + if cached: + cache = self._passlib_hasher_cache + try: + return cache[django_name] + except KeyError: + pass + result = cache[django_name] = \ + self.django_to_passlib(django_name, cached=False) + return result + + # check if it's an obviously-wrapped name + if django_name.startswith(PASSLIB_WRAPPER_PREFIX): + passlib_name = django_name[len(PASSLIB_WRAPPER_PREFIX):] + return self._get_passlib_hasher(passlib_name) + + # resolve default + if django_name == "default": + context = self.context + if context is None: + raise TypeError("can't determine default scheme w/ context") + return context.handler() + + # special case: Django uses a separate hasher for "sha1$$digest" + # hashes (unsalted_sha1) and "sha1$salt$digest" (sha1); + # but passlib uses "django_salted_sha1" for both of these. + if django_name == "unsalted_sha1": + django_name = "sha1" + + # resolve name + # XXX: bother caching these lists / mapping? + # not needed in long-term due to cache above. + context = self.context + if context is None: + # check registry + # TODO: should make iteration via registry easier + candidates = ( + registry.get_crypt_handler(passlib_name) + for passlib_name in registry.list_crypt_handlers() + if passlib_name.startswith(DJANGO_COMPAT_PREFIX) or + passlib_name in _other_django_hashes + ) + else: + # check context + candidates = context.schemes(resolve=True) + for handler in candidates: + if getattr(handler, "django_name", None) == django_name: + return handler + + # give up + # NOTE: this should only happen for custom django hashers that we don't + # know the equivalents for. _HasherHandler (below) is work in + # progress that would allow us to at least return a wrapper. + raise ValueError("can't translate django name to passlib name: %r" % + (django_name,)) + + #============================================================================= + # django hasher lookup + #============================================================================= + + def resolve_django_hasher(self, django_name, cached=True): + """ + Take in a django algorithm name, return django hasher. + """ + # check for django hasher + if hasattr(django_name, "algorithm"): + return django_name + + # resolve to passlib hasher + passlib_hasher = self.django_to_passlib(django_name, cached=cached) + + # special case: Django uses a separate hasher for "sha1$$digest" + # hashes (unsalted_sha1) and "sha1$salt$digest" (sha1); + # but passlib uses "django_salted_sha1" for both of these. + # XXX: this isn't ideal way to handle this. would like to do something + # like pass "django_variant=django_name" into passlib_to_django(), + # and have it cache separate hasher there. + # but that creates a LOT of complication in it's cache structure, + # for what is just one special case. + if django_name == "unsalted_sha1" and passlib_hasher.name == "django_salted_sha1": + if not cached: + return self._create_django_hasher(django_name) + result = self._django_unsalted_sha1 + if result is None: + result = self._django_unsalted_sha1 = self._create_django_hasher(django_name) + return result + + # lookup corresponding django hasher + return self.passlib_to_django(passlib_hasher, cached=cached) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# adapter +#============================================================================= +class DjangoContextAdapter(DjangoTranslator): + """ + Object which tries to adapt a Passlib CryptContext object, + using a Django-hasher compatible API. + + When installed in django, :mod:`!passlib.ext.django` will create + an instance of this class, and then monkeypatch the appropriate + methods into :mod:`!django.contrib.auth` and other appropriate places. + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: CryptContext instance we're wrapping + context = None + + #: ref to original make_password(), + #: needed to generate usuable passwords that match django + _orig_make_password = None + + #: ref to django helper of this name -- not monkeypatched + is_password_usable = None + + #: PatchManager instance used to track installation + _manager = None + + #: whether config=disabled flag was set + enabled = True + + #: patch status + patched = False + + #============================================================================= + # init + #============================================================================= + def __init__(self, context=None, get_user_category=None, **kwds): + + # init log + self.log = logging.getLogger(__name__ + ".DjangoContextAdapter") + + # init parent, filling in default context object + if context is None: + context = CryptContext() + super(DjangoContextAdapter, self).__init__(context=context, **kwds) + + # setup user category + if get_user_category: + assert callable(get_user_category) + self.get_user_category = get_user_category + + # install lru cache wrappers + try: + from functools import lru_cache # new py32 + except ImportError: + from django.utils.lru_cache import lru_cache # py2 compat, removed in django 3 (or earlier?) + self.get_hashers = lru_cache()(self.get_hashers) + + # get copy of original make_password + from django.contrib.auth.hashers import make_password + if make_password.__module__.startswith("passlib."): + make_password = _PatchManager.peek_unpatched_func(make_password) + self._orig_make_password = make_password + + # get other django helpers + from django.contrib.auth.hashers import is_password_usable + self.is_password_usable = is_password_usable + + # init manager + mlog = logging.getLogger(__name__ + ".DjangoContextAdapter._manager") + self._manager = _PatchManager(log=mlog) + + def reset_hashers(self): + """ + Wrapper to manually reset django's hasher lookup cache + """ + # resets cache for .get_hashers() & .get_hashers_by_algorithm() + from django.contrib.auth.hashers import reset_hashers + reset_hashers(setting="PASSWORD_HASHERS") + + # reset internal caches + super(DjangoContextAdapter, self).reset_hashers() + + #============================================================================= + # django hashers helpers -- hasher lookup + #============================================================================= + + # lru_cache()'ed by init + def get_hashers(self): + """ + Passlib replacement for get_hashers() -- + Return list of available django hasher classes + """ + passlib_to_django = self.passlib_to_django + return [passlib_to_django(hasher) + for hasher in self.context.schemes(resolve=True)] + + def get_hasher(self, algorithm="default"): + """ + Passlib replacement for get_hasher() -- + Return django hasher by name + """ + return self.resolve_django_hasher(algorithm) + + def identify_hasher(self, encoded): + """ + Passlib replacement for identify_hasher() -- + Identify django hasher based on hash. + """ + handler = self.context.identify(encoded, resolve=True, required=True) + if handler.name == "django_salted_sha1" and encoded.startswith("sha1$$"): + # Django uses a separate hasher for "sha1$$digest" hashes, but + # passlib identifies it as belonging to "sha1$salt$digest" handler. + # We want to resolve to correct django hasher. + return self.get_hasher("unsalted_sha1") + return self.passlib_to_django(handler) + + #============================================================================= + # django.contrib.auth.hashers helpers -- password helpers + #============================================================================= + + def make_password(self, password, salt=None, hasher="default"): + """ + Passlib replacement for make_password() + """ + if password is None: + return self._orig_make_password(None) + # NOTE: relying on hasher coming from context, and thus having + # context-specific config baked into it. + passlib_hasher = self.django_to_passlib(hasher) + if "salt" not in passlib_hasher.setting_kwds: + # ignore salt param even if preset + pass + elif hasher.startswith("unsalted_"): + # Django uses a separate 'unsalted_sha1' hasher for "sha1$$digest", + # but passlib just reuses it's "sha1" handler ("sha1$salt$digest"). To make + # this work, have to explicitly tell the sha1 handler to use an empty salt. + passlib_hasher = passlib_hasher.using(salt="") + elif salt: + # Django make_password() autogenerates a salt if salt is bool False (None / ''), + # so we only pass the keyword on if there's actually a fixed salt. + passlib_hasher = passlib_hasher.using(salt=salt) + return passlib_hasher.hash(password) + + def check_password(self, password, encoded, setter=None, preferred="default"): + """ + Passlib replacement for check_password() + """ + # XXX: this currently ignores "preferred" keyword, since its purpose + # was for hash migration, and that's handled by the context. + # XXX: honor "none_causes_check_password_error" quirk for django 2.2+? + # seems safer to return False. + if password is None or not self.is_password_usable(encoded): + return False + + # verify password + context = self.context + try: + correct = context.verify(password, encoded) + except exc.UnknownHashError: + # As of django 1.5, unidentifiable hashes returns False + # (side-effect of django issue 18453) + return False + + if not (correct and setter): + return correct + + # check if we need to rehash + if preferred == "default": + if not context.needs_update(encoded, secret=password): + return correct + else: + # Django's check_password() won't call setter() on a + # 'preferred' alg, even if it's otherwise deprecated. To try and + # replicate this behavior if preferred is set, we look up the + # passlib hasher, and call it's original needs_update() method. + # TODO: Solve redundancy that verify() call + # above is already identifying hash. + hasher = self.django_to_passlib(preferred) + if (hasher.identify(encoded) and + not hasher.needs_update(encoded, secret=password)): + # alg is 'preferred' and hash itself doesn't need updating, + # so nothing to do. + return correct + # else: either hash isn't preferred, or it needs updating. + + # call setter to rehash + setter(password) + return correct + + #============================================================================= + # django users helpers + #============================================================================= + + def user_check_password(self, user, password): + """ + Passlib replacement for User.check_password() + """ + if password is None: + return False + hash = user.password + if not self.is_password_usable(hash): + return False + cat = self.get_user_category(user) + try: + ok, new_hash = self.context.verify_and_update(password, hash, category=cat) + except exc.UnknownHashError: + # As of django 1.5, unidentifiable hashes returns False + # (side-effect of django issue 18453) + return False + if ok and new_hash is not None: + # migrate to new hash if needed. + user.password = new_hash + user.save() + return ok + + def user_set_password(self, user, password): + """ + Passlib replacement for User.set_password() + """ + if password is None: + user.set_unusable_password() + else: + cat = self.get_user_category(user) + user.password = self.context.hash(password, category=cat) + + def get_user_category(self, user): + """ + Helper for hashing passwords per-user -- + figure out the CryptContext category for specified Django user object. + .. note:: + This may be overridden via PASSLIB_GET_CATEGORY django setting + """ + if user.is_superuser: + return "superuser" + elif user.is_staff: + return "staff" + else: + return None + + #============================================================================= + # patch control + #============================================================================= + + HASHERS_PATH = "django.contrib.auth.hashers" + MODELS_PATH = "django.contrib.auth.models" + USER_CLASS_PATH = MODELS_PATH + ":User" + FORMS_PATH = "django.contrib.auth.forms" + + #: list of locations to patch + patch_locations = [ + # + # User object + # NOTE: could leave defaults alone, but want to have user available + # so that we can support get_user_category() + # + (USER_CLASS_PATH + ".check_password", "user_check_password", dict(method=True)), + (USER_CLASS_PATH + ".set_password", "user_set_password", dict(method=True)), + + # + # Hashers module + # + (HASHERS_PATH + ":", "check_password"), + (HASHERS_PATH + ":", "make_password"), + (HASHERS_PATH + ":", "get_hashers"), + (HASHERS_PATH + ":", "get_hasher"), + (HASHERS_PATH + ":", "identify_hasher"), + + # + # Patch known imports from hashers module + # + (MODELS_PATH + ":", "check_password"), + (MODELS_PATH + ":", "make_password"), + (FORMS_PATH + ":", "get_hasher"), + (FORMS_PATH + ":", "identify_hasher"), + + ] + + def install_patch(self): + """ + Install monkeypatch to replace django hasher framework. + """ + # don't reapply + log = self.log + if self.patched: + log.warning("monkeypatching already applied, refusing to reapply") + return False + + # version check + if DJANGO_VERSION < MIN_DJANGO_VERSION: + raise RuntimeError("passlib.ext.django requires django >= %s" % + (MIN_DJANGO_VERSION,)) + + # log start + log.debug("preparing to monkeypatch django ...") + + # run through patch locations + manager = self._manager + for record in self.patch_locations: + if len(record) == 2: + record += ({},) + target, source, opts = record + if target.endswith((":", ",")): + target += source + value = getattr(self, source) + if opts.get("method"): + # have to wrap our method in a function, + # since we're installing it in a class *as* a method + # XXX: make this a flag for .patch()? + value = _wrap_method(value) + manager.patch(target, value) + + # reset django's caches (e.g. get_hash_by_algorithm) + self.reset_hashers() + + # done! + self.patched = True + log.debug("... finished monkeypatching django") + return True + + def remove_patch(self): + """ + Remove monkeypatch from django hasher framework. + As precaution in case there are lingering refs to context, + context object will be wiped. + + .. warning:: + This may cause problems if any other Django modules have imported + their own copies of the patched functions, though the patched + code has been designed to throw an error as soon as possible in + this case. + """ + log = self.log + manager = self._manager + + if self.patched: + log.debug("removing django monkeypatching...") + manager.unpatch_all(unpatch_conflicts=True) + self.context.load({}) + self.patched = False + self.reset_hashers() + log.debug("...finished removing django monkeypatching") + return True + + if manager.isactive(): # pragma: no cover -- sanity check + log.warning("reverting partial monkeypatching of django...") + manager.unpatch_all() + self.context.load({}) + self.reset_hashers() + log.debug("...finished removing django monkeypatching") + return True + + log.debug("django not monkeypatched") + return False + + #============================================================================= + # loading config + #============================================================================= + + def load_model(self): + """ + Load configuration from django, and install patch. + """ + self._load_settings() + if self.enabled: + try: + self.install_patch() + except: + # try to undo what we can + self.remove_patch() + raise + else: + if self.patched: # pragma: no cover -- sanity check + log.error("didn't expect monkeypatching would be applied!") + self.remove_patch() + log.debug("passlib.ext.django loaded") + + def _load_settings(self): + """ + Update settings from django + """ + from django.conf import settings + + # TODO: would like to add support for inheriting config from a preset + # (or from existing hasher state) and letting PASSLIB_CONFIG + # be an update, not a replacement. + + # TODO: wrap and import any custom hashers as passlib handlers, + # so they could be used in the passlib config. + + # load config from settings + _UNSET = object() + config = getattr(settings, "PASSLIB_CONFIG", _UNSET) + if config is _UNSET: + # XXX: should probably deprecate this alias + config = getattr(settings, "PASSLIB_CONTEXT", _UNSET) + if config is _UNSET: + config = "passlib-default" + if config is None: + warn("setting PASSLIB_CONFIG=None is deprecated, " + "and support will be removed in Passlib 1.8, " + "use PASSLIB_CONFIG='disabled' instead.", + DeprecationWarning) + config = "disabled" + elif not isinstance(config, (unicode, bytes, dict)): + raise exc.ExpectedTypeError(config, "str or dict", "PASSLIB_CONFIG") + + # load custom category func (if any) + get_category = getattr(settings, "PASSLIB_GET_CATEGORY", None) + if get_category and not callable(get_category): + raise exc.ExpectedTypeError(get_category, "callable", "PASSLIB_GET_CATEGORY") + + # check if we've been disabled + if config == "disabled": + self.enabled = False + return + else: + self.__dict__.pop("enabled", None) + + # resolve any preset aliases + if isinstance(config, str) and '\n' not in config: + config = get_preset_config(config) + + # setup category func + if get_category: + self.get_user_category = get_category + else: + self.__dict__.pop("get_category", None) + + # setup context + self.context.load(config) + self.reset_hashers() + + #============================================================================= + # eof + #============================================================================= + +#============================================================================= +# wrapping passlib handlers as django hashers +#============================================================================= +_GEN_SALT_SIGNAL = "--!!!generate-new-salt!!!--" + +class ProxyProperty(object): + """helper that proxies another attribute""" + + def __init__(self, attr): + self.attr = attr + + def __get__(self, obj, cls): + if obj is None: + cls = obj + return getattr(obj, self.attr) + + def __set__(self, obj, value): + setattr(obj, self.attr, value) + + def __delete__(self, obj): + delattr(obj, self.attr) + + +class _PasslibHasherWrapper(object): + """ + adapter which which wraps a :cls:`passlib.ifc.PasswordHash` class, + and provides an interface compatible with the Django hasher API. + + :param passlib_handler: + passlib hash handler (e.g. :cls:`passlib.hash.sha256_crypt`. + """ + #===================================================================== + # instance attrs + #===================================================================== + + #: passlib handler that we're adapting. + passlib_handler = None + + # NOTE: 'rounds' attr will store variable rounds, IF handler supports it. + # 'iterations' will act as proxy, for compatibility with django pbkdf2 hashers. + # rounds = None + # iterations = None + + #===================================================================== + # init + #===================================================================== + def __init__(self, passlib_handler): + # init handler + if getattr(passlib_handler, "django_name", None): + raise ValueError("handlers that reflect an official django " + "hasher shouldn't be wrapped: %r" % + (passlib_handler.name,)) + if passlib_handler.is_disabled: + # XXX: could this be implemented? + raise ValueError("can't wrap disabled-hash handlers: %r" % + (passlib_handler.name)) + self.passlib_handler = passlib_handler + + # init rounds support + if self._has_rounds: + self.rounds = passlib_handler.default_rounds + self.iterations = ProxyProperty("rounds") + + #===================================================================== + # internal methods + #===================================================================== + def __repr__(self): + return "" % self.passlib_handler + + #===================================================================== + # internal properties + #===================================================================== + + @memoized_property + def __name__(self): + return "Passlib_%s_PasswordHasher" % self.passlib_handler.name.title() + + @memoized_property + def _has_rounds(self): + return "rounds" in self.passlib_handler.setting_kwds + + @memoized_property + def _translate_kwds(self): + """ + internal helper for safe_summary() -- + used to translate passlib hash options -> django keywords + """ + out = dict(checksum="hash") + if self._has_rounds and "pbkdf2" in self.passlib_handler.name: + out['rounds'] = 'iterations' + return out + + #===================================================================== + # hasher properties + #===================================================================== + + @memoized_property + def algorithm(self): + return PASSLIB_WRAPPER_PREFIX + self.passlib_handler.name + + #===================================================================== + # hasher api + #===================================================================== + def salt(self): + # NOTE: passlib's handler.hash() should generate new salt each time, + # so this just returns a special constant which tells + # encode() (below) not to pass a salt keyword along. + return _GEN_SALT_SIGNAL + + def verify(self, password, encoded): + return self.passlib_handler.verify(password, encoded) + + def encode(self, password, salt=None, rounds=None, iterations=None): + kwds = {} + if salt is not None and salt != _GEN_SALT_SIGNAL: + kwds['salt'] = salt + if self._has_rounds: + if rounds is not None: + kwds['rounds'] = rounds + elif iterations is not None: + kwds['rounds'] = iterations + else: + kwds['rounds'] = self.rounds + elif rounds is not None or iterations is not None: + warn("%s.hash(): 'rounds' and 'iterations' are ignored" % self.__name__) + handler = self.passlib_handler + if kwds: + handler = handler.using(**kwds) + return handler.hash(password) + + def safe_summary(self, encoded): + from django.contrib.auth.hashers import mask_hash + from django.utils.translation import ugettext_noop as _ + handler = self.passlib_handler + items = [ + # since this is user-facing, we're reporting passlib's name, + # without the distracting PASSLIB_HASHER_PREFIX prepended. + (_('algorithm'), handler.name), + ] + if hasattr(handler, "parsehash"): + kwds = handler.parsehash(encoded, sanitize=mask_hash) + for key, value in iteritems(kwds): + key = self._translate_kwds.get(key, key) + items.append((_(key), value)) + return OrderedDict(items) + + def must_update(self, encoded): + # TODO: would like access CryptContext, would need caller to pass it to get_passlib_hasher(). + # for now (as of passlib 1.6.6), replicating django policy that this returns True + # if 'encoded' hash has different rounds value from self.rounds + if self._has_rounds: + # XXX: could cache this subclass somehow (would have to intercept writes to self.rounds) + # TODO: always call subcls/handler.needs_update() in case there's other things to check + subcls = self.passlib_handler.using(min_rounds=self.rounds, max_rounds=self.rounds) + if subcls.needs_update(encoded): + return True + return False + + #===================================================================== + # eoc + #===================================================================== + +#============================================================================= +# adapting django hashers -> passlib handlers +#============================================================================= +# TODO: this code probably halfway works, mainly just needs +# a routine to read HASHERS and PREFERRED_HASHER. + +##from passlib.registry import register_crypt_handler +##from passlib.utils import classproperty, to_native_str, to_unicode +##from passlib.utils.compat import unicode +## +## +##class _HasherHandler(object): +## "helper for wrapping Hasher instances as passlib handlers" +## # FIXME: this generic wrapper doesn't handle custom settings +## # FIXME: genconfig / genhash not supported. +## +## def __init__(self, hasher): +## self.django_hasher = hasher +## if hasattr(hasher, "iterations"): +## # assume encode() accepts an "iterations" parameter. +## # fake min/max rounds +## self.min_rounds = 1 +## self.max_rounds = 0xFFFFffff +## self.default_rounds = self.django_hasher.iterations +## self.setting_kwds += ("rounds",) +## +## # hasher instance - filled in by constructor +## django_hasher = None +## +## setting_kwds = ("salt",) +## context_kwds = () +## +## @property +## def name(self): +## # XXX: need to make sure this wont' collide w/ builtin django hashes. +## # maybe by renaming this to django compatible aliases? +## return DJANGO_PASSLIB_PREFIX + self.django_name +## +## @property +## def django_name(self): +## # expose this so hasher_to_passlib_name() extracts original name +## return self.django_hasher.algorithm +## +## @property +## def ident(self): +## # this should always be correct, as django relies on ident prefix. +## return unicode(self.django_name + "$") +## +## @property +## def identify(self, hash): +## # this should always work, as django relies on ident prefix. +## return to_unicode(hash, "latin-1", "hash").startswith(self.ident) +## +## @property +## def hash(self, secret, salt=None, **kwds): +## # NOTE: from how make_password() is coded, all hashers +## # should have salt param. but only some will have +## # 'iterations' parameter. +## opts = {} +## if 'rounds' in self.setting_kwds and 'rounds' in kwds: +## opts['iterations'] = kwds.pop("rounds") +## if kwds: +## raise TypeError("unexpected keyword arguments: %r" % list(kwds)) +## if isinstance(secret, unicode): +## secret = secret.encode("utf-8") +## if salt is None: +## salt = self.django_hasher.salt() +## return to_native_str(self.django_hasher(secret, salt, **opts)) +## +## @property +## def verify(self, secret, hash): +## hash = to_native_str(hash, "utf-8", "hash") +## if isinstance(secret, unicode): +## secret = secret.encode("utf-8") +## return self.django_hasher.verify(secret, hash) +## +##def register_hasher(hasher): +## handler = _HasherHandler(hasher) +## register_crypt_handler(handler) +## return handler + +#============================================================================= +# monkeypatch helpers +#============================================================================= +# private singleton indicating lack-of-value +_UNSET = object() + +class _PatchManager(object): + """helper to manage monkeypatches and run sanity checks""" + + # NOTE: this could easily use a dict interface, + # but keeping it distinct to make clear that it's not a dict, + # since it has important side-effects. + + #=================================================================== + # init and support + #=================================================================== + def __init__(self, log=None): + # map of key -> (original value, patched value) + # original value may be _UNSET + self.log = log or logging.getLogger(__name__ + "._PatchManager") + self._state = {} + + def isactive(self): + return bool(self._state) + + # bool value tests if any patches are currently applied. + # NOTE: this behavior is deprecated in favor of .isactive + __bool__ = __nonzero__ = isactive + + def _import_path(self, path): + """retrieve obj and final attribute name from resource path""" + name, attr = path.split(":") + obj = __import__(name, fromlist=[attr], level=0) + while '.' in attr: + head, attr = attr.split(".", 1) + obj = getattr(obj, head) + return obj, attr + + @staticmethod + def _is_same_value(left, right): + """check if two values are the same (stripping method wrappers, etc)""" + return get_method_function(left) == get_method_function(right) + + #=================================================================== + # reading + #=================================================================== + def _get_path(self, key, default=_UNSET): + obj, attr = self._import_path(key) + return getattr(obj, attr, default) + + def get(self, path, default=None): + """return current value for path""" + return self._get_path(path, default) + + def getorig(self, path, default=None): + """return original (unpatched) value for path""" + try: + value, _= self._state[path] + except KeyError: + value = self._get_path(path) + return default if value is _UNSET else value + + def check_all(self, strict=False): + """run sanity check on all keys, issue warning if out of sync""" + same = self._is_same_value + for path, (orig, expected) in iteritems(self._state): + if same(self._get_path(path), expected): + continue + msg = "another library has patched resource: %r" % path + if strict: + raise RuntimeError(msg) + else: + warn(msg, PasslibRuntimeWarning) + + #=================================================================== + # patching + #=================================================================== + def _set_path(self, path, value): + obj, attr = self._import_path(path) + if value is _UNSET: + if hasattr(obj, attr): + delattr(obj, attr) + else: + setattr(obj, attr, value) + + def patch(self, path, value, wrap=False): + """monkeypatch object+attr at to have , stores original""" + assert value != _UNSET + current = self._get_path(path) + try: + orig, expected = self._state[path] + except KeyError: + self.log.debug("patching resource: %r", path) + orig = current + else: + self.log.debug("modifying resource: %r", path) + if not self._is_same_value(current, expected): + warn("overridding resource another library has patched: %r" + % path, PasslibRuntimeWarning) + if wrap: + assert callable(value) + wrapped = orig + wrapped_by = value + def wrapper(*args, **kwds): + return wrapped_by(wrapped, *args, **kwds) + update_wrapper(wrapper, value) + value = wrapper + if callable(value): + # needed by DjangoContextAdapter init + get_method_function(value)._patched_original_value = orig + self._set_path(path, value) + self._state[path] = (orig, value) + + @classmethod + def peek_unpatched_func(cls, value): + return value._patched_original_value + + ##def patch_many(self, **kwds): + ## "override specified resources with new values" + ## for path, value in iteritems(kwds): + ## self.patch(path, value) + + def monkeypatch(self, parent, name=None, enable=True, wrap=False): + """function decorator which patches function of same name in """ + def builder(func): + if enable: + sep = "." if ":" in parent else ":" + path = parent + sep + (name or func.__name__) + self.patch(path, func, wrap=wrap) + return func + if callable(name): + # called in non-decorator mode + func = name + name = None + builder(func) + return None + return builder + + #=================================================================== + # unpatching + #=================================================================== + def unpatch(self, path, unpatch_conflicts=True): + try: + orig, expected = self._state[path] + except KeyError: + return + current = self._get_path(path) + self.log.debug("unpatching resource: %r", path) + if not self._is_same_value(current, expected): + if unpatch_conflicts: + warn("reverting resource another library has patched: %r" + % path, PasslibRuntimeWarning) + else: + warn("not reverting resource another library has patched: %r" + % path, PasslibRuntimeWarning) + del self._state[path] + return + self._set_path(path, orig) + del self._state[path] + + def unpatch_all(self, **kwds): + for key in list(self._state): + self.unpatch(key, **kwds) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__init__.py b/ansible/lib/python3.11/site-packages/passlib/handlers/__init__.py new file mode 100644 index 000000000..0a0338c86 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/__init__.py @@ -0,0 +1 @@ +"""passlib.handlers -- holds implementations of all passlib's builtin hash formats""" diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 000000000..872a257bb Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/argon2.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/argon2.cpython-311.pyc new file mode 100644 index 000000000..2ad8392a9 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/argon2.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/bcrypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/bcrypt.cpython-311.pyc new file mode 100644 index 000000000..e89de5829 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/bcrypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/cisco.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/cisco.cpython-311.pyc new file mode 100644 index 000000000..8e13b7339 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/cisco.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/des_crypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/des_crypt.cpython-311.pyc new file mode 100644 index 000000000..54b48a4cf Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/des_crypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/digests.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/digests.cpython-311.pyc new file mode 100644 index 000000000..6d1f4535a Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/digests.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/django.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/django.cpython-311.pyc new file mode 100644 index 000000000..f985671a4 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/django.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/fshp.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/fshp.cpython-311.pyc new file mode 100644 index 000000000..7dbfa1ce3 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/fshp.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/ldap_digests.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/ldap_digests.cpython-311.pyc new file mode 100644 index 000000000..5654de445 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/ldap_digests.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/md5_crypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/md5_crypt.cpython-311.pyc new file mode 100644 index 000000000..e6a1e2e7f Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/md5_crypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/misc.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/misc.cpython-311.pyc new file mode 100644 index 000000000..296cf9c76 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/misc.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/mssql.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/mssql.cpython-311.pyc new file mode 100644 index 000000000..069c51982 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/mssql.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/mysql.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/mysql.cpython-311.pyc new file mode 100644 index 000000000..d794acfe6 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/mysql.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/oracle.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/oracle.cpython-311.pyc new file mode 100644 index 000000000..d1c207cef Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/oracle.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/pbkdf2.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/pbkdf2.cpython-311.pyc new file mode 100644 index 000000000..502602ec0 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/pbkdf2.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/phpass.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/phpass.cpython-311.pyc new file mode 100644 index 000000000..ca13407bf Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/phpass.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/postgres.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/postgres.cpython-311.pyc new file mode 100644 index 000000000..53bc25625 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/postgres.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/roundup.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/roundup.cpython-311.pyc new file mode 100644 index 000000000..54fb2c1ce Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/roundup.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/scram.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/scram.cpython-311.pyc new file mode 100644 index 000000000..1736c59df Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/scram.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/scrypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/scrypt.cpython-311.pyc new file mode 100644 index 000000000..569f243ff Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/scrypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sha1_crypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sha1_crypt.cpython-311.pyc new file mode 100644 index 000000000..628fa264a Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sha1_crypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sha2_crypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sha2_crypt.cpython-311.pyc new file mode 100644 index 000000000..b5bdd8c61 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sha2_crypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sun_md5_crypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sun_md5_crypt.cpython-311.pyc new file mode 100644 index 000000000..c476ed961 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/sun_md5_crypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/windows.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/windows.cpython-311.pyc new file mode 100644 index 000000000..f8e014486 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/handlers/__pycache__/windows.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/argon2.py b/ansible/lib/python3.11/site-packages/passlib/handlers/argon2.py new file mode 100644 index 000000000..4a5691b45 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/argon2.py @@ -0,0 +1,1009 @@ +"""passlib.handlers.argon2 -- argon2 password hash wrapper + +References +========== +* argon2 + - home: https://github.com/P-H-C/phc-winner-argon2 + - whitepaper: https://github.com/P-H-C/phc-winner-argon2/blob/master/argon2-specs.pdf +* argon2 cffi wrapper + - pypi: https://pypi.python.org/pypi/argon2_cffi + - home: https://github.com/hynek/argon2_cffi +* argon2 pure python + - pypi: https://pypi.python.org/pypi/argon2pure + - home: https://github.com/bwesterb/argon2pure +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, absolute_import +# core +import logging +log = logging.getLogger(__name__) +import re +import types +from warnings import warn +# site +_argon2_cffi = None # loaded below +_argon2pure = None # dynamically imported by _load_backend_argon2pure() +# pkg +from passlib import exc +from passlib.crypto.digest import MAX_UINT32 +from passlib.utils import classproperty, to_bytes, render_bytes +from passlib.utils.binary import b64s_encode, b64s_decode +from passlib.utils.compat import u, unicode, bascii_to_str, uascii_to_str, PY2 +import passlib.utils.handlers as uh +# local +__all__ = [ + "argon2", +] + +#============================================================================= +# helpers +#============================================================================= + +# NOTE: when adding a new argon2 hash type, need to do the following: +# * add TYPE_XXX constant, and add to ALL_TYPES +# * make sure "_backend_type_map" constructors handle it correctly for all backends +# * make sure _hash_regex & _ident_regex (below) support type string. +# * add reference vectors for testing. + +#: argon2 type constants -- subclasses handle mapping these to backend-specific type constants. +#: (should be lowercase, to match representation in hash string) +TYPE_I = u("i") +TYPE_D = u("d") +TYPE_ID = u("id") # new 2016-10-29; passlib 1.7.2 requires backends new enough for support + +#: list of all known types; first (supported) type will be used as default. +ALL_TYPES = (TYPE_ID, TYPE_I, TYPE_D) +ALL_TYPES_SET = set(ALL_TYPES) + +#============================================================================= +# import argon2 package (https://pypi.python.org/pypi/argon2_cffi) +#============================================================================= + +# import cffi package +# NOTE: we try to do this even if caller is going to use argon2pure, +# so that we can always use the libargon2 default settings when possible. +_argon2_cffi_error = None +try: + import argon2 as _argon2_cffi +except ImportError: + _argon2_cffi = None +else: + if not hasattr(_argon2_cffi, "Type"): + # they have incompatible "argon2" package installed, instead of "argon2_cffi" package. + _argon2_cffi_error = ( + "'argon2' module points to unsupported 'argon2' pypi package; " + "please install 'argon2-cffi' instead." + ) + _argon2_cffi = None + elif not hasattr(_argon2_cffi, "low_level"): + # they have pre-v16 argon2_cffi package + _argon2_cffi_error = "'argon2-cffi' is too old, please update to argon2_cffi >= 18.2.0" + _argon2_cffi = None + +# init default settings for our hasher class -- +# if we have argon2_cffi >= 16.0, use their default hasher settings, otherwise use static default +if hasattr(_argon2_cffi, "PasswordHasher"): + # use cffi's default settings + _default_settings = _argon2_cffi.PasswordHasher() + _default_version = _argon2_cffi.low_level.ARGON2_VERSION +else: + # use fallback settings (for no backend, or argon2pure) + class _DummyCffiHasher: + """ + dummy object to use as source of defaults when argon2_cffi isn't present. + this tries to mimic the attributes of ``argon2.PasswordHasher()`` which the rest of + this module reads. + + .. note:: values last synced w/ argon2 19.2 as of 2019-11-09 + """ + time_cost = 2 + memory_cost = 512 + parallelism = 2 + salt_len = 16 + hash_len = 16 + # NOTE: "type" attribute added in argon2_cffi v18.2; but currently not reading it + # type = _argon2_cffi.Type.ID + + _default_settings = _DummyCffiHasher() + _default_version = 0x13 # v1.9 + +#============================================================================= +# handler +#============================================================================= +class _Argon2Common(uh.SubclassBackendMixin, uh.ParallelismMixin, + uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, + uh.GenericHandler): + """ + Base class which implements brunt of Argon2 code. + This is then subclassed by the various backends, + to override w/ backend-specific methods. + + When a backend is loaded, the bases of the 'argon2' class proper + are modified to prepend the correct backend-specific subclass. + """ + #=================================================================== + # class attrs + #=================================================================== + + #------------------------ + # PasswordHash + #------------------------ + + name = "argon2" + setting_kwds = ("salt", + "salt_size", + "salt_len", # 'salt_size' alias for compat w/ argon2 package + "rounds", + "time_cost", # 'rounds' alias for compat w/ argon2 package + "memory_cost", + "parallelism", + "digest_size", + "hash_len", # 'digest_size' alias for compat w/ argon2 package + "type", # the type of argon2 hash used + ) + + # TODO: could support the optional 'data' parameter, + # but need to research the uses, what a more descriptive name would be, + # and deal w/ fact that argon2_cffi 16.1 doesn't currently support it. + # (argon2_pure does though) + + #------------------------ + # GenericHandler + #------------------------ + + # NOTE: ident -- all argon2 hashes start with "$argon2$" + # XXX: could programmaticaly generate "ident_values" string from ALL_TYPES above + + checksum_size = _default_settings.hash_len + + #: force parsing these kwds + _always_parse_settings = uh.GenericHandler._always_parse_settings + \ + ("type",) + + #: exclude these kwds from parsehash() result (most are aliases for other keys) + _unparsed_settings = uh.GenericHandler._unparsed_settings + \ + ("salt_len", "time_cost", "hash_len", "digest_size") + + #------------------------ + # HasSalt + #------------------------ + default_salt_size = _default_settings.salt_len + min_salt_size = 8 + max_salt_size = MAX_UINT32 + + #------------------------ + # HasRounds + # TODO: once rounds limit logic is factored out, + # make 'rounds' and 'cost' an alias for 'time_cost' + #------------------------ + default_rounds = _default_settings.time_cost + min_rounds = 1 + max_rounds = MAX_UINT32 + rounds_cost = "linear" + + #------------------------ + # ParalleismMixin + #------------------------ + max_parallelism = (1 << 24) - 1 # from argon2.h / ARGON2_MAX_LANES + + #------------------------ + # custom + #------------------------ + + #: max version support + #: NOTE: this is dependant on the backend, and initialized/modified by set_backend() + max_version = _default_version + + #: minimum version before needs_update() marks the hash; if None, defaults to max_version + min_desired_version = None + + #: minimum valid memory_cost + min_memory_cost = 8 # from argon2.h / ARGON2_MIN_MEMORY + + #: maximum number of threads (-1=unlimited); + #: number of threads used by .hash() will be min(parallelism, max_threads) + max_threads = -1 + + #: global flag signalling argon2pure backend to use threads + #: rather than subprocesses. + pure_use_threads = False + + #: internal helper used to store mapping of TYPE_XXX constants -> backend-specific type constants; + #: this is populated by _load_backend_mixin(); and used to detect which types are supported. + #: XXX: could expose keys as class-level .supported_types property? + _backend_type_map = {} + + @classproperty + def type_values(cls): + """ + return tuple of types supported by this backend + + .. versionadded:: 1.7.2 + """ + cls.get_backend() # make sure backend is loaded + return tuple(cls._backend_type_map) + + #=================================================================== + # instance attrs + #=================================================================== + + #: argon2 hash type, one of ALL_TYPES -- class value controls the default + #: .. versionadded:: 1.7.2 + type = TYPE_ID + + #: parallelism setting -- class value controls the default + parallelism = _default_settings.parallelism + + #: hash version (int) + #: NOTE: this is modified by set_backend() + version = _default_version + + #: memory cost -- class value controls the default + memory_cost = _default_settings.memory_cost + + @property + def type_d(self): + """ + flag indicating a Type D hash + + .. deprecated:: 1.7.2; will be removed in passlib 2.0 + """ + return self.type == TYPE_D + + #: optional secret data + data = None + + #=================================================================== + # variant constructor + #=================================================================== + + @classmethod + def using(cls, type=None, memory_cost=None, salt_len=None, time_cost=None, digest_size=None, + checksum_size=None, hash_len=None, max_threads=None, **kwds): + # support aliases which match argon2 naming convention + if time_cost is not None: + if "rounds" in kwds: + raise TypeError("'time_cost' and 'rounds' are mutually exclusive") + kwds['rounds'] = time_cost + + if salt_len is not None: + if "salt_size" in kwds: + raise TypeError("'salt_len' and 'salt_size' are mutually exclusive") + kwds['salt_size'] = salt_len + + if hash_len is not None: + if digest_size is not None: + raise TypeError("'hash_len' and 'digest_size' are mutually exclusive") + digest_size = hash_len + + if checksum_size is not None: + if digest_size is not None: + raise TypeError("'checksum_size' and 'digest_size' are mutually exclusive") + digest_size = checksum_size + + # create variant + subcls = super(_Argon2Common, cls).using(**kwds) + + # set type + if type is not None: + subcls.type = subcls._norm_type(type) + + # set checksum size + relaxed = kwds.get("relaxed") + if digest_size is not None: + if isinstance(digest_size, uh.native_string_types): + digest_size = int(digest_size) + # NOTE: this isn't *really* digest size minimum, but want to enforce secure minimum. + subcls.checksum_size = uh.norm_integer(subcls, digest_size, min=16, max=MAX_UINT32, + param="digest_size", relaxed=relaxed) + + # set memory cost + if memory_cost is not None: + if isinstance(memory_cost, uh.native_string_types): + memory_cost = int(memory_cost) + subcls.memory_cost = subcls._norm_memory_cost(memory_cost, relaxed=relaxed) + + # validate constraints + subcls._validate_constraints(subcls.memory_cost, subcls.parallelism) + + # set max threads + if max_threads is not None: + if isinstance(max_threads, uh.native_string_types): + max_threads = int(max_threads) + if max_threads < 1 and max_threads != -1: + raise ValueError("max_threads (%d) must be -1 (unlimited), or at least 1." % + (max_threads,)) + subcls.max_threads = max_threads + + return subcls + + @classmethod + def _validate_constraints(cls, memory_cost, parallelism): + # NOTE: this is used by class & instance, hence passing in via arguments. + # could switch and make this a hybrid method. + min_memory_cost = 8 * parallelism + if memory_cost < min_memory_cost: + raise ValueError("%s: memory_cost (%d) is too low, must be at least " + "8 * parallelism (8 * %d = %d)" % + (cls.name, memory_cost, + parallelism, min_memory_cost)) + + #=================================================================== + # public api + #=================================================================== + + #: shorter version of _hash_regex, used to quickly identify hashes + _ident_regex = re.compile(r"^\$argon2[a-z]+\$") + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return cls._ident_regex.match(hash) is not None + + # hash(), verify(), genhash() -- implemented by backend subclass + + #=================================================================== + # hash parsing / rendering + #=================================================================== + + # info taken from source of decode_string() function in + # + # + # hash format: + # $argon2[$v=]$m=,t=,p=[,keyid=][,data=][$[$]] + # + # NOTE: as of 2016-6-17, the official source (above) lists the "keyid" param in the comments, + # but the actual source of decode_string & encode_string don't mention it at all. + # we're supporting parsing it, but throw NotImplementedError if encountered. + # + # sample hashes: + # v1.0: '$argon2i$m=512,t=2,p=2$5VtWOO3cGWYQHEMaYGbsfQ$AcmqasQgW/wI6wAHAMk4aQ' + # v1.3: '$argon2i$v=19$m=512,t=2,p=2$5VtWOO3cGWYQHEMaYGbsfQ$AcmqasQgW/wI6wAHAMk4aQ' + + #: regex to parse argon hash + _hash_regex = re.compile(br""" + ^ + \$argon2(?P[a-z]+)\$ + (?: + v=(?P\d+) + \$ + )? + m=(?P\d+) + , + t=(?P\d+) + , + p=(?P\d+) + (?: + ,keyid=(?P[^,$]+) + )? + (?: + ,data=(?P[^,$]+) + )? + (?: + \$ + (?P[^$]+) + (?: + \$ + (?P.+) + )? + )? + $ + """, re.X) + + @classmethod + def from_string(cls, hash): + # NOTE: assuming hash will be unicode, or use ascii-compatible encoding. + # TODO: switch to working w/ str or unicode + if isinstance(hash, unicode): + hash = hash.encode("utf-8") + if not isinstance(hash, bytes): + raise exc.ExpectedStringError(hash, "hash") + m = cls._hash_regex.match(hash) + if not m: + raise exc.MalformedHashError(cls) + type, version, memory_cost, time_cost, parallelism, keyid, data, salt, digest = \ + m.group("type", "version", "memory_cost", "time_cost", "parallelism", + "keyid", "data", "salt", "digest") + if keyid: + raise NotImplementedError("argon2 'keyid' parameter not supported") + return cls( + type=type.decode("ascii"), + version=int(version) if version else 0x10, + memory_cost=int(memory_cost), + rounds=int(time_cost), + parallelism=int(parallelism), + salt=b64s_decode(salt) if salt else None, + data=b64s_decode(data) if data else None, + checksum=b64s_decode(digest) if digest else None, + ) + + def to_string(self): + version = self.version + if version == 0x10: + vstr = "" + else: + vstr = "v=%d$" % version + + data = self.data + if data: + kdstr = ",data=" + bascii_to_str(b64s_encode(self.data)) + else: + kdstr = "" + + # NOTE: 'keyid' param currently not supported + return "$argon2%s$%sm=%d,t=%d,p=%d%s$%s$%s" % ( + uascii_to_str(self.type), + vstr, + self.memory_cost, + self.rounds, + self.parallelism, + kdstr, + bascii_to_str(b64s_encode(self.salt)), + bascii_to_str(b64s_encode(self.checksum)), + ) + + #=================================================================== + # init + #=================================================================== + def __init__(self, type=None, type_d=False, version=None, memory_cost=None, data=None, **kwds): + + # handle deprecated kwds + if type_d: + warn('argon2 `type_d=True` keyword is deprecated, and will be removed in passlib 2.0; ' + 'please use ``type="d"`` instead') + assert type is None + type = TYPE_D + + # TODO: factor out variable checksum size support into a mixin. + # set checksum size to specific value before _norm_checksum() is called + checksum = kwds.get("checksum") + if checksum is not None: + self.checksum_size = len(checksum) + + # call parent + super(_Argon2Common, self).__init__(**kwds) + + # init type + if type is None: + assert uh.validate_default_value(self, self.type, self._norm_type, param="type") + else: + self.type = self._norm_type(type) + + # init version + if version is None: + assert uh.validate_default_value(self, self.version, self._norm_version, + param="version") + else: + self.version = self._norm_version(version) + + # init memory cost + if memory_cost is None: + assert uh.validate_default_value(self, self.memory_cost, self._norm_memory_cost, + param="memory_cost") + else: + self.memory_cost = self._norm_memory_cost(memory_cost) + + # init data + if data is None: + assert self.data is None + else: + if not isinstance(data, bytes): + raise uh.exc.ExpectedTypeError(data, "bytes", "data") + self.data = data + + #------------------------------------------------------------------- + # parameter guards + #------------------------------------------------------------------- + + @classmethod + def _norm_type(cls, value): + # type check + if not isinstance(value, unicode): + if PY2 and isinstance(value, bytes): + value = value.decode('ascii') + else: + raise uh.exc.ExpectedTypeError(value, "str", "type") + + # check if type is valid + if value in ALL_TYPES_SET: + return value + + # translate from uppercase + temp = value.lower() + if temp in ALL_TYPES_SET: + return temp + + # failure! + raise ValueError("unknown argon2 hash type: %r" % (value,)) + + @classmethod + def _norm_version(cls, version): + if not isinstance(version, uh.int_types): + raise uh.exc.ExpectedTypeError(version, "integer", "version") + + # minimum valid version + if version < 0x13 and version != 0x10: + raise ValueError("invalid argon2 hash version: %d" % (version,)) + + # check this isn't past backend's max version + backend = cls.get_backend() + if version > cls.max_version: + raise ValueError("%s: hash version 0x%X not supported by %r backend " + "(max version is 0x%X); try updating or switching backends" % + (cls.name, version, backend, cls.max_version)) + return version + + @classmethod + def _norm_memory_cost(cls, memory_cost, relaxed=False): + return uh.norm_integer(cls, memory_cost, min=cls.min_memory_cost, + param="memory_cost", relaxed=relaxed) + + #=================================================================== + # digest calculation + #=================================================================== + + # NOTE: _calc_checksum implemented by backend subclass + + @classmethod + def _get_backend_type(cls, value): + """ + helper to resolve backend constant from type + """ + try: + return cls._backend_type_map[value] + except KeyError: + pass + # XXX: pick better error class? + msg = "unsupported argon2 hash (type %r not supported by %s backend)" % \ + (value, cls.get_backend()) + raise ValueError(msg) + + #=================================================================== + # hash migration + #=================================================================== + + def _calc_needs_update(self, **kwds): + cls = type(self) + if self.type != cls.type: + return True + minver = cls.min_desired_version + if minver is None or minver > cls.max_version: + minver = cls.max_version + if self.version < minver: + # version is too old. + return True + if self.memory_cost != cls.memory_cost: + return True + if self.checksum_size != cls.checksum_size: + return True + return super(_Argon2Common, self)._calc_needs_update(**kwds) + + #=================================================================== + # backend loading + #=================================================================== + + _no_backend_suggestion = " -- recommend you install one (e.g. 'pip install argon2_cffi')" + + @classmethod + def _finalize_backend_mixin(mixin_cls, name, dryrun): + """ + helper called by from backend mixin classes' _load_backend_mixin() -- + invoked after backend imports have been loaded, and performs + feature detection & testing common to all backends. + """ + # check argon2 version + max_version = mixin_cls.max_version + assert isinstance(max_version, int) and max_version >= 0x10 + if max_version < 0x13: + warn("%r doesn't support argon2 v1.3, and should be upgraded" % name, + uh.exc.PasslibSecurityWarning) + + # prefer best available type + for type in ALL_TYPES: + if type in mixin_cls._backend_type_map: + mixin_cls.type = type + break + else: + warn("%r lacks support for all known hash types" % name, uh.exc.PasslibRuntimeWarning) + # NOTE: class will just throw "unsupported argon2 hash" error if they try to use it... + mixin_cls.type = TYPE_ID + + return True + + @classmethod + def _adapt_backend_error(cls, err, hash=None, self=None): + """ + internal helper invoked when backend has hash/verification error; + used to adapt to passlib message. + """ + backend = cls.get_backend() + + # parse hash to throw error if format was invalid, parameter out of range, etc. + if self is None and hash is not None: + self = cls.from_string(hash) + + # check constraints on parsed object + # XXX: could move this to __init__, but not needed by needs_update calls + if self is not None: + self._validate_constraints(self.memory_cost, self.parallelism) + + # as of cffi 16.1, lacks support in hash_secret(), so genhash() will get here. + # as of cffi 16.2, support removed from verify_secret() as well. + if backend == "argon2_cffi" and self.data is not None: + raise NotImplementedError("argon2_cffi backend doesn't support the 'data' parameter") + + # fallback to reporting a malformed hash + text = str(err) + if text not in [ + "Decoding failed" # argon2_cffi's default message + ]: + reason = "%s reported: %s: hash=%r" % (backend, text, hash) + else: + reason = repr(hash) + raise exc.MalformedHashError(cls, reason=reason) + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# stub backend +#----------------------------------------------------------------------- +class _NoBackend(_Argon2Common): + """ + mixin used before any backend has been loaded. + contains stubs that force loading of one of the available backends. + """ + #=================================================================== + # primary methods + #=================================================================== + @classmethod + def hash(cls, secret): + cls._stub_requires_backend() + return cls.hash(secret) + + @classmethod + def verify(cls, secret, hash): + cls._stub_requires_backend() + return cls.verify(secret, hash) + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config): + cls._stub_requires_backend() + return cls.genhash(secret, config) + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + # NOTE: since argon2_cffi takes care of rendering hash, + # _calc_checksum() is only used by the argon2pure backend. + self._stub_requires_backend() + # NOTE: have to use super() here so that we don't recursively + # call subclass's wrapped _calc_checksum + return super(argon2, self)._calc_checksum(secret) + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# argon2_cffi backend +#----------------------------------------------------------------------- +class _CffiBackend(_Argon2Common): + """ + argon2_cffi backend + """ + #=================================================================== + # backend loading + #=================================================================== + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # make sure we write info to base class's __dict__, not that of a subclass + assert mixin_cls is _CffiBackend + + # we automatically import this at top, so just grab info + if _argon2_cffi is None: + if _argon2_cffi_error: + raise exc.PasslibSecurityError(_argon2_cffi_error) + return False + max_version = _argon2_cffi.low_level.ARGON2_VERSION + log.debug("detected 'argon2_cffi' backend, version %r, with support for 0x%x argon2 hashes", + _argon2_cffi.__version__, max_version) + + # build type map + TypeEnum = _argon2_cffi.Type + type_map = {} + for type in ALL_TYPES: + try: + type_map[type] = getattr(TypeEnum, type.upper()) + except AttributeError: + # TYPE_ID support not added until v18.2 + assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type + mixin_cls._backend_type_map = type_map + + # set version info, and run common setup + mixin_cls.version = mixin_cls.max_version = max_version + return mixin_cls._finalize_backend_mixin(name, dryrun) + + #=================================================================== + # primary methods + #=================================================================== + @classmethod + def hash(cls, secret): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + # XXX: doesn't seem to be a way to make this honor max_threads + try: + return bascii_to_str(_argon2_cffi.low_level.hash_secret( + type=cls._get_backend_type(cls.type), + memory_cost=cls.memory_cost, + time_cost=cls.default_rounds, + parallelism=cls.parallelism, + salt=to_bytes(cls._generate_salt()), + hash_len=cls.checksum_size, + secret=secret, + )) + except _argon2_cffi.exceptions.HashingError as err: + raise cls._adapt_backend_error(err) + + #: helper for verify() method below -- maps prefixes to type constants + _byte_ident_map = dict((render_bytes(b"$argon2%s$", type.encode("ascii")), type) + for type in ALL_TYPES) + + @classmethod + def verify(cls, secret, hash): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + hash = to_bytes(hash, "ascii") + + # read type from start of hash + # NOTE: don't care about malformed strings, lowlevel will throw error for us + type = cls._byte_ident_map.get(hash[:1+hash.find(b"$", 1)], TYPE_I) + type_code = cls._get_backend_type(type) + + # XXX: doesn't seem to be a way to make this honor max_threads + try: + result = _argon2_cffi.low_level.verify_secret(hash, secret, type_code) + assert result is True + return True + except _argon2_cffi.exceptions.VerifyMismatchError: + return False + except _argon2_cffi.exceptions.VerificationError as err: + raise cls._adapt_backend_error(err, hash=hash) + + # NOTE: deprecated, will be removed in 2.0 + @classmethod + def genhash(cls, secret, config): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + self = cls.from_string(config) + # XXX: doesn't seem to be a way to make this honor max_threads + try: + result = bascii_to_str(_argon2_cffi.low_level.hash_secret( + type=cls._get_backend_type(self.type), + memory_cost=self.memory_cost, + time_cost=self.rounds, + parallelism=self.parallelism, + salt=to_bytes(self.salt), + hash_len=self.checksum_size, + secret=secret, + version=self.version, + )) + except _argon2_cffi.exceptions.HashingError as err: + raise cls._adapt_backend_error(err, hash=config) + if self.version == 0x10: + # workaround: argon2 0x13 always returns "v=" segment, even for 0x10 hashes + result = result.replace("$v=16$", "$") + return result + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + raise AssertionError("shouldn't be called under argon2_cffi backend") + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# argon2pure backend +#----------------------------------------------------------------------- +class _PureBackend(_Argon2Common): + """ + argon2pure backend + """ + #=================================================================== + # backend loading + #=================================================================== + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # make sure we write info to base class's __dict__, not that of a subclass + assert mixin_cls is _PureBackend + + # import argon2pure + global _argon2pure + try: + import argon2pure as _argon2pure + except ImportError: + return False + + # get default / max supported version -- added in v1.2.2 + try: + from argon2pure import ARGON2_DEFAULT_VERSION as max_version + except ImportError: + log.warning("detected 'argon2pure' backend, but package is too old " + "(passlib requires argon2pure >= 1.2.3)") + return False + + log.debug("detected 'argon2pure' backend, with support for 0x%x argon2 hashes", + max_version) + + if not dryrun: + warn("Using argon2pure backend, which is 100x+ slower than is required " + "for adequate security. Installing argon2_cffi (via 'pip install argon2_cffi') " + "is strongly recommended", exc.PasslibSecurityWarning) + + # build type map + type_map = {} + for type in ALL_TYPES: + try: + type_map[type] = getattr(_argon2pure, "ARGON2" + type.upper()) + except AttributeError: + # TYPE_ID support not added until v1.3 + assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type + mixin_cls._backend_type_map = type_map + + mixin_cls.version = mixin_cls.max_version = max_version + return mixin_cls._finalize_backend_mixin(name, dryrun) + + #=================================================================== + # primary methods + #=================================================================== + + # NOTE: this backend uses default .hash() & .verify() implementations. + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. + uh.validate_secret(secret) + secret = to_bytes(secret, "utf-8") + kwds = dict( + password=secret, + salt=self.salt, + time_cost=self.rounds, + memory_cost=self.memory_cost, + parallelism=self.parallelism, + tag_length=self.checksum_size, + type_code=self._get_backend_type(self.type), + version=self.version, + ) + if self.max_threads > 0: + kwds['threads'] = self.max_threads + if self.pure_use_threads: + kwds['use_threads'] = True + if self.data: + kwds['associated_data'] = self.data + # NOTE: should return raw bytes + # NOTE: this may raise _argon2pure.Argon2ParameterError, + # but it if does that, there's a bug in our own parameter checking code. + try: + return _argon2pure.argon2(**kwds) + except _argon2pure.Argon2Error as err: + raise self._adapt_backend_error(err, self=self) + + #=================================================================== + # eoc + #=================================================================== + +class argon2(_NoBackend, _Argon2Common): + """ + This class implements the Argon2 password hash [#argon2-home]_, and follows the :ref:`password-hash-api`. + + Argon2 supports a variable-length salt, and variable time & memory cost, + and a number of other configurable parameters. + + The :meth:`~passlib.ifc.PasswordHash.replace` method accepts the following optional keywords: + + :type type: str + :param type: + Specify the type of argon2 hash to generate. + Can be one of "ID", "I", "D". + + This defaults to "ID" if supported by the backend, otherwise "I". + + :type salt: str + :param salt: + Optional salt string. + If specified, the length must be between 0-1024 bytes. + If not specified, one will be auto-generated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + This corresponds linearly to the amount of time hashing will take. + + :type time_cost: int + :param time_cost: + An alias for **rounds**, for compatibility with underlying argon2 library. + + :param int memory_cost: + Defines the memory usage in kibibytes. + This corresponds linearly to the amount of memory hashing will take. + + :param int parallelism: + Defines the parallelization factor. + *NOTE: this will affect the resulting hash value.* + + :param int digest_size: + Length of the digest in bytes. + + :param int max_threads: + Maximum number of threads that will be used. + -1 means unlimited; otherwise hashing will use ``min(parallelism, max_threads)`` threads. + + .. note:: + + This option is currently only honored by the argon2pure backend. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionchanged:: 1.7.2 + + Added the "type" keyword, and support for type "D" and "ID" hashes. + (Prior versions could verify type "D" hashes, but not generate them). + + .. todo:: + + * Support configurable threading limits. + """ + #============================================================================= + # backend + #============================================================================= + + # NOTE: the brunt of the argon2 class is implemented in _Argon2Common. + # there are then subclass for each backend (e.g. _PureBackend), + # these are dynamically prepended to this class's bases + # in order to load the appropriate backend. + + #: list of potential backends + backends = ("argon2_cffi", "argon2pure") + + #: flag that this class's bases should be modified by SubclassBackendMixin + _backend_mixin_target = True + + #: map of backend -> mixin class, used by _get_backend_loader() + _backend_mixin_map = { + None: _NoBackend, + "argon2_cffi": _CffiBackend, + "argon2pure": _PureBackend, + } + + #============================================================================= + # + #============================================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/bcrypt.py b/ansible/lib/python3.11/site-packages/passlib/handlers/bcrypt.py new file mode 100644 index 000000000..b83b1107e --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/bcrypt.py @@ -0,0 +1,1243 @@ +"""passlib.bcrypt -- implementation of OpenBSD's BCrypt algorithm. + +TODO: + +* support 2x and altered-2a hashes? + http://www.openwall.com/lists/oss-security/2011/06/27/9 + +* deal with lack of PY3-compatibile c-ext implementation +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, absolute_import +# core +from base64 import b64encode +from hashlib import sha256 +import os +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +_bcrypt = None # dynamically imported by _load_backend_bcrypt() +_pybcrypt = None # dynamically imported by _load_backend_pybcrypt() +_bcryptor = None # dynamically imported by _load_backend_bcryptor() +# pkg +_builtin_bcrypt = None # dynamically imported by _load_backend_builtin() +from passlib.crypto.digest import compile_hmac +from passlib.exc import PasslibHashWarning, PasslibSecurityWarning, PasslibSecurityError +from passlib.utils import safe_crypt, repeat_string, to_bytes, parse_version, \ + rng, getrandstr, test_crypt, to_unicode, \ + utf8_truncate, utf8_repeat_string, crypt_accepts_bytes +from passlib.utils.binary import bcrypt64 +from passlib.utils.compat import get_unbound_method_function +from passlib.utils.compat import u, uascii_to_str, unicode, str_to_uascii, PY3, error_from +import passlib.utils.handlers as uh + +# local +__all__ = [ + "bcrypt", +] + +#============================================================================= +# support funcs & constants +#============================================================================= +IDENT_2 = u("$2$") +IDENT_2A = u("$2a$") +IDENT_2X = u("$2x$") +IDENT_2Y = u("$2y$") +IDENT_2B = u("$2b$") +_BNULL = b'\x00' + +# reference hash of "test", used in various self-checks +TEST_HASH_2A = "$2a$04$5BJqKfqMQvV7nS.yUguNcueVirQqDBGaLXSqj.rs.pZPlNR0UX/HK" + +def _detect_pybcrypt(): + """ + internal helper which tries to distinguish pybcrypt vs bcrypt. + + :returns: + True if cext-based py-bcrypt, + False if ffi-based bcrypt, + None if 'bcrypt' module not found. + + .. versionchanged:: 1.6.3 + + Now assuming bcrypt installed, unless py-bcrypt explicitly detected. + Previous releases assumed py-bcrypt by default. + + Making this change since py-bcrypt is (apparently) unmaintained and static, + whereas bcrypt is being actively maintained, and it's internal structure may shift. + """ + # NOTE: this is also used by the unittests. + + # check for module. + try: + import bcrypt + except ImportError: + # XXX: this is ignoring case where py-bcrypt's "bcrypt._bcrypt" C Ext fails to import; + # would need to inspect actual ImportError message to catch that. + return None + + # py-bcrypt has a "._bcrypt.__version__" attribute (confirmed for v0.1 - 0.4), + # which bcrypt lacks (confirmed for v1.0 - 2.0) + # "._bcrypt" alone isn't sufficient, since bcrypt 2.0 now has that attribute. + try: + from bcrypt._bcrypt import __version__ + except ImportError: + return False + return True + +#============================================================================= +# backend mixins +#============================================================================= +class _BcryptCommon(uh.SubclassBackendMixin, uh.TruncateMixin, uh.HasManyIdents, + uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """ + Base class which implements brunt of BCrypt code. + This is then subclassed by the various backends, + to override w/ backend-specific methods. + + When a backend is loaded, the bases of the 'bcrypt' class proper + are modified to prepend the correct backend-specific subclass. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "bcrypt" + setting_kwds = ("salt", "rounds", "ident", "truncate_error") + + #-------------------- + # GenericHandler + #-------------------- + checksum_size = 31 + checksum_chars = bcrypt64.charmap + + #-------------------- + # HasManyIdents + #-------------------- + default_ident = IDENT_2B + ident_values = (IDENT_2, IDENT_2A, IDENT_2X, IDENT_2Y, IDENT_2B) + ident_aliases = {u("2"): IDENT_2, u("2a"): IDENT_2A, u("2y"): IDENT_2Y, + u("2b"): IDENT_2B} + + #-------------------- + # HasSalt + #-------------------- + min_salt_size = max_salt_size = 22 + salt_chars = bcrypt64.charmap + + # NOTE: 22nd salt char must be in restricted set of ``final_salt_chars``, not full set above. + final_salt_chars = ".Oeu" # bcrypt64._padinfo2[1] + + #-------------------- + # HasRounds + #-------------------- + default_rounds = 12 # current passlib default + min_rounds = 4 # minimum from bcrypt specification + max_rounds = 31 # 32-bit integer limit (since real_rounds=1< class + + # NOTE: set_backend() will execute the ._load_backend_mixin() + # of the matching mixin class, which will handle backend detection + + # appended to HasManyBackends' "no backends available" error message + _no_backend_suggestion = " -- recommend you install one (e.g. 'pip install bcrypt')" + + @classmethod + def _finalize_backend_mixin(mixin_cls, backend, dryrun): + """ + helper called by from backend mixin classes' _load_backend_mixin() -- + invoked after backend imports have been loaded, and performs + feature detection & testing common to all backends. + """ + #---------------------------------------------------------------- + # setup helpers + #---------------------------------------------------------------- + assert mixin_cls is bcrypt._backend_mixin_map[backend], \ + "_configure_workarounds() invoked from wrong class" + + if mixin_cls._workrounds_initialized: + return True + + verify = mixin_cls.verify + + err_types = (ValueError, uh.exc.MissingBackendError) + if _bcryptor: + err_types += (_bcryptor.engine.SaltError,) + + def safe_verify(secret, hash): + """verify() wrapper which traps 'unknown identifier' errors""" + try: + return verify(secret, hash) + except err_types: + # backends without support for given ident will throw various + # errors about unrecognized version: + # os_crypt -- internal code below throws + # - PasswordValueError if there's encoding issue w/ password. + # - InternalBackendError if crypt fails for unknown reason + # (trapped below so we can debug it) + # pybcrypt, bcrypt -- raises ValueError + # bcryptor -- raises bcryptor.engine.SaltError + return NotImplemented + except uh.exc.InternalBackendError: + # _calc_checksum() code may also throw CryptBackendError + # if correct hash isn't returned (e.g. 2y hash converted to 2b, + # such as happens with bcrypt 3.0.0) + log.debug("trapped unexpected response from %r backend: verify(%r, %r):", + backend, secret, hash, exc_info=True) + return NotImplemented + + def assert_lacks_8bit_bug(ident): + """ + helper to check for cryptblowfish 8bit bug (fixed in 2y/2b); + even though it's not known to be present in any of passlib's backends. + this is treated as FATAL, because it can easily result in seriously malformed hashes, + and we can't correct for it ourselves. + + test cases from + reference hash is the incorrectly generated $2x$ hash taken from above url + """ + # NOTE: passlib 1.7.2 and earlier used the commented-out LATIN-1 test vector to detect + # this bug; but python3's crypt.crypt() only supports unicode inputs (and + # always encodes them as UTF8 before passing to crypt); so passlib 1.7.3 + # switched to the UTF8-compatible test vector below. This one's bug_hash value + # ("$2x$...rcAS") was drawn from the same openwall source (above); and the correct + # hash ("$2a$...X6eu") was generated by passing the raw bytes to python2's + # crypt.crypt() using OpenBSD 6.7 (hash confirmed as same for $2a$ & $2b$). + + # LATIN-1 test vector + # secret = b"\xA3" + # bug_hash = ident.encode("ascii") + b"05$/OK.fbVrR/bpIqNJ5ianF.CE5elHaaO4EbggVDjb8P19RukzXSM3e" + # correct_hash = ident.encode("ascii") + b"05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq" + + # UTF-8 test vector + secret = b"\xd1\x91" # aka "\u0451" + bug_hash = ident.encode("ascii") + b"05$6bNw2HLQYeqHYyBfLMsv/OiwqTymGIGzFsA4hOTWebfehXHNprcAS" + correct_hash = ident.encode("ascii") + b"05$6bNw2HLQYeqHYyBfLMsv/OUcZd0LKP39b87nBw3.S2tVZSqiQX6eu" + + if verify(secret, bug_hash): + # NOTE: this only EVER be observed in (broken) 2a and (backward-compat) 2x hashes + # generated by crypt_blowfish library. 2y/2b hashes should not have the bug + # (but we check w/ them anyways). + raise PasslibSecurityError( + "passlib.hash.bcrypt: Your installation of the %r backend is vulnerable to " + "the crypt_blowfish 8-bit bug (CVE-2011-2483) under %r hashes, " + "and should be upgraded or replaced with another backend" % (backend, ident)) + + # it doesn't have wraparound bug, but make sure it *does* verify against the correct + # hash, or we're in some weird third case! + if not verify(secret, correct_hash): + raise RuntimeError("%s backend failed to verify %s 8bit hash" % (backend, ident)) + + def detect_wrap_bug(ident): + """ + check for bsd wraparound bug (fixed in 2b) + this is treated as a warning, because it's rare in the field, + and pybcrypt (as of 2015-7-21) is unpatched, but some people may be stuck with it. + + test cases from + + NOTE: reference hash is of password "0"*72 + + NOTE: if in future we need to deliberately create hashes which have this bug, + can use something like 'hashpw(repeat_string(secret[:((1+secret) % 256) or 1]), 72)' + """ + # check if it exhibits wraparound bug + secret = (b"0123456789"*26)[:255] + bug_hash = ident.encode("ascii") + b"04$R1lJ2gkNaoPGdafE.H.16.nVyh2niHsGJhayOHLMiXlI45o8/DU.6" + if verify(secret, bug_hash): + return True + + # if it doesn't have wraparound bug, make sure it *does* handle things + # correctly -- or we're in some weird third case. + correct_hash = ident.encode("ascii") + b"04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi" + if not verify(secret, correct_hash): + raise RuntimeError("%s backend failed to verify %s wraparound hash" % (backend, ident)) + + return False + + def assert_lacks_wrap_bug(ident): + if not detect_wrap_bug(ident): + return + # should only see in 2a, later idents should NEVER exhibit this bug: + # * 2y implementations should have been free of it + # * 2b was what (supposedly) fixed it + raise RuntimeError("%s backend unexpectedly has wraparound bug for %s" % (backend, ident)) + + #---------------------------------------------------------------- + # check for old 20 support + #---------------------------------------------------------------- + test_hash_20 = b"$2$04$5BJqKfqMQvV7nS.yUguNcuRfMMOXK0xPWavM7pOzjEi5ze5T1k8/S" + result = safe_verify("test", test_hash_20) + if result is NotImplemented: + mixin_cls._lacks_20_support = True + log.debug("%r backend lacks $2$ support, enabling workaround", backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2$ hash" % backend) + + #---------------------------------------------------------------- + # check for 2a support + #---------------------------------------------------------------- + result = safe_verify("test", TEST_HASH_2A) + if result is NotImplemented: + # 2a support is required, and should always be present + raise RuntimeError("%s lacks support for $2a$ hashes" % backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2a$ hash" % backend) + else: + assert_lacks_8bit_bug(IDENT_2A) + if detect_wrap_bug(IDENT_2A): + if backend == "os_crypt": + # don't make this a warning for os crypt (e.g. openbsd); + # they'll have proper 2b implementation which will be used for new hashes. + # so even if we didn't have a workaround, this bug wouldn't be a concern. + log.debug("%r backend has $2a$ bsd wraparound bug, enabling workaround", backend) + else: + # installed library has the bug -- want to let users know, + # so they can upgrade it to something better (e.g. bcrypt cffi library) + warn("passlib.hash.bcrypt: Your installation of the %r backend is vulnerable to " + "the bsd wraparound bug, " + "and should be upgraded or replaced with another backend " + "(enabling workaround for now)." % backend, + uh.exc.PasslibSecurityWarning) + mixin_cls._has_2a_wraparound_bug = True + + #---------------------------------------------------------------- + # check for 2y support + #---------------------------------------------------------------- + test_hash_2y = TEST_HASH_2A.replace("2a", "2y") + result = safe_verify("test", test_hash_2y) + if result is NotImplemented: + mixin_cls._lacks_2y_support = True + log.debug("%r backend lacks $2y$ support, enabling workaround", backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2y$ hash" % backend) + else: + # NOTE: Not using this as fallback candidate, + # lacks wide enough support across implementations. + assert_lacks_8bit_bug(IDENT_2Y) + assert_lacks_wrap_bug(IDENT_2Y) + + #---------------------------------------------------------------- + # TODO: check for 2x support + #---------------------------------------------------------------- + + #---------------------------------------------------------------- + # check for 2b support + #---------------------------------------------------------------- + test_hash_2b = TEST_HASH_2A.replace("2a", "2b") + result = safe_verify("test", test_hash_2b) + if result is NotImplemented: + mixin_cls._lacks_2b_support = True + log.debug("%r backend lacks $2b$ support, enabling workaround", backend) + elif not result: + raise RuntimeError("%s incorrectly rejected $2b$ hash" % backend) + else: + mixin_cls._fallback_ident = IDENT_2B + assert_lacks_8bit_bug(IDENT_2B) + assert_lacks_wrap_bug(IDENT_2B) + + # set flag so we don't have to run this again + mixin_cls._workrounds_initialized = True + return True + + #=================================================================== + # digest calculation + #=================================================================== + + # _calc_checksum() defined by backends + + def _prepare_digest_args(self, secret): + """ + common helper for backends to implement _calc_checksum(). + takes in secret, returns (secret, ident) pair, + """ + return self._norm_digest_args(secret, self.ident, new=self.use_defaults) + + @classmethod + def _norm_digest_args(cls, secret, ident, new=False): + # make sure secret is unicode + require_valid_utf8_bytes = cls._require_valid_utf8_bytes + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + elif require_valid_utf8_bytes: + # if backend requires utf8 bytes (os_crypt); + # make sure input actually is utf8, or don't bother enabling utf-8 specific helpers. + try: + secret.decode("utf-8") + except UnicodeDecodeError: + # XXX: could just throw PasswordValueError here, backend will just do that + # when _calc_digest() is actually called. + require_valid_utf8_bytes = False + + # check max secret size + uh.validate_secret(secret) + + # check for truncation (during .hash() calls only) + if new: + cls._check_truncate_policy(secret) + + # NOTE: especially important to forbid NULLs for bcrypt, since many + # backends (bcryptor, bcrypt) happily accept them, and then + # silently truncate the password at first NULL they encounter! + if _BNULL in secret: + raise uh.exc.NullPasswordError(cls) + + # TODO: figure out way to skip these tests when not needed... + + # protect from wraparound bug by truncating secret before handing it to the backend. + # bcrypt only uses first 72 bytes anyways. + # NOTE: not needed for 2y/2b, but might use 2a as fallback for them. + if cls._has_2a_wraparound_bug and len(secret) >= 255: + if require_valid_utf8_bytes: + # backend requires valid utf8 bytes, so truncate secret to nearest valid segment. + # want to do this in constant time to not give away info about secret. + # NOTE: this only works because bcrypt will ignore everything past + # secret[71], so padding to include a full utf8 sequence + # won't break anything about the final output. + secret = utf8_truncate(secret, 72) + else: + secret = secret[:72] + + # special case handling for variants (ordered most common first) + if ident == IDENT_2A: + # nothing needs to be done. + pass + + elif ident == IDENT_2B: + if cls._lacks_2b_support: + # handle $2b$ hash format even if backend is too old. + # have it generate a 2A/2Y digest, then return it as a 2B hash. + # 2a-only backend could potentially exhibit wraparound bug -- + # but we work around that issue above. + ident = cls._fallback_ident + + elif ident == IDENT_2Y: + if cls._lacks_2y_support: + # handle $2y$ hash format (not supported by BSDs, being phased out on others) + # have it generate a 2A/2B digest, then return it as a 2Y hash. + ident = cls._fallback_ident + + elif ident == IDENT_2: + if cls._lacks_20_support: + # handle legacy $2$ format (not supported by most backends except BSD os_crypt) + # we can fake $2$ behavior using the 2A/2Y/2B algorithm + # by repeating the password until it's at least 72 chars in length. + if secret: + if require_valid_utf8_bytes: + # NOTE: this only works because bcrypt will ignore everything past + # secret[71], so padding to include a full utf8 sequence + # won't break anything about the final output. + secret = utf8_repeat_string(secret, 72) + else: + secret = repeat_string(secret, 72) + ident = cls._fallback_ident + + elif ident == IDENT_2X: + + # NOTE: shouldn't get here. + # XXX: could check if backend does actually offer 'support' + raise RuntimeError("$2x$ hashes not currently supported by passlib") + + else: + raise AssertionError("unexpected ident value: %r" % ident) + + return secret, ident + +#----------------------------------------------------------------------- +# stub backend +#----------------------------------------------------------------------- +class _NoBackend(_BcryptCommon): + """ + mixin used before any backend has been loaded. + contains stubs that force loading of one of the available backends. + """ + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + self._stub_requires_backend() + # NOTE: have to use super() here so that we don't recursively + # call subclass's wrapped _calc_checksum, e.g. bcrypt_sha256._calc_checksum + return super(bcrypt, self)._calc_checksum(secret) + + #=================================================================== + # eoc + #=================================================================== + +#----------------------------------------------------------------------- +# bcrypt backend +#----------------------------------------------------------------------- +class _BcryptBackend(_BcryptCommon): + """ + backend which uses 'bcrypt' package + """ + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # try to import bcrypt + global _bcrypt + if _detect_pybcrypt(): + # pybcrypt was installed instead + return False + try: + import bcrypt as _bcrypt + except ImportError: # pragma: no cover + return False + try: + version = _bcrypt.__about__.__version__ + except: + log.warning("(trapped) error reading bcrypt version", exc_info=True) + version = '' + + log.debug("detected 'bcrypt' backend, version %r", version) + return mixin_cls._finalize_backend_mixin(name, dryrun) + + # # TODO: would like to implementing verify() directly, + # # to skip need for parsing hash strings. + # # below method has a few edge cases where it chokes though. + # @classmethod + # def verify(cls, secret, hash): + # if isinstance(hash, unicode): + # hash = hash.encode("ascii") + # ident = hash[:hash.index(b"$", 1)+1].decode("ascii") + # if ident not in cls.ident_values: + # raise uh.exc.InvalidHashError(cls) + # secret, eff_ident = cls._norm_digest_args(secret, ident) + # if eff_ident != ident: + # # lacks support for original ident, replace w/ new one. + # hash = eff_ident.encode("ascii") + hash[len(ident):] + # result = _bcrypt.hashpw(secret, hash) + # assert result.startswith(eff_ident) + # return consteq(result, hash) + + def _calc_checksum(self, secret): + # bcrypt behavior: + # secret must be bytes + # config must be ascii bytes + # returns ascii bytes + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + if isinstance(config, unicode): + config = config.encode("ascii") + hash = _bcrypt.hashpw(secret, config) + assert isinstance(hash, bytes) + if not hash.startswith(config) or len(hash) != len(config)+31: + raise uh.exc.CryptBackendError(self, config, hash, source="`bcrypt` package") + return hash[-31:].decode("ascii") + +#----------------------------------------------------------------------- +# bcryptor backend +#----------------------------------------------------------------------- +class _BcryptorBackend(_BcryptCommon): + """ + backend which uses 'bcryptor' package + """ + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # try to import bcryptor + global _bcryptor + try: + import bcryptor as _bcryptor + except ImportError: # pragma: no cover + return False + + # deprecated as of 1.7.2 + if not dryrun: + warn("Support for `bcryptor` is deprecated, and will be removed in Passlib 1.8; " + "Please use `pip install bcrypt` instead", DeprecationWarning) + + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum(self, secret): + # bcryptor behavior: + # py2: unicode secret/hash encoded as ascii bytes before use, + # bytes taken as-is; returns ascii bytes. + # py3: not supported + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + hash = _bcryptor.engine.Engine(False).hash_key(secret, config) + if not hash.startswith(config) or len(hash) != len(config) + 31: + raise uh.exc.CryptBackendError(self, config, hash, source="bcryptor library") + return str_to_uascii(hash[-31:]) + +#----------------------------------------------------------------------- +# pybcrypt backend +#----------------------------------------------------------------------- +class _PyBcryptBackend(_BcryptCommon): + """ + backend which uses 'pybcrypt' package + """ + + #: classwide thread lock used for pybcrypt < 0.3 + _calc_lock = None + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + # try to import pybcrypt + global _pybcrypt + if not _detect_pybcrypt(): + # not installed, or bcrypt installed instead + return False + try: + import bcrypt as _pybcrypt + except ImportError: # pragma: no cover + # XXX: should we raise AssertionError here? (if get here, _detect_pybcrypt() is broken) + return False + + # deprecated as of 1.7.2 + if not dryrun: + warn("Support for `py-bcrypt` is deprecated, and will be removed in Passlib 1.8; " + "Please use `pip install bcrypt` instead", DeprecationWarning) + + # determine pybcrypt version + try: + version = _pybcrypt._bcrypt.__version__ + except: + log.warning("(trapped) error reading pybcrypt version", exc_info=True) + version = "" + log.debug("detected 'pybcrypt' backend, version %r", version) + + # return calc function based on version + vinfo = parse_version(version) or (0, 0) + if vinfo < (0, 3): + warn("py-bcrypt %s has a major security vulnerability, " + "you should upgrade to py-bcrypt 0.3 immediately." + % version, uh.exc.PasslibSecurityWarning) + if mixin_cls._calc_lock is None: + import threading + mixin_cls._calc_lock = threading.Lock() + mixin_cls._calc_checksum = get_unbound_method_function(mixin_cls._calc_checksum_threadsafe) + + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum_threadsafe(self, secret): + # as workaround for pybcrypt < 0.3's concurrency issue, + # we wrap everything in a thread lock. as long as bcrypt is only + # used through passlib, this should be safe. + with self._calc_lock: + return self._calc_checksum_raw(secret) + + def _calc_checksum_raw(self, secret): + # py-bcrypt behavior: + # py2: unicode secret/hash encoded as ascii bytes before use, + # bytes taken as-is; returns ascii bytes. + # py3: unicode secret encoded as utf-8 bytes, + # hash encoded as ascii bytes, returns ascii unicode. + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + hash = _pybcrypt.hashpw(secret, config) + if not hash.startswith(config) or len(hash) != len(config) + 31: + raise uh.exc.CryptBackendError(self, config, hash, source="pybcrypt library") + return str_to_uascii(hash[-31:]) + + _calc_checksum = _calc_checksum_raw + +#----------------------------------------------------------------------- +# os crypt backend +#----------------------------------------------------------------------- +class _OsCryptBackend(_BcryptCommon): + """ + backend which uses :func:`crypt.crypt` + """ + + #: set flag to ensure _prepare_digest_args() doesn't create invalid utf8 string + #: when truncating bytes. + _require_valid_utf8_bytes = not crypt_accepts_bytes + + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + if not test_crypt("test", TEST_HASH_2A): + return False + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum(self, secret): + # + # run secret through crypt.crypt(). + # if everything goes right, we'll get back a properly formed bcrypt hash. + # + secret, ident = self._prepare_digest_args(secret) + config = self._get_config(ident) + hash = safe_crypt(secret, config) + if hash is not None: + if not hash.startswith(config) or len(hash) != len(config) + 31: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-31:] + + # + # Check if this failed due to non-UTF8 bytes + # In detail: under py3, crypt.crypt() requires unicode inputs, which are then encoded to + # utf8 before passing them to os crypt() call. this is done according to the "s" format + # specifier for PyArg_ParseTuple (https://docs.python.org/3/c-api/arg.html). + # There appears no way to get around that to pass raw bytes; so we just throw error here + # to let user know they need to use another backend if they want raw bytes support. + # + # XXX: maybe just let safe_crypt() throw UnicodeDecodeError under passlib 2.0, + # and then catch it above? maybe have safe_crypt ALWAYS throw error + # instead of returning None? (would save re-detecting what went wrong) + # XXX: isn't secret ALWAYS bytes at this point? + # + if PY3 and isinstance(secret, bytes): + try: + secret.decode("utf-8") + except UnicodeDecodeError: + raise error_from(uh.exc.PasswordValueError( + "python3 crypt.crypt() ony supports bytes passwords using UTF8; " + "passlib recommends running `pip install bcrypt` for general bcrypt support.", + ), None) + + # + # else crypt() call failed for unknown reason. + # + # NOTE: getting here should be considered a bug in passlib -- + # if os_crypt backend detection said there's support, + # and we've already checked all known reasons above; + # want them to file bug so we can figure out what happened. + # in the meantime, users can avoid this by installing bcrypt-cffi backend; + # which won't have this (or utf8) edgecases. + # + # XXX: throw something more specific, like an "InternalBackendError"? + # NOTE: if do change this error, need to update test_81_crypt_fallback() expectations + # about what will be thrown; as well as safe_verify() above. + # + debug_only_repr = uh.exc.debug_only_repr + raise uh.exc.InternalBackendError( + "crypt.crypt() failed for unknown reason; " + "passlib recommends running `pip install bcrypt` for general bcrypt support." + # for debugging UTs -- + "(config=%s, secret=%s)" % (debug_only_repr(config), debug_only_repr(secret)), + ) + +#----------------------------------------------------------------------- +# builtin backend +#----------------------------------------------------------------------- +class _BuiltinBackend(_BcryptCommon): + """ + backend which uses passlib's pure-python implementation + """ + @classmethod + def _load_backend_mixin(mixin_cls, name, dryrun): + from passlib.utils import as_bool + if not as_bool(os.environ.get("PASSLIB_BUILTIN_BCRYPT")): + log.debug("bcrypt 'builtin' backend not enabled via $PASSLIB_BUILTIN_BCRYPT") + return False + global _builtin_bcrypt + from passlib.crypto._blowfish import raw_bcrypt as _builtin_bcrypt + return mixin_cls._finalize_backend_mixin(name, dryrun) + + def _calc_checksum(self, secret): + secret, ident = self._prepare_digest_args(secret) + chk = _builtin_bcrypt(secret, ident[1:-1], + self.salt.encode("ascii"), self.rounds) + return chk.decode("ascii") + +#============================================================================= +# handler +#============================================================================= +class bcrypt(_NoBackend, _BcryptCommon): + """This class implements the BCrypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 12, must be between 4 and 31, inclusive. + This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}` + -- increasing the rounds by +1 will double the amount of time taken. + + :type ident: str + :param ident: + Specifies which version of the BCrypt algorithm will be used when creating a new hash. + Typically this option is not needed, as the default (``"2b"``) is usually the correct choice. + If specified, it must be one of the following: + + * ``"2"`` - the first revision of BCrypt, which suffers from a minor security flaw and is generally not used anymore. + * ``"2a"`` - some implementations suffered from rare security flaws, replaced by 2b. + * ``"2y"`` - format specific to the *crypt_blowfish* BCrypt implementation, + identical to ``"2b"`` in all but name. + * ``"2b"`` - latest revision of the official BCrypt algorithm, current default. + + :param bool truncate_error: + By default, BCrypt will silently truncate passwords larger than 72 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This class now supports ``"2y"`` hashes, and recognizes + (but does not support) the broken ``"2x"`` hashes. + (see the :ref:`crypt_blowfish bug ` + for details). + + .. versionchanged:: 1.6 + Added a pure-python backend. + + .. versionchanged:: 1.6.3 + + Added support for ``"2b"`` variant. + + .. versionchanged:: 1.7 + + Now defaults to ``"2b"`` variant. + """ + #============================================================================= + # backend + #============================================================================= + + # NOTE: the brunt of the bcrypt class is implemented in _BcryptCommon. + # there are then subclass for each backend (e.g. _PyBcryptBackend), + # these are dynamically prepended to this class's bases + # in order to load the appropriate backend. + + #: list of potential backends + backends = ("bcrypt", "pybcrypt", "bcryptor", "os_crypt", "builtin") + + #: flag that this class's bases should be modified by SubclassBackendMixin + _backend_mixin_target = True + + #: map of backend -> mixin class, used by _get_backend_loader() + _backend_mixin_map = { + None: _NoBackend, + "bcrypt": _BcryptBackend, + "pybcrypt": _PyBcryptBackend, + "bcryptor": _BcryptorBackend, + "os_crypt": _OsCryptBackend, + "builtin": _BuiltinBackend, + } + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# variants +#============================================================================= +_UDOLLAR = u("$") + +# XXX: it might be better to have all the bcrypt variants share a common base class, +# and have the (django_)bcrypt_sha256 wrappers just proxy bcrypt instead of subclassing it. +class _wrapped_bcrypt(bcrypt): + """ + abstracts out some bits bcrypt_sha256 & django_bcrypt_sha256 share. + - bypass backend-loading wrappers for hash() etc + - disable truncation support, sha256 wrappers don't need it. + """ + setting_kwds = tuple(elem for elem in bcrypt.setting_kwds if elem not in ["truncate_error"]) + truncate_size = None + + # XXX: these will be needed if any bcrypt backends directly implement this... + # @classmethod + # def hash(cls, secret, **kwds): + # # bypass bcrypt backend overriding this method + # # XXX: would wrapping bcrypt make this easier than subclassing it? + # return super(_BcryptCommon, cls).hash(secret, **kwds) + # + # @classmethod + # def verify(cls, secret, hash): + # # bypass bcrypt backend overriding this method + # return super(_BcryptCommon, cls).verify(secret, hash) + # + # @classmethod + # def genhash(cls, secret, hash): + # # bypass bcrypt backend overriding this method + # return super(_BcryptCommon, cls).genhash(secret, hash) + + @classmethod + def _check_truncate_policy(cls, secret): + # disable check performed by bcrypt(), since this doesn't truncate passwords. + pass + +#============================================================================= +# bcrypt sha256 wrapper +#============================================================================= + +class bcrypt_sha256(_wrapped_bcrypt): + """ + This class implements a composition of BCrypt + HMAC_SHA256, + and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept + all the same optional keywords as the base :class:`bcrypt` hash. + + .. versionadded:: 1.6.2 + + .. versionchanged:: 1.7 + + Now defaults to ``"2b"`` bcrypt variant; though supports older hashes + generated using the ``"2a"`` bcrypt variant. + + .. versionchanged:: 1.7.3 + + For increased security, updated to use HMAC-SHA256 instead of plain SHA256. + Now only supports the ``"2b"`` bcrypt variant. Hash format updated to "v=2". + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "bcrypt_sha256" + + #-------------------- + # GenericHandler + #-------------------- + # this is locked at 2b for now (with 2a allowed only for legacy v1 format) + ident_values = (IDENT_2A, IDENT_2B) + + # clone bcrypt's ident aliases so they can be used here as well... + ident_aliases = (lambda ident_values: dict(item for item in bcrypt.ident_aliases.items() + if item[1] in ident_values))(ident_values) + default_ident = IDENT_2B + + #-------------------- + # class specific + #-------------------- + + _supported_versions = set([1, 2]) + + #=================================================================== + # instance attrs + #=================================================================== + + #: wrapper version. + #: v1 -- used prior to passlib 1.7.3; performs ``bcrypt(sha256(secret), salt, cost)`` + #: v2 -- new in passlib 1.7.3; performs `bcrypt(sha256_hmac(salt, secret), salt, cost)`` + version = 2 + + #=================================================================== + # configuration + #=================================================================== + + @classmethod + def using(cls, version=None, **kwds): + subcls = super(bcrypt_sha256, cls).using(**kwds) + if version is not None: + subcls.version = subcls._norm_version(version) + ident = subcls.default_ident + if subcls.version > 1 and ident != IDENT_2B: + raise ValueError("bcrypt %r hashes not allowed for version %r" % + (ident, subcls.version)) + return subcls + + #=================================================================== + # formatting + #=================================================================== + + # sample hash: + # $bcrypt-sha256$2a,6$/3OeRpbOf8/l6nPPRdZPp.$nRiyYqPobEZGdNRBWihQhiFDh1ws1tu + # $bcrypt-sha256$ -- prefix/identifier + # 2a -- bcrypt variant + # , -- field separator + # 6 -- bcrypt work factor + # $ -- section separator + # /3OeRpbOf8/l6nPPRdZPp. -- salt + # $ -- section separator + # nRiyYqPobEZGdNRBWihQhiFDh1ws1tu -- digest + + # XXX: we can't use .ident attr due to bcrypt code using it. + # working around that via prefix. + prefix = u('$bcrypt-sha256$') + + #: current version 2 hash format + _v2_hash_re = re.compile(r"""(?x) + ^ + [$]bcrypt-sha256[$] + v=(?P\d+), + t=(?P2b), + r=(?P\d{1,2}) + [$](?P[^$]{22}) + (?:[$](?P[^$]{31}))? + $ + """) + + #: old version 1 hash format + _v1_hash_re = re.compile(r"""(?x) + ^ + [$]bcrypt-sha256[$] + (?P2[ab]), + (?P\d{1,2}) + [$](?P[^$]{22}) + (?:[$](?P[^$]{31}))? + $ + """) + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + if not hash: + return False + return hash.startswith(cls.prefix) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if not hash.startswith(cls.prefix): + raise uh.exc.InvalidHashError(cls) + m = cls._v2_hash_re.match(hash) + if m: + version = int(m.group("version")) + if version < 2: + raise uh.exc.MalformedHashError(cls) + else: + m = cls._v1_hash_re.match(hash) + if m: + version = 1 + else: + raise uh.exc.MalformedHashError(cls) + rounds = m.group("rounds") + if rounds.startswith(uh._UZERO) and rounds != uh._UZERO: + raise uh.exc.ZeroPaddedRoundsError(cls) + return cls( + version=version, + ident=m.group("type"), + rounds=int(rounds), + salt=m.group("salt"), + checksum=m.group("digest"), + ) + + _v2_template = u("$bcrypt-sha256$v=2,t=%s,r=%d$%s$%s") + _v1_template = u("$bcrypt-sha256$%s,%d$%s$%s") + + def to_string(self): + if self.version == 1: + template = self._v1_template + else: + template = self._v2_template + hash = template % (self.ident.strip(_UDOLLAR), self.rounds, self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # init + #=================================================================== + + def __init__(self, version=None, **kwds): + if version is not None: + self.version = self._norm_version(version) + super(bcrypt_sha256, self).__init__(**kwds) + + #=================================================================== + # version + #=================================================================== + + @classmethod + def _norm_version(cls, version): + if version not in cls._supported_versions: + raise ValueError("%s: unknown or unsupported version: %r" % (cls.name, version)) + return version + + #=================================================================== + # checksum + #=================================================================== + + def _calc_checksum(self, secret): + # NOTE: can't use digest directly, since bcrypt stops at first NULL. + # NOTE: bcrypt doesn't fully mix entropy for bytes 55-72 of password + # (XXX: citation needed), so we don't want key to be > 55 bytes. + # thus, have to use base64 (44 bytes) rather than hex (64 bytes). + # XXX: it's later come out that 55-72 may be ok, so later revision of bcrypt_sha256 + # may switch to hex encoding, since it's simpler to implement elsewhere. + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + if self.version == 1: + # version 1 -- old version just ran secret through sha256(), + # though this could be vulnerable to a breach attach + # (c.f. issue 114); which is why v2 switched to hmac wrapper. + digest = sha256(secret).digest() + else: + # version 2 -- running secret through HMAC keyed off salt. + # this prevents known secret -> sha256 password tables from being + # used to test against a bcrypt_sha256 hash. + # keying off salt (instead of constant string) should minimize chances of this + # colliding with existing table of hmac digest lookups as well. + # NOTE: salt in this case is the "bcrypt64"-encoded value, not the raw salt bytes, + # to make things easier for parallel implementations of this hash -- + # saving them the trouble of implementing a "bcrypt64" decoder. + salt = self.salt + if salt[-1] not in self.final_salt_chars: + # forbidding salts with padding bits set, because bcrypt implementations + # won't consistently hash them the same. since we control this format, + # just prevent these from even getting used. + raise ValueError("invalid salt string") + digest = compile_hmac("sha256", salt.encode("ascii"))(secret) + + # NOTE: output of b64encode() uses "+/" altchars, "=" padding chars, + # and no leading/trailing whitespace. + key = b64encode(digest) + + # hand result off to normal bcrypt algorithm + return super(bcrypt_sha256, self)._calc_checksum(key) + + #=================================================================== + # other + #=================================================================== + + def _calc_needs_update(self, **kwds): + if self.version < type(self).version: + return True + return super(bcrypt_sha256, self)._calc_needs_update(**kwds) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/cisco.py b/ansible/lib/python3.11/site-packages/passlib/handlers/cisco.py new file mode 100644 index 000000000..e715e1ab5 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/cisco.py @@ -0,0 +1,440 @@ +""" +passlib.handlers.cisco -- Cisco password hashes +""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import right_pad_string, to_unicode, repeat_string, to_bytes +from passlib.utils.binary import h64 +from passlib.utils.compat import unicode, u, join_byte_values, \ + join_byte_elems, iter_byte_values, uascii_to_str +import passlib.utils.handlers as uh +# local +__all__ = [ + "cisco_pix", + "cisco_asa", + "cisco_type7", +] + +#============================================================================= +# utils +#============================================================================= + +#: dummy bytes used by spoil_digest var in cisco_pix._calc_checksum() +_DUMMY_BYTES = b'\xFF' * 32 + +#============================================================================= +# cisco pix firewall hash +#============================================================================= +class cisco_pix(uh.HasUserContext, uh.StaticHandler): + """ + This class implements the password hash used by older Cisco PIX firewalls, + and follows the :ref:`password-hash-api`. + It does a single round of hashing, and relies on the username + as the salt. + + This class only allows passwords <= 16 bytes, anything larger + will result in a :exc:`~passlib.exc.PasswordSizeError` if passed to :meth:`~cisco_pix.hash`, + and be silently rejected if passed to :meth:`~cisco_pix.verify`. + + The :meth:`~passlib.ifc.PasswordHash.hash`, + :meth:`~passlib.ifc.PasswordHash.genhash`, and + :meth:`~passlib.ifc.PasswordHash.verify` methods + all support the following extra keyword: + + :param str user: + String containing name of user account this password is associated with. + + This is *required* in order to correctly hash passwords associated + with a user account on the Cisco device, as it is used to salt + the hash. + + Conversely, this *must* be omitted or set to ``""`` in order to correctly + hash passwords which don't have an associated user account + (such as the "enable" password). + + .. versionadded:: 1.6 + + .. versionchanged:: 1.7.1 + + Passwords > 16 bytes are now rejected / throw error instead of being silently truncated, + to match Cisco behavior. A number of :ref:`bugs ` were fixed + which caused prior releases to generate unverifiable hashes in certain cases. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "cisco_pix" + + truncate_size = 16 + + # NOTE: these are the default policy for PasswordHash, + # but want to set them explicitly for now. + truncate_error = True + truncate_verify_reject = True + + #-------------------- + # GenericHandler + #-------------------- + checksum_size = 16 + checksum_chars = uh.HASH64_CHARS + + #-------------------- + # custom + #-------------------- + + #: control flag signalling "cisco_asa" mode, set by cisco_asa class + _is_asa = False + + #=================================================================== + # methods + #=================================================================== + def _calc_checksum(self, secret): + """ + This function implements the "encrypted" hash format used by Cisco + PIX & ASA. It's behavior has been confirmed for ASA 9.6, + but is presumed correct for PIX & other ASA releases, + as it fits with known test vectors, and existing literature. + + While nearly the same, the PIX & ASA hashes have slight differences, + so this function performs differently based on the _is_asa class flag. + Noteable changes from PIX to ASA include password size limit + increased from 16 -> 32, and other internal changes. + """ + # select PIX vs or ASA mode + asa = self._is_asa + + # + # encode secret + # + # per ASA 8.4 documentation, + # http://www.cisco.com/c/en/us/td/docs/security/asa/asa84/configuration/guide/asa_84_cli_config/ref_cli.html#Supported_Character_Sets, + # it supposedly uses UTF-8 -- though some double-encoding issues have + # been observed when trying to actually *set* a non-ascii password + # via ASDM, and access via SSH seems to strip 8-bit chars. + # + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + # + # check if password too large + # + # Per ASA 9.6 changes listed in + # http://www.cisco.com/c/en/us/td/docs/security/asa/roadmap/asa_new_features.html, + # prior releases had a maximum limit of 32 characters. + # Testing with an ASA 9.6 system bears this out -- + # setting 32-char password for a user account, + # and logins will fail if any chars are appended. + # (ASA 9.6 added new PBKDF2-based hash algorithm, + # which supports larger passwords). + # + # Per PIX documentation + # http://www.cisco.com/en/US/docs/security/pix/pix50/configuration/guide/commands.html, + # it would not allow passwords > 16 chars. + # + # Thus, we unconditionally throw a password size error here, + # as nothing valid can come from a larger password. + # NOTE: assuming PIX has same behavior, but at 16 char limit. + # + spoil_digest = None + if len(secret) > self.truncate_size: + if self.use_defaults: + # called from hash() + msg = "Password too long (%s allows at most %d bytes)" % \ + (self.name, self.truncate_size) + raise uh.exc.PasswordSizeError(self.truncate_size, msg=msg) + else: + # called from verify() -- + # We don't want to throw error, or return early, + # as that would let attacker know too much. Instead, we set a + # flag to add some dummy data into the md5 digest, so that + # output won't match truncated version of secret, or anything + # else that's fixed and predictable. + spoil_digest = secret + _DUMMY_BYTES + + # + # append user to secret + # + # Policy appears to be: + # + # * Nothing appended for enable password (user = "") + # + # * ASA: If user present, but secret is >= 28 chars, nothing appended. + # + # * 1-2 byte users not allowed. + # DEVIATION: we're letting them through, and repeating their + # chars ala 3-char user, to simplify testing. + # Could issue warning in the future though. + # + # * 3 byte user has first char repeated, to pad to 4. + # (observed under ASA 9.6, assuming true elsewhere) + # + # * 4 byte users are used directly. + # + # * 5+ byte users are truncated to 4 bytes. + # + user = self.user + if user: + if isinstance(user, unicode): + user = user.encode("utf-8") + if not asa or len(secret) < 28: + secret += repeat_string(user, 4) + + # + # pad / truncate result to limit + # + # While PIX always pads to 16 bytes, ASA increases to 32 bytes IFF + # secret+user > 16 bytes. This makes PIX & ASA have different results + # where secret size in range(13,16), and user is present -- + # PIX will truncate to 16, ASA will truncate to 32. + # + if asa and len(secret) > 16: + pad_size = 32 + else: + pad_size = 16 + secret = right_pad_string(secret, pad_size) + + # + # md5 digest + # + if spoil_digest: + # make sure digest won't match truncated version of secret + secret += spoil_digest + digest = md5(secret).digest() + + # + # drop every 4th byte + # NOTE: guessing this was done because it makes output exactly + # 16 bytes, which may have been a general 'char password[]' + # size limit under PIX + # + digest = join_byte_elems(c for i, c in enumerate(digest) if (i + 1) & 3) + + # + # encode using Hash64 + # + return h64.encode_bytes(digest).decode("ascii") + + # NOTE: works, but needs UTs. + # @classmethod + # def same_as_pix(cls, secret, user=""): + # """ + # test whether (secret + user) combination should + # have the same hash under PIX and ASA. + # + # mainly present to help unittests. + # """ + # # see _calc_checksum() above for details of this logic. + # size = len(to_bytes(secret, "utf-8")) + # if user and size < 28: + # size += 4 + # return size < 17 + + #=================================================================== + # eoc + #=================================================================== + + +class cisco_asa(cisco_pix): + """ + This class implements the password hash used by Cisco ASA/PIX 7.0 and newer (2005). + Aside from a different internal algorithm, it's use and format is identical + to the older :class:`cisco_pix` class. + + For passwords less than 13 characters, this should be identical to :class:`!cisco_pix`, + but will generate a different hash for most larger inputs + (See the `Format & Algorithm`_ section for the details). + + This class only allows passwords <= 32 bytes, anything larger + will result in a :exc:`~passlib.exc.PasswordSizeError` if passed to :meth:`~cisco_asa.hash`, + and be silently rejected if passed to :meth:`~cisco_asa.verify`. + + .. versionadded:: 1.7 + + .. versionchanged:: 1.7.1 + + Passwords > 32 bytes are now rejected / throw error instead of being silently truncated, + to match Cisco behavior. A number of :ref:`bugs ` were fixed + which caused prior releases to generate unverifiable hashes in certain cases. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "cisco_asa" + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 32 + + #-------------------- + # cisco_pix + #-------------------- + _is_asa = True + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# type 7 +#============================================================================= +class cisco_type7(uh.GenericHandler): + """ + This class implements the "Type 7" password encoding used by Cisco IOS, + and follows the :ref:`password-hash-api`. + It has a simple 4-5 bit salt, but is nonetheless a reversible encoding + instead of a real hash. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: int + :param salt: + This may be an optional salt integer drawn from ``range(0,16)``. + If omitted, one will be chosen at random. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` values that are out of range. + + Note that while this class outputs digests in upper-case hexadecimal, + it will accept lower-case as well. + + This class also provides the following additional method: + + .. automethod:: decode + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "cisco_type7" + setting_kwds = ("salt",) + + #-------------------- + # GenericHandler + #-------------------- + checksum_chars = uh.UPPER_HEX_CHARS + + #-------------------- + # HasSalt + #-------------------- + + # NOTE: encoding could handle max_salt_value=99, but since key is only 52 + # chars in size, not sure what appropriate behavior is for that edge case. + min_salt_value = 0 + max_salt_value = 52 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def using(cls, salt=None, **kwds): + subcls = super(cisco_type7, cls).using(**kwds) + if salt is not None: + salt = subcls._norm_salt(salt, relaxed=kwds.get("relaxed")) + subcls._generate_salt = staticmethod(lambda: salt) + return subcls + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if len(hash) < 2: + raise uh.exc.InvalidHashError(cls) + salt = int(hash[:2]) # may throw ValueError + return cls(salt=salt, checksum=hash[2:].upper()) + + def __init__(self, salt=None, **kwds): + super(cisco_type7, self).__init__(**kwds) + if salt is not None: + salt = self._norm_salt(salt) + elif self.use_defaults: + salt = self._generate_salt() + assert self._norm_salt(salt) == salt, "generated invalid salt: %r" % (salt,) + else: + raise TypeError("no salt specified") + self.salt = salt + + @classmethod + def _norm_salt(cls, salt, relaxed=False): + """ + validate & normalize salt value. + .. note:: + the salt for this algorithm is an integer 0-52, not a string + """ + if not isinstance(salt, int): + raise uh.exc.ExpectedTypeError(salt, "integer", "salt") + if 0 <= salt <= cls.max_salt_value: + return salt + msg = "salt/offset must be in 0..52 range" + if relaxed: + warn(msg, uh.PasslibHashWarning) + return 0 if salt < 0 else cls.max_salt_value + else: + raise ValueError(msg) + + @staticmethod + def _generate_salt(): + return uh.rng.randint(0, 15) + + def to_string(self): + return "%02d%s" % (self.salt, uascii_to_str(self.checksum)) + + def _calc_checksum(self, secret): + # XXX: no idea what unicode policy is, but all examples are + # 7-bit ascii compatible, so using UTF-8 + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return hexlify(self._cipher(secret, self.salt)).decode("ascii").upper() + + @classmethod + def decode(cls, hash, encoding="utf-8"): + """decode hash, returning original password. + + :arg hash: encoded password + :param encoding: optional encoding to use (defaults to ``UTF-8``). + :returns: password as unicode + """ + self = cls.from_string(hash) + tmp = unhexlify(self.checksum.encode("ascii")) + raw = self._cipher(tmp, self.salt) + return raw.decode(encoding) if encoding else raw + + # type7 uses a xor-based vingere variant, using the following secret key: + _key = u("dsfd;kfoA,.iyewrkldJKDHSUBsgvca69834ncxv9873254k;fg87") + + @classmethod + def _cipher(cls, data, salt): + """xor static key against data - encrypts & decrypts""" + key = cls._key + key_size = len(key) + return join_byte_values( + value ^ ord(key[(salt + idx) % key_size]) + for idx, value in enumerate(iter_byte_values(data)) + ) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/des_crypt.py b/ansible/lib/python3.11/site-packages/passlib/handlers/des_crypt.py new file mode 100644 index 000000000..68a4ca7ee --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/des_crypt.py @@ -0,0 +1,607 @@ +"""passlib.handlers.des_crypt - traditional unix (DES) crypt and variants""" +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import safe_crypt, test_crypt, to_unicode +from passlib.utils.binary import h64, h64big +from passlib.utils.compat import byte_elem_value, u, uascii_to_str, unicode, suppress_cause +from passlib.crypto.des import des_encrypt_int_block +import passlib.utils.handlers as uh +# local +__all__ = [ + "des_crypt", + "bsdi_crypt", + "bigcrypt", + "crypt16", +] + +#============================================================================= +# pure-python backend for des_crypt family +#============================================================================= +_BNULL = b'\x00' + +def _crypt_secret_to_key(secret): + """convert secret to 64-bit DES key. + + this only uses the first 8 bytes of the secret, + and discards the high 8th bit of each byte at that. + a null parity bit is inserted after every 7th bit of the output. + """ + # NOTE: this would set the parity bits correctly, + # but des_encrypt_int_block() would just ignore them... + ##return sum(expand_7bit(byte_elem_value(c) & 0x7f) << (56-i*8) + ## for i, c in enumerate(secret[:8])) + return sum((byte_elem_value(c) & 0x7f) << (57-i*8) + for i, c in enumerate(secret[:8])) + +def _raw_des_crypt(secret, salt): + """pure-python backed for des_crypt""" + assert len(salt) == 2 + + # NOTE: some OSes will accept non-HASH64 characters in the salt, + # but what value they assign these characters varies wildy, + # so just rejecting them outright. + # the same goes for single-character salts... + # some OSes duplicate the char, some insert a '.' char, + # and openbsd does (something) which creates an invalid hash. + salt_value = h64.decode_int12(salt) + + # gotta do something - no official policy since this predates unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + assert isinstance(secret, bytes) + + # forbidding NULL char because underlying crypt() rejects them too. + if _BNULL in secret: + raise uh.exc.NullPasswordError(des_crypt) + + # convert first 8 bytes of secret string into an integer + key_value = _crypt_secret_to_key(secret) + + # run data through des using input of 0 + result = des_encrypt_int_block(key_value, 0, salt_value, 25) + + # run h64 encode on result + return h64big.encode_int64(result) + +def _bsdi_secret_to_key(secret): + """convert secret to DES key used by bsdi_crypt""" + key_value = _crypt_secret_to_key(secret) + idx = 8 + end = len(secret) + while idx < end: + next = idx + 8 + tmp_value = _crypt_secret_to_key(secret[idx:next]) + key_value = des_encrypt_int_block(key_value, key_value) ^ tmp_value + idx = next + return key_value + +def _raw_bsdi_crypt(secret, rounds, salt): + """pure-python backend for bsdi_crypt""" + + # decode salt + salt_value = h64.decode_int24(salt) + + # gotta do something - no official policy since this predates unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + assert isinstance(secret, bytes) + + # forbidding NULL char because underlying crypt() rejects them too. + if _BNULL in secret: + raise uh.exc.NullPasswordError(bsdi_crypt) + + # convert secret string into an integer + key_value = _bsdi_secret_to_key(secret) + + # run data through des using input of 0 + result = des_encrypt_int_block(key_value, 0, salt_value, rounds) + + # run h64 encode on result + return h64big.encode_int64(result) + +#============================================================================= +# handlers +#============================================================================= +class des_crypt(uh.TruncateMixin, uh.HasManyBackends, uh.HasSalt, uh.GenericHandler): + """This class implements the des-crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :param bool truncate_error: + By default, des_crypt will silently truncate passwords larger than 8 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "des_crypt" + setting_kwds = ("salt", "truncate_error") + + #-------------------- + # GenericHandler + #-------------------- + checksum_chars = uh.HASH64_CHARS + checksum_size = 11 + + #-------------------- + # HasSalt + #-------------------- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 8 + + #=================================================================== + # formatting + #=================================================================== + # FORMAT: 2 chars of H64-encoded salt + 11 chars of H64-encoded checksum + + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P[./a-z0-9]{11})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + salt, chk = hash[:2], hash[2:] + return cls(salt=salt, checksum=chk or None) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + + return self._calc_checksum_backend(secret) + + #=================================================================== + # backend + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", 'abgOeLfPimXQo'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + # NOTE: we let safe_crypt() encode unicode secret -> utf8; + # no official policy since des-crypt predates unicode + hash = safe_crypt(secret, self.salt) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(self.salt) or len(hash) != 13: + raise uh.exc.CryptBackendError(self, self.salt, hash) + return hash[2:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_des_crypt(secret, self.salt.encode("ascii")).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +class bsdi_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the BSDi-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 5001, must be between 1 and 16777215, inclusive. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + :meth:`hash` will now issue a warning if an even number of rounds is used + (see :ref:`bsdi-crypt-security-issues` regarding weak DES keys). + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "bsdi_crypt" + setting_kwds = ("salt", "rounds") + checksum_size = 11 + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 4 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 5001 + min_rounds = 1 + max_rounds = 16777215 # (1<<24)-1 + rounds_cost = "linear" + + # NOTE: OpenBSD login.conf reports 7250 as minimum allowed rounds, + # but that seems to be an OS policy, not a algorithm limitation. + + #=================================================================== + # parsing + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + _ + (?P[./a-z0-9]{4}) + (?P[./a-z0-9]{4}) + (?P[./a-z0-9]{11})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + rounds, salt, chk = m.group("rounds", "salt", "chk") + return cls( + rounds=h64.decode_int24(rounds.encode("ascii")), + salt=salt, + checksum=chk, + ) + + def to_string(self): + hash = u("_%s%s%s") % (h64.encode_int24(self.rounds).decode("ascii"), + self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # validation + #=================================================================== + + # NOTE: keeping this flag for admin/choose_rounds.py script. + # want to eventually expose rounds logic to that script in better way. + _avoid_even_rounds = True + + @classmethod + def using(cls, **kwds): + subcls = super(bsdi_crypt, cls).using(**kwds) + if not subcls.default_rounds & 1: + # issue warning if caller set an even 'rounds' value. + warn("bsdi_crypt rounds should be odd, as even rounds may reveal weak DES keys", + uh.exc.PasslibSecurityWarning) + return subcls + + @classmethod + def _generate_rounds(cls): + rounds = super(bsdi_crypt, cls)._generate_rounds() + # ensure autogenerated rounds are always odd + # NOTE: doing this even for default_rounds so needs_update() doesn't get + # caught in a loop. + # FIXME: this technically might generate a rounds value 1 larger + # than the requested upper bound - but better to err on side of safety. + return rounds|1 + + #=================================================================== + # migration + #=================================================================== + + def _calc_needs_update(self, **kwds): + # mark bsdi_crypt hashes as deprecated if they have even rounds. + if not self.rounds & 1: + return True + # hand off to base implementation + return super(bsdi_crypt, self)._calc_needs_update(**kwds) + + #=================================================================== + # backends + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", '_/...lLDAxARksGCHin.'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string() + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(config[:9]) or len(hash) != 20: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-11:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_bsdi_crypt(secret, self.rounds, self.salt.encode("ascii")).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +class bigcrypt(uh.HasSalt, uh.GenericHandler): + """This class implements the BigCrypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "bigcrypt" + setting_kwds = ("salt",) + checksum_chars = uh.HASH64_CHARS + # NOTE: checksum chars must be multiple of 11 + + #--HasSalt-- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # internal helpers + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P([./a-z0-9]{11})+)? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum) + return uascii_to_str(hash) + + def _norm_checksum(self, checksum, relaxed=False): + checksum = super(bigcrypt, self)._norm_checksum(checksum, relaxed=relaxed) + if len(checksum) % 11: + raise uh.exc.InvalidHashError(self) + return checksum + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = _raw_des_crypt(secret, self.salt.encode("ascii")) + idx = 8 + end = len(secret) + while idx < end: + next = idx + 8 + chk += _raw_des_crypt(secret[idx:next], chk[-11:-9]) + idx = next + return chk.decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +class crypt16(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): + """This class implements the crypt16 password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :param bool truncate_error: + By default, crypt16 will silently truncate passwords larger than 16 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "crypt16" + setting_kwds = ("salt", "truncate_error") + + #-------------------- + # GenericHandler + #-------------------- + checksum_size = 22 + checksum_chars = uh.HASH64_CHARS + + #-------------------- + # HasSalt + #-------------------- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 16 + + #=================================================================== + # internal helpers + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P[./a-z0-9]{22})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum) + return uascii_to_str(hash) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + + # parse salt value + try: + salt_value = h64.decode_int12(self.salt.encode("ascii")) + except ValueError: # pragma: no cover - caught by class + raise suppress_cause(ValueError("invalid chars in salt")) + + # convert first 8 byts of secret string into an integer, + key1 = _crypt_secret_to_key(secret) + + # run data through des using input of 0 + result1 = des_encrypt_int_block(key1, 0, salt_value, 20) + + # convert next 8 bytes of secret string into integer (key=0 if secret < 8 chars) + key2 = _crypt_secret_to_key(secret[8:16]) + + # run data through des using input of 0 + result2 = des_encrypt_int_block(key2, 0, salt_value, 5) + + # done + chk = h64big.encode_int64(result1) + h64big.encode_int64(result2) + return chk.decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/digests.py b/ansible/lib/python3.11/site-packages/passlib/handlers/digests.py new file mode 100644 index 000000000..982155c91 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/digests.py @@ -0,0 +1,168 @@ +"""passlib.handlers.digests - plain hash digests +""" +#============================================================================= +# imports +#============================================================================= +# core +import hashlib +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_native_str, to_bytes, render_bytes, consteq +from passlib.utils.compat import unicode, str_to_uascii +import passlib.utils.handlers as uh +from passlib.crypto.digest import lookup_hash +# local +__all__ = [ + "create_hex_hash", + "hex_md4", + "hex_md5", + "hex_sha1", + "hex_sha256", + "hex_sha512", +] + +#============================================================================= +# helpers for hexadecimal hashes +#============================================================================= +class HexDigestHash(uh.StaticHandler): + """this provides a template for supporting passwords stored as plain hexadecimal hashes""" + #=================================================================== + # class attrs + #=================================================================== + _hash_func = None # hash function to use - filled in by create_hex_hash() + checksum_size = None # filled in by create_hex_hash() + checksum_chars = uh.HEX_CHARS + + #: special for detecting if _hash_func is just a stub method. + supported = True + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(self._hash_func(secret).hexdigest()) + + #=================================================================== + # eoc + #=================================================================== + +def create_hex_hash(digest, module=__name__, django_name=None, required=True): + """ + create hex-encoded unsalted hasher for specified digest algorithm. + + .. versionchanged:: 1.7.3 + If called with unknown/supported digest, won't throw error immediately, + but instead return a dummy hasher that will throw error when called. + + set ``required=True`` to restore old behavior. + """ + info = lookup_hash(digest, required=required) + name = "hex_" + info.name + if not info.supported: + info.digest_size = 0 + hasher = type(name, (HexDigestHash,), dict( + name=name, + __module__=module, # so ABCMeta won't clobber it + _hash_func=staticmethod(info.const), # sometimes it's a function, sometimes not. so wrap it. + checksum_size=info.digest_size*2, + __doc__="""This class implements a plain hexadecimal %s hash, and follows the :ref:`password-hash-api`. + +It supports no optional or contextual keywords. +""" % (info.name,) + )) + if not info.supported: + hasher.supported = False + if django_name: + hasher.django_name = django_name + return hasher + +#============================================================================= +# predefined handlers +#============================================================================= + +# NOTE: some digests below are marked as "required=False", because these may not be present on +# FIPS systems (see issue 116). if missing, will return stub hasher that throws error +# if an attempt is made to actually use hash/verify with them. + +hex_md4 = create_hex_hash("md4", required=False) +hex_md5 = create_hex_hash("md5", django_name="unsalted_md5", required=False) +hex_sha1 = create_hex_hash("sha1", required=False) +hex_sha256 = create_hex_hash("sha256") +hex_sha512 = create_hex_hash("sha512") + +#============================================================================= +# htdigest +#============================================================================= +class htdigest(uh.MinimalHandler): + """htdigest hash function. + + .. todo:: + document this hash + """ + name = "htdigest" + setting_kwds = () + context_kwds = ("user", "realm", "encoding") + default_encoding = "utf-8" + + @classmethod + def hash(cls, secret, user, realm, encoding=None): + # NOTE: this was deliberately written so that raw bytes are passed through + # unchanged, the encoding kwd is only used to handle unicode values. + if not encoding: + encoding = cls.default_encoding + uh.validate_secret(secret) + if isinstance(secret, unicode): + secret = secret.encode(encoding) + user = to_bytes(user, encoding, "user") + realm = to_bytes(realm, encoding, "realm") + data = render_bytes("%s:%s:%s", user, realm, secret) + return hashlib.md5(data).hexdigest() + + @classmethod + def _norm_hash(cls, hash): + """normalize hash to native string, and validate it""" + hash = to_native_str(hash, param="hash") + if len(hash) != 32: + raise uh.exc.MalformedHashError(cls, "wrong size") + for char in hash: + if char not in uh.LC_HEX_CHARS: + raise uh.exc.MalformedHashError(cls, "invalid chars in hash") + return hash + + @classmethod + def verify(cls, secret, hash, user, realm, encoding="utf-8"): + hash = cls._norm_hash(hash) + other = cls.hash(secret, user, realm, encoding) + return consteq(hash, other) + + @classmethod + def identify(cls, hash): + try: + cls._norm_hash(hash) + except ValueError: + return False + return True + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls): + return cls.hash("", "", "") + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, user, realm, encoding=None): + # NOTE: 'config' is ignored, as this hash has no salting / other configuration. + # just have to make sure it's valid. + cls._norm_hash(config) + return cls.hash(secret, user, realm, encoding) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/django.py b/ansible/lib/python3.11/site-packages/passlib/handlers/django.py new file mode 100644 index 000000000..6dd499ac2 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/django.py @@ -0,0 +1,512 @@ +"""passlib.handlers.django- Django password hash support""" +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode +from binascii import hexlify +from hashlib import md5, sha1, sha256 +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.handlers.bcrypt import _wrapped_bcrypt +from passlib.hash import argon2, bcrypt, pbkdf2_sha1, pbkdf2_sha256 +from passlib.utils import to_unicode, rng, getrandstr +from passlib.utils.binary import BASE64_CHARS +from passlib.utils.compat import str_to_uascii, uascii_to_str, unicode, u +from passlib.crypto.digest import pbkdf2_hmac +import passlib.utils.handlers as uh +# local +__all__ = [ + "django_salted_sha1", + "django_salted_md5", + "django_bcrypt", + "django_pbkdf2_sha1", + "django_pbkdf2_sha256", + "django_argon2", + "django_des_crypt", + "django_disabled", +] + +#============================================================================= +# lazy imports & constants +#============================================================================= + +# imported by django_des_crypt._calc_checksum() +des_crypt = None + +def _import_des_crypt(): + global des_crypt + if des_crypt is None: + from passlib.hash import des_crypt + return des_crypt + +# django 1.4's salt charset +SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' + +#============================================================================= +# salted hashes +#============================================================================= +class DjangoSaltedHash(uh.HasSalt, uh.GenericHandler): + """base class providing common code for django hashes""" + # name, ident, checksum_size must be set by subclass. + # ident must include "$" suffix. + setting_kwds = ("salt", "salt_size") + + # NOTE: django 1.0-1.3 would accept empty salt strings. + # django 1.4 won't, but this appears to be regression + # (https://code.djangoproject.com/ticket/18144) + # so presumably it will be fixed in a later release. + default_salt_size = 12 + max_salt_size = None + salt_chars = SALT_CHARS + + checksum_chars = uh.LOWER_HEX_CHARS + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + return cls(salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc2(self.ident, self.salt, self.checksum) + +# NOTE: only used by PBKDF2 +class DjangoVariableHash(uh.HasRounds, DjangoSaltedHash): + """base class providing common code for django hashes w/ variable rounds""" + setting_kwds = DjangoSaltedHash.setting_kwds + ("rounds",) + + min_rounds = 1 + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc3(self.ident, self.rounds, self.salt, self.checksum) + +class django_salted_sha1(DjangoSaltedHash): + """This class implements Django's Salted SHA1 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and uses a single round of SHA1. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + This should be compatible with Django 1.4's :class:`!SHA1PasswordHasher` class. + + .. versionchanged: 1.6 + This class now generates 12-character salts instead of 5, + and generated salts uses the character range ``[0-9a-zA-Z]`` instead of + the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 + generates these hashes; but hashes generated in this manner will still be + correctly interpreted by earlier versions of Django. + """ + name = "django_salted_sha1" + django_name = "sha1" + ident = u("sha1$") + checksum_size = 40 + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(sha1(self.salt.encode("ascii") + secret).hexdigest()) + +class django_salted_md5(DjangoSaltedHash): + """This class implements Django's Salted MD5 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and uses a single round of MD5. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!MD5PasswordHasher` class. + + .. versionchanged: 1.6 + This class now generates 12-character salts instead of 5, + and generated salts uses the character range ``[0-9a-zA-Z]`` instead of + the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 + generates these hashes; but hashes generated in this manner will still be + correctly interpreted by earlier versions of Django. + """ + name = "django_salted_md5" + django_name = "md5" + ident = u("md5$") + checksum_size = 32 + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(md5(self.salt.encode("ascii") + secret).hexdigest()) + +#============================================================================= +# BCrypt +#============================================================================= + +django_bcrypt = uh.PrefixWrapper("django_bcrypt", bcrypt, + prefix=u('bcrypt$'), ident=u("bcrypt$"), + # NOTE: this docstring is duplicated in the docs, since sphinx + # seems to be having trouble reading it via autodata:: + doc="""This class implements Django 1.4's BCrypt wrapper, and follows the :ref:`password-hash-api`. + + This is identical to :class:`!bcrypt` itself, but with + the Django-specific prefix ``"bcrypt$"`` prepended. + + See :doc:`/lib/passlib.hash.bcrypt` for more details, + the usage and behavior is identical. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!BCryptPasswordHasher` class. + + .. versionadded:: 1.6 + """) +django_bcrypt.django_name = "bcrypt" +django_bcrypt._using_clone_attrs += ("django_name",) + +#============================================================================= +# BCRYPT + SHA256 +#============================================================================= + +class django_bcrypt_sha256(_wrapped_bcrypt): + """This class implements Django 1.6's Bcrypt+SHA256 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + While the algorithm and format is somewhat different, + the api and options for this hash are identical to :class:`!bcrypt` itself, + see :doc:`bcrypt ` for more details. + + .. versionadded:: 1.6.2 + """ + name = "django_bcrypt_sha256" + django_name = "bcrypt_sha256" + _digest = sha256 + + # sample hash: + # bcrypt_sha256$$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu + + # XXX: we can't use .ident attr due to bcrypt code using it. + # working around that via django_prefix + django_prefix = u('bcrypt_sha256$') + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + if not hash: + return False + return hash.startswith(cls.django_prefix) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if not hash.startswith(cls.django_prefix): + raise uh.exc.InvalidHashError(cls) + bhash = hash[len(cls.django_prefix):] + if not bhash.startswith("$2"): + raise uh.exc.MalformedHashError(cls) + return super(django_bcrypt_sha256, cls).from_string(bhash) + + def to_string(self): + bhash = super(django_bcrypt_sha256, self).to_string() + return uascii_to_str(self.django_prefix) + bhash + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + secret = hexlify(self._digest(secret).digest()) + return super(django_bcrypt_sha256, self)._calc_checksum(secret) + +#============================================================================= +# PBKDF2 variants +#============================================================================= + +class django_pbkdf2_sha256(DjangoVariableHash): + """This class implements Django's PBKDF2-HMAC-SHA256 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 29000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!PBKDF2PasswordHasher` class. + + .. versionadded:: 1.6 + """ + name = "django_pbkdf2_sha256" + django_name = "pbkdf2_sha256" + ident = u('pbkdf2_sha256$') + min_salt_size = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + checksum_chars = uh.PADDED_BASE64_CHARS + checksum_size = 44 # 32 bytes -> base64 + default_rounds = pbkdf2_sha256.default_rounds # NOTE: django 1.6 uses 12000 + _digest = "sha256" + + def _calc_checksum(self, secret): + # NOTE: secret & salt will be encoded using UTF-8 by pbkdf2_hmac() + hash = pbkdf2_hmac(self._digest, secret, self.salt, self.rounds) + return b64encode(hash).rstrip().decode("ascii") + +class django_pbkdf2_sha1(django_pbkdf2_sha256): + """This class implements Django's PBKDF2-HMAC-SHA1 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 131000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!PBKDF2SHA1PasswordHasher` class. + + .. versionadded:: 1.6 + """ + name = "django_pbkdf2_sha1" + django_name = "pbkdf2_sha1" + ident = u('pbkdf2_sha1$') + checksum_size = 28 # 20 bytes -> base64 + default_rounds = pbkdf2_sha1.default_rounds # NOTE: django 1.6 uses 12000 + _digest = "sha1" + +#============================================================================= +# Argon2 +#============================================================================= + +# NOTE: as of 2019-11-11, Django's Argon2PasswordHasher only supports Type I; +# so limiting this to ensure that as well. + +django_argon2 = uh.PrefixWrapper( + name="django_argon2", + wrapped=argon2.using(type="I"), + prefix=u('argon2'), + ident=u('argon2$argon2i$'), + # NOTE: this docstring is duplicated in the docs, since sphinx + # seems to be having trouble reading it via autodata:: + doc="""This class implements Django 1.10's Argon2 wrapper, and follows the :ref:`password-hash-api`. + + This is identical to :class:`!argon2` itself, but with + the Django-specific prefix ``"argon2$"`` prepended. + + See :doc:`argon2 ` for more details, + the usage and behavior is identical. + + This should be compatible with the hashes generated by + Django 1.10's :class:`!Argon2PasswordHasher` class. + + .. versionadded:: 1.7 + """) +django_argon2.django_name = "argon2" +django_argon2._using_clone_attrs += ("django_name",) + +#============================================================================= +# DES +#============================================================================= +class django_des_crypt(uh.TruncateMixin, uh.HasSalt, uh.GenericHandler): + """This class implements Django's :class:`des_crypt` wrapper, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :param bool truncate_error: + By default, django_des_crypt will silently truncate passwords larger than 8 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + This should be compatible with the hashes generated by + Django 1.4's :class:`!CryptPasswordHasher` class. + Note that Django only supports this hash on Unix systems + (though :class:`!django_des_crypt` is available cross-platform + under Passlib). + + .. versionchanged:: 1.6 + This class will now accept hashes with empty salt strings, + since Django 1.4 generates them this way. + """ + name = "django_des_crypt" + django_name = "crypt" + setting_kwds = ("salt", "salt_size", "truncate_error") + ident = u("crypt$") + checksum_chars = salt_chars = uh.HASH64_CHARS + checksum_size = 11 + min_salt_size = default_salt_size = 2 + truncate_size = 8 + + # NOTE: regarding duplicate salt field: + # + # django 1.0 had a "crypt$$" hash format, + # used [a-z0-9] to generate a 5 char salt, stored it in salt1, + # duplicated the first two chars of salt1 as salt2. + # it would throw an error if salt1 was empty. + # + # django 1.4 started generating 2 char salt using the full alphabet, + # left salt1 empty, and only paid attention to salt2. + # + # in order to be compatible with django 1.0, the hashes generated + # by this function will always include salt1, unless the following + # class-level field is disabled (mainly used for testing) + use_duplicate_salt = True + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + if chk: + # chk should be full des_crypt hash + if not salt: + # django 1.4 always uses empty salt field, + # so extract salt from des_crypt hash + salt = chk[:2] + elif salt[:2] != chk[:2]: + # django 1.0 stored 5 chars in salt field, and duplicated + # the first two chars in . we keep the full salt, + # but make sure the first two chars match as sanity check. + raise uh.exc.MalformedHashError(cls, + "first two digits of salt and checksum must match") + # in all cases, strip salt chars from + chk = chk[2:] + return cls(salt=salt, checksum=chk) + + def to_string(self): + salt = self.salt + chk = salt[:2] + self.checksum + if self.use_duplicate_salt: + # filling in salt field, so that we're compatible with django 1.0 + return uh.render_mc2(self.ident, salt, chk) + else: + # django 1.4+ style hash + return uh.render_mc2(self.ident, "", chk) + + def _calc_checksum(self, secret): + # NOTE: we lazily import des_crypt, + # since most django deploys won't use django_des_crypt + global des_crypt + if des_crypt is None: + _import_des_crypt() + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + return des_crypt(salt=self.salt[:2])._calc_checksum(secret) + +class django_disabled(uh.ifc.DisabledHash, uh.StaticHandler): + """This class provides disabled password behavior for Django, and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead + claims the special hash string ``"!"`` which Django uses + to indicate an account's password has been disabled. + + * newly encrypted passwords will hash to ``"!"``. + * it rejects all passwords. + + .. note:: + + Django 1.6 prepends a randomly generated 40-char alphanumeric string + to each unusuable password. This class recognizes such strings, + but for backwards compatibility, still returns ``"!"``. + + See ``_ for why + Django appends an alphanumeric string. + + .. versionchanged:: 1.6.2 added Django 1.6 support + + .. versionchanged:: 1.7 started appending an alphanumeric string. + """ + name = "django_disabled" + _hash_prefix = u("!") + suffix_length = 40 + + # XXX: move this to StaticHandler, or wherever _hash_prefix is being used? + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return hash.startswith(cls._hash_prefix) + + def _calc_checksum(self, secret): + # generate random suffix to match django's behavior + return getrandstr(rng, BASE64_CHARS[:-2], self.suffix_length) + + @classmethod + def verify(cls, secret, hash): + uh.validate_secret(secret) + if not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + return False + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/fshp.py b/ansible/lib/python3.11/site-packages/passlib/handlers/fshp.py new file mode 100644 index 000000000..db13e745b --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/fshp.py @@ -0,0 +1,214 @@ +"""passlib.handlers.fshp +""" + +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode, b64decode +import re +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_unicode +import passlib.utils.handlers as uh +from passlib.utils.compat import bascii_to_str, iteritems, u,\ + unicode +from passlib.crypto.digest import pbkdf1 +# local +__all__ = [ + 'fshp', +] +#============================================================================= +# sha1-crypt +#============================================================================= +class fshp(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the FSHP password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :param salt: + Optional raw salt string. + If not specified, one will be autogenerated (this is recommended). + + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any non-negative value. + + :param rounds: + Optional number of rounds to use. + Defaults to 480000, must be between 1 and 4294967295, inclusive. + + :param variant: + Optionally specifies variant of FSHP to use. + + * ``0`` - uses SHA-1 digest (deprecated). + * ``1`` - uses SHA-2/256 digest (default). + * ``2`` - uses SHA-2/384 digest. + * ``3`` - uses SHA-2/512 digest. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "fshp" + setting_kwds = ("salt", "salt_size", "rounds", "variant") + checksum_chars = uh.PADDED_BASE64_CHARS + ident = u("{FSHP") + # checksum_size is property() that depends on variant + + #--HasRawSalt-- + default_salt_size = 16 # current passlib default, FSHP uses 8 + max_salt_size = None + + #--HasRounds-- + # FIXME: should probably use different default rounds + # based on the variant. setting for default variant (sha256) for now. + default_rounds = 480000 # current passlib default, FSHP uses 4096 + min_rounds = 1 # set by FSHP + max_rounds = 4294967295 # 32-bit integer limit - not set by FSHP + rounds_cost = "linear" + + #--variants-- + default_variant = 1 + _variant_info = { + # variant: (hash name, digest size) + 0: ("sha1", 20), + 1: ("sha256", 32), + 2: ("sha384", 48), + 3: ("sha512", 64), + } + _variant_aliases = dict( + [(unicode(k),k) for k in _variant_info] + + [(v[0],k) for k,v in iteritems(_variant_info)] + ) + + #=================================================================== + # configuration + #=================================================================== + @classmethod + def using(cls, variant=None, **kwds): + subcls = super(fshp, cls).using(**kwds) + if variant is not None: + subcls.default_variant = cls._norm_variant(variant) + return subcls + + #=================================================================== + # instance attrs + #=================================================================== + variant = None + + #=================================================================== + # init + #=================================================================== + def __init__(self, variant=None, **kwds): + # NOTE: variant must be set first, since it controls checksum size, etc. + self.use_defaults = kwds.get("use_defaults") # load this early + if variant is not None: + variant = self._norm_variant(variant) + elif self.use_defaults: + variant = self.default_variant + assert self._norm_variant(variant) == variant, "invalid default variant: %r" % (variant,) + else: + raise TypeError("no variant specified") + self.variant = variant + super(fshp, self).__init__(**kwds) + + @classmethod + def _norm_variant(cls, variant): + if isinstance(variant, bytes): + variant = variant.decode("ascii") + if isinstance(variant, unicode): + try: + variant = cls._variant_aliases[variant] + except KeyError: + raise ValueError("invalid fshp variant") + if not isinstance(variant, int): + raise TypeError("fshp variant must be int or known alias") + if variant not in cls._variant_info: + raise ValueError("invalid fshp variant") + return variant + + @property + def checksum_alg(self): + return self._variant_info[self.variant][0] + + @property + def checksum_size(self): + return self._variant_info[self.variant][1] + + #=================================================================== + # formatting + #=================================================================== + + _hash_regex = re.compile(u(r""" + ^ + \{FSHP + (\d+)\| # variant + (\d+)\| # salt size + (\d+)\} # rounds + ([a-zA-Z0-9+/]+={0,3}) # digest + $"""), re.X) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + variant, salt_size, rounds, data = m.group(1,2,3,4) + variant = int(variant) + salt_size = int(salt_size) + rounds = int(rounds) + try: + data = b64decode(data.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + salt = data[:salt_size] + chk = data[salt_size:] + return cls(salt=salt, checksum=chk, rounds=rounds, variant=variant) + + def to_string(self): + chk = self.checksum + salt = self.salt + data = bascii_to_str(b64encode(salt+chk)) + return "{FSHP%d|%d|%d}%s" % (self.variant, len(salt), self.rounds, data) + + #=================================================================== + # backend + #=================================================================== + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + # NOTE: for some reason, FSHP uses pbkdf1 with password & salt reversed. + # this has only a minimal impact on security, + # but it is worth noting this deviation. + return pbkdf1( + digest=self.checksum_alg, + secret=self.salt, + salt=secret, + rounds=self.rounds, + keylen=self.checksum_size, + ) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/ldap_digests.py b/ansible/lib/python3.11/site-packages/passlib/handlers/ldap_digests.py new file mode 100644 index 000000000..30254f0b5 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/ldap_digests.py @@ -0,0 +1,359 @@ +"""passlib.handlers.digests - plain hash digests +""" +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode, b64decode +from hashlib import md5, sha1, sha256, sha512 +import logging; log = logging.getLogger(__name__) +import re +# site +# pkg +from passlib.handlers.misc import plaintext +from passlib.utils import unix_crypt_schemes, to_unicode +from passlib.utils.compat import uascii_to_str, unicode, u +from passlib.utils.decor import classproperty +import passlib.utils.handlers as uh +# local +__all__ = [ + "ldap_plaintext", + "ldap_md5", + "ldap_sha1", + "ldap_salted_md5", + "ldap_salted_sha1", + "ldap_salted_sha256", + "ldap_salted_sha512", + + ##"get_active_ldap_crypt_schemes", + "ldap_des_crypt", + "ldap_bsdi_crypt", + "ldap_md5_crypt", + "ldap_sha1_crypt", + "ldap_bcrypt", + "ldap_sha256_crypt", + "ldap_sha512_crypt", +] + +#============================================================================= +# ldap helpers +#============================================================================= +class _Base64DigestHelper(uh.StaticHandler): + """helper for ldap_md5 / ldap_sha1""" + # XXX: could combine this with hex digests in digests.py + + ident = None # required - prefix identifier + _hash_func = None # required - hash function + _hash_regex = None # required - regexp to recognize hash + checksum_chars = uh.PADDED_BASE64_CHARS + + @classproperty + def _hash_prefix(cls): + """tell StaticHandler to strip ident from checksum""" + return cls.ident + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = self._hash_func(secret).digest() + return b64encode(chk).decode("ascii") + +class _SaltedBase64DigestHelper(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """helper for ldap_salted_md5 / ldap_salted_sha1""" + setting_kwds = ("salt", "salt_size") + checksum_chars = uh.PADDED_BASE64_CHARS + + ident = None # required - prefix identifier + _hash_func = None # required - hash function + _hash_regex = None # required - regexp to recognize hash + min_salt_size = max_salt_size = 4 + + # NOTE: openldap implementation uses 4 byte salt, + # but it's been reported (issue 30) that some servers use larger salts. + # the semi-related rfc3112 recommends support for up to 16 byte salts. + min_salt_size = 4 + default_salt_size = 4 + max_salt_size = 16 + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + try: + data = b64decode(m.group("tmp").encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + cs = cls.checksum_size + assert cs + return cls(checksum=data[:cs], salt=data[cs:]) + + def to_string(self): + data = self.checksum + self.salt + hash = self.ident + b64encode(data).decode("ascii") + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return self._hash_func(secret + self.salt).digest() + +#============================================================================= +# implementations +#============================================================================= +class ldap_md5(_Base64DigestHelper): + """This class stores passwords using LDAP's plain MD5 format, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. + """ + name = "ldap_md5" + ident = u("{MD5}") + _hash_func = md5 + _hash_regex = re.compile(u(r"^\{MD5\}(?P[+/a-zA-Z0-9]{22}==)$")) + +class ldap_sha1(_Base64DigestHelper): + """This class stores passwords using LDAP's plain SHA1 format, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. + """ + name = "ldap_sha1" + ident = u("{SHA}") + _hash_func = sha1 + _hash_regex = re.compile(u(r"^\{SHA\}(?P[+/a-zA-Z0-9]{27}=)$")) + +class ldap_salted_md5(_SaltedBase64DigestHelper): + """This class stores passwords using LDAP's salted MD5 format, and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 4 bytes for compatibility with the LDAP spec, + but some systems use larger salts, and Passlib supports + any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This format now supports variable length salts, instead of a fix 4 bytes. + """ + name = "ldap_salted_md5" + ident = u("{SMD5}") + checksum_size = 16 + _hash_func = md5 + _hash_regex = re.compile(u(r"^\{SMD5\}(?P[+/a-zA-Z0-9]{27,}={0,2})$")) + +class ldap_salted_sha1(_SaltedBase64DigestHelper): + """ + This class stores passwords using LDAP's "Salted SHA1" format, + and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 4 bytes for compatibility with the LDAP spec, + but some systems use larger salts, and Passlib supports + any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This format now supports variable length salts, instead of a fix 4 bytes. + """ + name = "ldap_salted_sha1" + ident = u("{SSHA}") + checksum_size = 20 + _hash_func = sha1 + # NOTE: 32 = ceil((20 + 4) * 4/3) + _hash_regex = re.compile(u(r"^\{SSHA\}(?P[+/a-zA-Z0-9]{32,}={0,2})$")) + + + +class ldap_salted_sha256(_SaltedBase64DigestHelper): + """ + This class stores passwords using LDAP's "Salted SHA2-256" format, + and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 8 bytes for compatibility with the LDAP spec, + but Passlib supports any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.7.3 + """ + name = "ldap_salted_sha256" + ident = u("{SSHA256}") + checksum_size = 32 + default_salt_size = 8 + _hash_func = sha256 + # NOTE: 48 = ceil((32 + 4) * 4/3) + _hash_regex = re.compile(u(r"^\{SSHA256\}(?P[+/a-zA-Z0-9]{48,}={0,2})$")) + + +class ldap_salted_sha512(_SaltedBase64DigestHelper): + """ + This class stores passwords using LDAP's "Salted SHA2-512" format, + and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 8 bytes for compatibility with the LDAP spec, + but Passlib supports any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.7.3 + """ + name = "ldap_salted_sha512" + ident = u("{SSHA512}") + checksum_size = 64 + default_salt_size = 8 + _hash_func = sha512 + # NOTE: 91 = ceil((64 + 4) * 4/3) + _hash_regex = re.compile(u(r"^\{SSHA512\}(?P[+/a-zA-Z0-9]{91,}={0,2})$")) + + +class ldap_plaintext(plaintext): + """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. + + This class acts much like the generic :class:`!passlib.hash.plaintext` handler, + except that it will identify a hash only if it does NOT begin with the ``{XXX}`` identifier prefix + used by RFC2307 passwords. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keyword: + + :type encoding: str + :param encoding: + This controls the character encoding to use (defaults to ``utf-8``). + + This encoding will be used to encode :class:`!unicode` passwords + under Python 2, and decode :class:`!bytes` hashes under Python 3. + + .. versionchanged:: 1.6 + The ``encoding`` keyword was added. + """ + # NOTE: this subclasses plaintext, since all it does differently + # is override identify() + + name = "ldap_plaintext" + _2307_pat = re.compile(u(r"^\{\w+\}.*$")) + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls): + # Overridding plaintext.genconfig() since it returns "", + # but have to return non-empty value due to identify() below + return "!" + + @classmethod + def identify(cls, hash): + # NOTE: identifies all strings EXCEPT those with {XXX} prefix + hash = uh.to_unicode_for_identify(hash) + return bool(hash) and cls._2307_pat.match(hash) is None + +#============================================================================= +# {CRYPT} wrappers +# the following are wrappers around the base crypt algorithms, +# which add the ldap required {CRYPT} prefix +#============================================================================= +ldap_crypt_schemes = [ 'ldap_' + name for name in unix_crypt_schemes ] + +def _init_ldap_crypt_handlers(): + # NOTE: I don't like to implicitly modify globals() like this, + # but don't want to write out all these handlers out either :) + g = globals() + for wname in unix_crypt_schemes: + name = 'ldap_' + wname + g[name] = uh.PrefixWrapper(name, wname, prefix=u("{CRYPT}"), lazy=True) + del g +_init_ldap_crypt_handlers() + +##_lcn_host = None +##def get_host_ldap_crypt_schemes(): +## global _lcn_host +## if _lcn_host is None: +## from passlib.hosts import host_context +## schemes = host_context.schemes() +## _lcn_host = [ +## "ldap_" + name +## for name in unix_crypt_names +## if name in schemes +## ] +## return _lcn_host + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/md5_crypt.py b/ansible/lib/python3.11/site-packages/passlib/handlers/md5_crypt.py new file mode 100644 index 000000000..e3a2dfa49 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/md5_crypt.py @@ -0,0 +1,346 @@ +"""passlib.handlers.md5_crypt - md5-crypt algorithm""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import safe_crypt, test_crypt, repeat_string +from passlib.utils.binary import h64 +from passlib.utils.compat import unicode, u +import passlib.utils.handlers as uh +# local +__all__ = [ + "md5_crypt", + "apr_md5_crypt", +] + +#============================================================================= +# pure-python backend +#============================================================================= +_BNULL = b"\x00" +_MD5_MAGIC = b"$1$" +_APR_MAGIC = b"$apr1$" + +# pre-calculated offsets used to speed up C digest stage (see notes below). +# sequence generated using the following: + ##perms_order = "p,pp,ps,psp,sp,spp".split(",") + ##def offset(i): + ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + + ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) + ## return perms_order.index(key) + ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] +_c_digest_offsets = ( + (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), + (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), + (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), + ) + +# map used to transpose bytes when encoding final digest +_transpose_map = (12, 6, 0, 13, 7, 1, 14, 8, 2, 15, 9, 3, 5, 10, 4, 11) + +def _raw_md5_crypt(pwd, salt, use_apr=False): + """perform raw md5-crypt calculation + + this function provides a pure-python implementation of the internals + for the MD5-Crypt algorithms; it doesn't handle any of the + parsing/validation of the hash strings themselves. + + :arg pwd: password chars/bytes to hash + :arg salt: salt chars to use + :arg use_apr: use apache variant + + :returns: + encoded checksum chars + """ + # NOTE: regarding 'apr' format: + # really, apache? you had to invent a whole new "$apr1$" format, + # when all you did was change the ident incorporated into the hash? + # would love to find webpage explaining why just using a portable + # implementation of $1$ wasn't sufficient. *nothing else* was changed. + + #=================================================================== + # init & validate inputs + #=================================================================== + + # validate secret + # XXX: not sure what official unicode policy is, using this as default + if isinstance(pwd, unicode): + pwd = pwd.encode("utf-8") + assert isinstance(pwd, bytes), "pwd not unicode or bytes" + if _BNULL in pwd: + raise uh.exc.NullPasswordError(md5_crypt) + pwd_len = len(pwd) + + # validate salt - should have been taken care of by caller + assert isinstance(salt, unicode), "salt not unicode" + salt = salt.encode("ascii") + assert len(salt) < 9, "salt too large" + # NOTE: spec says salts larger than 8 bytes should be truncated, + # instead of causing an error. this function assumes that's been + # taken care of by the handler class. + + # load APR specific constants + if use_apr: + magic = _APR_MAGIC + else: + magic = _MD5_MAGIC + + #=================================================================== + # digest B - used as subinput to digest A + #=================================================================== + db = md5(pwd + salt + pwd).digest() + + #=================================================================== + # digest A - used to initialize first round of digest C + #=================================================================== + # start out with pwd + magic + salt + a_ctx = md5(pwd + magic + salt) + a_ctx_update = a_ctx.update + + # add pwd_len bytes of b, repeating b as many times as needed. + a_ctx_update(repeat_string(db, pwd_len)) + + # add null chars & first char of password + # NOTE: this may have historically been a bug, + # where they meant to use db[0] instead of B_NULL, + # but the original code memclear'ed db, + # and now all implementations have to use this. + i = pwd_len + evenchar = pwd[:1] + while i: + a_ctx_update(_BNULL if i & 1 else evenchar) + i >>= 1 + + # finish A + da = a_ctx.digest() + + #=================================================================== + # digest C - for a 1000 rounds, combine A, S, and P + # digests in various ways; in order to burn CPU time. + #=================================================================== + + # NOTE: the original MD5-Crypt implementation performs the C digest + # calculation using the following loop: + # + ##dc = da + ##i = 0 + ##while i < rounds: + ## tmp_ctx = md5(pwd if i & 1 else dc) + ## if i % 3: + ## tmp_ctx.update(salt) + ## if i % 7: + ## tmp_ctx.update(pwd) + ## tmp_ctx.update(dc if i & 1 else pwd) + ## dc = tmp_ctx.digest() + ## i += 1 + # + # The code Passlib uses (below) implements an equivalent algorithm, + # it's just been heavily optimized to pre-calculate a large number + # of things beforehand. It works off of a couple of observations + # about the original algorithm: + # + # 1. each round is a combination of 'dc', 'salt', and 'pwd'; and the exact + # combination is determined by whether 'i' a multiple of 2,3, and/or 7. + # 2. since lcm(2,3,7)==42, the series of combinations will repeat + # every 42 rounds. + # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; + # while odd rounds 1-41 consist of hash(round-specific-constant + dc) + # + # Using these observations, the following code... + # * calculates the round-specific combination of salt & pwd for each round 0-41 + # * runs through as many 42-round blocks as possible (23) + # * runs through as many pairs of rounds as needed for remaining rounds (17) + # * this results in the required 42*23+2*17=1000 rounds required by md5_crypt. + # + # this cuts out a lot of the control overhead incurred when running the + # original loop 1000 times in python, resulting in ~20% increase in + # speed under CPython (though still 2x slower than glibc crypt) + + # prepare the 6 combinations of pwd & salt which are needed + # (order of 'perms' must match how _c_digest_offsets was generated) + pwd_pwd = pwd+pwd + pwd_salt = pwd+salt + perms = [pwd, pwd_pwd, pwd_salt, pwd_salt+pwd, salt+pwd, salt+pwd_pwd] + + # build up list of even-round & odd-round constants, + # and store in 21-element list as (even,odd) pairs. + data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] + + # perform 23 blocks of 42 rounds each (for a total of 966 rounds) + dc = da + blocks = 23 + while blocks: + for even, odd in data: + dc = md5(odd + md5(dc + even).digest()).digest() + blocks -= 1 + + # perform 17 more pairs of rounds (34 more rounds, for a total of 1000) + for even, odd in data[:17]: + dc = md5(odd + md5(dc + even).digest()).digest() + + #=================================================================== + # encode digest using appropriate transpose map + #=================================================================== + return h64.encode_transposed_bytes(dc, _transpose_map).decode("ascii") + +#============================================================================= +# handler +#============================================================================= +class _MD5_Common(uh.HasSalt, uh.GenericHandler): + """common code for md5_crypt and apr_md5_crypt""" + #=================================================================== + # class attrs + #=================================================================== + # name - set in subclass + setting_kwds = ("salt", "salt_size") + # ident - set in subclass + checksum_size = 22 + checksum_chars = uh.HASH64_CHARS + + max_salt_size = 8 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # methods + #=================================================================== + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + return cls(salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc2(self.ident, self.salt, self.checksum) + + # _calc_checksum() - provided by subclass + + #=================================================================== + # eoc + #=================================================================== + +class md5_crypt(uh.HasManyBackends, _MD5_Common): + """This class implements the MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 8, but can be any value between 0 and 8. + (This is mainly needed when generating Cisco-compatible hashes, + which require ``salt_size=4``). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "md5_crypt" + ident = u("$1$") + + #=================================================================== + # methods + #=================================================================== + # FIXME: can't find definitive policy on how md5-crypt handles non-ascii. + # all backends currently coerce -> utf-8 + + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", '$1$test$pi/xDtU5WFVRqYS6BMU8X/'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.ident + self.salt + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(config) or len(hash) != len(config) + 23: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-22:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_md5_crypt(secret, self.salt) + + #=================================================================== + # eoc + #=================================================================== + +class apr_md5_crypt(_MD5_Common): + """This class implements the Apr-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "apr_md5_crypt" + ident = u("$apr1$") + + #=================================================================== + # methods + #=================================================================== + def _calc_checksum(self, secret): + return _raw_md5_crypt(secret, self.salt, use_apr=True) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/misc.py b/ansible/lib/python3.11/site-packages/passlib/handlers/misc.py new file mode 100644 index 000000000..44abc3438 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/misc.py @@ -0,0 +1,269 @@ +"""passlib.handlers.misc - misc generic handlers +""" +#============================================================================= +# imports +#============================================================================= +# core +import sys +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_native_str, str_consteq +from passlib.utils.compat import unicode, u, unicode_or_bytes_types +import passlib.utils.handlers as uh +# local +__all__ = [ + "unix_disabled", + "unix_fallback", + "plaintext", +] + +#============================================================================= +# handler +#============================================================================= +class unix_fallback(uh.ifc.DisabledHash, uh.StaticHandler): + """This class provides the fallback behavior for unix shadow files, and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead provides fallback + behavior as found in /etc/shadow on most unix variants. + If used, should be the last scheme in the context. + + * this class will positively identify all hash strings. + * for security, passwords will always hash to ``!``. + * it rejects all passwords if the hash is NOT an empty string (``!`` or ``*`` are frequently used). + * by default it rejects all passwords if the hash is an empty string, + but if ``enable_wildcard=True`` is passed to verify(), + all passwords will be allowed through if the hash is an empty string. + + .. deprecated:: 1.6 + This has been deprecated due to its "wildcard" feature, + and will be removed in Passlib 1.8. Use :class:`unix_disabled` instead. + """ + name = "unix_fallback" + context_kwds = ("enable_wildcard",) + + @classmethod + def identify(cls, hash): + if isinstance(hash, unicode_or_bytes_types): + return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + + def __init__(self, enable_wildcard=False, **kwds): + warn("'unix_fallback' is deprecated, " + "and will be removed in Passlib 1.8; " + "please use 'unix_disabled' instead.", + DeprecationWarning) + super(unix_fallback, self).__init__(**kwds) + self.enable_wildcard = enable_wildcard + + def _calc_checksum(self, secret): + if self.checksum: + # NOTE: hash will generally be "!", but we want to preserve + # it in case it's something else, like "*". + return self.checksum + else: + return u("!") + + @classmethod + def verify(cls, secret, hash, enable_wildcard=False): + uh.validate_secret(secret) + if not isinstance(hash, unicode_or_bytes_types): + raise uh.exc.ExpectedStringError(hash, "hash") + elif hash: + return False + else: + return enable_wildcard + +_MARKER_CHARS = u("*!") +_MARKER_BYTES = b"*!" + +class unix_disabled(uh.ifc.DisabledHash, uh.MinimalHandler): + """This class provides disabled password behavior for unix shadow files, + and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead matches the "disabled account" + strings found in ``/etc/shadow`` on most Unix variants. "encrypting" a password + will simply return the disabled account marker. It will reject all passwords, + no matter the hash string. The :meth:`~passlib.ifc.PasswordHash.hash` + method supports one optional keyword: + + :type marker: str + :param marker: + Optional marker string which overrides the platform default + used to indicate a disabled account. + + If not specified, this will default to ``"*"`` on BSD systems, + and use the Linux default ``"!"`` for all other platforms. + (:attr:`!unix_disabled.default_marker` will contain the default value) + + .. versionadded:: 1.6 + This class was added as a replacement for the now-deprecated + :class:`unix_fallback` class, which had some undesirable features. + """ + name = "unix_disabled" + setting_kwds = ("marker",) + context_kwds = () + + _disable_prefixes = tuple(str(_MARKER_CHARS)) + + # TODO: rename attr to 'marker'... + if 'bsd' in sys.platform: # pragma: no cover -- runtime detection + default_marker = u("*") + else: + # use the linux default for other systems + # (glibc also supports adding old hash after the marker + # so it can be restored later). + default_marker = u("!") + + @classmethod + def using(cls, marker=None, **kwds): + subcls = super(unix_disabled, cls).using(**kwds) + if marker is not None: + if not cls.identify(marker): + raise ValueError("invalid marker: %r" % marker) + subcls.default_marker = marker + return subcls + + @classmethod + def identify(cls, hash): + # NOTE: technically, anything in the /etc/shadow password field + # which isn't valid crypt() output counts as "disabled". + # but that's rather ambiguous, and it's hard to predict what + # valid output is for unknown crypt() implementations. + # so to be on the safe side, we only match things *known* + # to be disabled field indicators, and will add others + # as they are found. things beginning w/ "$" should *never* match. + # + # things currently matched: + # * linux uses "!" + # * bsd uses "*" + # * linux may use "!" + hash to disable but preserve original hash + # * linux counts empty string as "any password"; + # this code recognizes it, but treats it the same as "!" + if isinstance(hash, unicode): + start = _MARKER_CHARS + elif isinstance(hash, bytes): + start = _MARKER_BYTES + else: + raise uh.exc.ExpectedStringError(hash, "hash") + return not hash or hash[0] in start + + @classmethod + def verify(cls, secret, hash): + uh.validate_secret(secret) + if not cls.identify(hash): # handles typecheck + raise uh.exc.InvalidHashError(cls) + return False + + @classmethod + def hash(cls, secret, **kwds): + if kwds: + uh.warn_hash_settings_deprecation(cls, kwds) + return cls.using(**kwds).hash(secret) + uh.validate_secret(secret) + marker = cls.default_marker + assert marker and cls.identify(marker) + return to_native_str(marker, param="marker") + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, marker=None): + if not cls.identify(config): + raise uh.exc.InvalidHashError(cls) + elif config: + # preserve the existing str,since it might contain a disabled password hash ("!" + hash) + uh.validate_secret(secret) + return to_native_str(config, param="config") + else: + if marker is not None: + cls = cls.using(marker=marker) + return cls.hash(secret) + + @classmethod + def disable(cls, hash=None): + out = cls.hash("") + if hash is not None: + hash = to_native_str(hash, param="hash") + if cls.identify(hash): + # extract original hash, so that we normalize marker + hash = cls.enable(hash) + if hash: + out += hash + return out + + @classmethod + def enable(cls, hash): + hash = to_native_str(hash, param="hash") + for prefix in cls._disable_prefixes: + if hash.startswith(prefix): + orig = hash[len(prefix):] + if orig: + return orig + else: + raise ValueError("cannot restore original hash") + raise uh.exc.InvalidHashError(cls) + +class plaintext(uh.MinimalHandler): + """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keyword: + + :type encoding: str + :param encoding: + This controls the character encoding to use (defaults to ``utf-8``). + + This encoding will be used to encode :class:`!unicode` passwords + under Python 2, and decode :class:`!bytes` hashes under Python 3. + + .. versionchanged:: 1.6 + The ``encoding`` keyword was added. + """ + # NOTE: this is subclassed by ldap_plaintext + + name = "plaintext" + setting_kwds = () + context_kwds = ("encoding",) + default_encoding = "utf-8" + + @classmethod + def identify(cls, hash): + if isinstance(hash, unicode_or_bytes_types): + return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + + @classmethod + def hash(cls, secret, encoding=None): + uh.validate_secret(secret) + if not encoding: + encoding = cls.default_encoding + return to_native_str(secret, encoding, "secret") + + @classmethod + def verify(cls, secret, hash, encoding=None): + if not encoding: + encoding = cls.default_encoding + hash = to_native_str(hash, encoding, "hash") + if not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + return str_consteq(cls.hash(secret, encoding), hash) + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls): + return cls.hash("") + + @uh.deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, encoding=None): + # NOTE: 'config' is ignored, as this hash has no salting / etc + if not cls.identify(config): + raise uh.exc.InvalidHashError(cls) + return cls.hash(secret, encoding=encoding) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/mssql.py b/ansible/lib/python3.11/site-packages/passlib/handlers/mssql.py new file mode 100644 index 000000000..b060b365c --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/mssql.py @@ -0,0 +1,244 @@ +"""passlib.handlers.mssql - MS-SQL Password Hash + +Notes +===== +MS-SQL has used a number of hash algs over the years, +most of which were exposed through the undocumented +'pwdencrypt' and 'pwdcompare' sql functions. + +Known formats +------------- +6.5 + snefru hash, ascii encoded password + no examples found + +7.0 + snefru hash, unicode (what encoding?) + saw ref that these blobs were 16 bytes in size + no examples found + +2000 + byte string using displayed as 0x hex, using 0x0100 prefix. + contains hashes of password and upper-case password. + +2007 + same as 2000, but without the upper-case hash. + +refs +---------- +https://blogs.msdn.com/b/lcris/archive/2007/04/30/sql-server-2005-about-login-password-hashes.aspx?Redirected=true +http://us.generation-nt.com/securing-passwords-hash-help-35429432.html +http://forum.md5decrypter.co.uk/topic230-mysql-and-mssql-get-password-hashes.aspx +http://www.theregister.co.uk/2002/07/08/cracking_ms_sql_server_passwords/ +""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import consteq +from passlib.utils.compat import bascii_to_str, unicode, u +import passlib.utils.handlers as uh +# local +__all__ = [ + "mssql2000", + "mssql2005", +] + +#============================================================================= +# mssql 2000 +#============================================================================= +def _raw_mssql(secret, salt): + assert isinstance(secret, unicode) + assert isinstance(salt, bytes) + return sha1(secret.encode("utf-16-le") + salt).digest() + +BIDENT = b"0x0100" +##BIDENT2 = b("\x01\x00") +UIDENT = u("0x0100") + +def _ident_mssql(hash, csize, bsize): + """common identify for mssql 2000/2005""" + if isinstance(hash, unicode): + if len(hash) == csize and hash.startswith(UIDENT): + return True + elif isinstance(hash, bytes): + if len(hash) == csize and hash.startswith(BIDENT): + return True + ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes + ## return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + return False + +def _parse_mssql(hash, csize, bsize, handler): + """common parser for mssql 2000/2005; returns 4 byte salt + checksum""" + if isinstance(hash, unicode): + if len(hash) == csize and hash.startswith(UIDENT): + try: + return unhexlify(hash[6:].encode("utf-8")) + except TypeError: # throw when bad char found + pass + elif isinstance(hash, bytes): + # assumes ascii-compat encoding + assert isinstance(hash, bytes) + if len(hash) == csize and hash.startswith(BIDENT): + try: + return unhexlify(hash[6:]) + except TypeError: # throw when bad char found + pass + ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes + ## return hash[2:] + else: + raise uh.exc.ExpectedStringError(hash, "hash") + raise uh.exc.InvalidHashError(handler) + +class mssql2000(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the password hash used by MS-SQL 2000, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 bytes in length. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "mssql2000" + setting_kwds = ("salt",) + checksum_size = 40 + min_salt_size = max_salt_size = 4 + + #=================================================================== + # formatting + #=================================================================== + + # 0100 - 2 byte identifier + # 4 byte salt + # 20 byte checksum + # 20 byte checksum + # = 46 bytes + # encoded '0x' + 92 chars = 94 + + @classmethod + def identify(cls, hash): + return _ident_mssql(hash, 94, 46) + + @classmethod + def from_string(cls, hash): + data = _parse_mssql(hash, 94, 46, cls) + return cls(salt=data[:4], checksum=data[4:]) + + def to_string(self): + raw = self.salt + self.checksum + # raw bytes format - BIDENT2 + raw + return "0x0100" + bascii_to_str(hexlify(raw).upper()) + + def _calc_checksum(self, secret): + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + salt = self.salt + return _raw_mssql(secret, salt) + _raw_mssql(secret.upper(), salt) + + @classmethod + def verify(cls, secret, hash): + # NOTE: we only compare against the upper-case hash + # XXX: add 'full' just to verify both checksums? + uh.validate_secret(secret) + self = cls.from_string(hash) + chk = self.checksum + if chk is None: + raise uh.exc.MissingDigestError(cls) + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + result = _raw_mssql(secret.upper(), self.salt) + return consteq(result, chk[20:]) + +#============================================================================= +# handler +#============================================================================= +class mssql2005(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the password hash used by MS-SQL 2005, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 bytes in length. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "mssql2005" + setting_kwds = ("salt",) + + checksum_size = 20 + min_salt_size = max_salt_size = 4 + + #=================================================================== + # formatting + #=================================================================== + + # 0x0100 - 2 byte identifier + # 4 byte salt + # 20 byte checksum + # = 26 bytes + # encoded '0x' + 52 chars = 54 + + @classmethod + def identify(cls, hash): + return _ident_mssql(hash, 54, 26) + + @classmethod + def from_string(cls, hash): + data = _parse_mssql(hash, 54, 26, cls) + return cls(salt=data[:4], checksum=data[4:]) + + def to_string(self): + raw = self.salt + self.checksum + # raw bytes format - BIDENT2 + raw + return "0x0100" + bascii_to_str(hexlify(raw)).upper() + + def _calc_checksum(self, secret): + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + return _raw_mssql(secret, self.salt) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/mysql.py b/ansible/lib/python3.11/site-packages/passlib/handlers/mysql.py new file mode 100644 index 000000000..4a7125350 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/mysql.py @@ -0,0 +1,128 @@ +"""passlib.handlers.mysql + +MySQL 3.2.3 / OLD_PASSWORD() + + This implements Mysql's OLD_PASSWORD algorithm, introduced in version 3.2.3, deprecated in version 4.1. + + See :mod:`passlib.handlers.mysql_41` for the new algorithm was put in place in version 4.1 + + This algorithm is known to be very insecure, and should only be used to verify existing password hashes. + + http://djangosnippets.org/snippets/1508/ + +MySQL 4.1.1 / NEW PASSWORD + This implements Mysql new PASSWORD algorithm, introduced in version 4.1. + + This function is unsalted, and therefore not very secure against rainbow attacks. + It should only be used when dealing with mysql passwords, + for all other purposes, you should use a salted hash function. + + Description taken from http://dev.mysql.com/doc/refman/6.0/en/password-hashing.html +""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_native_str +from passlib.utils.compat import bascii_to_str, unicode, u, \ + byte_elem_value, str_to_uascii +import passlib.utils.handlers as uh +# local +__all__ = [ + 'mysql323', + 'mysq41', +] + +#============================================================================= +# backend +#============================================================================= +class mysql323(uh.StaticHandler): + """This class implements the MySQL 3.2.3 password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "mysql323" + checksum_size = 16 + checksum_chars = uh.HEX_CHARS + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + # FIXME: no idea if mysql has a policy about handling unicode passwords + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + MASK_32 = 0xffffffff + MASK_31 = 0x7fffffff + WHITE = b' \t' + + nr1 = 0x50305735 + nr2 = 0x12345671 + add = 7 + for c in secret: + if c in WHITE: + continue + tmp = byte_elem_value(c) + nr1 ^= ((((nr1 & 63)+add)*tmp) + (nr1 << 8)) & MASK_32 + nr2 = (nr2+((nr2 << 8) ^ nr1)) & MASK_32 + add = (add+tmp) & MASK_32 + return u("%08x%08x") % (nr1 & MASK_31, nr2 & MASK_31) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# handler +#============================================================================= +class mysql41(uh.StaticHandler): + """This class implements the MySQL 4.1 password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "mysql41" + _hash_prefix = u("*") + checksum_chars = uh.HEX_CHARS + checksum_size = 40 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.upper() + + def _calc_checksum(self, secret): + # FIXME: no idea if mysql has a policy about handling unicode passwords + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(sha1(sha1(secret).digest()).hexdigest()).upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/oracle.py b/ansible/lib/python3.11/site-packages/passlib/handlers/oracle.py new file mode 100644 index 000000000..a094f3721 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/oracle.py @@ -0,0 +1,172 @@ +"""passlib.handlers.oracle - Oracle DB Password Hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_unicode, xor_bytes +from passlib.utils.compat import irange, u, \ + uascii_to_str, unicode, str_to_uascii +from passlib.crypto.des import des_encrypt_block +import passlib.utils.handlers as uh +# local +__all__ = [ + "oracle10g", + "oracle11g" +] + +#============================================================================= +# oracle10 +#============================================================================= +def des_cbc_encrypt(key, value, iv=b'\x00' * 8, pad=b'\x00'): + """performs des-cbc encryption, returns only last block. + + this performs a specific DES-CBC encryption implementation + as needed by the Oracle10 hash. it probably won't be useful for + other purposes as-is. + + input value is null-padded to multiple of 8 bytes. + + :arg key: des key as bytes + :arg value: value to encrypt, as bytes. + :param iv: optional IV + :param pad: optional pad byte + + :returns: last block of DES-CBC encryption of all ``value``'s byte blocks. + """ + value += pad * (-len(value) % 8) # null pad to multiple of 8 + hash = iv # start things off + for offset in irange(0,len(value),8): + chunk = xor_bytes(hash, value[offset:offset+8]) + hash = des_encrypt_block(key, chunk) + return hash + +# magic string used as initial des key by oracle10 +ORACLE10_MAGIC = b"\x01\x23\x45\x67\x89\xAB\xCD\xEF" + +class oracle10(uh.HasUserContext, uh.StaticHandler): + """This class implements the password hash used by Oracle up to version 10g, and follows the :ref:`password-hash-api`. + + It does a single round of hashing, and relies on the username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keywords: + + :type user: str + :param user: name of oracle user account this password is associated with. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "oracle10" + checksum_chars = uh.HEX_CHARS + checksum_size = 16 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.upper() + + def _calc_checksum(self, secret): + # FIXME: not sure how oracle handles unicode. + # online docs about 10g hash indicate it puts ascii chars + # in a 2-byte encoding w/ the high byte set to null. + # they don't say how it handles other chars, or what encoding. + # + # so for now, encoding secret & user to utf-16-be, + # since that fits, and if secret/user is bytes, + # we assume utf-8, and decode first. + # + # this whole mess really needs someone w/ an oracle system, + # and some answers :) + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + user = to_unicode(self.user, "utf-8", param="user") + input = (user+secret).upper().encode("utf-16-be") + hash = des_cbc_encrypt(ORACLE10_MAGIC, input) + hash = des_cbc_encrypt(hash, input) + return hexlify(hash).decode("ascii").upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# oracle11 +#============================================================================= +class oracle11(uh.HasSalt, uh.GenericHandler): + """This class implements the Oracle11g password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 20 hexadecimal characters. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "oracle11" + setting_kwds = ("salt",) + checksum_size = 40 + checksum_chars = uh.UPPER_HEX_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 20 + salt_chars = uh.UPPER_HEX_CHARS + + + #=================================================================== + # methods + #=================================================================== + _hash_regex = re.compile(u("^S:(?P[0-9a-f]{40})(?P[0-9a-f]{20})$"), re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk.upper()) + + def to_string(self): + chk = self.checksum + hash = u("S:%s%s") % (chk.upper(), self.salt.upper()) + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = sha1(secret + unhexlify(self.salt.encode("ascii"))).hexdigest() + return str_to_uascii(chk).upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/pbkdf2.py b/ansible/lib/python3.11/site-packages/passlib/handlers/pbkdf2.py new file mode 100644 index 000000000..274278d86 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/pbkdf2.py @@ -0,0 +1,475 @@ +"""passlib.handlers.pbkdf - PBKDF2 based hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from base64 import b64encode, b64decode +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import to_unicode +from passlib.utils.binary import ab64_decode, ab64_encode +from passlib.utils.compat import str_to_bascii, u, uascii_to_str, unicode +from passlib.crypto.digest import pbkdf2_hmac +import passlib.utils.handlers as uh +# local +__all__ = [ + "pbkdf2_sha1", + "pbkdf2_sha256", + "pbkdf2_sha512", + "cta_pbkdf2_sha1", + "dlitz_pbkdf2_sha1", + "grub_pbkdf2_sha512", +] + +#============================================================================= +# +#============================================================================= +class Pbkdf2DigestHandler(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """base class for various pbkdf2_{digest} algorithms""" + #=================================================================== + # class attrs + #=================================================================== + + #--GenericHandler-- + setting_kwds = ("salt", "salt_size", "rounds") + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + default_salt_size = 16 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = None # set by subclass + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #--this class-- + _digest = None # name of subclass-specified hash + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide sanity check. + # the underlying pbkdf2 specifies no bounds for either. + + # NOTE: defaults chosen to be at least as large as pbkdf2 rfc recommends... + # >8 bytes of entropy in salt, >1000 rounds + # increased due to time since rfc established + + #=================================================================== + # methods + #=================================================================== + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + salt = ab64_decode(salt.encode("ascii")) + if chk: + chk = ab64_decode(chk.encode("ascii")) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + salt = ab64_encode(self.salt).decode("ascii") + chk = ab64_encode(self.checksum).decode("ascii") + return uh.render_mc3(self.ident, self.rounds, salt, chk) + + def _calc_checksum(self, secret): + # NOTE: pbkdf2_hmac() will encode secret & salt using UTF8 + return pbkdf2_hmac(self._digest, secret, self.salt, self.rounds, self.checksum_size) + +def create_pbkdf2_hash(hash_name, digest_size, rounds=12000, ident=None, module=__name__): + """create new Pbkdf2DigestHandler subclass for a specific hash""" + name = 'pbkdf2_' + hash_name + if ident is None: + ident = u("$pbkdf2-%s$") % (hash_name,) + base = Pbkdf2DigestHandler + return type(name, (base,), dict( + __module__=module, # so ABCMeta won't clobber it. + name=name, + ident=ident, + _digest = hash_name, + default_rounds=rounds, + checksum_size=digest_size, + encoded_checksum_size=(digest_size*4+2)//3, + __doc__="""This class implements a generic ``PBKDF2-HMAC-%(digest)s``-based password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be between 0-1024 bytes. + If not specified, a %(dsc)d byte salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to %(dsc)d bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to %(dr)d, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ % dict(digest=hash_name.upper(), dsc=base.default_salt_size, dr=rounds) + )) + +#------------------------------------------------------------------------ +# derived handlers +#------------------------------------------------------------------------ +pbkdf2_sha1 = create_pbkdf2_hash("sha1", 20, 131000, ident=u("$pbkdf2$")) +pbkdf2_sha256 = create_pbkdf2_hash("sha256", 32, 29000) +pbkdf2_sha512 = create_pbkdf2_hash("sha512", 64, 25000) + +ldap_pbkdf2_sha1 = uh.PrefixWrapper("ldap_pbkdf2_sha1", pbkdf2_sha1, "{PBKDF2}", "$pbkdf2$", ident=True) +ldap_pbkdf2_sha256 = uh.PrefixWrapper("ldap_pbkdf2_sha256", pbkdf2_sha256, "{PBKDF2-SHA256}", "$pbkdf2-sha256$", ident=True) +ldap_pbkdf2_sha512 = uh.PrefixWrapper("ldap_pbkdf2_sha512", pbkdf2_sha512, "{PBKDF2-SHA512}", "$pbkdf2-sha512$", ident=True) + +#============================================================================= +# cryptacular's pbkdf2 hash +#============================================================================= + +# bytes used by cta hash for base64 values 63 & 64 +CTA_ALTCHARS = b"-_" + +class cta_pbkdf2_sha1(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements Cryptacular's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, it may be any length. + If not specified, a one will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 60000, must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "cta_pbkdf2_sha1" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$p5k2$") + checksum_size = 20 + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. underlying algorithm (and reference implementation) + # allows effectively unbounded values for both of these parameters. + + #--HasSalt-- + default_salt_size = 16 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = pbkdf2_sha1.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + + # hash $p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0= + # ident $p5k2$ + # rounds 1000 + # salt ZxK4ZBJCfQg= + # chk jJZVscWtO--p1-xIZl6jhO2LKR0= + # NOTE: rounds in hex + + @classmethod + def from_string(cls, hash): + # NOTE: passlib deviation - forbidding zero-padded rounds + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, handler=cls) + salt = b64decode(salt.encode("ascii"), CTA_ALTCHARS) + if chk: + chk = b64decode(chk.encode("ascii"), CTA_ALTCHARS) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + salt = b64encode(self.salt, CTA_ALTCHARS).decode("ascii") + chk = b64encode(self.checksum, CTA_ALTCHARS).decode("ascii") + return uh.render_mc3(self.ident, self.rounds, salt, chk, rounds_base=16) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + return pbkdf2_hmac("sha1", secret, self.salt, self.rounds, 20) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# dlitz's pbkdf2 hash +#============================================================================= +class dlitz_pbkdf2_sha1(uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements Dwayne Litzenberger's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If specified, it may be any length, but must use the characters in the regexp range ``[./0-9A-Za-z]``. + If not specified, a 16 character salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 60000, must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "dlitz_pbkdf2_sha1" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$p5k2$") + _stub_checksum = u("0" * 48 + "=") + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. underlying algorithm (and reference implementation) + # allows effectively unbounded values for both of these parameters. + + #--HasSalt-- + default_salt_size = 16 + max_salt_size = 1024 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + # NOTE: for security, the default here is set to match pbkdf2_sha1, + # even though this hash's extra block makes it twice as slow. + default_rounds = pbkdf2_sha1.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + + # hash $p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g + # ident $p5k2$ + # rounds c + # salt u9HvcT4d + # chk Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g + # rounds in lowercase hex, no zero padding + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, + default_rounds=400, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + rounds = self.rounds + if rounds == 400: + rounds = None # omit rounds measurement if == 400 + return uh.render_mc3(self.ident, rounds, self.salt, self.checksum, rounds_base=16) + + def _get_config(self): + rounds = self.rounds + if rounds == 400: + rounds = None # omit rounds measurement if == 400 + return uh.render_mc3(self.ident, rounds, self.salt, None, rounds_base=16) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + salt = self._get_config() + result = pbkdf2_hmac("sha1", secret, salt, self.rounds, 24) + return ab64_encode(result).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# crowd +#============================================================================= +class atlassian_pbkdf2_sha1(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the PBKDF2 hash used by Atlassian. + + It supports a fixed-length salt, and a fixed number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be exactly 16 bytes. + If not specified, a salt will be autogenerated (this is recommended). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #--GenericHandler-- + name = "atlassian_pbkdf2_sha1" + setting_kwds =("salt",) + ident = u("{PKCS5S2}") + checksum_size = 32 + + #--HasRawSalt-- + min_salt_size = max_salt_size = 16 + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + ident = cls.ident + if not hash.startswith(ident): + raise uh.exc.InvalidHashError(cls) + data = b64decode(hash[len(ident):].encode("ascii")) + salt, chk = data[:16], data[16:] + return cls(salt=salt, checksum=chk) + + def to_string(self): + data = self.salt + self.checksum + hash = self.ident + b64encode(data).decode("ascii") + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + # TODO: find out what crowd's policy is re: unicode + # crowd seems to use a fixed number of rounds. + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + return pbkdf2_hmac("sha1", secret, self.salt, 10000, 32) + +#============================================================================= +# grub +#============================================================================= +class grub_pbkdf2_sha512(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements Grub's pbkdf2-hmac-sha512 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be between 0-1024 bytes. + If not specified, a 64 byte salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 64 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 19000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + name = "grub_pbkdf2_sha512" + setting_kwds = ("salt", "salt_size", "rounds") + + ident = u("grub.pbkdf2.sha512.") + checksum_size = 64 + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. the underlying pbkdf2 specifies no bounds for either, + # and it's not clear what grub specifies. + + default_salt_size = 64 + max_salt_size = 1024 + + default_rounds = pbkdf2_sha512.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, sep=u("."), + handler=cls) + salt = unhexlify(salt.encode("ascii")) + if chk: + chk = unhexlify(chk.encode("ascii")) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + salt = hexlify(self.salt).decode("ascii").upper() + chk = hexlify(self.checksum).decode("ascii").upper() + return uh.render_mc3(self.ident, self.rounds, salt, chk, sep=u(".")) + + def _calc_checksum(self, secret): + # TODO: find out what grub's policy is re: unicode + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8 + return pbkdf2_hmac("sha512", secret, self.salt, self.rounds, 64) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/phpass.py b/ansible/lib/python3.11/site-packages/passlib/handlers/phpass.py new file mode 100644 index 000000000..6736f0f2d --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/phpass.py @@ -0,0 +1,135 @@ +"""passlib.handlers.phpass - PHPass Portable Crypt + +phppass located - http://www.openwall.com/phpass/ +algorithm described - http://www.openwall.com/articles/PHP-Users-Passwords + +phpass context - blowfish, bsdi_crypt, phpass +""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils.binary import h64 +from passlib.utils.compat import u, uascii_to_str, unicode +import passlib.utils.handlers as uh +# local +__all__ = [ + "phpass", +] + +#============================================================================= +# phpass +#============================================================================= +class phpass(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the PHPass Portable Hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 19, must be between 7 and 30, inclusive. + This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}`. + + :type ident: str + :param ident: + phpBB3 uses ``H`` instead of ``P`` for its identifier, + this may be set to ``H`` in order to generate phpBB3 compatible hashes. + it defaults to ``P``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "phpass" + setting_kwds = ("salt", "rounds", "ident") + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 8 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 19 + min_rounds = 7 + max_rounds = 30 + rounds_cost = "log2" + + #--HasManyIdents-- + default_ident = u("$P$") + ident_values = (u("$P$"), u("$H$")) + ident_aliases = {u("P"):u("$P$"), u("H"):u("$H$")} + + #=================================================================== + # formatting + #=================================================================== + + #$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0 + # $P$ + # 9 + # IQRaTwmf + # eRo7ud9Fh4E2PdI0S3r.L0 + + @classmethod + def from_string(cls, hash): + ident, data = cls._parse_ident(hash) + rounds, salt, chk = data[0], data[1:9], data[9:] + return cls( + ident=ident, + rounds=h64.decode_int6(rounds.encode("ascii")), + salt=salt, + checksum=chk or None, + ) + + def to_string(self): + hash = u("%s%s%s%s") % (self.ident, + h64.encode_int6(self.rounds).decode("ascii"), + self.salt, + self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + # FIXME: can't find definitive policy on how phpass handles non-ascii. + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + real_rounds = 1<`_ + hash names. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + In addition to the standard :ref:`password-hash-api` methods, + this class also provides the following methods for manipulating Passlib + scram hashes in ways useful for pluging into a SCRAM protocol stack: + + .. automethod:: extract_digest_info + .. automethod:: extract_digest_algs + .. automethod:: derive_digest + """ + #=================================================================== + # class attrs + #=================================================================== + + # NOTE: unlike most GenericHandler classes, the 'checksum' attr of + # ScramHandler is actually a map from digest_name -> digest, so + # many of the standard methods have been overridden. + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide + # a sanity check; the underlying pbkdf2 specifies no bounds for either. + + #--GenericHandler-- + name = "scram" + setting_kwds = ("salt", "salt_size", "rounds", "algs") + ident = u("$scram$") + + #--HasSalt-- + default_salt_size = 12 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = 100000 + min_rounds = 1 + max_rounds = 2**32-1 + rounds_cost = "linear" + + #--custom-- + + # default algorithms when creating new hashes. + default_algs = ["sha-1", "sha-256", "sha-512"] + + # list of algs verify prefers to use, in order. + _verify_algs = ["sha-256", "sha-512", "sha-224", "sha-384", "sha-1"] + + #=================================================================== + # instance attrs + #=================================================================== + + # 'checksum' is different from most GenericHandler subclasses, + # in that it contains a dict mapping from alg -> digest, + # or None if no checksum present. + + # list of algorithms to create/compare digests for. + algs = None + + #=================================================================== + # scram frontend helpers + #=================================================================== + @classmethod + def extract_digest_info(cls, hash, alg): + """return (salt, rounds, digest) for specific hash algorithm. + + :type hash: str + :arg hash: + :class:`!scram` hash stored for desired user + + :type alg: str + :arg alg: + Name of digest algorithm (e.g. ``"sha-1"``) requested by client. + + This value is run through :func:`~passlib.crypto.digest.norm_hash_name`, + so it is case-insensitive, and can be the raw SCRAM + mechanism name (e.g. ``"SCRAM-SHA-1"``), the IANA name, + or the hashlib name. + + :raises KeyError: + If the hash does not contain an entry for the requested digest + algorithm. + + :returns: + A tuple containing ``(salt, rounds, digest)``, + where *digest* matches the raw bytes returned by + SCRAM's :func:`Hi` function for the stored password, + the provided *salt*, and the iteration count (*rounds*). + *salt* and *digest* are both raw (unencoded) bytes. + """ + # XXX: this could be sped up by writing custom parsing routine + # that just picks out relevant digest, and doesn't bother + # with full structure validation each time it's called. + alg = norm_hash_name(alg, 'iana') + self = cls.from_string(hash) + chkmap = self.checksum + if not chkmap: + raise ValueError("scram hash contains no digests") + return self.salt, self.rounds, chkmap[alg] + + @classmethod + def extract_digest_algs(cls, hash, format="iana"): + """Return names of all algorithms stored in a given hash. + + :type hash: str + :arg hash: + The :class:`!scram` hash to parse + + :type format: str + :param format: + This changes the naming convention used by the + returned algorithm names. By default the names + are IANA-compatible; possible values are ``"iana"`` or ``"hashlib"``. + + :returns: + Returns a list of digest algorithms; e.g. ``["sha-1"]`` + """ + # XXX: this could be sped up by writing custom parsing routine + # that just picks out relevant names, and doesn't bother + # with full structure validation each time it's called. + algs = cls.from_string(hash).algs + if format == "iana": + return algs + else: + return [norm_hash_name(alg, format) for alg in algs] + + @classmethod + def derive_digest(cls, password, salt, rounds, alg): + """helper to create SaltedPassword digest for SCRAM. + + This performs the step in the SCRAM protocol described as:: + + SaltedPassword := Hi(Normalize(password), salt, i) + + :type password: unicode or utf-8 bytes + :arg password: password to run through digest + + :type salt: bytes + :arg salt: raw salt data + + :type rounds: int + :arg rounds: number of iterations. + + :type alg: str + :arg alg: name of digest to use (e.g. ``"sha-1"``). + + :returns: + raw bytes of ``SaltedPassword`` + """ + if isinstance(password, bytes): + password = password.decode("utf-8") + # NOTE: pbkdf2_hmac() will encode secret & salt using utf-8, + # and handle normalizing alg name. + return pbkdf2_hmac(alg, saslprep(password), salt, rounds) + + #=================================================================== + # serialization + #=================================================================== + + @classmethod + def from_string(cls, hash): + hash = to_native_str(hash, "ascii", "hash") + if not hash.startswith("$scram$"): + raise uh.exc.InvalidHashError(cls) + parts = hash[7:].split("$") + if len(parts) != 3: + raise uh.exc.MalformedHashError(cls) + rounds_str, salt_str, chk_str = parts + + # decode rounds + rounds = int(rounds_str) + if rounds_str != str(rounds): # forbid zero padding, etc. + raise uh.exc.MalformedHashError(cls) + + # decode salt + try: + salt = ab64_decode(salt_str.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + + # decode algs/digest list + if not chk_str: + # scram hashes MUST have something here. + raise uh.exc.MalformedHashError(cls) + elif "=" in chk_str: + # comma-separated list of 'alg=digest' pairs + algs = None + chkmap = {} + for pair in chk_str.split(","): + alg, digest = pair.split("=") + try: + chkmap[alg] = ab64_decode(digest.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + else: + # comma-separated list of alg names, no digests + algs = chk_str + chkmap = None + + # return new object + return cls( + rounds=rounds, + salt=salt, + checksum=chkmap, + algs=algs, + ) + + def to_string(self): + salt = bascii_to_str(ab64_encode(self.salt)) + chkmap = self.checksum + chk_str = ",".join( + "%s=%s" % (alg, bascii_to_str(ab64_encode(chkmap[alg]))) + for alg in self.algs + ) + return '$scram$%d$%s$%s' % (self.rounds, salt, chk_str) + + #=================================================================== + # variant constructor + #=================================================================== + @classmethod + def using(cls, default_algs=None, algs=None, **kwds): + # parse aliases + if algs is not None: + assert default_algs is None + default_algs = algs + + # create subclass + subcls = super(scram, cls).using(**kwds) + + # fill in algs + if default_algs is not None: + subcls.default_algs = cls._norm_algs(default_algs) + return subcls + + #=================================================================== + # init + #=================================================================== + def __init__(self, algs=None, **kwds): + super(scram, self).__init__(**kwds) + + # init algs + digest_map = self.checksum + if algs is not None: + if digest_map is not None: + raise RuntimeError("checksum & algs kwds are mutually exclusive") + algs = self._norm_algs(algs) + elif digest_map is not None: + # derive algs list from digest map (if present). + algs = self._norm_algs(digest_map.keys()) + elif self.use_defaults: + algs = list(self.default_algs) + assert self._norm_algs(algs) == algs, "invalid default algs: %r" % (algs,) + else: + raise TypeError("no algs list specified") + self.algs = algs + + def _norm_checksum(self, checksum, relaxed=False): + if not isinstance(checksum, dict): + raise uh.exc.ExpectedTypeError(checksum, "dict", "checksum") + for alg, digest in iteritems(checksum): + if alg != norm_hash_name(alg, 'iana'): + raise ValueError("malformed algorithm name in scram hash: %r" % + (alg,)) + if len(alg) > 9: + raise ValueError("SCRAM limits algorithm names to " + "9 characters: %r" % (alg,)) + if not isinstance(digest, bytes): + raise uh.exc.ExpectedTypeError(digest, "raw bytes", "digests") + # TODO: verify digest size (if digest is known) + if 'sha-1' not in checksum: + # NOTE: required because of SCRAM spec. + raise ValueError("sha-1 must be in algorithm list of scram hash") + return checksum + + @classmethod + def _norm_algs(cls, algs): + """normalize algs parameter""" + if isinstance(algs, native_string_types): + algs = splitcomma(algs) + algs = sorted(norm_hash_name(alg, 'iana') for alg in algs) + if any(len(alg)>9 for alg in algs): + raise ValueError("SCRAM limits alg names to max of 9 characters") + if 'sha-1' not in algs: + # NOTE: required because of SCRAM spec (rfc 5802) + raise ValueError("sha-1 must be in algorithm list of scram hash") + return algs + + #=================================================================== + # migration + #=================================================================== + def _calc_needs_update(self, **kwds): + # marks hashes as deprecated if they don't include at least all default_algs. + # XXX: should we deprecate if they aren't exactly the same, + # to permit removing legacy hashes? + if not set(self.algs).issuperset(self.default_algs): + return True + + # hand off to base implementation + return super(scram, self)._calc_needs_update(**kwds) + + #=================================================================== + # digest methods + #=================================================================== + def _calc_checksum(self, secret, alg=None): + rounds = self.rounds + salt = self.salt + hash = self.derive_digest + if alg: + # if requested, generate digest for specific alg + return hash(secret, salt, rounds, alg) + else: + # by default, return dict containing digests for all algs + return dict( + (alg, hash(secret, salt, rounds, alg)) + for alg in self.algs + ) + + @classmethod + def verify(cls, secret, hash, full=False): + uh.validate_secret(secret) + self = cls.from_string(hash) + chkmap = self.checksum + if not chkmap: + raise ValueError("expected %s hash, got %s config string instead" % + (cls.name, cls.name)) + + # NOTE: to make the verify method efficient, we just calculate hash + # of shortest digest by default. apps can pass in "full=True" to + # check entire hash for consistency. + if full: + correct = failed = False + for alg, digest in iteritems(chkmap): + other = self._calc_checksum(secret, alg) + # NOTE: could do this length check in norm_algs(), + # but don't need to be that strict, and want to be able + # to parse hashes containing algs not supported by platform. + # it's fine if we fail here though. + if len(digest) != len(other): + raise ValueError("mis-sized %s digest in scram hash: %r != %r" + % (alg, len(digest), len(other))) + if consteq(other, digest): + correct = True + else: + failed = True + if correct and failed: + raise ValueError("scram hash verified inconsistently, " + "may be corrupted") + else: + return correct + else: + # XXX: should this just always use sha1 hash? would be faster. + # otherwise only verify against one hash, pick one w/ best security. + for alg in self._verify_algs: + if alg in chkmap: + other = self._calc_checksum(secret, alg) + return consteq(other, chkmap[alg]) + # there should always be sha-1 at the very least, + # or something went wrong inside _norm_algs() + raise AssertionError("sha-1 digest not found!") + + #=================================================================== + # + #=================================================================== + +#============================================================================= +# code used for testing scram against protocol examples during development. +#============================================================================= +##def _test_reference_scram(): +## "quick hack testing scram reference vectors" +## # NOTE: "n,," is GS2 header - see https://tools.ietf.org/html/rfc5801 +## from passlib.utils.compat import print_ +## +## engine = _scram_engine( +## alg="sha-1", +## salt='QSXCR+Q6sek8bf92'.decode("base64"), +## rounds=4096, +## password=u("pencil"), +## ) +## print_(engine.digest.encode("base64").rstrip()) +## +## msg = engine.format_auth_msg( +## username="user", +## client_nonce = "fyko+d2lbbFgONRv9qkxdawL", +## server_nonce = "3rfcNHYJY1ZVvWVs7j", +## header='c=biws', +## ) +## +## cp = engine.get_encoded_client_proof(msg) +## assert cp == "v0X8v3Bz2T0CJGbJQyF0X+HI4Ts=", cp +## +## ss = engine.get_encoded_server_sig(msg) +## assert ss == "rmF9pqV8S7suAoZWja4dJRkFsKQ=", ss +## +##class _scram_engine(object): +## """helper class for verifying scram hash behavior +## against SCRAM protocol examples. not officially part of Passlib. +## +## takes in alg, salt, rounds, and a digest or password. +## +## can calculate the various keys & messages of the scram protocol. +## +## """ +## #========================================================= +## # init +## #========================================================= +## +## @classmethod +## def from_string(cls, hash, alg): +## "create record from scram hash, for given alg" +## return cls(alg, *scram.extract_digest_info(hash, alg)) +## +## def __init__(self, alg, salt, rounds, digest=None, password=None): +## self.alg = norm_hash_name(alg) +## self.salt = salt +## self.rounds = rounds +## self.password = password +## if password: +## data = scram.derive_digest(password, salt, rounds, alg) +## if digest and data != digest: +## raise ValueError("password doesn't match digest") +## else: +## digest = data +## elif not digest: +## raise TypeError("must provide password or digest") +## self.digest = digest +## +## #========================================================= +## # frontend methods +## #========================================================= +## def get_hash(self, data): +## "return hash of raw data" +## return hashlib.new(iana_to_hashlib(self.alg), data).digest() +## +## def get_client_proof(self, msg): +## "return client proof of specified auth msg text" +## return xor_bytes(self.client_key, self.get_client_sig(msg)) +## +## def get_encoded_client_proof(self, msg): +## return self.get_client_proof(msg).encode("base64").rstrip() +## +## def get_client_sig(self, msg): +## "return client signature of specified auth msg text" +## return self.get_hmac(self.stored_key, msg) +## +## def get_server_sig(self, msg): +## "return server signature of specified auth msg text" +## return self.get_hmac(self.server_key, msg) +## +## def get_encoded_server_sig(self, msg): +## return self.get_server_sig(msg).encode("base64").rstrip() +## +## def format_server_response(self, client_nonce, server_nonce): +## return 'r={client_nonce}{server_nonce},s={salt},i={rounds}'.format( +## client_nonce=client_nonce, +## server_nonce=server_nonce, +## rounds=self.rounds, +## salt=self.encoded_salt, +## ) +## +## def format_auth_msg(self, username, client_nonce, server_nonce, +## header='c=biws'): +## return ( +## 'n={username},r={client_nonce}' +## ',' +## 'r={client_nonce}{server_nonce},s={salt},i={rounds}' +## ',' +## '{header},r={client_nonce}{server_nonce}' +## ).format( +## username=username, +## client_nonce=client_nonce, +## server_nonce=server_nonce, +## salt=self.encoded_salt, +## rounds=self.rounds, +## header=header, +## ) +## +## #========================================================= +## # helpers to calculate & cache constant data +## #========================================================= +## def _calc_get_hmac(self): +## return get_prf("hmac-" + iana_to_hashlib(self.alg))[0] +## +## def _calc_client_key(self): +## return self.get_hmac(self.digest, b("Client Key")) +## +## def _calc_stored_key(self): +## return self.get_hash(self.client_key) +## +## def _calc_server_key(self): +## return self.get_hmac(self.digest, b("Server Key")) +## +## def _calc_encoded_salt(self): +## return self.salt.encode("base64").rstrip() +## +## #========================================================= +## # hacks for calculated attributes +## #========================================================= +## +## def __getattr__(self, attr): +## if not attr.startswith("_"): +## f = getattr(self, "_calc_" + attr, None) +## if f: +## value = f() +## setattr(self, attr, value) +## return value +## raise AttributeError("attribute not found") +## +## def __dir__(self): +## cdir = dir(self.__class__) +## attrs = set(cdir) +## attrs.update(self.__dict__) +## attrs.update(attr[6:] for attr in cdir +## if attr.startswith("_calc_")) +## return sorted(attrs) +## #========================================================= +## # eoc +## #========================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/scrypt.py b/ansible/lib/python3.11/site-packages/passlib/handlers/scrypt.py new file mode 100644 index 000000000..1686fda50 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/scrypt.py @@ -0,0 +1,383 @@ +"""passlib.handlers.scrypt -- scrypt password hash""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, absolute_import +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.crypto import scrypt as _scrypt +from passlib.utils import h64, to_bytes +from passlib.utils.binary import h64, b64s_decode, b64s_encode +from passlib.utils.compat import u, bascii_to_str, suppress_cause +from passlib.utils.decor import classproperty +import passlib.utils.handlers as uh +# local +__all__ = [ + "scrypt", +] + +#============================================================================= +# scrypt format identifiers +#============================================================================= + +IDENT_SCRYPT = u("$scrypt$") # identifier used by passlib +IDENT_7 = u("$7$") # used by official scrypt spec + +_UDOLLAR = u("$") + +#============================================================================= +# handler +#============================================================================= +class scrypt(uh.ParallelismMixin, uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.HasManyIdents, + uh.GenericHandler): + """This class implements an SCrypt-based password [#scrypt-home]_ hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, a variable number of rounds, + as well as some custom tuning parameters unique to scrypt (see below). + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If specified, the length must be between 0-1024 bytes. + If not specified, one will be auto-generated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 16, but must be within ``range(1,32)``. + + .. warning:: + + Unlike many hash algorithms, increasing the rounds value + will increase both the time *and memory* required to hash a password. + + :type block_size: int + :param block_size: + Optional block size to pass to scrypt hash function (the ``r`` parameter). + Useful for tuning scrypt to optimal performance for your CPU architecture. + Defaults to 8. + + :type parallelism: int + :param parallelism: + Optional parallelism to pass to scrypt hash function (the ``p`` parameter). + Defaults to 1. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. note:: + + The underlying scrypt hash function has a number of limitations + on it's parameter values, which forbids certain combinations of settings. + The requirements are: + + * ``linear_rounds = 2**`` + * ``linear_rounds < 2**(16 * block_size)`` + * ``block_size * parallelism <= 2**30-1`` + + .. todo:: + + This class currently does not support configuring default values + for ``block_size`` or ``parallelism`` via a :class:`~passlib.context.CryptContext` + configuration. + """ + + #=================================================================== + # class attrs + #=================================================================== + + #------------------------ + # PasswordHash + #------------------------ + name = "scrypt" + setting_kwds = ("ident", "salt", "salt_size", "rounds", "block_size", "parallelism") + + #------------------------ + # GenericHandler + #------------------------ + # NOTE: scrypt supports arbitrary output sizes. since it's output runs through + # pbkdf2-hmac-sha256 before returning, and this could be raised eventually... + # but a 256-bit digest is more than sufficient for password hashing. + # XXX: make checksum size configurable? could merge w/ argon2 code that does this. + checksum_size = 32 + + #------------------------ + # HasManyIdents + #------------------------ + default_ident = IDENT_SCRYPT + ident_values = (IDENT_SCRYPT, IDENT_7) + + #------------------------ + # HasRawSalt + #------------------------ + default_salt_size = 16 + max_salt_size = 1024 + + #------------------------ + # HasRounds + #------------------------ + # TODO: would like to dynamically pick this based on system + default_rounds = 16 + min_rounds = 1 + max_rounds = 31 # limited by scrypt alg + rounds_cost = "log2" + + # TODO: make default block size configurable via using(), and deprecatable via .needs_update() + + #=================================================================== + # instance attrs + #=================================================================== + + #: default parallelism setting (min=1 currently hardcoded in mixin) + parallelism = 1 + + #: default block size setting + block_size = 8 + + #=================================================================== + # variant constructor + #=================================================================== + + @classmethod + def using(cls, block_size=None, **kwds): + subcls = super(scrypt, cls).using(**kwds) + if block_size is not None: + if isinstance(block_size, uh.native_string_types): + block_size = int(block_size) + subcls.block_size = subcls._norm_block_size(block_size, relaxed=kwds.get("relaxed")) + + # make sure param combination is valid for scrypt() + try: + _scrypt.validate(1 << cls.default_rounds, cls.block_size, cls.parallelism) + except ValueError as err: + raise suppress_cause(ValueError("scrypt: invalid settings combination: " + str(err))) + + return subcls + + #=================================================================== + # parsing + #=================================================================== + + @classmethod + def from_string(cls, hash): + return cls(**cls.parse(hash)) + + @classmethod + def parse(cls, hash): + ident, suffix = cls._parse_ident(hash) + func = getattr(cls, "_parse_%s_string" % ident.strip(_UDOLLAR), None) + if func: + return func(suffix) + else: + raise uh.exc.InvalidHashError(cls) + + # + # passlib's format: + # $scrypt$ln=,r=,p=

$[$] + # where: + # logN, r, p -- decimal-encoded positive integer, no zero-padding + # logN -- log cost setting + # r -- block size setting (usually 8) + # p -- parallelism setting (usually 1) + # salt, digest -- b64-nopad encoded bytes + # + + @classmethod + def _parse_scrypt_string(cls, suffix): + # break params, salt, and digest sections + parts = suffix.split("$") + if len(parts) == 3: + params, salt, digest = parts + elif len(parts) == 2: + params, salt = parts + digest = None + else: + raise uh.exc.MalformedHashError(cls, "malformed hash") + + # break params apart + parts = params.split(",") + if len(parts) == 3: + nstr, bstr, pstr = parts + assert nstr.startswith("ln=") + assert bstr.startswith("r=") + assert pstr.startswith("p=") + else: + raise uh.exc.MalformedHashError(cls, "malformed settings field") + + return dict( + ident=IDENT_SCRYPT, + rounds=int(nstr[3:]), + block_size=int(bstr[2:]), + parallelism=int(pstr[2:]), + salt=b64s_decode(salt.encode("ascii")), + checksum=b64s_decode(digest.encode("ascii")) if digest else None, + ) + + # + # official format specification defined at + # https://gitlab.com/jas/scrypt-unix-crypt/blob/master/unix-scrypt.txt + # format: + # $7$[$] + # 0 12345 67890 1 + # where: + # All bytes use h64-little-endian encoding + # N: 6-bit log cost setting + # r: 30-bit block size setting + # p: 30-bit parallelism setting + # salt: variable length salt bytes + # digest: fixed 32-byte digest + # + + @classmethod + def _parse_7_string(cls, suffix): + # XXX: annoyingly, official spec embeds salt *raw*, yet doesn't specify a hash encoding. + # so assuming only h64 chars are valid for salt, and are ASCII encoded. + + # split into params & digest + parts = suffix.encode("ascii").split(b"$") + if len(parts) == 2: + params, digest = parts + elif len(parts) == 1: + params, = parts + digest = None + else: + raise uh.exc.MalformedHashError() + + # parse params & return + if len(params) < 11: + raise uh.exc.MalformedHashError(cls, "params field too short") + return dict( + ident=IDENT_7, + rounds=h64.decode_int6(params[:1]), + block_size=h64.decode_int30(params[1:6]), + parallelism=h64.decode_int30(params[6:11]), + salt=params[11:], + checksum=h64.decode_bytes(digest) if digest else None, + ) + + #=================================================================== + # formatting + #=================================================================== + def to_string(self): + ident = self.ident + if ident == IDENT_SCRYPT: + return "$scrypt$ln=%d,r=%d,p=%d$%s$%s" % ( + self.rounds, + self.block_size, + self.parallelism, + bascii_to_str(b64s_encode(self.salt)), + bascii_to_str(b64s_encode(self.checksum)), + ) + else: + assert ident == IDENT_7 + salt = self.salt + try: + salt.decode("ascii") + except UnicodeDecodeError: + raise suppress_cause(NotImplementedError("scrypt $7$ hashes dont support non-ascii salts")) + return bascii_to_str(b"".join([ + b"$7$", + h64.encode_int6(self.rounds), + h64.encode_int30(self.block_size), + h64.encode_int30(self.parallelism), + self.salt, + b"$", + h64.encode_bytes(self.checksum) + ])) + + #=================================================================== + # init + #=================================================================== + def __init__(self, block_size=None, **kwds): + super(scrypt, self).__init__(**kwds) + + # init block size + if block_size is None: + assert uh.validate_default_value(self, self.block_size, self._norm_block_size, + param="block_size") + else: + self.block_size = self._norm_block_size(block_size) + + # NOTE: if hash contains invalid complex constraint, relying on error + # being raised by scrypt call in _calc_checksum() + + @classmethod + def _norm_block_size(cls, block_size, relaxed=False): + return uh.norm_integer(cls, block_size, min=1, param="block_size", relaxed=relaxed) + + def _generate_salt(self): + salt = super(scrypt, self)._generate_salt() + if self.ident == IDENT_7: + # this format doesn't support non-ascii salts. + # as workaround, we take raw bytes, encoded to base64 + salt = b64s_encode(salt) + return salt + + #=================================================================== + # backend configuration + # NOTE: this following HasManyBackends' API, but provides it's own implementation, + # which actually switches the backend that 'passlib.crypto.scrypt.scrypt()' uses. + #=================================================================== + + @classproperty + def backends(cls): + return _scrypt.backend_values + + @classmethod + def get_backend(cls): + return _scrypt.backend + + @classmethod + def has_backend(cls, name="any"): + try: + cls.set_backend(name, dryrun=True) + return True + except uh.exc.MissingBackendError: + return False + + @classmethod + def set_backend(cls, name="any", dryrun=False): + _scrypt._set_backend(name, dryrun=dryrun) + + #=================================================================== + # digest calculation + #=================================================================== + def _calc_checksum(self, secret): + secret = to_bytes(secret, param="secret") + return _scrypt.scrypt(secret, self.salt, n=(1 << self.rounds), r=self.block_size, + p=self.parallelism, keylen=self.checksum_size) + + #=================================================================== + # hash migration + #=================================================================== + + def _calc_needs_update(self, **kwds): + """ + mark hash as needing update if rounds is outside desired bounds. + """ + # XXX: for now, marking all hashes which don't have matching block_size setting + if self.block_size != type(self).block_size: + return True + return super(scrypt, self)._calc_needs_update(**kwds) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/sha1_crypt.py b/ansible/lib/python3.11/site-packages/passlib/handlers/sha1_crypt.py new file mode 100644 index 000000000..8f9aa7173 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/sha1_crypt.py @@ -0,0 +1,158 @@ +"""passlib.handlers.sha1_crypt +""" + +#============================================================================= +# imports +#============================================================================= + +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import safe_crypt, test_crypt +from passlib.utils.binary import h64 +from passlib.utils.compat import u, unicode, irange +from passlib.crypto.digest import compile_hmac +import passlib.utils.handlers as uh +# local +__all__ = [ +] +#============================================================================= +# sha1-crypt +#============================================================================= +_BNULL = b'\x00' + +class sha1_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the SHA1-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, an 8 character one will be autogenerated (this is recommended). + If specified, it must be 0-64 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 8 bytes, but can be any value between 0 and 64. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 480000, must be between 1 and 4294967295, inclusive. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "sha1_crypt" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$sha1$") + checksum_size = 28 + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + default_salt_size = 8 + max_salt_size = 64 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 480000 # current passlib default + min_rounds = 1 # really, this should be higher. + max_rounds = 4294967295 # 32-bit integer limit + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self, config=False): + chk = None if config else self.checksum + return uh.render_mc3(self.ident, self.rounds, self.salt, chk) + + #=================================================================== + # backend + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt("test", '$sha1$1$Wq3GL2Vp$C8U25GvfHS8qGHim' + 'ExLaiSFlGkAe'): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string(config=True) + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + if not hash.startswith(config) or len(hash) != len(config) + 29: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-28:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + if _BNULL in secret: + raise uh.exc.NullPasswordError(self) + rounds = self.rounds + # NOTE: this seed value is NOT the same as the config string + result = (u("%s$sha1$%s") % (self.salt, rounds)).encode("ascii") + # NOTE: this algorithm is essentially PBKDF1, modified to use HMAC. + keyed_hmac = compile_hmac("sha1", secret) + for _ in irange(rounds): + result = keyed_hmac(result) + return h64.encode_transposed_bytes(result, self._chk_offsets).decode("ascii") + + _chk_offsets = [ + 2,1,0, + 5,4,3, + 8,7,6, + 11,10,9, + 14,13,12, + 17,16,15, + 0,19,18, + ] + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/sha2_crypt.py b/ansible/lib/python3.11/site-packages/passlib/handlers/sha2_crypt.py new file mode 100644 index 000000000..e6060c5e9 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/sha2_crypt.py @@ -0,0 +1,534 @@ +"""passlib.handlers.sha2_crypt - SHA256-Crypt / SHA512-Crypt""" +#============================================================================= +# imports +#============================================================================= +# core +import hashlib +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils import safe_crypt, test_crypt, \ + repeat_string, to_unicode +from passlib.utils.binary import h64 +from passlib.utils.compat import byte_elem_value, u, \ + uascii_to_str, unicode +import passlib.utils.handlers as uh +# local +__all__ = [ + "sha512_crypt", + "sha256_crypt", +] + +#============================================================================= +# pure-python backend, used by both sha256_crypt & sha512_crypt +# when crypt.crypt() backend is not available. +#============================================================================= +_BNULL = b'\x00' + +# pre-calculated offsets used to speed up C digest stage (see notes below). +# sequence generated using the following: + ##perms_order = "p,pp,ps,psp,sp,spp".split(",") + ##def offset(i): + ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + + ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) + ## return perms_order.index(key) + ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] +_c_digest_offsets = ( + (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), + (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), + (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), + ) + +# map used to transpose bytes when encoding final sha256_crypt digest +_256_transpose_map = ( + 20, 10, 0, 11, 1, 21, 2, 22, 12, 23, 13, 3, 14, 4, 24, 5, + 25, 15, 26, 16, 6, 17, 7, 27, 8, 28, 18, 29, 19, 9, 30, 31, +) + +# map used to transpose bytes when encoding final sha512_crypt digest +_512_transpose_map = ( + 42, 21, 0, 1, 43, 22, 23, 2, 44, 45, 24, 3, 4, 46, 25, 26, + 5, 47, 48, 27, 6, 7, 49, 28, 29, 8, 50, 51, 30, 9, 10, 52, + 31, 32, 11, 53, 54, 33, 12, 13, 55, 34, 35, 14, 56, 57, 36, 15, + 16, 58, 37, 38, 17, 59, 60, 39, 18, 19, 61, 40, 41, 20, 62, 63, +) + +def _raw_sha2_crypt(pwd, salt, rounds, use_512=False): + """perform raw sha256-crypt / sha512-crypt + + this function provides a pure-python implementation of the internals + for the SHA256-Crypt and SHA512-Crypt algorithms; it doesn't + handle any of the parsing/validation of the hash strings themselves. + + :arg pwd: password chars/bytes to hash + :arg salt: salt chars to use + :arg rounds: linear rounds cost + :arg use_512: use sha512-crypt instead of sha256-crypt mode + + :returns: + encoded checksum chars + """ + #=================================================================== + # init & validate inputs + #=================================================================== + + # NOTE: the setup portion of this algorithm scales ~linearly in time + # with the size of the password, making it vulnerable to a DOS from + # unreasonably large inputs. the following code has some optimizations + # which would make things even worse, using O(pwd_len**2) memory + # when calculating digest P. + # + # to mitigate these two issues: 1) this code switches to a + # O(pwd_len)-memory algorithm for passwords that are much larger + # than average, and 2) Passlib enforces a library-wide max limit on + # the size of passwords it will allow, to prevent this algorithm and + # others from being DOSed in this way (see passlib.exc.PasswordSizeError + # for details). + + # validate secret + if isinstance(pwd, unicode): + # XXX: not sure what official unicode policy is, using this as default + pwd = pwd.encode("utf-8") + assert isinstance(pwd, bytes) + if _BNULL in pwd: + raise uh.exc.NullPasswordError(sha512_crypt if use_512 else sha256_crypt) + pwd_len = len(pwd) + + # validate rounds + assert 1000 <= rounds <= 999999999, "invalid rounds" + # NOTE: spec says out-of-range rounds should be clipped, instead of + # causing an error. this function assumes that's been taken care of + # by the handler class. + + # validate salt + assert isinstance(salt, unicode), "salt not unicode" + salt = salt.encode("ascii") + salt_len = len(salt) + assert salt_len < 17, "salt too large" + # NOTE: spec says salts larger than 16 bytes should be truncated, + # instead of causing an error. this function assumes that's been + # taken care of by the handler class. + + # load sha256/512 specific constants + if use_512: + hash_const = hashlib.sha512 + transpose_map = _512_transpose_map + else: + hash_const = hashlib.sha256 + transpose_map = _256_transpose_map + + #=================================================================== + # digest B - used as subinput to digest A + #=================================================================== + db = hash_const(pwd + salt + pwd).digest() + + #=================================================================== + # digest A - used to initialize first round of digest C + #=================================================================== + # start out with pwd + salt + a_ctx = hash_const(pwd + salt) + a_ctx_update = a_ctx.update + + # add pwd_len bytes of b, repeating b as many times as needed. + a_ctx_update(repeat_string(db, pwd_len)) + + # for each bit in pwd_len: add b if it's 1, or pwd if it's 0 + i = pwd_len + while i: + a_ctx_update(db if i & 1 else pwd) + i >>= 1 + + # finish A + da = a_ctx.digest() + + #=================================================================== + # digest P from password - used instead of password itself + # when calculating digest C. + #=================================================================== + if pwd_len < 96: + # this method is faster under python, but uses O(pwd_len**2) memory; + # so we don't use it for larger passwords to avoid a potential DOS. + dp = repeat_string(hash_const(pwd * pwd_len).digest(), pwd_len) + else: + # this method is slower under python, but uses a fixed amount of memory. + tmp_ctx = hash_const(pwd) + tmp_ctx_update = tmp_ctx.update + i = pwd_len-1 + while i: + tmp_ctx_update(pwd) + i -= 1 + dp = repeat_string(tmp_ctx.digest(), pwd_len) + assert len(dp) == pwd_len + + #=================================================================== + # digest S - used instead of salt itself when calculating digest C + #=================================================================== + ds = hash_const(salt * (16 + byte_elem_value(da[0]))).digest()[:salt_len] + assert len(ds) == salt_len, "salt_len somehow > hash_len!" + + #=================================================================== + # digest C - for a variable number of rounds, combine A, S, and P + # digests in various ways; in order to burn CPU time. + #=================================================================== + + # NOTE: the original SHA256/512-Crypt specification performs the C digest + # calculation using the following loop: + # + ##dc = da + ##i = 0 + ##while i < rounds: + ## tmp_ctx = hash_const(dp if i & 1 else dc) + ## if i % 3: + ## tmp_ctx.update(ds) + ## if i % 7: + ## tmp_ctx.update(dp) + ## tmp_ctx.update(dc if i & 1 else dp) + ## dc = tmp_ctx.digest() + ## i += 1 + # + # The code Passlib uses (below) implements an equivalent algorithm, + # it's just been heavily optimized to pre-calculate a large number + # of things beforehand. It works off of a couple of observations + # about the original algorithm: + # + # 1. each round is a combination of 'dc', 'ds', and 'dp'; determined + # by the whether 'i' a multiple of 2,3, and/or 7. + # 2. since lcm(2,3,7)==42, the series of combinations will repeat + # every 42 rounds. + # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; + # while odd rounds 1-41 consist of hash(round-specific-constant + dc) + # + # Using these observations, the following code... + # * calculates the round-specific combination of ds & dp for each round 0-41 + # * runs through as many 42-round blocks as possible + # * runs through as many pairs of rounds as possible for remaining rounds + # * performs once last round if the total rounds should be odd. + # + # this cuts out a lot of the control overhead incurred when running the + # original loop 40,000+ times in python, resulting in ~20% increase in + # speed under CPython (though still 2x slower than glibc crypt) + + # prepare the 6 combinations of ds & dp which are needed + # (order of 'perms' must match how _c_digest_offsets was generated) + dp_dp = dp+dp + dp_ds = dp+ds + perms = [dp, dp_dp, dp_ds, dp_ds+dp, ds+dp, ds+dp_dp] + + # build up list of even-round & odd-round constants, + # and store in 21-element list as (even,odd) pairs. + data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] + + # perform as many full 42-round blocks as possible + dc = da + blocks, tail = divmod(rounds, 42) + while blocks: + for even, odd in data: + dc = hash_const(odd + hash_const(dc + even).digest()).digest() + blocks -= 1 + + # perform any leftover rounds + if tail: + # perform any pairs of rounds + pairs = tail>>1 + for even, odd in data[:pairs]: + dc = hash_const(odd + hash_const(dc + even).digest()).digest() + + # if rounds was odd, do one last round (since we started at 0, + # last round will be an even-numbered round) + if tail & 1: + dc = hash_const(dc + data[pairs][0]).digest() + + #=================================================================== + # encode digest using appropriate transpose map + #=================================================================== + return h64.encode_transposed_bytes(dc, transpose_map).decode("ascii") + +#============================================================================= +# handlers +#============================================================================= +_UROUNDS = u("rounds=") +_UDOLLAR = u("$") +_UZERO = u("0") + +class _SHA2_Common(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, + uh.GenericHandler): + """class containing common code shared by sha256_crypt & sha512_crypt""" + #=================================================================== + # class attrs + #=================================================================== + # name - set by subclass + setting_kwds = ("salt", "rounds", "implicit_rounds", "salt_size") + # ident - set by subclass + checksum_chars = uh.HASH64_CHARS + # checksum_size - set by subclass + + max_salt_size = 16 + salt_chars = uh.HASH64_CHARS + + min_rounds = 1000 # bounds set by spec + max_rounds = 999999999 # bounds set by spec + rounds_cost = "linear" + + _cdb_use_512 = False # flag for _calc_digest_builtin() + _rounds_prefix = None # ident + _UROUNDS + + #=================================================================== + # methods + #=================================================================== + implicit_rounds = False + + def __init__(self, implicit_rounds=None, **kwds): + super(_SHA2_Common, self).__init__(**kwds) + # if user calls hash() w/ 5000 rounds, default to compact form. + if implicit_rounds is None: + implicit_rounds = (self.use_defaults and self.rounds == 5000) + self.implicit_rounds = implicit_rounds + + def _parse_salt(self, salt): + # required per SHA2-crypt spec -- truncate config salts rather than throwing error + return self._norm_salt(salt, relaxed=self.checksum is None) + + def _parse_rounds(self, rounds): + # required per SHA2-crypt spec -- clip config rounds rather than throwing error + return self._norm_rounds(rounds, relaxed=self.checksum is None) + + @classmethod + def from_string(cls, hash): + # basic format this parses - + # $5$[rounds=$][$] + + # TODO: this *could* use uh.parse_mc3(), except that the rounds + # portion has a slightly different grammar. + + # convert to unicode, check for ident prefix, split on dollar signs. + hash = to_unicode(hash, "ascii", "hash") + ident = cls.ident + if not hash.startswith(ident): + raise uh.exc.InvalidHashError(cls) + assert len(ident) == 3 + parts = hash[3:].split(_UDOLLAR) + + # extract rounds value + if parts[0].startswith(_UROUNDS): + assert len(_UROUNDS) == 7 + rounds = parts.pop(0)[7:] + if rounds.startswith(_UZERO) and rounds != _UZERO: + raise uh.exc.ZeroPaddedRoundsError(cls) + rounds = int(rounds) + implicit_rounds = False + else: + rounds = 5000 + implicit_rounds = True + + # rest should be salt and checksum + if len(parts) == 2: + salt, chk = parts + elif len(parts) == 1: + salt = parts[0] + chk = None + else: + raise uh.exc.MalformedHashError(cls) + + # return new object + return cls( + rounds=rounds, + salt=salt, + checksum=chk or None, + implicit_rounds=implicit_rounds, + ) + + def to_string(self): + if self.rounds == 5000 and self.implicit_rounds: + hash = u("%s%s$%s") % (self.ident, self.salt, + self.checksum or u('')) + else: + hash = u("%srounds=%d$%s$%s") % (self.ident, self.rounds, + self.salt, self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backends + #=================================================================== + backends = ("os_crypt", "builtin") + + #--------------------------------------------------------------- + # os_crypt backend + #--------------------------------------------------------------- + + #: test hash for OS detection -- provided by subclass + _test_hash = None + + @classmethod + def _load_backend_os_crypt(cls): + if test_crypt(*cls._test_hash): + cls._set_calc_checksum_backend(cls._calc_checksum_os_crypt) + return True + else: + return False + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string() + hash = safe_crypt(secret, config) + if hash is None: + # py3's crypt.crypt() can't handle non-utf8 bytes. + # fallback to builtin alg, which is always available. + return self._calc_checksum_builtin(secret) + # NOTE: avoiding full parsing routine via from_string().checksum, + # and just extracting the bit we need. + cs = self.checksum_size + if not hash.startswith(self.ident) or hash[-cs-1] != _UDOLLAR: + raise uh.exc.CryptBackendError(self, config, hash) + return hash[-cs:] + + #--------------------------------------------------------------- + # builtin backend + #--------------------------------------------------------------- + @classmethod + def _load_backend_builtin(cls): + cls._set_calc_checksum_backend(cls._calc_checksum_builtin) + return True + + def _calc_checksum_builtin(self, secret): + return _raw_sha2_crypt(secret, self.salt, self.rounds, + self._cdb_use_512) + + #=================================================================== + # eoc + #=================================================================== + +class sha256_crypt(_SHA2_Common): + """This class implements the SHA256-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 535000, must be between 1000 and 999999999, inclusive. + + .. note:: + per the official specification, when the rounds parameter is set to 5000, + it may be omitted from the hash string. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. + commented out, currently only supported by :meth:`hash`, and not via :meth:`using`: + + :type implicit_rounds: bool + :param implicit_rounds: + this is an internal option which generally doesn't need to be touched. + + this flag determines whether the hash should omit the rounds parameter + when encoding it to a string; this is only permitted by the spec for rounds=5000, + and the flag is ignored otherwise. the spec requires the two different + encodings be preserved as they are, instead of normalizing them. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "sha256_crypt" + ident = u("$5$") + checksum_size = 43 + # NOTE: using 25/75 weighting of builtin & os_crypt backends + default_rounds = 535000 + + #=================================================================== + # backends + #=================================================================== + _test_hash = ("test", "$5$rounds=1000$test$QmQADEXMG8POI5W" + "Dsaeho0P36yK3Tcrgboabng6bkb/") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# sha 512 crypt +#============================================================================= +class sha512_crypt(_SHA2_Common): + """This class implements the SHA512-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 656000, must be between 1000 and 999999999, inclusive. + + .. note:: + per the official specification, when the rounds parameter is set to 5000, + it may be omitted from the hash string. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. + commented out, currently only supported by :meth:`hash`, and not via :meth:`using`: + + :type implicit_rounds: bool + :param implicit_rounds: + this is an internal option which generally doesn't need to be touched. + + this flag determines whether the hash should omit the rounds parameter + when encoding it to a string; this is only permitted by the spec for rounds=5000, + and the flag is ignored otherwise. the spec requires the two different + encodings be preserved as they are, instead of normalizing them. + """ + + #=================================================================== + # class attrs + #=================================================================== + name = "sha512_crypt" + ident = u("$6$") + checksum_size = 86 + _cdb_use_512 = True + # NOTE: using 25/75 weighting of builtin & os_crypt backends + default_rounds = 656000 + + #=================================================================== + # backend + #=================================================================== + _test_hash = ("test", "$6$rounds=1000$test$2M/Lx6Mtobqj" + "Ljobw0Wmo4Q5OFx5nVLJvmgseatA6oMn" + "yWeBdRDx4DU.1H3eGmse6pgsOgDisWBG" + "I5c7TZauS0") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/sun_md5_crypt.py b/ansible/lib/python3.11/site-packages/passlib/handlers/sun_md5_crypt.py new file mode 100644 index 000000000..0eeb4e744 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/sun_md5_crypt.py @@ -0,0 +1,363 @@ +"""passlib.handlers.sun_md5_crypt - Sun's Md5 Crypt, used on Solaris + +.. warning:: + + This implementation may not reproduce + the original Solaris behavior in some border cases. + See documentation for details. +""" + +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_unicode +from passlib.utils.binary import h64 +from passlib.utils.compat import byte_elem_value, irange, u, \ + uascii_to_str, unicode, str_to_bascii +import passlib.utils.handlers as uh +# local +__all__ = [ + "sun_md5_crypt", +] + +#============================================================================= +# backend +#============================================================================= +# constant data used by alg - Hamlet act 3 scene 1 + null char +# exact bytes as in http://www.ibiblio.org/pub/docs/books/gutenberg/etext98/2ws2610.txt +# from Project Gutenberg. + +MAGIC_HAMLET = ( + b"To be, or not to be,--that is the question:--\n" + b"Whether 'tis nobler in the mind to suffer\n" + b"The slings and arrows of outrageous fortune\n" + b"Or to take arms against a sea of troubles,\n" + b"And by opposing end them?--To die,--to sleep,--\n" + b"No more; and by a sleep to say we end\n" + b"The heartache, and the thousand natural shocks\n" + b"That flesh is heir to,--'tis a consummation\n" + b"Devoutly to be wish'd. To die,--to sleep;--\n" + b"To sleep! perchance to dream:--ay, there's the rub;\n" + b"For in that sleep of death what dreams may come,\n" + b"When we have shuffled off this mortal coil,\n" + b"Must give us pause: there's the respect\n" + b"That makes calamity of so long life;\n" + b"For who would bear the whips and scorns of time,\n" + b"The oppressor's wrong, the proud man's contumely,\n" + b"The pangs of despis'd love, the law's delay,\n" + b"The insolence of office, and the spurns\n" + b"That patient merit of the unworthy takes,\n" + b"When he himself might his quietus make\n" + b"With a bare bodkin? who would these fardels bear,\n" + b"To grunt and sweat under a weary life,\n" + b"But that the dread of something after death,--\n" + b"The undiscover'd country, from whose bourn\n" + b"No traveller returns,--puzzles the will,\n" + b"And makes us rather bear those ills we have\n" + b"Than fly to others that we know not of?\n" + b"Thus conscience does make cowards of us all;\n" + b"And thus the native hue of resolution\n" + b"Is sicklied o'er with the pale cast of thought;\n" + b"And enterprises of great pith and moment,\n" + b"With this regard, their currents turn awry,\n" + b"And lose the name of action.--Soft you now!\n" + b"The fair Ophelia!--Nymph, in thy orisons\n" + b"Be all my sins remember'd.\n\x00" #<- apparently null at end of C string is included (test vector won't pass otherwise) +) + +# NOTE: these sequences are pre-calculated iteration ranges used by X & Y loops w/in rounds function below +xr = irange(7) +_XY_ROUNDS = [ + tuple((i,i,i+3) for i in xr), # xrounds 0 + tuple((i,i+1,i+4) for i in xr), # xrounds 1 + tuple((i,i+8,(i+11)&15) for i in xr), # yrounds 0 + tuple((i,(i+9)&15, (i+12)&15) for i in xr), # yrounds 1 +] +del xr + +def raw_sun_md5_crypt(secret, rounds, salt): + """given secret & salt, return encoded sun-md5-crypt checksum""" + global MAGIC_HAMLET + assert isinstance(secret, bytes) + assert isinstance(salt, bytes) + + # validate rounds + if rounds <= 0: + rounds = 0 + real_rounds = 4096 + rounds + # NOTE: spec seems to imply max 'rounds' is 2**32-1 + + # generate initial digest to start off round 0. + # NOTE: algorithm 'salt' includes full config string w/ trailing "$" + result = md5(secret + salt).digest() + assert len(result) == 16 + + # NOTE: many things in this function have been inlined (to speed up the loop + # as much as possible), to the point that this code barely resembles + # the algorithm as described in the docs. in particular: + # + # * all accesses to a given bit have been inlined using the formula + # rbitval(bit) = (rval((bit>>3) & 15) >> (bit & 7)) & 1 + # + # * the calculation of coinflip value R has been inlined + # + # * the conditional division of coinflip value V has been inlined as + # a shift right of 0 or 1. + # + # * the i, i+3, etc iterations are precalculated in lists. + # + # * the round-based conditional division of x & y is now performed + # by choosing an appropriate precalculated list, so that it only + # calculates the 7 bits which will actually be used. + # + X_ROUNDS_0, X_ROUNDS_1, Y_ROUNDS_0, Y_ROUNDS_1 = _XY_ROUNDS + + # NOTE: % appears to be *slightly* slower than &, so we prefer & if possible + + round = 0 + while round < real_rounds: + # convert last result byte string to list of byte-ints for easy access + rval = [ byte_elem_value(c) for c in result ].__getitem__ + + # build up X bit by bit + x = 0 + xrounds = X_ROUNDS_1 if (rval((round>>3) & 15)>>(round & 7)) & 1 else X_ROUNDS_0 + for i, ia, ib in xrounds: + a = rval(ia) + b = rval(ib) + v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) + x |= ((rval((v>>3)&15)>>(v&7))&1) << i + + # build up Y bit by bit + y = 0 + yrounds = Y_ROUNDS_1 if (rval(((round+64)>>3) & 15)>>(round & 7)) & 1 else Y_ROUNDS_0 + for i, ia, ib in yrounds: + a = rval(ia) + b = rval(ib) + v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) + y |= ((rval((v>>3)&15)>>(v&7))&1) << i + + # extract x'th and y'th bit, xoring them together to yeild "coin flip" + coin = ((rval(x>>3) >> (x&7)) ^ (rval(y>>3) >> (y&7))) & 1 + + # construct hash for this round + h = md5(result) + if coin: + h.update(MAGIC_HAMLET) + h.update(unicode(round).encode("ascii")) + result = h.digest() + + round += 1 + + # encode output + return h64.encode_transposed_bytes(result, _chk_offsets) + +# NOTE: same offsets as md5_crypt +_chk_offsets = ( + 12,6,0, + 13,7,1, + 14,8,2, + 15,9,3, + 5,10,4, + 11, +) + +#============================================================================= +# handler +#============================================================================= +class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the Sun-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a salt will be autogenerated (this is recommended). + If specified, it must be drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + If no salt is specified, this parameter can be used to specify + the size (in characters) of the autogenerated salt. + It currently defaults to 8. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 34000, must be between 0 and 4294963199, inclusive. + + :type bare_salt: bool + :param bare_salt: + Optional flag used to enable an alternate salt digest behavior + used by some hash strings in this scheme. + This flag can be ignored by most users. + Defaults to ``False``. + (see :ref:`smc-bare-salt` for details). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "sun_md5_crypt" + setting_kwds = ("salt", "rounds", "bare_salt", "salt_size") + checksum_chars = uh.HASH64_CHARS + checksum_size = 22 + + # NOTE: docs say max password length is 255. + # release 9u2 + + # NOTE: not sure if original crypt has a salt size limit, + # all instances that have been seen use 8 chars. + default_salt_size = 8 + max_salt_size = None + salt_chars = uh.HASH64_CHARS + + default_rounds = 34000 # current passlib default + min_rounds = 0 + max_rounds = 4294963199 ##2**32-1-4096 + # XXX: ^ not sure what it does if past this bound... does 32 int roll over? + rounds_cost = "linear" + + ident_values = (u("$md5$"), u("$md5,")) + + #=================================================================== + # instance attrs + #=================================================================== + bare_salt = False # flag to indicate legacy hashes that lack "$$" suffix + + #=================================================================== + # constructor + #=================================================================== + def __init__(self, bare_salt=False, **kwds): + self.bare_salt = bare_salt + super(sun_md5_crypt, self).__init__(**kwds) + + #=================================================================== + # internal helpers + #=================================================================== + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return hash.startswith(cls.ident_values) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + + # + # detect if hash specifies rounds value. + # if so, parse and validate it. + # by end, set 'rounds' to int value, and 'tail' containing salt+chk + # + if hash.startswith(u("$md5$")): + rounds = 0 + salt_idx = 5 + elif hash.startswith(u("$md5,rounds=")): + idx = hash.find(u("$"), 12) + if idx == -1: + raise uh.exc.MalformedHashError(cls, "unexpected end of rounds") + rstr = hash[12:idx] + try: + rounds = int(rstr) + except ValueError: + raise uh.exc.MalformedHashError(cls, "bad rounds") + if rstr != unicode(rounds): + raise uh.exc.ZeroPaddedRoundsError(cls) + if rounds == 0: + # NOTE: not sure if this is forbidden by spec or not; + # but allowing it would complicate things, + # and it should never occur anyways. + raise uh.exc.MalformedHashError(cls, "explicit zero rounds") + salt_idx = idx+1 + else: + raise uh.exc.InvalidHashError(cls) + + # + # salt/checksum separation is kinda weird, + # to deal cleanly with some backward-compatible workarounds + # implemented by original implementation. + # + chk_idx = hash.rfind(u("$"), salt_idx) + if chk_idx == -1: + # ''-config for $-hash + salt = hash[salt_idx:] + chk = None + bare_salt = True + elif chk_idx == len(hash)-1: + if chk_idx > salt_idx and hash[-2] == u("$"): + raise uh.exc.MalformedHashError(cls, "too many '$' separators") + # $-config for $$-hash + salt = hash[salt_idx:-1] + chk = None + bare_salt = False + elif chk_idx > 0 and hash[chk_idx-1] == u("$"): + # $$-hash + salt = hash[salt_idx:chk_idx-1] + chk = hash[chk_idx+1:] + bare_salt = False + else: + # $-hash + salt = hash[salt_idx:chk_idx] + chk = hash[chk_idx+1:] + bare_salt = True + + return cls( + rounds=rounds, + salt=salt, + checksum=chk, + bare_salt=bare_salt, + ) + + def to_string(self, _withchk=True): + ss = u('') if self.bare_salt else u('$') + rounds = self.rounds + if rounds > 0: + hash = u("$md5,rounds=%d$%s%s") % (rounds, self.salt, ss) + else: + hash = u("$md5$%s%s") % (self.salt, ss) + if _withchk: + chk = self.checksum + hash = u("%s$%s") % (hash, chk) + return uascii_to_str(hash) + + #=================================================================== + # primary interface + #=================================================================== + # TODO: if we're on solaris, check for native crypt() support. + # this will require extra testing, to make sure native crypt + # actually behaves correctly. of particular importance: + # when using ""-config, make sure to append "$x" to string. + + def _calc_checksum(self, secret): + # NOTE: no reference for how sun_md5_crypt handles unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + config = str_to_bascii(self.to_string(_withchk=False)) + return raw_sun_md5_crypt(secret, self.rounds, config).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/handlers/windows.py b/ansible/lib/python3.11/site-packages/passlib/handlers/windows.py new file mode 100644 index 000000000..e17beba4f --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/handlers/windows.py @@ -0,0 +1,334 @@ +"""passlib.handlers.nthash - Microsoft Windows -related hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_unicode, right_pad_string +from passlib.utils.compat import unicode +from passlib.crypto.digest import lookup_hash +md4 = lookup_hash("md4").const +import passlib.utils.handlers as uh +# local +__all__ = [ + "lmhash", + "nthash", + "bsd_nthash", + "msdcc", + "msdcc2", +] + +#============================================================================= +# lanman hash +#============================================================================= +class lmhash(uh.TruncateMixin, uh.HasEncodingContext, uh.StaticHandler): + """This class implements the Lan Manager Password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.using` method accepts a single + optional keyword: + + :param bool truncate_error: + By default, this will silently truncate passwords larger than 14 bytes. + Setting ``truncate_error=True`` will cause :meth:`~passlib.ifc.PasswordHash.hash` + to raise a :exc:`~passlib.exc.PasswordTruncateError` instead. + + .. versionadded:: 1.7 + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.verify` methods accept a single + optional keyword: + + :type encoding: str + :param encoding: + + This specifies what character encoding LMHASH should use when + calculating digest. It defaults to ``cp437``, the most + common encoding encountered. + + Note that while this class outputs digests in lower-case hexadecimal, + it will accept upper-case as well. + """ + #=================================================================== + # class attrs + #=================================================================== + + #-------------------- + # PasswordHash + #-------------------- + name = "lmhash" + setting_kwds = ("truncate_error",) + + #-------------------- + # GenericHandler + #-------------------- + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + #-------------------- + # TruncateMixin + #-------------------- + truncate_size = 14 + + #-------------------- + # custom + #-------------------- + default_encoding = "cp437" + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + # check for truncation (during .hash() calls only) + if self.use_defaults: + self._check_truncate_policy(secret) + + return hexlify(self.raw(secret, self.encoding)).decode("ascii") + + # magic constant used by LMHASH + _magic = b"KGS!@#$%" + + @classmethod + def raw(cls, secret, encoding=None): + """encode password using LANMAN hash algorithm. + + :type secret: unicode or utf-8 encoded bytes + :arg secret: secret to hash + :type encoding: str + :arg encoding: + optional encoding to use for unicode inputs. + this defaults to ``cp437``, which is the + common case for most situations. + + :returns: returns string of raw bytes + """ + if not encoding: + encoding = cls.default_encoding + # some nice empircal data re: different encodings is at... + # http://www.openwall.com/lists/john-dev/2011/08/01/2 + # http://www.freerainbowtables.com/phpBB3/viewtopic.php?t=387&p=12163 + from passlib.crypto.des import des_encrypt_block + MAGIC = cls._magic + if isinstance(secret, unicode): + # perform uppercasing while we're still unicode, + # to give a better shot at getting non-ascii chars right. + # (though some codepages do NOT upper-case the same as unicode). + secret = secret.upper().encode(encoding) + elif isinstance(secret, bytes): + # FIXME: just trusting ascii upper will work? + # and if not, how to do codepage specific case conversion? + # we could decode first using , + # but *that* might not always be right. + secret = secret.upper() + else: + raise TypeError("secret must be unicode or bytes") + secret = right_pad_string(secret, 14) + return des_encrypt_block(secret[0:7], MAGIC) + \ + des_encrypt_block(secret[7:14], MAGIC) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# ntlm hash +#============================================================================= +class nthash(uh.StaticHandler): + """This class implements the NT Password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + + Note that while this class outputs lower-case hexadecimal digests, + it will accept upper-case digests as well. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "nthash" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret)).decode("ascii") + + @classmethod + def raw(cls, secret): + """encode password using MD4-based NTHASH algorithm + + :arg secret: secret as unicode or utf-8 encoded bytes + + :returns: returns string of raw bytes + """ + secret = to_unicode(secret, "utf-8", param="secret") + # XXX: found refs that say only first 128 chars are used. + return md4(secret.encode("utf-16-le")).digest() + + @classmethod + def raw_nthash(cls, secret, hex=False): + warn("nthash.raw_nthash() is deprecated, and will be removed " + "in Passlib 1.8, please use nthash.raw() instead", + DeprecationWarning) + ret = nthash.raw(secret) + return hexlify(ret).decode("ascii") if hex else ret + + #=================================================================== + # eoc + #=================================================================== + +bsd_nthash = uh.PrefixWrapper("bsd_nthash", nthash, prefix="$3$$", ident="$3$$", + doc="""The class support FreeBSD's representation of NTHASH + (which is compatible with the :ref:`modular-crypt-format`), + and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.hash` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """) + +##class ntlm_pair(object): +## "combined lmhash & nthash" +## name = "ntlm_pair" +## setting_kwds = () +## _hash_regex = re.compile(u"^(?P[0-9a-f]{32}):(?P[0-9][a-f]{32})$", +## re.I) +## +## @classmethod +## def identify(cls, hash): +## hash = to_unicode(hash, "latin-1", "hash") +## return len(hash) == 65 and cls._hash_regex.match(hash) is not None +## +## @classmethod +## def hash(cls, secret, config=None): +## if config is not None and not cls.identify(config): +## raise uh.exc.InvalidHashError(cls) +## return lmhash.hash(secret) + ":" + nthash.hash(secret) +## +## @classmethod +## def verify(cls, secret, hash): +## hash = to_unicode(hash, "ascii", "hash") +## m = cls._hash_regex.match(hash) +## if not m: +## raise uh.exc.InvalidHashError(cls) +## lm, nt = m.group("lm", "nt") +## # NOTE: verify against both in case encoding issue +## # causes one not to match. +## return lmhash.verify(secret, lm) or nthash.verify(secret, nt) + +#============================================================================= +# msdcc v1 +#============================================================================= +class msdcc(uh.HasUserContext, uh.StaticHandler): + """This class implements Microsoft's Domain Cached Credentials password hash, + and follows the :ref:`password-hash-api`. + + It has a fixed number of rounds, and uses the associated + username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods + have the following optional keywords: + + :type user: str + :param user: + String containing name of user account this password is associated with. + This is required to properly calculate the hash. + + This keyword is case-insensitive, and should contain just the username + (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). + + Note that while this class outputs lower-case hexadecimal digests, + it will accept upper-case digests as well. + """ + name = "msdcc" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret, self.user)).decode("ascii") + + @classmethod + def raw(cls, secret, user): + """encode password using mscash v1 algorithm + + :arg secret: secret as unicode or utf-8 encoded bytes + :arg user: username to use as salt + + :returns: returns string of raw bytes + """ + secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") + user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") + return md4(md4(secret).digest() + user).digest() + +#============================================================================= +# msdcc2 aka mscash2 +#============================================================================= +class msdcc2(uh.HasUserContext, uh.StaticHandler): + """This class implements version 2 of Microsoft's Domain Cached Credentials + password hash, and follows the :ref:`password-hash-api`. + + It has a fixed number of rounds, and uses the associated + username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.hash`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods + have the following extra keyword: + + :type user: str + :param user: + String containing name of user account this password is associated with. + This is required to properly calculate the hash. + + This keyword is case-insensitive, and should contain just the username + (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). + """ + name = "msdcc2" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret, self.user)).decode("ascii") + + @classmethod + def raw(cls, secret, user): + """encode password using msdcc v2 algorithm + + :type secret: unicode or utf-8 bytes + :arg secret: secret + + :type user: str + :arg user: username to use as salt + + :returns: returns string of raw bytes + """ + from passlib.crypto.digest import pbkdf2_hmac + secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") + user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") + tmp = md4(md4(secret).digest() + user).digest() + return pbkdf2_hmac("sha1", tmp, user, 10240, 16) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/hash.py b/ansible/lib/python3.11/site-packages/passlib/hash.py new file mode 100644 index 000000000..2cc0628da --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/hash.py @@ -0,0 +1,68 @@ +""" +passlib.hash - proxy object mapping hash scheme names -> handlers + +================== +***** NOTICE ***** +================== + +This module does not actually contain any hashes. This file +is a stub that replaces itself with a proxy object. + +This proxy object (passlib.registry._PasslibRegistryProxy) +handles lazy-loading hashes as they are requested. + +The actual implementation of the various hashes is store elsewhere, +mainly in the submodules of the ``passlib.handlers`` subpackage. +""" + +#============================================================================= +# import proxy object and replace this module +#============================================================================= + +# XXX: if any platform has problem w/ lazy modules, could support 'non-lazy' +# version which just imports all schemes known to list_crypt_handlers() + +from passlib.registry import _proxy +import sys +sys.modules[__name__] = _proxy + +#============================================================================= +# HACK: the following bit of code is unreachable, but it's presence seems to +# help make autocomplete work for certain IDEs such as PyCharm. +# this list is automatically regenerated using $SOURCE/admin/regen.py +#============================================================================= + +#---------------------------------------------------- +# begin autocomplete hack (autogenerated 2016-11-10) +#---------------------------------------------------- +if False: + from passlib.handlers.argon2 import argon2 + from passlib.handlers.bcrypt import bcrypt, bcrypt_sha256 + from passlib.handlers.cisco import cisco_asa, cisco_pix, cisco_type7 + from passlib.handlers.des_crypt import bigcrypt, bsdi_crypt, crypt16, des_crypt + from passlib.handlers.digests import hex_md4, hex_md5, hex_sha1, hex_sha256, hex_sha512, htdigest + from passlib.handlers.django import django_bcrypt, django_bcrypt_sha256, django_des_crypt, django_disabled, django_pbkdf2_sha1, django_pbkdf2_sha256, django_salted_md5, django_salted_sha1 + from passlib.handlers.fshp import fshp + from passlib.handlers.ldap_digests import ldap_bcrypt, ldap_bsdi_crypt, ldap_des_crypt, ldap_md5, ldap_md5_crypt, ldap_plaintext, ldap_salted_md5, ldap_salted_sha1, ldap_salted_sha256, ldap_salted_sha512, ldap_sha1, ldap_sha1_crypt, ldap_sha256_crypt, ldap_sha512_crypt + from passlib.handlers.md5_crypt import apr_md5_crypt, md5_crypt + from passlib.handlers.misc import plaintext, unix_disabled, unix_fallback + from passlib.handlers.mssql import mssql2000, mssql2005 + from passlib.handlers.mysql import mysql323, mysql41 + from passlib.handlers.oracle import oracle10, oracle11 + from passlib.handlers.pbkdf2 import atlassian_pbkdf2_sha1, cta_pbkdf2_sha1, dlitz_pbkdf2_sha1, grub_pbkdf2_sha512, ldap_pbkdf2_sha1, ldap_pbkdf2_sha256, ldap_pbkdf2_sha512, pbkdf2_sha1, pbkdf2_sha256, pbkdf2_sha512 + from passlib.handlers.phpass import phpass + from passlib.handlers.postgres import postgres_md5 + from passlib.handlers.roundup import ldap_hex_md5, ldap_hex_sha1, roundup_plaintext + from passlib.handlers.scram import scram + from passlib.handlers.scrypt import scrypt + from passlib.handlers.sha1_crypt import sha1_crypt + from passlib.handlers.sha2_crypt import sha256_crypt, sha512_crypt + from passlib.handlers.sun_md5_crypt import sun_md5_crypt + from passlib.handlers.windows import bsd_nthash, lmhash, msdcc, msdcc2, nthash +#---------------------------------------------------- +# end autocomplete hack +#---------------------------------------------------- + +#============================================================================= +# eoc +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/hosts.py b/ansible/lib/python3.11/site-packages/passlib/hosts.py new file mode 100644 index 000000000..1f137a260 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/hosts.py @@ -0,0 +1,106 @@ +"""passlib.hosts""" +#============================================================================= +# imports +#============================================================================= +# core +from warnings import warn +# pkg +from passlib.context import LazyCryptContext +from passlib.exc import PasslibRuntimeWarning +from passlib import registry +from passlib.utils import has_crypt, unix_crypt_schemes +# local +__all__ = [ + "linux_context", "linux2_context", + "openbsd_context", + "netbsd_context", + "freebsd_context", + "host_context", +] + +#============================================================================= +# linux support +#============================================================================= + +# known platform names - linux2 + +linux_context = linux2_context = LazyCryptContext( + schemes = [ "sha512_crypt", "sha256_crypt", "md5_crypt", + "des_crypt", "unix_disabled" ], + deprecated = [ "des_crypt" ], + ) + +#============================================================================= +# bsd support +#============================================================================= + +# known platform names - +# freebsd2 +# freebsd3 +# freebsd4 +# freebsd5 +# freebsd6 +# freebsd7 +# +# netbsd1 + +# referencing source via -http://fxr.googlebit.com +# freebsd 6,7,8 - des, md5, bcrypt, bsd_nthash +# netbsd - des, ext, md5, bcrypt, sha1 +# openbsd - des, ext, md5, bcrypt + +freebsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsd_nthash", + "des_crypt", "unix_disabled"]) + +openbsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsdi_crypt", + "des_crypt", "unix_disabled"]) + +netbsd_context = LazyCryptContext(["bcrypt", "sha1_crypt", "md5_crypt", + "bsdi_crypt", "des_crypt", "unix_disabled"]) + +# XXX: include darwin in this list? it's got a BSD crypt variant, +# but that's not what it uses for user passwords. + +#============================================================================= +# current host +#============================================================================= +if registry.os_crypt_present: + # NOTE: this is basically mimicing the output of os crypt(), + # except that it uses passlib's (usually stronger) defaults settings, + # and can be inspected and used much more flexibly. + + def _iter_os_crypt_schemes(): + """helper which iterates over supported os_crypt schemes""" + out = registry.get_supported_os_crypt_schemes() + if out: + # only offer disabled handler if there's another scheme in front, + # as this can't actually hash any passwords + out += ("unix_disabled",) + return out + + host_context = LazyCryptContext(_iter_os_crypt_schemes()) + +#============================================================================= +# other platforms +#============================================================================= + +# known platform strings - +# aix3 +# aix4 +# atheos +# beos5 +# darwin +# generic +# hp-ux11 +# irix5 +# irix6 +# mac +# next3 +# os2emx +# riscos +# sunos5 +# unixware7 + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/ifc.py b/ansible/lib/python3.11/site-packages/passlib/ifc.py new file mode 100644 index 000000000..559d256e3 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/ifc.py @@ -0,0 +1,353 @@ +"""passlib.ifc - abstract interfaces used by Passlib""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +import sys +# site +# pkg +from passlib.utils.decor import deprecated_method +# local +__all__ = [ + "PasswordHash", +] + +#============================================================================= +# 2/3 compatibility helpers +#============================================================================= +def recreate_with_metaclass(meta): + """class decorator that re-creates class using metaclass""" + def builder(cls): + if meta is type(cls): + return cls + return meta(cls.__name__, cls.__bases__, cls.__dict__.copy()) + return builder + +#============================================================================= +# PasswordHash interface +#============================================================================= +from abc import ABCMeta, abstractmethod, abstractproperty + +# TODO: make this actually use abstractproperty(), +# now that we dropped py25, 'abc' is always available. + +# XXX: rename to PasswordHasher? + +@recreate_with_metaclass(ABCMeta) +class PasswordHash(object): + """This class describes an abstract interface which all password hashes + in Passlib adhere to. Under Python 2.6 and up, this is an actual + Abstract Base Class built using the :mod:`!abc` module. + + See the Passlib docs for full documentation. + """ + #=================================================================== + # class attributes + #=================================================================== + + #--------------------------------------------------------------- + # general information + #--------------------------------------------------------------- + ##name + ##setting_kwds + ##context_kwds + + #: flag which indicates this hasher matches a "disabled" hash + #: (e.g. unix_disabled, or django_disabled); and doesn't actually + #: depend on the provided password. + is_disabled = False + + #: Should be None, or a positive integer indicating hash + #: doesn't support secrets larger than this value. + #: Whether hash throws error or silently truncates secret + #: depends on .truncate_error and .truncate_verify_reject flags below. + #: NOTE: calls may treat as boolean, since value will never be 0. + #: .. versionadded:: 1.7 + #: .. TODO: passlib 1.8: deprecate/rename this attr to "max_secret_size"? + truncate_size = None + + # NOTE: these next two default to the optimistic "ideal", + # most hashes in passlib have to default to False + # for backward compat and/or expected behavior with existing hashes. + + #: If True, .hash() should throw a :exc:`~passlib.exc.PasswordSizeError` for + #: any secrets larger than .truncate_size. Many hashers default to False + #: for historical / compatibility purposes, indicating they will silently + #: truncate instead. All such hashers SHOULD support changing + #: the policy via ``.using(truncate_error=True)``. + #: .. versionadded:: 1.7 + #: .. TODO: passlib 1.8: deprecate/rename this attr to "truncate_hash_error"? + truncate_error = True + + #: If True, .verify() should reject secrets larger than max_password_size. + #: Many hashers default to False for historical / compatibility purposes, + #: indicating they will match on the truncated portion instead. + #: .. versionadded:: 1.7.1 + truncate_verify_reject = True + + #--------------------------------------------------------------- + # salt information -- if 'salt' in setting_kwds + #--------------------------------------------------------------- + ##min_salt_size + ##max_salt_size + ##default_salt_size + ##salt_chars + ##default_salt_chars + + #--------------------------------------------------------------- + # rounds information -- if 'rounds' in setting_kwds + #--------------------------------------------------------------- + ##min_rounds + ##max_rounds + ##default_rounds + ##rounds_cost + + #--------------------------------------------------------------- + # encoding info -- if 'encoding' in context_kwds + #--------------------------------------------------------------- + ##default_encoding + + #=================================================================== + # primary methods + #=================================================================== + @classmethod + @abstractmethod + def hash(cls, secret, # * + **setting_and_context_kwds): # pragma: no cover -- abstract method + r""" + Hash secret, returning result. + Should handle generating salt, etc, and should return string + containing identifier, salt & other configuration, as well as digest. + + :param \\*\\*settings_kwds: + + Pass in settings to customize configuration of resulting hash. + + .. deprecated:: 1.7 + + Starting with Passlib 1.7, callers should no longer pass settings keywords + (e.g. ``rounds`` or ``salt`` directly to :meth:`!hash`); should use + ``.using(**settings).hash(secret)`` construction instead. + + Support will be removed in Passlib 2.0. + + :param \\*\\*context_kwds: + + Specific algorithms may require context-specific information (such as the user login). + """ + # FIXME: need stub for classes that define .encrypt() instead ... + # this should call .encrypt(), and check for recursion back to here. + raise NotImplementedError("must be implemented by subclass") + + @deprecated_method(deprecated="1.7", removed="2.0", replacement=".hash()") + @classmethod + def encrypt(cls, *args, **kwds): + """ + Legacy alias for :meth:`hash`. + + .. deprecated:: 1.7 + This method was renamed to :meth:`!hash` in version 1.7. + This alias will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.6. + """ + return cls.hash(*args, **kwds) + + # XXX: could provide default implementation which hands value to + # hash(), and then does constant-time comparision on the result + # (after making both are same string type) + @classmethod + @abstractmethod + def verify(cls, secret, hash, **context_kwds): # pragma: no cover -- abstract method + """verify secret against hash, returns True/False""" + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # configuration + #=================================================================== + @classmethod + @abstractmethod + def using(cls, relaxed=False, **kwds): + """ + Return another hasher object (typically a subclass of the current one), + which integrates the configuration options specified by ``kwds``. + This should *always* return a new object, even if no configuration options are changed. + + .. todo:: + + document which options are accepted. + + :returns: + typically returns a subclass for most hasher implementations. + + .. todo:: + + add this method to main documentation. + """ + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # migration + #=================================================================== + @classmethod + def needs_update(cls, hash, secret=None): + """ + check if hash's configuration is outside desired bounds, + or contains some other internal option which requires + updating the password hash. + + :param hash: + hash string to examine + + :param secret: + optional secret known to have verified against the provided hash. + (this is used by some hashes to detect legacy algorithm mistakes). + + :return: + whether secret needs re-hashing. + + .. versionadded:: 1.7 + """ + # by default, always report that we don't need update + return False + + #=================================================================== + # additional methods + #=================================================================== + @classmethod + @abstractmethod + def identify(cls, hash): # pragma: no cover -- abstract method + """check if hash belongs to this scheme, returns True/False""" + raise NotImplementedError("must be implemented by subclass") + + @deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genconfig(cls, **setting_kwds): # pragma: no cover -- abstract method + """ + compile settings into a configuration string for genhash() + + .. deprecated:: 1.7 + + As of 1.7, this method is deprecated, and slated for complete removal in Passlib 2.0. + + For all known real-world uses, hashing a constant string + should provide equivalent functionality. + + This deprecation may be reversed if a use-case presents itself in the mean time. + """ + # NOTE: this fallback runs full hash alg, w/ whatever cost param is passed along. + # implementations (esp ones w/ variable cost) will want to subclass this + # with a constant-time implementation that just renders a config string. + if cls.context_kwds: + raise NotImplementedError("must be implemented by subclass") + return cls.using(**setting_kwds).hash("") + + @deprecated_method(deprecated="1.7", removed="2.0") + @classmethod + def genhash(cls, secret, config, **context): + """ + generated hash for secret, using settings from config/hash string + + .. deprecated:: 1.7 + + As of 1.7, this method is deprecated, and slated for complete removal in Passlib 2.0. + + This deprecation may be reversed if a use-case presents itself in the mean time. + """ + # XXX: if hashes reliably offered a .parse() method, could make a fallback for this. + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # undocumented methods / attributes + #=================================================================== + # the following entry points are used internally by passlib, + # and aren't documented as part of the exposed interface. + # they are subject to change between releases, + # but are documented here so there's a list of them *somewhere*. + + #--------------------------------------------------------------- + # extra metdata + #--------------------------------------------------------------- + + #: this attribute shouldn't be used by hashers themselves, + #: it's reserved for the CryptContext to track which hashers are deprecated. + #: Note the context will only set this on objects it owns (and generated by .using()), + #: and WONT set it on global objects. + #: [added in 1.7] + #: TODO: document this, or at least the use of testing for + #: 'CryptContext().handler().deprecated' + deprecated = False + + #: optionally present if hasher corresponds to format built into Django. + #: this attribute (if not None) should be the Django 'algorithm' name. + #: also indicates to passlib.ext.django that (when installed in django), + #: django's native hasher should be used in preference to this one. + ## django_name + + #--------------------------------------------------------------- + # checksum information - defined for many hashes + #--------------------------------------------------------------- + ## checksum_chars + ## checksum_size + + #--------------------------------------------------------------- + # experimental methods + #--------------------------------------------------------------- + + ##@classmethod + ##def normhash(cls, hash): + ## """helper to clean up non-canonic instances of hash. + ## currently only provided by bcrypt() to fix an historical passlib issue. + ## """ + + # experimental helper to parse hash into components. + ##@classmethod + ##def parsehash(cls, hash, checksum=True, sanitize=False): + ## """helper to parse hash into components, returns dict""" + + # experiment helper to estimate bitsize of different hashes, + # implement for GenericHandler, but may be currently be off for some hashes. + # want to expand this into a way to programmatically compare + # "strengths" of different hashes and hash algorithms. + # still needs to have some factor for estimate relative cost per round, + # ala in the style of the scrypt whitepaper. + ##@classmethod + ##def bitsize(cls, **kwds): + ## """returns dict mapping component -> bits contributed. + ## components currently include checksum, salt, rounds. + ## """ + + #=================================================================== + # eoc + #=================================================================== + +class DisabledHash(PasswordHash): + """ + extended disabled-hash methods; only need be present if .disabled = True + """ + + is_disabled = True + + @classmethod + def disable(cls, hash=None): + """ + return string representing a 'disabled' hash; + optionally including previously enabled hash + (this is up to the individual scheme). + """ + # default behavior: ignore original hash, return standalone marker + return cls.hash("") + + @classmethod + def enable(cls, hash): + """ + given a disabled-hash string, + extract previously-enabled hash if one is present, + otherwise raises ValueError + """ + # default behavior: no way to restore original hash + raise ValueError("cannot restore original hash") + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/pwd.py b/ansible/lib/python3.11/site-packages/passlib/pwd.py new file mode 100644 index 000000000..27ed228bb --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/pwd.py @@ -0,0 +1,809 @@ +"""passlib.pwd -- password generation helpers""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function, unicode_literals +# core +import codecs +from collections import defaultdict +try: + from collections.abc import MutableMapping +except ImportError: + # py2 compat + from collections import MutableMapping +from math import ceil, log as logf +import logging; log = logging.getLogger(__name__) +import pkg_resources +import os +# site +# pkg +from passlib import exc +from passlib.utils.compat import PY2, irange, itervalues, int_types +from passlib.utils import rng, getrandstr, to_unicode +from passlib.utils.decor import memoized_property +# local +__all__ = [ + "genword", "default_charsets", + "genphrase", "default_wordsets", +] + +#============================================================================= +# constants +#============================================================================= + +# XXX: rename / publically document this map? +entropy_aliases = dict( + # barest protection from throttled online attack + unsafe=12, + + # some protection from unthrottled online attack + weak=24, + + # some protection from offline attacks + fair=36, + + # reasonable protection from offline attacks + strong=48, + + # very good protection from offline attacks + secure=60, +) + +#============================================================================= +# internal helpers +#============================================================================= + +def _superclasses(obj, cls): + """return remaining classes in object's MRO after cls""" + mro = type(obj).__mro__ + return mro[mro.index(cls)+1:] + + +def _self_info_rate(source): + """ + returns 'rate of self-information' -- + i.e. average (per-symbol) entropy of the sequence **source**, + where probability of a given symbol occurring is calculated based on + the number of occurrences within the sequence itself. + + if all elements of the source are unique, this should equal ``log(len(source), 2)``. + + :arg source: + iterable containing 0+ symbols + (e.g. list of strings or ints, string of characters, etc). + + :returns: + float bits of entropy + """ + try: + size = len(source) + except TypeError: + # if len() doesn't work, calculate size by summing counts later + size = None + counts = defaultdict(int) + for char in source: + counts[char] += 1 + if size is None: + values = counts.values() + size = sum(values) + else: + values = itervalues(counts) + if not size: + return 0 + # NOTE: the following performs ``- sum(value / size * logf(value / size, 2) for value in values)``, + # it just does so with as much pulled out of the sum() loop as possible... + return logf(size, 2) - sum(value * logf(value, 2) for value in values) / size + + +# def _total_self_info(source): +# """ +# return total self-entropy of a sequence +# (the average entropy per symbol * size of sequence) +# """ +# return _self_info_rate(source) * len(source) + + +def _open_asset_path(path, encoding=None): + """ + :param asset_path: + string containing absolute path to file, + or package-relative path using format + ``"python.module:relative/file/path"``. + + :returns: + filehandle opened in 'rb' mode + (unless encoding explicitly specified) + """ + if encoding: + return codecs.getreader(encoding)(_open_asset_path(path)) + if os.path.isabs(path): + return open(path, "rb") + package, sep, subpath = path.partition(":") + if not sep: + raise ValueError("asset path must be absolute file path " + "or use 'pkg.name:sub/path' format: %r" % (path,)) + return pkg_resources.resource_stream(package, subpath) + + +#: type aliases +_sequence_types = (list, tuple) +_set_types = (set, frozenset) + +#: set of elements that ensure_unique() has validated already. +_ensure_unique_cache = set() + + +def _ensure_unique(source, param="source"): + """ + helper for generators -- + Throws ValueError if source elements aren't unique. + Error message will display (abbreviated) repr of the duplicates in a string/list + """ + # check cache to speed things up for frozensets / tuples / strings + cache = _ensure_unique_cache + hashable = True + try: + if source in cache: + return True + except TypeError: + hashable = False + + # check if it has dup elements + if isinstance(source, _set_types) or len(set(source)) == len(source): + if hashable: + try: + cache.add(source) + except TypeError: + # XXX: under pypy, "list() in set()" above doesn't throw TypeError, + # but trying to add unhashable it to a set *does*. + pass + return True + + # build list of duplicate values + seen = set() + dups = set() + for elem in source: + (dups if elem in seen else seen).add(elem) + dups = sorted(dups) + trunc = 8 + if len(dups) > trunc: + trunc = 5 + dup_repr = ", ".join(repr(str(word)) for word in dups[:trunc]) + if len(dups) > trunc: + dup_repr += ", ... plus %d others" % (len(dups) - trunc) + + # throw error + raise ValueError("`%s` cannot contain duplicate elements: %s" % + (param, dup_repr)) + +#============================================================================= +# base generator class +#============================================================================= +class SequenceGenerator(object): + """ + Base class used by word & phrase generators. + + These objects take a series of options, corresponding + to those of the :func:`generate` function. + They act as callables which can be used to generate a password + or a list of 1+ passwords. They also expose some read-only + informational attributes. + + Parameters + ---------- + :param entropy: + Optionally specify the amount of entropy the resulting passwords + should contain (as measured with respect to the generator itself). + This will be used to auto-calculate the required password size. + + :param length: + Optionally specify the length of password to generate, + measured as count of whatever symbols the subclass uses (characters or words). + Note if ``entropy`` requires a larger minimum length, + that will be used instead. + + :param rng: + Optionally provide a custom RNG source to use. + Should be an instance of :class:`random.Random`, + defaults to :class:`random.SystemRandom`. + + Attributes + ---------- + .. autoattribute:: length + .. autoattribute:: symbol_count + .. autoattribute:: entropy_per_symbol + .. autoattribute:: entropy + + Subclassing + ----------- + Subclasses must implement the ``.__next__()`` method, + and set ``.symbol_count`` before calling base ``__init__`` method. + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: requested size of final password + length = None + + #: requested entropy of final password + requested_entropy = "strong" + + #: random number source to use + rng = rng + + #: number of potential symbols (must be filled in by subclass) + symbol_count = None + + #============================================================================= + # init + #============================================================================= + def __init__(self, entropy=None, length=None, rng=None, **kwds): + + # make sure subclass set things up correctly + assert self.symbol_count is not None, "subclass must set .symbol_count" + + # init length & requested entropy + if entropy is not None or length is None: + if entropy is None: + entropy = self.requested_entropy + entropy = entropy_aliases.get(entropy, entropy) + if entropy <= 0: + raise ValueError("`entropy` must be positive number") + min_length = int(ceil(entropy / self.entropy_per_symbol)) + if length is None or length < min_length: + length = min_length + + self.requested_entropy = entropy + + if length < 1: + raise ValueError("`length` must be positive integer") + self.length = length + + # init other common options + if rng is not None: + self.rng = rng + + # hand off to parent + if kwds and _superclasses(self, SequenceGenerator) == (object,): + raise TypeError("Unexpected keyword(s): %s" % ", ".join(kwds.keys())) + super(SequenceGenerator, self).__init__(**kwds) + + #============================================================================= + # informational helpers + #============================================================================= + + @memoized_property + def entropy_per_symbol(self): + """ + Average entropy per symbol (assuming all symbols have equal probability) + """ + return logf(self.symbol_count, 2) + + @memoized_property + def entropy(self): + """ + Effective entropy of generated passwords. + + This value will always be a multiple of :attr:`entropy_per_symbol`. + If entropy is specified in constructor, :attr:`length` will be chosen so + so that this value is the smallest multiple >= :attr:`requested_entropy`. + """ + return self.length * self.entropy_per_symbol + + #============================================================================= + # generation + #============================================================================= + def __next__(self): + """main generation function, should create one password/phrase""" + raise NotImplementedError("implement in subclass") + + def __call__(self, returns=None): + """ + frontend used by genword() / genphrase() to create passwords + """ + if returns is None: + return next(self) + elif isinstance(returns, int_types): + return [next(self) for _ in irange(returns)] + elif returns is iter: + return self + else: + raise exc.ExpectedTypeError(returns, ", int, or ", "returns") + + def __iter__(self): + return self + + if PY2: + def next(self): + return self.__next__() + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# default charsets +#============================================================================= + +#: global dict of predefined characters sets +default_charsets = dict( + # ascii letters, digits, and some punctuation + ascii_72='0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*?/', + + # ascii letters and digits + ascii_62='0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', + + # ascii_50, without visually similar '1IiLl', '0Oo', '5S', '8B' + ascii_50='234679abcdefghjkmnpqrstuvwxyzACDEFGHJKMNPQRTUVWXYZ', + + # lower case hexadecimal + hex='0123456789abcdef', +) + +#============================================================================= +# password generator +#============================================================================= + +class WordGenerator(SequenceGenerator): + """ + Class which generates passwords by randomly choosing from a string of unique characters. + + Parameters + ---------- + :param chars: + custom character string to draw from. + + :param charset: + predefined charset to draw from. + + :param \\*\\*kwds: + all other keywords passed to the :class:`SequenceGenerator` parent class. + + Attributes + ---------- + .. autoattribute:: chars + .. autoattribute:: charset + .. autoattribute:: default_charsets + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: Predefined character set in use (set to None for instances using custom 'chars') + charset = "ascii_62" + + #: string of chars to draw from -- usually filled in from charset + chars = None + + #============================================================================= + # init + #============================================================================= + def __init__(self, chars=None, charset=None, **kwds): + + # init chars and charset + if chars: + if charset: + raise TypeError("`chars` and `charset` are mutually exclusive") + else: + if not charset: + charset = self.charset + assert charset + chars = default_charsets[charset] + self.charset = charset + chars = to_unicode(chars, param="chars") + _ensure_unique(chars, param="chars") + self.chars = chars + + # hand off to parent + super(WordGenerator, self).__init__(**kwds) + # log.debug("WordGenerator(): entropy/char=%r", self.entropy_per_symbol) + + #============================================================================= + # informational helpers + #============================================================================= + + @memoized_property + def symbol_count(self): + return len(self.chars) + + #============================================================================= + # generation + #============================================================================= + + def __next__(self): + # XXX: could do things like optionally ensure certain character groups + # (e.g. letters & punctuation) are included + return getrandstr(self.rng, self.chars, self.length) + + #============================================================================= + # eoc + #============================================================================= + + +def genword(entropy=None, length=None, returns=None, **kwds): + """Generate one or more random passwords. + + This function uses :mod:`random.SystemRandom` to generate + one or more passwords using various character sets. + The complexity of the password can be specified + by size, or by the desired amount of entropy. + + Usage Example:: + + >>> # generate a random alphanumeric string with 48 bits of entropy (the default) + >>> from passlib import pwd + >>> pwd.genword() + 'DnBHvDjMK6' + + >>> # generate a random hexadecimal string with 52 bits of entropy + >>> pwd.genword(entropy=52, charset="hex") + '310f1a7ac793f' + + :param entropy: + Strength of resulting password, measured in 'guessing entropy' bits. + An appropriate **length** value will be calculated + based on the requested entropy amount, and the size of the character set. + + This can be a positive integer, or one of the following preset + strings: ``"weak"`` (24), ``"fair"`` (36), + ``"strong"`` (48), and ``"secure"`` (56). + + If neither this or **length** is specified, **entropy** will default + to ``"strong"`` (48). + + :param length: + Size of resulting password, measured in characters. + If omitted, the size is auto-calculated based on the **entropy** parameter. + + If both **entropy** and **length** are specified, + the stronger value will be used. + + :param returns: + Controls what this function returns: + + * If ``None`` (the default), this function will generate a single password. + * If an integer, this function will return a list containing that many passwords. + * If the ``iter`` constant, will return an iterator that yields passwords. + + :param chars: + + Optionally specify custom string of characters to use when randomly + generating a password. This option cannot be combined with **charset**. + + :param charset: + + The predefined character set to draw from (if not specified by **chars**). + There are currently four presets available: + + * ``"ascii_62"`` (the default) -- all digits and ascii upper & lowercase letters. + Provides ~5.95 entropy per character. + + * ``"ascii_50"`` -- subset which excludes visually similar characters + (``1IiLl0Oo5S8B``). Provides ~5.64 entropy per character. + + * ``"ascii_72"`` -- all digits and ascii upper & lowercase letters, + as well as some punctuation. Provides ~6.17 entropy per character. + + * ``"hex"`` -- Lower case hexadecimal. Providers 4 bits of entropy per character. + + :returns: + :class:`!unicode` string containing randomly generated password; + or list of 1+ passwords if :samp:`returns={int}` is specified. + """ + gen = WordGenerator(length=length, entropy=entropy, **kwds) + return gen(returns) + +#============================================================================= +# default wordsets +#============================================================================= + +def _load_wordset(asset_path): + """ + load wordset from compressed datafile within package data. + file should be utf-8 encoded + + :param asset_path: + string containing absolute path to wordset file, + or "python.module:relative/file/path". + + :returns: + tuple of words, as loaded from specified words file. + """ + # open resource file, convert to tuple of words (strip blank lines & ws) + with _open_asset_path(asset_path, "utf-8") as fh: + gen = (word.strip() for word in fh) + words = tuple(word for word in gen if word) + + # NOTE: works but not used + # # detect if file uses " " format, and strip numeric prefix + # def extract(row): + # idx, word = row.replace("\t", " ").split(" ", 1) + # if not idx.isdigit(): + # raise ValueError("row is not dice index + word") + # return word + # try: + # extract(words[-1]) + # except ValueError: + # pass + # else: + # words = tuple(extract(word) for word in words) + + log.debug("loaded %d-element wordset from %r", len(words), asset_path) + return words + + +class WordsetDict(MutableMapping): + """ + Special mapping used to store dictionary of wordsets. + Different from a regular dict in that some wordsets + may be lazy-loaded from an asset path. + """ + + #: dict of key -> asset path + paths = None + + #: dict of key -> value + _loaded = None + + def __init__(self, *args, **kwds): + self.paths = {} + self._loaded = {} + super(WordsetDict, self).__init__(*args, **kwds) + + def __getitem__(self, key): + try: + return self._loaded[key] + except KeyError: + pass + path = self.paths[key] + value = self._loaded[key] = _load_wordset(path) + return value + + def set_path(self, key, path): + """ + set asset path to lazy-load wordset from. + """ + self.paths[key] = path + + def __setitem__(self, key, value): + self._loaded[key] = value + + def __delitem__(self, key): + if key in self: + del self._loaded[key] + self.paths.pop(key, None) + else: + del self.paths[key] + + @property + def _keyset(self): + keys = set(self._loaded) + keys.update(self.paths) + return keys + + def __iter__(self): + return iter(self._keyset) + + def __len__(self): + return len(self._keyset) + + # NOTE: speeds things up, and prevents contains from lazy-loading + def __contains__(self, key): + return key in self._loaded or key in self.paths + + +#: dict of predefined word sets. +#: key is name of wordset, value should be sequence of words. +default_wordsets = WordsetDict() + +# register the wordsets built into passlib +for name in "eff_long eff_short eff_prefixed bip39".split(): + default_wordsets.set_path(name, "passlib:_data/wordsets/%s.txt" % name) + +#============================================================================= +# passphrase generator +#============================================================================= +class PhraseGenerator(SequenceGenerator): + """class which generates passphrases by randomly choosing + from a list of unique words. + + :param wordset: + wordset to draw from. + :param preset: + name of preset wordlist to use instead of ``wordset``. + :param spaces: + whether to insert spaces between words in output (defaults to ``True``). + :param \\*\\*kwds: + all other keywords passed to the :class:`SequenceGenerator` parent class. + + .. autoattribute:: wordset + """ + #============================================================================= + # instance attrs + #============================================================================= + + #: predefined wordset to use + wordset = "eff_long" + + #: list of words to draw from + words = None + + #: separator to use when joining words + sep = " " + + #============================================================================= + # init + #============================================================================= + def __init__(self, wordset=None, words=None, sep=None, **kwds): + + # load wordset + if words is not None: + if wordset is not None: + raise TypeError("`words` and `wordset` are mutually exclusive") + else: + if wordset is None: + wordset = self.wordset + assert wordset + words = default_wordsets[wordset] + self.wordset = wordset + + # init words + if not isinstance(words, _sequence_types): + words = tuple(words) + _ensure_unique(words, param="words") + self.words = words + + # init separator + if sep is None: + sep = self.sep + sep = to_unicode(sep, param="sep") + self.sep = sep + + # hand off to parent + super(PhraseGenerator, self).__init__(**kwds) + ##log.debug("PhraseGenerator(): entropy/word=%r entropy/char=%r min_chars=%r", + ## self.entropy_per_symbol, self.entropy_per_char, self.min_chars) + + #============================================================================= + # informational helpers + #============================================================================= + + @memoized_property + def symbol_count(self): + return len(self.words) + + #============================================================================= + # generation + #============================================================================= + + def __next__(self): + words = (self.rng.choice(self.words) for _ in irange(self.length)) + return self.sep.join(words) + + #============================================================================= + # eoc + #============================================================================= + + +def genphrase(entropy=None, length=None, returns=None, **kwds): + """Generate one or more random password / passphrases. + + This function uses :mod:`random.SystemRandom` to generate + one or more passwords; it can be configured to generate + alphanumeric passwords, or full english phrases. + The complexity of the password can be specified + by size, or by the desired amount of entropy. + + Usage Example:: + + >>> # generate random phrase with 48 bits of entropy + >>> from passlib import pwd + >>> pwd.genphrase() + 'gangly robbing salt shove' + + >>> # generate a random phrase with 52 bits of entropy + >>> # using a particular wordset + >>> pwd.genword(entropy=52, wordset="bip39") + 'wheat dilemma reward rescue diary' + + :param entropy: + Strength of resulting password, measured in 'guessing entropy' bits. + An appropriate **length** value will be calculated + based on the requested entropy amount, and the size of the word set. + + This can be a positive integer, or one of the following preset + strings: ``"weak"`` (24), ``"fair"`` (36), + ``"strong"`` (48), and ``"secure"`` (56). + + If neither this or **length** is specified, **entropy** will default + to ``"strong"`` (48). + + :param length: + Length of resulting password, measured in words. + If omitted, the size is auto-calculated based on the **entropy** parameter. + + If both **entropy** and **length** are specified, + the stronger value will be used. + + :param returns: + Controls what this function returns: + + * If ``None`` (the default), this function will generate a single password. + * If an integer, this function will return a list containing that many passwords. + * If the ``iter`` builtin, will return an iterator that yields passwords. + + :param words: + + Optionally specifies a list/set of words to use when randomly generating a passphrase. + This option cannot be combined with **wordset**. + + :param wordset: + + The predefined word set to draw from (if not specified by **words**). + There are currently four presets available: + + ``"eff_long"`` (the default) + + Wordset containing 7776 english words of ~7 letters. + Constructed by the EFF, it offers ~12.9 bits of entropy per word. + + This wordset (and the other ``"eff_"`` wordsets) + were `created by the EFF `_ + to aid in generating passwords. See their announcement page + for more details about the design & properties of these wordsets. + + ``"eff_short"`` + + Wordset containing 1296 english words of ~4.5 letters. + Constructed by the EFF, it offers ~10.3 bits of entropy per word. + + ``"eff_prefixed"`` + + Wordset containing 1296 english words of ~8 letters, + selected so that they each have a unique 3-character prefix. + Constructed by the EFF, it offers ~10.3 bits of entropy per word. + + ``"bip39"`` + + Wordset of 2048 english words of ~5 letters, + selected so that they each have a unique 4-character prefix. + Published as part of Bitcoin's `BIP 39 `_, + this wordset has exactly 11 bits of entropy per word. + + This list offers words that are typically shorter than ``"eff_long"`` + (at the cost of slightly less entropy); and much shorter than + ``"eff_prefixed"`` (at the cost of a longer unique prefix). + + :param sep: + Optional separator to use when joining words. + Defaults to ``" "`` (a space), but can be an empty string, a hyphen, etc. + + :returns: + :class:`!unicode` string containing randomly generated passphrase; + or list of 1+ passphrases if :samp:`returns={int}` is specified. + """ + gen = PhraseGenerator(entropy=entropy, length=length, **kwds) + return gen(returns) + +#============================================================================= +# strength measurement +# +# NOTE: +# for a little while, had rough draft of password strength measurement alg here. +# but not sure if there's value in yet another measurement algorithm, +# that's not just duplicating the effort of libraries like zxcbn. +# may revive it later, but for now, leaving some refs to others out there: +# * NIST 800-63 has simple alg +# * zxcvbn (https://tech.dropbox.com/2012/04/zxcvbn-realistic-password-strength-estimation/) +# might also be good, and has approach similar to composite approach i was already thinking about, +# but much more well thought out. +# * passfault (https://github.com/c-a-m/passfault) looks thorough, +# but may have licensing issues, plus porting to python looks like very big job :( +# * give a look at running things through zlib - might be able to cheaply +# catch extra redundancies. +#============================================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/registry.py b/ansible/lib/python3.11/site-packages/passlib/registry.py new file mode 100644 index 000000000..9964b257e --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/registry.py @@ -0,0 +1,547 @@ +"""passlib.registry - registry for password hash handlers""" +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# pkg +from passlib import exc +from passlib.exc import ExpectedTypeError, PasslibWarning +from passlib.ifc import PasswordHash +from passlib.utils import ( + is_crypt_handler, has_crypt as os_crypt_present, + unix_crypt_schemes as os_crypt_schemes, +) +from passlib.utils.compat import unicode_or_str +from passlib.utils.decor import memoize_single_value +# local +__all__ = [ + "register_crypt_handler_path", + "register_crypt_handler", + "get_crypt_handler", + "list_crypt_handlers", +] + +#============================================================================= +# proxy object used in place of 'passlib.hash' module +#============================================================================= +class _PasslibRegistryProxy(object): + """proxy module passlib.hash + + this module is in fact an object which lazy-loads + the requested password hash algorithm from wherever it has been stored. + it acts as a thin wrapper around :func:`passlib.registry.get_crypt_handler`. + """ + __name__ = "passlib.hash" + __package__ = None + + def __getattr__(self, attr): + if attr.startswith("_"): + raise AttributeError("missing attribute: %r" % (attr,)) + handler = get_crypt_handler(attr, None) + if handler: + return handler + else: + raise AttributeError("unknown password hash: %r" % (attr,)) + + def __setattr__(self, attr, value): + if attr.startswith("_"): + # writing to private attributes should behave normally. + # (required so GAE can write to the __loader__ attribute). + object.__setattr__(self, attr, value) + else: + # writing to public attributes should be treated + # as attempting to register a handler. + register_crypt_handler(value, _attr=attr) + + def __repr__(self): + return "" + + def __dir__(self): + # this adds in lazy-loaded handler names, + # otherwise this is the standard dir() implementation. + attrs = set(dir(self.__class__)) + attrs.update(self.__dict__) + attrs.update(_locations) + return sorted(attrs) + +# create single instance - available publically as 'passlib.hash' +_proxy = _PasslibRegistryProxy() + +#============================================================================= +# internal registry state +#============================================================================= + +# singleton uses to detect omitted keywords +_UNSET = object() + +# dict mapping name -> loaded handlers (just uses proxy object's internal dict) +_handlers = _proxy.__dict__ + +# dict mapping names -> import path for lazy loading. +# * import path should be "module.path" or "module.path:attr" +# * if attr omitted, "name" used as default. +_locations = dict( + # NOTE: this is a hardcoded list of the handlers built into passlib, + # applications should call register_crypt_handler_path() + apr_md5_crypt = "passlib.handlers.md5_crypt", + argon2 = "passlib.handlers.argon2", + atlassian_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + bcrypt = "passlib.handlers.bcrypt", + bcrypt_sha256 = "passlib.handlers.bcrypt", + bigcrypt = "passlib.handlers.des_crypt", + bsd_nthash = "passlib.handlers.windows", + bsdi_crypt = "passlib.handlers.des_crypt", + cisco_pix = "passlib.handlers.cisco", + cisco_asa = "passlib.handlers.cisco", + cisco_type7 = "passlib.handlers.cisco", + cta_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + crypt16 = "passlib.handlers.des_crypt", + des_crypt = "passlib.handlers.des_crypt", + django_argon2 = "passlib.handlers.django", + django_bcrypt = "passlib.handlers.django", + django_bcrypt_sha256 = "passlib.handlers.django", + django_pbkdf2_sha256 = "passlib.handlers.django", + django_pbkdf2_sha1 = "passlib.handlers.django", + django_salted_sha1 = "passlib.handlers.django", + django_salted_md5 = "passlib.handlers.django", + django_des_crypt = "passlib.handlers.django", + django_disabled = "passlib.handlers.django", + dlitz_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + fshp = "passlib.handlers.fshp", + grub_pbkdf2_sha512 = "passlib.handlers.pbkdf2", + hex_md4 = "passlib.handlers.digests", + hex_md5 = "passlib.handlers.digests", + hex_sha1 = "passlib.handlers.digests", + hex_sha256 = "passlib.handlers.digests", + hex_sha512 = "passlib.handlers.digests", + htdigest = "passlib.handlers.digests", + ldap_plaintext = "passlib.handlers.ldap_digests", + ldap_md5 = "passlib.handlers.ldap_digests", + ldap_sha1 = "passlib.handlers.ldap_digests", + ldap_hex_md5 = "passlib.handlers.roundup", + ldap_hex_sha1 = "passlib.handlers.roundup", + ldap_salted_md5 = "passlib.handlers.ldap_digests", + ldap_salted_sha1 = "passlib.handlers.ldap_digests", + ldap_salted_sha256 = "passlib.handlers.ldap_digests", + ldap_salted_sha512 = "passlib.handlers.ldap_digests", + ldap_des_crypt = "passlib.handlers.ldap_digests", + ldap_bsdi_crypt = "passlib.handlers.ldap_digests", + ldap_md5_crypt = "passlib.handlers.ldap_digests", + ldap_bcrypt = "passlib.handlers.ldap_digests", + ldap_sha1_crypt = "passlib.handlers.ldap_digests", + ldap_sha256_crypt = "passlib.handlers.ldap_digests", + ldap_sha512_crypt = "passlib.handlers.ldap_digests", + ldap_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + ldap_pbkdf2_sha256 = "passlib.handlers.pbkdf2", + ldap_pbkdf2_sha512 = "passlib.handlers.pbkdf2", + lmhash = "passlib.handlers.windows", + md5_crypt = "passlib.handlers.md5_crypt", + msdcc = "passlib.handlers.windows", + msdcc2 = "passlib.handlers.windows", + mssql2000 = "passlib.handlers.mssql", + mssql2005 = "passlib.handlers.mssql", + mysql323 = "passlib.handlers.mysql", + mysql41 = "passlib.handlers.mysql", + nthash = "passlib.handlers.windows", + oracle10 = "passlib.handlers.oracle", + oracle11 = "passlib.handlers.oracle", + pbkdf2_sha1 = "passlib.handlers.pbkdf2", + pbkdf2_sha256 = "passlib.handlers.pbkdf2", + pbkdf2_sha512 = "passlib.handlers.pbkdf2", + phpass = "passlib.handlers.phpass", + plaintext = "passlib.handlers.misc", + postgres_md5 = "passlib.handlers.postgres", + roundup_plaintext = "passlib.handlers.roundup", + scram = "passlib.handlers.scram", + scrypt = "passlib.handlers.scrypt", + sha1_crypt = "passlib.handlers.sha1_crypt", + sha256_crypt = "passlib.handlers.sha2_crypt", + sha512_crypt = "passlib.handlers.sha2_crypt", + sun_md5_crypt = "passlib.handlers.sun_md5_crypt", + unix_disabled = "passlib.handlers.misc", + unix_fallback = "passlib.handlers.misc", +) + +# master regexp for detecting valid handler names +_name_re = re.compile("^[a-z][a-z0-9_]+[a-z0-9]$") + +# names which aren't allowed for various reasons +# (mainly keyword conflicts in CryptContext) +_forbidden_names = frozenset(["onload", "policy", "context", "all", + "default", "none", "auto"]) + +#============================================================================= +# registry frontend functions +#============================================================================= +def _validate_handler_name(name): + """helper to validate handler name + + :raises ValueError: + * if empty name + * if name not lower case + * if name contains double underscores + * if name is reserved (e.g. ``context``, ``all``). + """ + if not name: + raise ValueError("handler name cannot be empty: %r" % (name,)) + if name.lower() != name: + raise ValueError("name must be lower-case: %r" % (name,)) + if not _name_re.match(name): + raise ValueError("invalid name (must be 3+ characters, " + " begin with a-z, and contain only underscore, a-z, " + "0-9): %r" % (name,)) + if '__' in name: + raise ValueError("name may not contain double-underscores: %r" % + (name,)) + if name in _forbidden_names: + raise ValueError("that name is not allowed: %r" % (name,)) + return True + +def register_crypt_handler_path(name, path): + """register location to lazy-load handler when requested. + + custom hashes may be registered via :func:`register_crypt_handler`, + or they may be registered by this function, + which will delay actually importing and loading the handler + until a call to :func:`get_crypt_handler` is made for the specified name. + + :arg name: name of handler + :arg path: module import path + + the specified module path should contain a password hash handler + called :samp:`{name}`, or the path may contain a colon, + specifying the module and module attribute to use. + for example, the following would cause ``get_handler("myhash")`` to look + for a class named ``myhash`` within the ``myapp.helpers`` module:: + + >>> from passlib.registry import registry_crypt_handler_path + >>> registry_crypt_handler_path("myhash", "myapp.helpers") + + ...while this form would cause ``get_handler("myhash")`` to look + for a class name ``MyHash`` within the ``myapp.helpers`` module:: + + >>> from passlib.registry import registry_crypt_handler_path + >>> registry_crypt_handler_path("myhash", "myapp.helpers:MyHash") + """ + # validate name + _validate_handler_name(name) + + # validate path + if path.startswith("."): + raise ValueError("path cannot start with '.'") + if ':' in path: + if path.count(':') > 1: + raise ValueError("path cannot have more than one ':'") + if path.find('.', path.index(':')) > -1: + raise ValueError("path cannot have '.' to right of ':'") + + # store location + _locations[name] = path + log.debug("registered path to %r handler: %r", name, path) + +def register_crypt_handler(handler, force=False, _attr=None): + """register password hash handler. + + this method immediately registers a handler with the internal passlib registry, + so that it will be returned by :func:`get_crypt_handler` when requested. + + :arg handler: the password hash handler to register + :param force: force override of existing handler (defaults to False) + :param _attr: + [internal kwd] if specified, ensures ``handler.name`` + matches this value, or raises :exc:`ValueError`. + + :raises TypeError: + if the specified object does not appear to be a valid handler. + + :raises ValueError: + if the specified object's name (or other required attributes) + contain invalid values. + + :raises KeyError: + if a (different) handler was already registered with + the same name, and ``force=True`` was not specified. + """ + # validate handler + if not is_crypt_handler(handler): + raise ExpectedTypeError(handler, "password hash handler", "handler") + if not handler: + raise AssertionError("``bool(handler)`` must be True") + + # validate name + name = handler.name + _validate_handler_name(name) + if _attr and _attr != name: + raise ValueError("handlers must be stored only under their own name (%r != %r)" % + (_attr, name)) + + # check for existing handler + other = _handlers.get(name) + if other: + if other is handler: + log.debug("same %r handler already registered: %r", name, handler) + return + elif force: + log.warning("overriding previously registered %r handler: %r", + name, other) + else: + raise KeyError("another %r handler has already been registered: %r" % + (name, other)) + + # register handler + _handlers[name] = handler + log.debug("registered %r handler: %r", name, handler) + +def get_crypt_handler(name, default=_UNSET): + """return handler for specified password hash scheme. + + this method looks up a handler for the specified scheme. + if the handler is not already loaded, + it checks if the location is known, and loads it first. + + :arg name: name of handler to return + :param default: optional default value to return if no handler with specified name is found. + + :raises KeyError: if no handler matching that name is found, and no default specified, a KeyError will be raised. + + :returns: handler attached to name, or default value (if specified). + """ + # catch invalid names before we check _handlers, + # since it's a module dict, and exposes things like __package__, etc. + if name.startswith("_"): + if default is _UNSET: + raise KeyError("invalid handler name: %r" % (name,)) + else: + return default + + # check if handler is already loaded + try: + return _handlers[name] + except KeyError: + pass + + # normalize name (and if changed, check dict again) + assert isinstance(name, unicode_or_str), "name must be string instance" + alt = name.replace("-","_").lower() + if alt != name: + warn("handler names should be lower-case, and use underscores instead " + "of hyphens: %r => %r" % (name, alt), PasslibWarning, + stacklevel=2) + name = alt + + # try to load using new name + try: + return _handlers[name] + except KeyError: + pass + + # check if lazy load mapping has been specified for this driver + path = _locations.get(name) + if path: + if ':' in path: + modname, modattr = path.split(":") + else: + modname, modattr = path, name + ##log.debug("loading %r handler from path: '%s:%s'", name, modname, modattr) + + # try to load the module - any import errors indicate runtime config, usually + # either missing package, or bad path provided to register_crypt_handler_path() + mod = __import__(modname, fromlist=[modattr], level=0) + + # first check if importing module triggered register_crypt_handler(), + # (this is discouraged due to its magical implicitness) + handler = _handlers.get(name) + if handler: + # XXX: issue deprecation warning here? + assert is_crypt_handler(handler), "unexpected object: name=%r object=%r" % (name, handler) + return handler + + # then get real handler & register it + handler = getattr(mod, modattr) + register_crypt_handler(handler, _attr=name) + return handler + + # fail! + if default is _UNSET: + raise KeyError("no crypt handler found for algorithm: %r" % (name,)) + else: + return default + +def list_crypt_handlers(loaded_only=False): + """return sorted list of all known crypt handler names. + + :param loaded_only: if ``True``, only returns names of handlers which have actually been loaded. + + :returns: list of names of all known handlers + """ + names = set(_handlers) + if not loaded_only: + names.update(_locations) + # strip private attrs out of namespace and sort. + # TODO: make _handlers a separate list, so we don't have module namespace mixed in. + return sorted(name for name in names if not name.startswith("_")) + +# NOTE: these two functions mainly exist just for the unittests... + +def _has_crypt_handler(name, loaded_only=False): + """check if handler name is known. + + this is only useful for two cases: + + * quickly checking if handler has already been loaded + * checking if handler exists, without actually loading it + + :arg name: name of handler + :param loaded_only: if ``True``, returns False if handler exists but hasn't been loaded + """ + return (name in _handlers) or (not loaded_only and name in _locations) + +def _unload_handler_name(name, locations=True): + """unloads a handler from the registry. + + .. warning:: + + this is an internal function, + used only by the unittests. + + if loaded handler is found with specified name, it's removed. + if path to lazy load handler is found, it's removed. + + missing names are a noop. + + :arg name: name of handler to unload + :param locations: if False, won't purge registered handler locations (default True) + """ + if name in _handlers: + del _handlers[name] + if locations and name in _locations: + del _locations[name] + +#============================================================================= +# inspection helpers +#============================================================================= + +#------------------------------------------------------------------ +# general +#------------------------------------------------------------------ + +# TODO: needs UTs +def _resolve(hasher, param="value"): + """ + internal helper to resolve argument to hasher object + """ + if is_crypt_handler(hasher): + return hasher + elif isinstance(hasher, unicode_or_str): + return get_crypt_handler(hasher) + else: + raise exc.ExpectedTypeError(hasher, unicode_or_str, param) + + +#: backend aliases +ANY = "any" +BUILTIN = "builtin" +OS_CRYPT = "os_crypt" + +# TODO: needs UTs +def has_backend(hasher, backend=ANY, safe=False): + """ + Test if specified backend is available for hasher. + + :param hasher: + Hasher name or object. + + :param backend: + Name of backend, or ``"any"`` if any backend will do. + For hashers without multiple backends, will pretend + they have a single backend named ``"builtin"``. + + :param safe: + By default, throws error if backend is unknown. + If ``safe=True``, will just return false value. + + :raises ValueError: + * if hasher name is unknown. + * if backend is unknown to hasher, and safe=False. + + :return: + True if backend available, False if not available, + and None if unknown + safe=True. + """ + hasher = _resolve(hasher) + + if backend == ANY: + if not hasattr(hasher, "get_backend"): + # single backend, assume it's loaded + return True + + # multiple backends, check at least one is loadable + try: + hasher.get_backend() + return True + except exc.MissingBackendError: + return False + + # test for specific backend + if hasattr(hasher, "has_backend"): + # multiple backends + if safe and backend not in hasher.backends: + return None + return hasher.has_backend(backend) + + # single builtin backend + if backend == BUILTIN: + return True + elif safe: + return None + else: + raise exc.UnknownBackendError(hasher, backend) + +#------------------------------------------------------------------ +# os crypt +#------------------------------------------------------------------ + +# TODO: move unix_crypt_schemes list to here. +# os_crypt_schemes -- alias for unix_crypt_schemes above + + +# TODO: needs UTs +@memoize_single_value +def get_supported_os_crypt_schemes(): + """ + return tuple of schemes which :func:`crypt.crypt` natively supports. + """ + if not os_crypt_present: + return () + cache = tuple(name for name in os_crypt_schemes + if get_crypt_handler(name).has_backend(OS_CRYPT)) + if not cache: # pragma: no cover -- sanity check + # no idea what OS this could happen on... + import platform + warn("crypt.crypt() function is present, but doesn't support any " + "formats known to passlib! (system=%r release=%r)" % + (platform.system(), platform.release()), + exc.PasslibRuntimeWarning) + return cache + + +# TODO: needs UTs +def has_os_crypt_support(hasher): + """ + check if hash is supported by native :func:`crypt.crypt` function. + if :func:`crypt.crypt` is not present, will always return False. + + :param hasher: + name or hasher object. + + :returns bool: + True if hash format is supported by OS, else False. + """ + return os_crypt_present and has_backend(hasher, OS_CRYPT, safe=True) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__init__.py b/ansible/lib/python3.11/site-packages/passlib/tests/__init__.py new file mode 100644 index 000000000..389da76e1 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/__init__.py @@ -0,0 +1 @@ +"""passlib tests""" diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__main__.py b/ansible/lib/python3.11/site-packages/passlib/tests/__main__.py new file mode 100644 index 000000000..242457684 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/__main__.py @@ -0,0 +1,6 @@ +import os +from nose import run +run( + defaultTest=os.path.dirname(__file__), +) + diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/__init__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 000000000..02345292e Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/__init__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/__main__.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/__main__.cpython-311.pyc new file mode 100644 index 000000000..8394ec12e Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/__main__.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/_test_bad_register.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/_test_bad_register.cpython-311.pyc new file mode 100644 index 000000000..78aaca68a Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/_test_bad_register.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/backports.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/backports.cpython-311.pyc new file mode 100644 index 000000000..ad7638d02 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/backports.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_apache.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_apache.cpython-311.pyc new file mode 100644 index 000000000..524f64d63 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_apache.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_apps.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_apps.cpython-311.pyc new file mode 100644 index 000000000..3c30bd508 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_apps.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_context.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_context.cpython-311.pyc new file mode 100644 index 000000000..8b469d152 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_context.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_context_deprecated.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_context_deprecated.cpython-311.pyc new file mode 100644 index 000000000..05d420bda Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_context_deprecated.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-311.pyc new file mode 100644 index 000000000..eb9d20d8a Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_builtin_md4.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_des.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_des.cpython-311.pyc new file mode 100644 index 000000000..d252bfaf9 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_des.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_digest.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_digest.cpython-311.pyc new file mode 100644 index 000000000..045fb9e14 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_digest.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_scrypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_scrypt.cpython-311.pyc new file mode 100644 index 000000000..0a2290ac1 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_crypto_scrypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_ext_django.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_ext_django.cpython-311.pyc new file mode 100644 index 000000000..20b1e9ae4 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_ext_django.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_ext_django_source.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_ext_django_source.cpython-311.pyc new file mode 100644 index 000000000..6da1f05be Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_ext_django_source.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers.cpython-311.pyc new file mode 100644 index 000000000..d02f1e166 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_argon2.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_argon2.cpython-311.pyc new file mode 100644 index 000000000..d8e1bafb5 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_argon2.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_bcrypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_bcrypt.cpython-311.pyc new file mode 100644 index 000000000..d55995f2f Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_bcrypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_cisco.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_cisco.cpython-311.pyc new file mode 100644 index 000000000..dbbf28a84 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_cisco.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_django.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_django.cpython-311.pyc new file mode 100644 index 000000000..8a4c68079 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_django.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-311.pyc new file mode 100644 index 000000000..3eacccaad Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_pbkdf2.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_scrypt.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_scrypt.cpython-311.pyc new file mode 100644 index 000000000..ab0a19767 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_handlers_scrypt.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_hosts.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_hosts.cpython-311.pyc new file mode 100644 index 000000000..4b195b85e Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_hosts.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_pwd.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_pwd.cpython-311.pyc new file mode 100644 index 000000000..f1d0a74a9 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_pwd.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_registry.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_registry.cpython-311.pyc new file mode 100644 index 000000000..9994a41fe Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_registry.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_totp.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_totp.cpython-311.pyc new file mode 100644 index 000000000..35abf1788 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_totp.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils.cpython-311.pyc new file mode 100644 index 000000000..d1450038f Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_handlers.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_handlers.cpython-311.pyc new file mode 100644 index 000000000..999f3fe1a Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_handlers.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_md4.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_md4.cpython-311.pyc new file mode 100644 index 000000000..80d34976b Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_md4.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_pbkdf2.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_pbkdf2.cpython-311.pyc new file mode 100644 index 000000000..b08e8ee8a Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_utils_pbkdf2.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_win32.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_win32.cpython-311.pyc new file mode 100644 index 000000000..4b8a95bc8 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/test_win32.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/tox_support.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/tox_support.cpython-311.pyc new file mode 100644 index 000000000..90322983e Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/tox_support.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/utils.cpython-311.pyc b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/utils.cpython-311.pyc new file mode 100644 index 000000000..5e091209d Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/__pycache__/utils.cpython-311.pyc differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/_test_bad_register.py b/ansible/lib/python3.11/site-packages/passlib/tests/_test_bad_register.py new file mode 100644 index 000000000..f0683fcc3 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/_test_bad_register.py @@ -0,0 +1,15 @@ +"""helper for method in test_registry.py""" +from passlib.registry import register_crypt_handler +import passlib.utils.handlers as uh + +class dummy_bad(uh.StaticHandler): + name = "dummy_bad" + +class alt_dummy_bad(uh.StaticHandler): + name = "dummy_bad" + +# NOTE: if passlib.tests is being run from symlink (e.g. via gaeunit), +# this module may be imported a second time as test._test_bad_registry. +# we don't want it to do anything in that case. +if __name__.startswith("passlib.tests"): + register_crypt_handler(alt_dummy_bad) diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/backports.py b/ansible/lib/python3.11/site-packages/passlib/tests/backports.py new file mode 100644 index 000000000..5058cec6c --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/backports.py @@ -0,0 +1,67 @@ +"""backports of needed unittest2 features""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import re +import sys +##from warnings import warn +# site +# pkg +from passlib.utils.compat import PY26 +# local +__all__ = [ + "TestCase", + "unittest", + # TODO: deprecate these exports in favor of "unittest.XXX" + "skip", "skipIf", "skipUnless", +] + +#============================================================================= +# import latest unittest module available +#============================================================================= +try: + import unittest2 as unittest +except ImportError: + if PY26: + raise ImportError("Passlib's tests require 'unittest2' under Python 2.6 (as of Passlib 1.7)") + # python 2.7 and python 3.2 both have unittest2 features (at least, the ones we use) + import unittest + +#============================================================================= +# unittest aliases +#============================================================================= +skip = unittest.skip +skipIf = unittest.skipIf +skipUnless = unittest.skipUnless +SkipTest = unittest.SkipTest + +#============================================================================= +# custom test harness +#============================================================================= +class TestCase(unittest.TestCase): + """backports a number of unittest2 features in TestCase""" + + #=================================================================== + # backport some unittest2 names + #=================================================================== + + #--------------------------------------------------------------- + # backport assertRegex() alias from 3.2 to 2.7 + # was present in 2.7 under an alternate name + #--------------------------------------------------------------- + if not hasattr(unittest.TestCase, "assertRegex"): + assertRegex = unittest.TestCase.assertRegexpMatches + + if not hasattr(unittest.TestCase, "assertRaisesRegex"): + assertRaisesRegex = unittest.TestCase.assertRaisesRegexp + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/sample1.cfg b/ansible/lib/python3.11/site-packages/passlib/tests/sample1.cfg new file mode 100644 index 000000000..56e3ae8e7 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/sample1.cfg @@ -0,0 +1,9 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/sample1b.cfg b/ansible/lib/python3.11/site-packages/passlib/tests/sample1b.cfg new file mode 100644 index 000000000..542a6036b --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/sample1b.cfg @@ -0,0 +1,9 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/sample1c.cfg b/ansible/lib/python3.11/site-packages/passlib/tests/sample1c.cfg new file mode 100644 index 000000000..a5033eb90 Binary files /dev/null and b/ansible/lib/python3.11/site-packages/passlib/tests/sample1c.cfg differ diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/sample_config_1s.cfg b/ansible/lib/python3.11/site-packages/passlib/tests/sample_config_1s.cfg new file mode 100644 index 000000000..495a13eae --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/sample_config_1s.cfg @@ -0,0 +1,8 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all.vary_rounds = 10%% +bsdi_crypt.max_rounds = 30000 +bsdi_crypt.default_rounds = 25000 +sha512_crypt.max_rounds = 50000 +sha512_crypt.min_rounds = 40000 diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_apache.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_apache.py new file mode 100644 index 000000000..198b4250d --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_apache.py @@ -0,0 +1,769 @@ +"""tests for passlib.apache -- (c) Assurance Technologies 2008-2011""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from logging import getLogger +import os +import subprocess +# site +# pkg +from passlib import apache, registry +from passlib.exc import MissingBackendError +from passlib.utils.compat import irange +from passlib.tests.backports import unittest +from passlib.tests.utils import TestCase, get_file, set_file, ensure_mtime_changed +from passlib.utils.compat import u +from passlib.utils import to_bytes +from passlib.utils.handlers import to_unicode_for_identify +# module +log = getLogger(__name__) + +#============================================================================= +# helpers +#============================================================================= + +def backdate_file_mtime(path, offset=10): + """backdate file's mtime by specified amount""" + # NOTE: this is used so we can test code which detects mtime changes, + # without having to actually *pause* for that long. + atime = os.path.getatime(path) + mtime = os.path.getmtime(path)-offset + os.utime(path, (atime, mtime)) + +#============================================================================= +# detect external HTPASSWD tool +#============================================================================= + + +htpasswd_path = os.environ.get("PASSLIB_TEST_HTPASSWD_PATH") or "htpasswd" + + +def _call_htpasswd(args, stdin=None): + """ + helper to run htpasswd cmd + """ + if stdin is not None: + stdin = stdin.encode("utf-8") + proc = subprocess.Popen([htpasswd_path] + args, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, stdin=subprocess.PIPE if stdin else None) + out, err = proc.communicate(stdin) + rc = proc.wait() + out = to_unicode_for_identify(out or "") + return out, rc + + +def _call_htpasswd_verify(path, user, password): + """ + wrapper for htpasswd verify + """ + out, rc = _call_htpasswd(["-vi", path, user], password) + return not rc + + +def _detect_htpasswd(): + """ + helper to check if htpasswd is present + """ + try: + out, rc = _call_htpasswd([]) + except OSError: + # TODO: under py3, could trap the more specific FileNotFoundError + # cmd not found + return False, False + # when called w/o args, it should print usage to stderr & return rc=2 + if not rc: + log.warning("htpasswd test returned with rc=0") + have_bcrypt = " -B " in out + return True, have_bcrypt + + +HAVE_HTPASSWD, HAVE_HTPASSWD_BCRYPT = _detect_htpasswd() + +requires_htpasswd_cmd = unittest.skipUnless(HAVE_HTPASSWD, "requires `htpasswd` cmdline tool") + + +#============================================================================= +# htpasswd +#============================================================================= +class HtpasswdFileTest(TestCase): + """test HtpasswdFile class""" + descriptionPrefix = "HtpasswdFile" + + # sample with 4 users + sample_01 = (b'user2:2CHkkwa2AtqGs\n' + b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + b'user4:pass4\n' + b'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n') + + # sample 1 with user 1, 2 deleted; 4 changed + sample_02 = b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\nuser4:pass4\n' + + # sample 1 with user2 updated, user 1 first entry removed, and user 5 added + sample_03 = (b'user2:pass2x\n' + b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + b'user4:pass4\n' + b'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n' + b'user5:pass5\n') + + # standalone sample with 8-bit username + sample_04_utf8 = b'user\xc3\xa6:2CHkkwa2AtqGs\n' + sample_04_latin1 = b'user\xe6:2CHkkwa2AtqGs\n' + + sample_dup = b'user1:pass1\nuser1:pass2\n' + + # sample with bcrypt & sha256_crypt hashes + sample_05 = (b'user2:2CHkkwa2AtqGs\n' + b'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + b'user4:pass4\n' + b'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n' + b'user5:$2a$12$yktDxraxijBZ360orOyCOePFGhuis/umyPNJoL5EbsLk.s6SWdrRO\n' + b'user6:$5$rounds=110000$cCRp/xUUGVgwR4aP$' + b'p0.QKFS5qLNRqw1/47lXYiAcgIjJK.WjCO8nrEKuUK.\n') + + def test_00_constructor_autoload(self): + """test constructor autoload""" + # check with existing file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile(path) + self.assertEqual(ht.to_string(), self.sample_01) + self.assertEqual(ht.path, path) + self.assertTrue(ht.mtime) + + # check changing path + ht.path = path + "x" + self.assertEqual(ht.path, path + "x") + self.assertFalse(ht.mtime) + + # check new=True + ht = apache.HtpasswdFile(path, new=True) + self.assertEqual(ht.to_string(), b"") + self.assertEqual(ht.path, path) + self.assertFalse(ht.mtime) + + # check autoload=False (deprecated alias for new=True) + with self.assertWarningList("``autoload=False`` is deprecated"): + ht = apache.HtpasswdFile(path, autoload=False) + self.assertEqual(ht.to_string(), b"") + self.assertEqual(ht.path, path) + self.assertFalse(ht.mtime) + + # check missing file + os.remove(path) + self.assertRaises(IOError, apache.HtpasswdFile, path) + + # NOTE: "default_scheme" option checked via set_password() test, among others + + def test_00_from_path(self): + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile.from_path(path) + self.assertEqual(ht.to_string(), self.sample_01) + self.assertEqual(ht.path, None) + self.assertFalse(ht.mtime) + + def test_01_delete(self): + """test delete()""" + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertTrue(ht.delete("user1")) # should delete both entries + self.assertTrue(ht.delete("user2")) + self.assertFalse(ht.delete("user5")) # user not present + self.assertEqual(ht.to_string(), self.sample_02) + + # invalid user + self.assertRaises(ValueError, ht.delete, "user:") + + def test_01_delete_autosave(self): + path = self.mktemp() + sample = b'user1:pass1\nuser2:pass2\n' + set_file(path, sample) + + ht = apache.HtpasswdFile(path) + ht.delete("user1") + self.assertEqual(get_file(path), sample) + + ht = apache.HtpasswdFile(path, autosave=True) + ht.delete("user1") + self.assertEqual(get_file(path), b"user2:pass2\n") + + def test_02_set_password(self): + """test set_password()""" + ht = apache.HtpasswdFile.from_string( + self.sample_01, default_scheme="plaintext") + self.assertTrue(ht.set_password("user2", "pass2x")) + self.assertFalse(ht.set_password("user5", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # test legacy default kwd + with self.assertWarningList("``default`` is deprecated"): + ht = apache.HtpasswdFile.from_string(self.sample_01, default="plaintext") + self.assertTrue(ht.set_password("user2", "pass2x")) + self.assertFalse(ht.set_password("user5", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # invalid user + self.assertRaises(ValueError, ht.set_password, "user:", "pass") + + # test that legacy update() still works + with self.assertWarningList("update\(\) is deprecated"): + ht.update("user2", "test") + self.assertTrue(ht.check_password("user2", "test")) + + def test_02_set_password_autosave(self): + path = self.mktemp() + sample = b'user1:pass1\n' + set_file(path, sample) + + ht = apache.HtpasswdFile(path) + ht.set_password("user1", "pass2") + self.assertEqual(get_file(path), sample) + + ht = apache.HtpasswdFile(path, default_scheme="plaintext", autosave=True) + ht.set_password("user1", "pass2") + self.assertEqual(get_file(path), b"user1:pass2\n") + + def test_02_set_password_default_scheme(self): + """test set_password() -- default_scheme""" + + def check(scheme): + ht = apache.HtpasswdFile(default_scheme=scheme) + ht.set_password("user1", "pass1") + return ht.context.identify(ht.get_hash("user1")) + + # explicit scheme + self.assertEqual(check("sha256_crypt"), "sha256_crypt") + self.assertEqual(check("des_crypt"), "des_crypt") + + # unknown scheme + self.assertRaises(KeyError, check, "xxx") + + # alias resolution + self.assertEqual(check("portable"), apache.htpasswd_defaults["portable"]) + self.assertEqual(check("portable_apache_22"), apache.htpasswd_defaults["portable_apache_22"]) + self.assertEqual(check("host_apache_22"), apache.htpasswd_defaults["host_apache_22"]) + + # default + self.assertEqual(check(None), apache.htpasswd_defaults["portable_apache_22"]) + + def test_03_users(self): + """test users()""" + ht = apache.HtpasswdFile.from_string(self.sample_01) + ht.set_password("user5", "pass5") + ht.delete("user3") + ht.set_password("user3", "pass3") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user3", "user4", "user5"]) + + def test_04_check_password(self): + """test check_password()""" + ht = apache.HtpasswdFile.from_string(self.sample_05) + self.assertRaises(TypeError, ht.check_password, 1, 'pass9') + self.assertTrue(ht.check_password("user9","pass9") is None) + + # users 1..6 of sample_01 run through all the main hash formats, + # to make sure they're recognized. + for i in irange(1, 7): + i = str(i) + try: + self.assertTrue(ht.check_password("user"+i, "pass"+i)) + self.assertTrue(ht.check_password("user"+i, "pass9") is False) + except MissingBackendError: + if i == "5": + # user5 uses bcrypt, which is apparently not available right now + continue + raise + + self.assertRaises(ValueError, ht.check_password, "user:", "pass") + + # test that legacy verify() still works + with self.assertWarningList(["verify\(\) is deprecated"]*2): + self.assertTrue(ht.verify("user1", "pass1")) + self.assertFalse(ht.verify("user1", "pass2")) + + def test_05_load(self): + """test load()""" + # setup empty file + path = self.mktemp() + set_file(path, "") + backdate_file_mtime(path, 5) + ha = apache.HtpasswdFile(path, default_scheme="plaintext") + self.assertEqual(ha.to_string(), b"") + + # make changes, check load_if_changed() does nothing + ha.set_password("user1", "pass1") + ha.load_if_changed() + self.assertEqual(ha.to_string(), b"user1:pass1\n") + + # change file + set_file(path, self.sample_01) + ha.load_if_changed() + self.assertEqual(ha.to_string(), self.sample_01) + + # make changes, check load() overwrites them + ha.set_password("user5", "pass5") + ha.load() + self.assertEqual(ha.to_string(), self.sample_01) + + # test load w/ no path + hb = apache.HtpasswdFile() + self.assertRaises(RuntimeError, hb.load) + self.assertRaises(RuntimeError, hb.load_if_changed) + + # test load w/ dups and explicit path + set_file(path, self.sample_dup) + hc = apache.HtpasswdFile() + hc.load(path) + self.assertTrue(hc.check_password('user1','pass1')) + + # NOTE: load_string() tested via from_string(), which is used all over this file + + def test_06_save(self): + """test save()""" + # load from file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile(path) + + # make changes, check they saved + ht.delete("user1") + ht.delete("user2") + ht.save() + self.assertEqual(get_file(path), self.sample_02) + + # test save w/ no path + hb = apache.HtpasswdFile(default_scheme="plaintext") + hb.set_password("user1", "pass1") + self.assertRaises(RuntimeError, hb.save) + + # test save w/ explicit path + hb.save(path) + self.assertEqual(get_file(path), b"user1:pass1\n") + + def test_07_encodings(self): + """test 'encoding' kwd""" + # test bad encodings cause failure in constructor + self.assertRaises(ValueError, apache.HtpasswdFile, encoding="utf-16") + + # check sample utf-8 + ht = apache.HtpasswdFile.from_string(self.sample_04_utf8, encoding="utf-8", + return_unicode=True) + self.assertEqual(ht.users(), [ u("user\u00e6") ]) + + # test deprecated encoding=None + with self.assertWarningList("``encoding=None`` is deprecated"): + ht = apache.HtpasswdFile.from_string(self.sample_04_utf8, encoding=None) + self.assertEqual(ht.users(), [ b'user\xc3\xa6' ]) + + # check sample latin-1 + ht = apache.HtpasswdFile.from_string(self.sample_04_latin1, + encoding="latin-1", return_unicode=True) + self.assertEqual(ht.users(), [ u("user\u00e6") ]) + + def test_08_get_hash(self): + """test get_hash()""" + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertEqual(ht.get_hash("user3"), b"{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=") + self.assertEqual(ht.get_hash("user4"), b"pass4") + self.assertEqual(ht.get_hash("user5"), None) + + with self.assertWarningList("find\(\) is deprecated"): + self.assertEqual(ht.find("user4"), b"pass4") + + def test_09_to_string(self): + """test to_string""" + + # check with known sample + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertEqual(ht.to_string(), self.sample_01) + + # test blank + ht = apache.HtpasswdFile() + self.assertEqual(ht.to_string(), b"") + + def test_10_repr(self): + ht = apache.HtpasswdFile("fakepath", autosave=True, new=True, encoding="latin-1") + repr(ht) + + def test_11_malformed(self): + self.assertRaises(ValueError, apache.HtpasswdFile.from_string, + b'realm:user1:pass1\n') + self.assertRaises(ValueError, apache.HtpasswdFile.from_string, + b'pass1\n') + + def test_12_from_string(self): + # forbid path kwd + self.assertRaises(TypeError, apache.HtpasswdFile.from_string, + b'', path=None) + + def test_13_whitespace(self): + """whitespace & comment handling""" + + # per htpasswd source (https://github.com/apache/httpd/blob/trunk/support/htpasswd.c), + # lines that match "^\s*(#.*)?$" should be ignored + source = to_bytes( + '\n' + 'user2:pass2\n' + 'user4:pass4\n' + 'user7:pass7\r\n' + ' \t \n' + 'user1:pass1\n' + ' # legacy users\n' + '#user6:pass6\n' + 'user5:pass5\n\n' + ) + + # loading should see all users (except user6, who was commented out) + ht = apache.HtpasswdFile.from_string(source) + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user7"]) + + # update existing user + ht.set_hash("user4", "althash4") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user7"]) + + # add a new user + ht.set_hash("user6", "althash6") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user6", "user7"]) + + # delete existing user + ht.delete("user7") + self.assertEqual(sorted(ht.users()), ["user1", "user2", "user4", "user5", "user6"]) + + # re-serialization should preserve whitespace + target = to_bytes( + '\n' + 'user2:pass2\n' + 'user4:althash4\n' + ' \t \n' + 'user1:pass1\n' + ' # legacy users\n' + '#user6:pass6\n' + 'user5:pass5\n' + 'user6:althash6\n' + ) + self.assertEqual(ht.to_string(), target) + + @requires_htpasswd_cmd + def test_htpasswd_cmd_verify(self): + """ + verify "htpasswd" command can read output + """ + path = self.mktemp() + ht = apache.HtpasswdFile(path=path, new=True) + + def hash_scheme(pwd, scheme): + return ht.context.handler(scheme).hash(pwd) + + # base scheme + ht.set_hash("user1", hash_scheme("password","apr_md5_crypt")) + + # 2.2-compat scheme + host_no_bcrypt = apache.htpasswd_defaults["host_apache_22"] + ht.set_hash("user2", hash_scheme("password", host_no_bcrypt)) + + # 2.4-compat scheme + host_best = apache.htpasswd_defaults["host"] + ht.set_hash("user3", hash_scheme("password", host_best)) + + # unsupported scheme -- should always fail to verify + ht.set_hash("user4", "$xxx$foo$bar$baz") + + # make sure htpasswd properly recognizes hashes + ht.save() + + self.assertFalse(_call_htpasswd_verify(path, "user1", "wrong")) + self.assertFalse(_call_htpasswd_verify(path, "user2", "wrong")) + self.assertFalse(_call_htpasswd_verify(path, "user3", "wrong")) + self.assertFalse(_call_htpasswd_verify(path, "user4", "wrong")) + + self.assertTrue(_call_htpasswd_verify(path, "user1", "password")) + self.assertTrue(_call_htpasswd_verify(path, "user2", "password")) + self.assertTrue(_call_htpasswd_verify(path, "user3", "password")) + + @requires_htpasswd_cmd + @unittest.skipUnless(registry.has_backend("bcrypt"), "bcrypt support required") + def test_htpasswd_cmd_verify_bcrypt(self): + """ + verify "htpasswd" command can read bcrypt format + + this tests for regression of issue 95, where we output "$2b$" instead of "$2y$"; + fixed in v1.7.2. + """ + path = self.mktemp() + ht = apache.HtpasswdFile(path=path, new=True) + def hash_scheme(pwd, scheme): + return ht.context.handler(scheme).hash(pwd) + ht.set_hash("user1", hash_scheme("password", "bcrypt")) + ht.save() + self.assertFalse(_call_htpasswd_verify(path, "user1", "wrong")) + if HAVE_HTPASSWD_BCRYPT: + self.assertTrue(_call_htpasswd_verify(path, "user1", "password")) + else: + # apache2.2 should fail, acting like it's an unknown hash format + self.assertFalse(_call_htpasswd_verify(path, "user1", "password")) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htdigest +#============================================================================= +class HtdigestFileTest(TestCase): + """test HtdigestFile class""" + descriptionPrefix = "HtdigestFile" + + # sample with 4 users + sample_01 = (b'user2:realm:549d2a5f4659ab39a80dac99e159ab19\n' + b'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + b'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n' + b'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n') + + # sample 1 with user 1, 2 deleted; 4 changed + sample_02 = (b'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + b'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n') + + # sample 1 with user2 updated, user 1 first entry removed, and user 5 added + sample_03 = (b'user2:realm:5ba6d8328943c23c64b50f8b29566059\n' + b'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + b'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n' + b'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n' + b'user5:realm:03c55fdc6bf71552356ad401bdb9af19\n') + + # standalone sample with 8-bit username & realm + sample_04_utf8 = b'user\xc3\xa6:realm\xc3\xa6:549d2a5f4659ab39a80dac99e159ab19\n' + sample_04_latin1 = b'user\xe6:realm\xe6:549d2a5f4659ab39a80dac99e159ab19\n' + + def test_00_constructor_autoload(self): + """test constructor autoload""" + # check with existing file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtdigestFile(path) + self.assertEqual(ht.to_string(), self.sample_01) + + # check without autoload + ht = apache.HtdigestFile(path, new=True) + self.assertEqual(ht.to_string(), b"") + + # check missing file + os.remove(path) + self.assertRaises(IOError, apache.HtdigestFile, path) + + # NOTE: default_realm option checked via other tests. + + def test_01_delete(self): + """test delete()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertTrue(ht.delete("user1", "realm")) + self.assertTrue(ht.delete("user2", "realm")) + self.assertFalse(ht.delete("user5", "realm")) + self.assertFalse(ht.delete("user3", "realm5")) + self.assertEqual(ht.to_string(), self.sample_02) + + # invalid user + self.assertRaises(ValueError, ht.delete, "user:", "realm") + + # invalid realm + self.assertRaises(ValueError, ht.delete, "user", "realm:") + + def test_01_delete_autosave(self): + path = self.mktemp() + set_file(path, self.sample_01) + + ht = apache.HtdigestFile(path) + self.assertTrue(ht.delete("user1", "realm")) + self.assertFalse(ht.delete("user3", "realm5")) + self.assertFalse(ht.delete("user5", "realm")) + self.assertEqual(get_file(path), self.sample_01) + + ht.autosave = True + self.assertTrue(ht.delete("user2", "realm")) + self.assertEqual(get_file(path), self.sample_02) + + def test_02_set_password(self): + """test update()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertTrue(ht.set_password("user2", "realm", "pass2x")) + self.assertFalse(ht.set_password("user5", "realm", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # default realm + self.assertRaises(TypeError, ht.set_password, "user2", "pass3") + ht.default_realm = "realm2" + ht.set_password("user2", "pass3") + ht.check_password("user2", "realm2", "pass3") + + # invalid user + self.assertRaises(ValueError, ht.set_password, "user:", "realm", "pass") + self.assertRaises(ValueError, ht.set_password, "u"*256, "realm", "pass") + + # invalid realm + self.assertRaises(ValueError, ht.set_password, "user", "realm:", "pass") + self.assertRaises(ValueError, ht.set_password, "user", "r"*256, "pass") + + # test that legacy update() still works + with self.assertWarningList("update\(\) is deprecated"): + ht.update("user2", "realm2", "test") + self.assertTrue(ht.check_password("user2", "test")) + + # TODO: test set_password autosave + + def test_03_users(self): + """test users()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + ht.set_password("user5", "realm", "pass5") + ht.delete("user3", "realm") + ht.set_password("user3", "realm", "pass3") + self.assertEqual(sorted(ht.users("realm")), ["user1", "user2", "user3", "user4", "user5"]) + + self.assertRaises(TypeError, ht.users, 1) + + def test_04_check_password(self): + """test check_password()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertRaises(TypeError, ht.check_password, 1, 'realm', 'pass5') + self.assertRaises(TypeError, ht.check_password, 'user', 1, 'pass5') + self.assertIs(ht.check_password("user5", "realm","pass5"), None) + for i in irange(1,5): + i = str(i) + self.assertTrue(ht.check_password("user"+i, "realm", "pass"+i)) + self.assertIs(ht.check_password("user"+i, "realm", "pass5"), False) + + # default realm + self.assertRaises(TypeError, ht.check_password, "user5", "pass5") + ht.default_realm = "realm" + self.assertTrue(ht.check_password("user1", "pass1")) + self.assertIs(ht.check_password("user5", "pass5"), None) + + # test that legacy verify() still works + with self.assertWarningList(["verify\(\) is deprecated"]*2): + self.assertTrue(ht.verify("user1", "realm", "pass1")) + self.assertFalse(ht.verify("user1", "realm", "pass2")) + + # invalid user + self.assertRaises(ValueError, ht.check_password, "user:", "realm", "pass") + + def test_05_load(self): + """test load()""" + # setup empty file + path = self.mktemp() + set_file(path, "") + backdate_file_mtime(path, 5) + ha = apache.HtdigestFile(path) + self.assertEqual(ha.to_string(), b"") + + # make changes, check load_if_changed() does nothing + ha.set_password("user1", "realm", "pass1") + ha.load_if_changed() + self.assertEqual(ha.to_string(), b'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n') + + # change file + set_file(path, self.sample_01) + ha.load_if_changed() + self.assertEqual(ha.to_string(), self.sample_01) + + # make changes, check load_if_changed overwrites them + ha.set_password("user5", "realm", "pass5") + ha.load() + self.assertEqual(ha.to_string(), self.sample_01) + + # test load w/ no path + hb = apache.HtdigestFile() + self.assertRaises(RuntimeError, hb.load) + self.assertRaises(RuntimeError, hb.load_if_changed) + + # test load w/ explicit path + hc = apache.HtdigestFile() + hc.load(path) + self.assertEqual(hc.to_string(), self.sample_01) + + # change file, test deprecated force=False kwd + ensure_mtime_changed(path) + set_file(path, "") + with self.assertWarningList(r"load\(force=False\) is deprecated"): + ha.load(force=False) + self.assertEqual(ha.to_string(), b"") + + def test_06_save(self): + """test save()""" + # load from file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtdigestFile(path) + + # make changes, check they saved + ht.delete("user1", "realm") + ht.delete("user2", "realm") + ht.save() + self.assertEqual(get_file(path), self.sample_02) + + # test save w/ no path + hb = apache.HtdigestFile() + hb.set_password("user1", "realm", "pass1") + self.assertRaises(RuntimeError, hb.save) + + # test save w/ explicit path + hb.save(path) + self.assertEqual(get_file(path), hb.to_string()) + + def test_07_realms(self): + """test realms() & delete_realm()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + + self.assertEqual(ht.delete_realm("x"), 0) + self.assertEqual(ht.realms(), ['realm']) + + self.assertEqual(ht.delete_realm("realm"), 4) + self.assertEqual(ht.realms(), []) + self.assertEqual(ht.to_string(), b"") + + def test_08_get_hash(self): + """test get_hash()""" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertEqual(ht.get_hash("user3", "realm"), "a500bb8c02f6a9170ae46af10c898744") + self.assertEqual(ht.get_hash("user4", "realm"), "ab7b5d5f28ccc7666315f508c7358519") + self.assertEqual(ht.get_hash("user5", "realm"), None) + + with self.assertWarningList("find\(\) is deprecated"): + self.assertEqual(ht.find("user4", "realm"), "ab7b5d5f28ccc7666315f508c7358519") + + def test_09_encodings(self): + """test encoding parameter""" + # test bad encodings cause failure in constructor + self.assertRaises(ValueError, apache.HtdigestFile, encoding="utf-16") + + # check sample utf-8 + ht = apache.HtdigestFile.from_string(self.sample_04_utf8, encoding="utf-8", return_unicode=True) + self.assertEqual(ht.realms(), [ u("realm\u00e6") ]) + self.assertEqual(ht.users(u("realm\u00e6")), [ u("user\u00e6") ]) + + # check sample latin-1 + ht = apache.HtdigestFile.from_string(self.sample_04_latin1, encoding="latin-1", return_unicode=True) + self.assertEqual(ht.realms(), [ u("realm\u00e6") ]) + self.assertEqual(ht.users(u("realm\u00e6")), [ u("user\u00e6") ]) + + def test_10_to_string(self): + """test to_string()""" + + # check sample + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertEqual(ht.to_string(), self.sample_01) + + # check blank + ht = apache.HtdigestFile() + self.assertEqual(ht.to_string(), b"") + + def test_11_malformed(self): + self.assertRaises(ValueError, apache.HtdigestFile.from_string, + b'realm:user1:pass1:other\n') + self.assertRaises(ValueError, apache.HtdigestFile.from_string, + b'user1:pass1\n') + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_apps.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_apps.py new file mode 100644 index 000000000..167437f5d --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_apps.py @@ -0,0 +1,139 @@ +"""test passlib.apps""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib import apps, hash as hashmod +from passlib.tests.utils import TestCase +# module + +#============================================================================= +# test predefined app contexts +#============================================================================= +class AppsTest(TestCase): + """perform general tests to make sure contexts work""" + # NOTE: these tests are not really comprehensive, + # since they would do little but duplicate + # the presets in apps.py + # + # they mainly try to ensure no typos + # or dynamic behavior foul-ups. + + def test_master_context(self): + ctx = apps.master_context + self.assertGreater(len(ctx.schemes()), 50) + + def test_custom_app_context(self): + ctx = apps.custom_app_context + self.assertEqual(ctx.schemes(), ("sha512_crypt", "sha256_crypt")) + for hash in [ + ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751'), + ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17'), + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_django16_context(self): + ctx = apps.django16_context + for hash in [ + 'pbkdf2_sha256$29000$ZsgquwnCyBs2$fBxRQpfKd2PIeMxtkKPy0h7SrnrN+EU/cm67aitoZ2s=', + 'sha1$0d082$cdb462ae8b6be8784ef24b20778c4d0c82d5957f', + 'md5$b887a$37767f8a745af10612ad44c80ff52e92', + 'crypt$95a6d$95x74hLDQKXI2', + '098f6bcd4621d373cade4e832627b4f6', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertEqual(ctx.identify("!"), "django_disabled") + self.assertFalse(ctx.verify("test", "!")) + + def test_django_context(self): + ctx = apps.django_context + for hash in [ + 'pbkdf2_sha256$29000$ZsgquwnCyBs2$fBxRQpfKd2PIeMxtkKPy0h7SrnrN+EU/cm67aitoZ2s=', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertEqual(ctx.identify("!"), "django_disabled") + self.assertFalse(ctx.verify("test", "!")) + + def test_ldap_nocrypt_context(self): + ctx = apps.ldap_nocrypt_context + for hash in [ + '{SSHA}cPusOzd6d5n3OjSVK3R329ZGCNyFcC7F', + 'test', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertIs(ctx.identify('{CRYPT}$5$rounds=31817$iZGmlyBQ99JSB5' + 'n6$p4E.pdPBWx19OajgjLRiOW0itGnyxDGgMlDcOsfaI17'), None) + + def test_ldap_context(self): + ctx = apps.ldap_context + for hash in [ + ('{CRYPT}$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0' + 'itGnyxDGgMlDcOsfaI17'), + '{SSHA}cPusOzd6d5n3OjSVK3R329ZGCNyFcC7F', + 'test', + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_ldap_mysql_context(self): + ctx = apps.mysql_context + for hash in [ + '*94BDCEBE19083CE2A1F959FD02F964C7AF4CFC29', + '378b243e220ca493', + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_postgres_context(self): + ctx = apps.postgres_context + hash = 'md55d9c68c6c50ed3d02a2fcf54f63993b6' + self.assertTrue(ctx.verify("test", hash, user='user')) + + def test_phppass_context(self): + ctx = apps.phpass_context + for hash in [ + '$P$8Ja1vJsKa5qyy/b3mCJGXM7GyBnt6..', + '$H$8b95CoYQnQ9Y6fSTsACyphNh5yoM02.', + '_cD..aBxeRhYFJvtUvsI', + ]: + self.assertTrue(ctx.verify("test", hash)) + + h1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + if hashmod.bcrypt.has_backend(): + self.assertTrue(ctx.verify("test", h1)) + self.assertEqual(ctx.default_scheme(), "bcrypt") + self.assertEqual(ctx.handler().name, "bcrypt") + else: + self.assertEqual(ctx.identify(h1), "bcrypt") + self.assertEqual(ctx.default_scheme(), "phpass") + self.assertEqual(ctx.handler().name, "phpass") + + def test_phpbb3_context(self): + ctx = apps.phpbb3_context + for hash in [ + '$P$8Ja1vJsKa5qyy/b3mCJGXM7GyBnt6..', + '$H$8b95CoYQnQ9Y6fSTsACyphNh5yoM02.', + ]: + self.assertTrue(ctx.verify("test", hash)) + self.assertTrue(ctx.hash("test").startswith("$H$")) + + def test_roundup_context(self): + ctx = apps.roundup_context + for hash in [ + '{PBKDF2}9849$JMTYu3eOUSoFYExprVVqbQ$N5.gV.uR1.BTgLSvi0qyPiRlGZ0', + '{SHA}a94a8fe5ccb19ba61c4c0873d391e987982fbbd3', + '{CRYPT}dptOmKDriOGfU', + '{plaintext}test', + ]: + self.assertTrue(ctx.verify("test", hash)) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_context.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_context.py new file mode 100644 index 000000000..09b52c0d7 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_context.py @@ -0,0 +1,1786 @@ +"""tests for passlib.context""" +#============================================================================= +# imports +#============================================================================= +# core +from __future__ import with_statement +from passlib.utils.compat import PY3 +if PY3: + from configparser import NoSectionError +else: + from ConfigParser import NoSectionError +import datetime +from functools import partial +import logging; log = logging.getLogger(__name__) +import os +import warnings +# site +# pkg +from passlib import hash +from passlib.context import CryptContext, LazyCryptContext +from passlib.exc import PasslibConfigWarning, PasslibHashWarning +from passlib.utils import tick, to_unicode +from passlib.utils.compat import irange, u, unicode, str_to_uascii, PY2, PY26 +import passlib.utils.handlers as uh +from passlib.tests.utils import (TestCase, set_file, TICK_RESOLUTION, + quicksleep, time_call, handler_derived_from) +from passlib.registry import (register_crypt_handler_path, + _has_crypt_handler as has_crypt_handler, + _unload_handler_name as unload_handler_name, + get_crypt_handler, + ) +# local +#============================================================================= +# support +#============================================================================= +here = os.path.abspath(os.path.dirname(__file__)) + +def merge_dicts(first, *args, **kwds): + target = first.copy() + for arg in args: + target.update(arg) + if kwds: + target.update(kwds) + return target + +#============================================================================= +# +#============================================================================= +class CryptContextTest(TestCase): + descriptionPrefix = "CryptContext" + + # TODO: these unittests could really use a good cleanup + # and reorganizing, to ensure they're getting everything. + + #=================================================================== + # sample configurations used in tests + #=================================================================== + + #--------------------------------------------------------------- + # sample 1 - typical configuration + #--------------------------------------------------------------- + sample_1_schemes = ["des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"] + sample_1_handlers = [get_crypt_handler(name) for name in sample_1_schemes] + + sample_1_dict = dict( + schemes = sample_1_schemes, + default = "md5_crypt", + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30001, + bsdi_crypt__default_rounds = 25001, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + sample_1_resolved_dict = merge_dicts(sample_1_dict, + schemes = sample_1_handlers) + + sample_1_unnormalized = u("""\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +; this is using %... +all__vary_rounds = 10%% +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 +""") + + sample_1_unicode = u("""\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25001 +bsdi_crypt__max_rounds = 30001 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + +""") + + #--------------------------------------------------------------- + # sample 1 external files + #--------------------------------------------------------------- + + # sample 1 string with '\n' linesep + sample_1_path = os.path.join(here, "sample1.cfg") + + # sample 1 with '\r\n' linesep + sample_1b_unicode = sample_1_unicode.replace(u("\n"), u("\r\n")) + sample_1b_path = os.path.join(here, "sample1b.cfg") + + # sample 1 using UTF-16 and alt section + sample_1c_bytes = sample_1_unicode.replace(u("[passlib]"), + u("[mypolicy]")).encode("utf-16") + sample_1c_path = os.path.join(here, "sample1c.cfg") + + # enable to regenerate sample files + if False: + set_file(sample_1_path, sample_1_unicode) + set_file(sample_1b_path, sample_1b_unicode) + set_file(sample_1c_path, sample_1c_bytes) + + #--------------------------------------------------------------- + # sample 2 & 12 - options patch + #--------------------------------------------------------------- + sample_2_dict = dict( + # using this to test full replacement of existing options + bsdi_crypt__min_rounds = 29001, + bsdi_crypt__max_rounds = 35001, + bsdi_crypt__default_rounds = 31001, + # using this to test partial replacement of existing options + sha512_crypt__min_rounds=45000, + ) + + sample_2_unicode = """\ +[passlib] +bsdi_crypt__min_rounds = 29001 +bsdi_crypt__max_rounds = 35001 +bsdi_crypt__default_rounds = 31001 +sha512_crypt__min_rounds = 45000 +""" + + # sample 2 overlayed on top of sample 1 + sample_12_dict = merge_dicts(sample_1_dict, sample_2_dict) + + #--------------------------------------------------------------- + # sample 3 & 123 - just changing default from sample 1 + #--------------------------------------------------------------- + sample_3_dict = dict( + default="sha512_crypt", + ) + + # sample 3 overlayed on 2 overlayed on 1 + sample_123_dict = merge_dicts(sample_12_dict, sample_3_dict) + + #--------------------------------------------------------------- + # sample 4 - used by api tests + #--------------------------------------------------------------- + sample_4_dict = dict( + schemes = [ "des_crypt", "md5_crypt", "phpass", "bsdi_crypt", + "sha256_crypt"], + deprecated = [ "des_crypt", ], + default = "sha256_crypt", + bsdi_crypt__max_rounds = 31, + bsdi_crypt__default_rounds = 25, + bsdi_crypt__vary_rounds = 0, + sha256_crypt__max_rounds = 3000, + sha256_crypt__min_rounds = 2000, + sha256_crypt__default_rounds = 3000, + phpass__ident = "H", + phpass__default_rounds = 7, + ) + + #=================================================================== + # setup + #=================================================================== + def setUp(self): + super(CryptContextTest, self).setUp() + warnings.filterwarnings("ignore", "The 'all' scheme is deprecated.*") + warnings.filterwarnings("ignore", ".*'scheme' keyword is deprecated as of Passlib 1.7.*") + + #=================================================================== + # constructors + #=================================================================== + def test_01_constructor(self): + """test class constructor""" + + # test blank constructor works correctly + ctx = CryptContext() + self.assertEqual(ctx.to_dict(), {}) + + # test sample 1 with scheme=names + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 with scheme=handlers + ctx = CryptContext(**self.sample_1_resolved_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 2: options w/o schemes + ctx = CryptContext(**self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_2_dict) + + # test sample 3: default only + ctx = CryptContext(**self.sample_3_dict) + self.assertEqual(ctx.to_dict(), self.sample_3_dict) + + # test unicode scheme names (issue 54) + ctx = CryptContext(schemes=[u("sha256_crypt")]) + self.assertEqual(ctx.schemes(), ("sha256_crypt",)) + + def test_02_from_string(self): + """test from_string() constructor""" + # test sample 1 unicode + ctx = CryptContext.from_string(self.sample_1_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 with unnormalized inputs + ctx = CryptContext.from_string(self.sample_1_unnormalized) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 utf-8 + ctx = CryptContext.from_string(self.sample_1_unicode.encode("utf-8")) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 w/ '\r\n' linesep + ctx = CryptContext.from_string(self.sample_1b_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 using UTF-16 and alt section + ctx = CryptContext.from_string(self.sample_1c_bytes, section="mypolicy", + encoding="utf-16") + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test wrong type + self.assertRaises(TypeError, CryptContext.from_string, None) + + # test missing section + self.assertRaises(NoSectionError, CryptContext.from_string, + self.sample_1_unicode, section="fakesection") + + def test_03_from_path(self): + """test from_path() constructor""" + # make sure sample files exist + if not os.path.exists(self.sample_1_path): + raise RuntimeError("can't find data file: %r" % self.sample_1_path) + + # test sample 1 + ctx = CryptContext.from_path(self.sample_1_path) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 w/ '\r\n' linesep + ctx = CryptContext.from_path(self.sample_1b_path) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 encoding using UTF-16 and alt section + ctx = CryptContext.from_path(self.sample_1c_path, section="mypolicy", + encoding="utf-16") + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test missing file + self.assertRaises(EnvironmentError, CryptContext.from_path, + os.path.join(here, "sample1xxx.cfg")) + + # test missing section + self.assertRaises(NoSectionError, CryptContext.from_path, + self.sample_1_path, section="fakesection") + + def test_04_copy(self): + """test copy() method""" + cc1 = CryptContext(**self.sample_1_dict) + + # overlay sample 2 onto copy + cc2 = cc1.copy(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc2.to_dict(), self.sample_12_dict) + + # check that repeating overlay makes no change + cc2b = cc2.copy(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc2b.to_dict(), self.sample_12_dict) + + # overlay sample 3 on copy + cc3 = cc2.copy(**self.sample_3_dict) + self.assertEqual(cc3.to_dict(), self.sample_123_dict) + + # test empty copy creates separate copy + cc4 = cc1.copy() + self.assertIsNot(cc4, cc1) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc4.to_dict(), self.sample_1_dict) + + # ... and that modifying copy doesn't affect original + cc4.update(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc4.to_dict(), self.sample_12_dict) + + def test_09_repr(self): + """test repr()""" + cc1 = CryptContext(**self.sample_1_dict) + # NOTE: "0x-1234" format used by Pyston 0.5.1 (support deprecated 2019-11) + self.assertRegex(repr(cc1), "^$") + + #=================================================================== + # modifiers + #=================================================================== + def test_10_load(self): + """test load() / load_path() method""" + # NOTE: load() is the workhorse that handles all policy parsing, + # compilation, and validation. most of its features are tested + # elsewhere, since all the constructors and modifiers are just + # wrappers for it. + + # source_type 'auto' + ctx = CryptContext() + + # detect dict + ctx.load(self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # detect unicode string + ctx.load(self.sample_1_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # detect bytes string + ctx.load(self.sample_1_unicode.encode("utf-8")) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # anything else - TypeError + self.assertRaises(TypeError, ctx.load, None) + + # NOTE: load_path() tested by from_path() + # NOTE: additional string tests done by from_string() + + # update flag - tested by update() method tests + # encoding keyword - tested by from_string() & from_path() + # section keyword - tested by from_string() & from_path() + + # test load empty + ctx = CryptContext(**self.sample_1_dict) + ctx.load({}, update=True) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # multiple loads should clear the state + ctx = CryptContext() + ctx.load(self.sample_1_dict) + ctx.load(self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_2_dict) + + def test_11_load_rollback(self): + """test load() errors restore old state""" + # create initial context + cc = CryptContext(["des_crypt", "sha256_crypt"], + sha256_crypt__default_rounds=5000, + all__vary_rounds=0.1, + ) + result = cc.to_string() + + # do an update operation that should fail during parsing + # XXX: not sure what the right error type is here. + self.assertRaises(TypeError, cc.update, too__many__key__parts=True) + self.assertEqual(cc.to_string(), result) + + # do an update operation that should fail during extraction + # FIXME: this isn't failing even in broken case, need to figure out + # way to ensure some keys come after this one. + self.assertRaises(KeyError, cc.update, fake_context_option=True) + self.assertEqual(cc.to_string(), result) + + # do an update operation that should fail during compilation + self.assertRaises(ValueError, cc.update, sha256_crypt__min_rounds=10000) + self.assertEqual(cc.to_string(), result) + + def test_12_update(self): + """test update() method""" + + # empty overlay + ctx = CryptContext(**self.sample_1_dict) + ctx.update() + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test basic overlay + ctx = CryptContext(**self.sample_1_dict) + ctx.update(**self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # ... and again + ctx.update(**self.sample_3_dict) + self.assertEqual(ctx.to_dict(), self.sample_123_dict) + + # overlay w/ dict arg + ctx = CryptContext(**self.sample_1_dict) + ctx.update(self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # overlay w/ string + ctx = CryptContext(**self.sample_1_dict) + ctx.update(self.sample_2_unicode) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # too many args + self.assertRaises(TypeError, ctx.update, {}, {}) + self.assertRaises(TypeError, ctx.update, {}, schemes=['des_crypt']) + + # wrong arg type + self.assertRaises(TypeError, ctx.update, None) + + #=================================================================== + # option parsing + #=================================================================== + def test_20_options(self): + """test basic option parsing""" + def parse(**kwds): + return CryptContext(**kwds).to_dict() + + # + # common option parsing tests + # + + # test keys with blank fields are rejected + # blank option + self.assertRaises(TypeError, CryptContext, __=0.1) + self.assertRaises(TypeError, CryptContext, default__scheme__='x') + + # blank scheme + self.assertRaises(TypeError, CryptContext, __option='x') + self.assertRaises(TypeError, CryptContext, default____option='x') + + # blank category + self.assertRaises(TypeError, CryptContext, __scheme__option='x') + + # test keys with too many field are rejected + self.assertRaises(TypeError, CryptContext, + category__scheme__option__invalid = 30000) + + # keys with mixed separators should be handled correctly. + # (testing actual data, not to_dict(), since re-render hid original bug) + self.assertRaises(KeyError, parse, + **{"admin.context__schemes":"md5_crypt"}) + ctx = CryptContext(**{"schemes":"md5_crypt,des_crypt", + "admin.context__default":"des_crypt"}) + self.assertEqual(ctx.default_scheme("admin"), "des_crypt") + + # + # context option -specific tests + # + + # test context option key parsing + result = dict(default="md5_crypt") + self.assertEqual(parse(default="md5_crypt"), result) + self.assertEqual(parse(context__default="md5_crypt"), result) + self.assertEqual(parse(default__context__default="md5_crypt"), result) + self.assertEqual(parse(**{"context.default":"md5_crypt"}), result) + self.assertEqual(parse(**{"default.context.default":"md5_crypt"}), result) + + # test context option key parsing w/ category + result = dict(admin__context__default="md5_crypt") + self.assertEqual(parse(admin__context__default="md5_crypt"), result) + self.assertEqual(parse(**{"admin.context.default":"md5_crypt"}), result) + + # + # hash option -specific tests + # + + # test hash option key parsing + result = dict(all__vary_rounds=0.1) + self.assertEqual(parse(all__vary_rounds=0.1), result) + self.assertEqual(parse(default__all__vary_rounds=0.1), result) + self.assertEqual(parse(**{"all.vary_rounds":0.1}), result) + self.assertEqual(parse(**{"default.all.vary_rounds":0.1}), result) + + # test hash option key parsing w/ category + result = dict(admin__all__vary_rounds=0.1) + self.assertEqual(parse(admin__all__vary_rounds=0.1), result) + self.assertEqual(parse(**{"admin.all.vary_rounds":0.1}), result) + + # settings not allowed if not in hash.setting_kwds + ctx = CryptContext(["phpass", "md5_crypt"], phpass__ident="P") + self.assertRaises(KeyError, ctx.copy, md5_crypt__ident="P") + + # hash options 'salt' and 'rounds' not allowed + self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"], + des_crypt__salt="xx") + self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"], + all__salt="xx") + + def test_21_schemes(self): + """test 'schemes' context option parsing""" + + # schemes can be empty + cc = CryptContext(schemes=None) + self.assertEqual(cc.schemes(), ()) + + # schemes can be list of names + cc = CryptContext(schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # schemes can be comma-sep string + cc = CryptContext(schemes=" des_crypt, md5_crypt, ") + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # schemes can be list of handlers + cc = CryptContext(schemes=[hash.des_crypt, hash.md5_crypt]) + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # scheme must be name or handler + self.assertRaises(TypeError, CryptContext, schemes=[uh.StaticHandler]) + + # handlers must have a name + class nameless(uh.StaticHandler): + name = None + self.assertRaises(ValueError, CryptContext, schemes=[nameless]) + + # names must be unique + class dummy_1(uh.StaticHandler): + name = 'dummy_1' + self.assertRaises(KeyError, CryptContext, schemes=[dummy_1, dummy_1]) + + # schemes not allowed per-category + self.assertRaises(KeyError, CryptContext, + admin__context__schemes=["md5_crypt"]) + + def test_22_deprecated(self): + """test 'deprecated' context option parsing""" + def getdep(ctx, category=None): + return [name for name in ctx.schemes() + if ctx.handler(name, category).deprecated] + + # no schemes - all deprecated values allowed + cc = CryptContext(deprecated=["md5_crypt"]) + cc.update(schemes=["md5_crypt", "des_crypt"]) + self.assertEqual(getdep(cc),["md5_crypt"]) + + # deprecated values allowed if subset of schemes + cc = CryptContext(deprecated=["md5_crypt"], schemes=["md5_crypt", "des_crypt"]) + self.assertEqual(getdep(cc), ["md5_crypt"]) + + # can be handler + # XXX: allow handlers in deprecated list? not for now. + self.assertRaises(TypeError, CryptContext, deprecated=[hash.md5_crypt], + schemes=["md5_crypt", "des_crypt"]) +## cc = CryptContext(deprecated=[hash.md5_crypt], schemes=["md5_crypt", "des_crypt"]) +## self.assertEqual(getdep(cc), ["md5_crypt"]) + + # comma sep list + cc = CryptContext(deprecated="md5_crypt,des_crypt", schemes=["md5_crypt", "des_crypt", "sha256_crypt"]) + self.assertEqual(getdep(cc), ["md5_crypt", "des_crypt"]) + + # values outside of schemes not allowed + self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'], + deprecated=['md5_crypt']) + + # deprecating ALL schemes should cause ValueError + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt'], + deprecated=['des_crypt']) + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__deprecated=['des_crypt', 'md5_crypt']) + + # deprecating explicit default scheme should cause ValueError + + # ... default listed as deprecated + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + default="md5_crypt", + deprecated="md5_crypt") + + # ... global default deprecated per-category + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + default="md5_crypt", + admin__context__deprecated="md5_crypt") + + # ... category default deprecated globally + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__default="md5_crypt", + deprecated="md5_crypt") + + # ... category default deprecated in category + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__default="md5_crypt", + admin__context__deprecated="md5_crypt") + + # category deplist should shadow default deplist + CryptContext( + schemes=['des_crypt', 'md5_crypt'], + deprecated="md5_crypt", + admin__context__default="md5_crypt", + admin__context__deprecated=[]) + + # wrong type + self.assertRaises(TypeError, CryptContext, deprecated=123) + + # deprecated per-category + cc = CryptContext(deprecated=["md5_crypt"], + schemes=["md5_crypt", "des_crypt"], + admin__context__deprecated=["des_crypt"], + ) + self.assertEqual(getdep(cc), ["md5_crypt"]) + self.assertEqual(getdep(cc, "user"), ["md5_crypt"]) + self.assertEqual(getdep(cc, "admin"), ["des_crypt"]) + + # blank per-category deprecated list, shadowing default list + cc = CryptContext(deprecated=["md5_crypt"], + schemes=["md5_crypt", "des_crypt"], + admin__context__deprecated=[], + ) + self.assertEqual(getdep(cc), ["md5_crypt"]) + self.assertEqual(getdep(cc, "user"), ["md5_crypt"]) + self.assertEqual(getdep(cc, "admin"), []) + + def test_23_default(self): + """test 'default' context option parsing""" + + # anything allowed if no schemes + self.assertEqual(CryptContext(default="md5_crypt").to_dict(), + dict(default="md5_crypt")) + + # default allowed if in scheme list + ctx = CryptContext(default="md5_crypt", schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # default can be handler + # XXX: sure we want to allow this ? maybe deprecate in future. + ctx = CryptContext(default=hash.md5_crypt, schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # implicit default should be first non-deprecated scheme + ctx = CryptContext(schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "des_crypt") + ctx.update(deprecated="des_crypt") + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # error if not in scheme list + self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'], + default='md5_crypt') + + # wrong type + self.assertRaises(TypeError, CryptContext, default=1) + + # per-category + ctx = CryptContext(default="des_crypt", + schemes=["des_crypt", "md5_crypt"], + admin__context__default="md5_crypt") + self.assertEqual(ctx.default_scheme(), "des_crypt") + self.assertEqual(ctx.default_scheme("user"), "des_crypt") + self.assertEqual(ctx.default_scheme("admin"), "md5_crypt") + + def test_24_vary_rounds(self): + """test 'vary_rounds' hash option parsing""" + def parse(v): + return CryptContext(all__vary_rounds=v).to_dict()['all__vary_rounds'] + + # floats should be preserved + self.assertEqual(parse(0.1), 0.1) + self.assertEqual(parse('0.1'), 0.1) + + # 'xx%' should be converted to float + self.assertEqual(parse('10%'), 0.1) + + # ints should be preserved + self.assertEqual(parse(1000), 1000) + self.assertEqual(parse('1000'), 1000) + + #=================================================================== + # inspection & serialization + #=================================================================== + + def assertHandlerDerivedFrom(self, handler, base, msg=None): + self.assertTrue(handler_derived_from(handler, base), msg=msg) + + def test_30_schemes(self): + """test schemes() method""" + # NOTE: also checked under test_21 + + # test empty + ctx = CryptContext() + self.assertEqual(ctx.schemes(), ()) + self.assertEqual(ctx.schemes(resolve=True), ()) + + # test sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.schemes(), tuple(self.sample_1_schemes)) + self.assertEqual(ctx.schemes(resolve=True, unconfigured=True), tuple(self.sample_1_handlers)) + for result, correct in zip(ctx.schemes(resolve=True), self.sample_1_handlers): + self.assertTrue(handler_derived_from(result, correct)) + + # test sample 2 + ctx = CryptContext(**self.sample_2_dict) + self.assertEqual(ctx.schemes(), ()) + + def test_31_default_scheme(self): + """test default_scheme() method""" + # NOTE: also checked under test_23 + + # test empty + ctx = CryptContext() + self.assertRaises(KeyError, ctx.default_scheme) + + # test sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + self.assertEqual(ctx.default_scheme(resolve=True, unconfigured=True), hash.md5_crypt) + self.assertHandlerDerivedFrom(ctx.default_scheme(resolve=True), hash.md5_crypt) + + # test sample 2 + ctx = CryptContext(**self.sample_2_dict) + self.assertRaises(KeyError, ctx.default_scheme) + + # test defaults to first in scheme + ctx = CryptContext(schemes=self.sample_1_schemes) + self.assertEqual(ctx.default_scheme(), "des_crypt") + + # categories tested under test_23 + + def test_32_handler(self): + """test handler() method""" + + # default for empty + ctx = CryptContext() + self.assertRaises(KeyError, ctx.handler) + self.assertRaises(KeyError, ctx.handler, "md5_crypt") + + # default for sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.handler(unconfigured=True), hash.md5_crypt) + self.assertHandlerDerivedFrom(ctx.handler(), hash.md5_crypt) + + # by name + self.assertEqual(ctx.handler("des_crypt", unconfigured=True), hash.des_crypt) + self.assertHandlerDerivedFrom(ctx.handler("des_crypt"), hash.des_crypt) + + # name not in schemes + self.assertRaises(KeyError, ctx.handler, "mysql323") + + # check handler() honors category default + ctx = CryptContext("sha256_crypt,md5_crypt", admin__context__default="md5_crypt") + self.assertEqual(ctx.handler(unconfigured=True), hash.sha256_crypt) + self.assertHandlerDerivedFrom(ctx.handler(), hash.sha256_crypt) + + self.assertEqual(ctx.handler(category="staff", unconfigured=True), hash.sha256_crypt) + self.assertHandlerDerivedFrom(ctx.handler(category="staff"), hash.sha256_crypt) + + self.assertEqual(ctx.handler(category="admin", unconfigured=True), hash.md5_crypt) + self.assertHandlerDerivedFrom(ctx.handler(category="staff"), hash.sha256_crypt) + + # test unicode category strings are accepted under py2 + if PY2: + self.assertEqual(ctx.handler(category=u("staff"), unconfigured=True), hash.sha256_crypt) + self.assertEqual(ctx.handler(category=u("admin"), unconfigured=True), hash.md5_crypt) + + def test_33_options(self): + """test internal _get_record_options() method""" + + def options(ctx, scheme, category=None): + return ctx._config._get_record_options_with_flag(scheme, category)[0] + + # this checks that (3 schemes, 3 categories) inherit options correctly. + # the 'user' category is not present in the options. + cc4 = CryptContext( + truncate_error=True, + schemes = [ "sha512_crypt", "des_crypt", "bsdi_crypt"], + deprecated = ["sha512_crypt", "des_crypt"], + all__vary_rounds = 0.1, + bsdi_crypt__vary_rounds=0.2, + sha512_crypt__max_rounds = 20000, + admin__context__deprecated = [ "des_crypt", "bsdi_crypt" ], + admin__all__vary_rounds = 0.05, + admin__bsdi_crypt__vary_rounds=0.3, + admin__sha512_crypt__max_rounds = 40000, + ) + self.assertEqual(cc4._config.categories, ("admin",)) + + # + # sha512_crypt + # NOTE: 'truncate_error' shouldn't be passed along... + # + self.assertEqual(options(cc4, "sha512_crypt"), dict( + deprecated=True, + vary_rounds=0.1, # inherited from all__ + max_rounds=20000, + )) + + self.assertEqual(options(cc4, "sha512_crypt", "user"), dict( + deprecated=True, # unconfigured category inherits from default + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(options(cc4, "sha512_crypt", "admin"), dict( + # NOT deprecated - context option overridden per-category + vary_rounds=0.05, # global overridden per-cateogry + max_rounds=40000, # overridden per-category + )) + + # + # des_crypt + # NOTE: vary_rounds shouldn't be passed along... + # + self.assertEqual(options(cc4, "des_crypt"), dict( + deprecated=True, + truncate_error=True, + )) + + self.assertEqual(options(cc4, "des_crypt", "user"), dict( + deprecated=True, # unconfigured category inherits from default + truncate_error=True, + )) + + self.assertEqual(options(cc4, "des_crypt", "admin"), dict( + deprecated=True, # unchanged though overidden + truncate_error=True, + )) + + # + # bsdi_crypt + # + self.assertEqual(options(cc4, "bsdi_crypt"), dict( + vary_rounds=0.2, # overridden from all__vary_rounds + )) + + self.assertEqual(options(cc4, "bsdi_crypt", "user"), dict( + vary_rounds=0.2, # unconfigured category inherits from default + )) + + self.assertEqual(options(cc4, "bsdi_crypt", "admin"), dict( + vary_rounds=0.3, + deprecated=True, # deprecation set per-category + )) + + def test_34_to_dict(self): + """test to_dict() method""" + # NOTE: this is tested all throughout this test case. + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + self.assertEqual(ctx.to_dict(resolve=True), self.sample_1_resolved_dict) + + def test_35_to_string(self): + """test to_string() method""" + + # create ctx and serialize + ctx = CryptContext(**self.sample_1_dict) + dump = ctx.to_string() + + # check ctx->string returns canonical format. + # NOTE: ConfigParser for PY26 doesn't use OrderedDict, + # making to_string()'s ordering unpredictable... + # so we skip this test under PY26. + if not PY26: + self.assertEqual(dump, self.sample_1_unicode) + + # check ctx->string->ctx->dict returns original + ctx2 = CryptContext.from_string(dump) + self.assertEqual(ctx2.to_dict(), self.sample_1_dict) + + # test section kwd is honored + other = ctx.to_string(section="password-security") + self.assertEqual(other, dump.replace("[passlib]","[password-security]")) + + # test unmanaged handler warning + from passlib.tests.test_utils_handlers import UnsaltedHash + ctx3 = CryptContext([UnsaltedHash, "md5_crypt"]) + dump = ctx3.to_string() + self.assertRegex(dump, r"# NOTE: the 'unsalted_test_hash' handler\(s\)" + r" are not registered with Passlib") + + #=================================================================== + # password hash api + #=================================================================== + nonstring_vectors = [ + (None, {}), + (None, {"scheme": "des_crypt"}), + (1, {}), + ((), {}), + ] + + def test_40_basic(self): + """test basic hash/identify/verify functionality""" + handlers = [hash.md5_crypt, hash.des_crypt, hash.bsdi_crypt] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + # run through handlers + for crypt in handlers: + h = cc.hash("test", scheme=crypt.name) + self.assertEqual(cc.identify(h), crypt.name) + self.assertEqual(cc.identify(h, resolve=True, unconfigured=True), crypt) + self.assertHandlerDerivedFrom(cc.identify(h, resolve=True), crypt) + self.assertTrue(cc.verify('test', h)) + self.assertFalse(cc.verify('notest', h)) + + # test default + h = cc.hash("test") + self.assertEqual(cc.identify(h), "md5_crypt") + + # test genhash + h = cc.genhash('secret', cc.genconfig()) + self.assertEqual(cc.identify(h), 'md5_crypt') + + h = cc.genhash('secret', cc.genconfig(), scheme='md5_crypt') + self.assertEqual(cc.identify(h), 'md5_crypt') + + self.assertRaises(ValueError, cc.genhash, 'secret', cc.genconfig(), scheme="des_crypt") + + def test_41_genconfig(self): + """test genconfig() method""" + cc = CryptContext(schemes=["md5_crypt", "phpass"], + phpass__ident="H", + phpass__default_rounds=7, + admin__phpass__ident="P", + ) + + # uses default scheme + self.assertTrue(cc.genconfig().startswith("$1$")) + + # override scheme + self.assertTrue(cc.genconfig(scheme="phpass").startswith("$H$5")) + + # category override + self.assertTrue(cc.genconfig(scheme="phpass", category="admin").startswith("$P$5")) + self.assertTrue(cc.genconfig(scheme="phpass", category="staff").startswith("$H$5")) + + # override scheme & custom settings + self.assertEqual( + cc.genconfig(scheme="phpass", salt='.'*8, rounds=8, ident='P'), + '$P$6........22zGEuacuPOqEpYPDeR0R/', # NOTE: config string generated w/ rounds=1 + ) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # test unicode category strings are accepted under py2 + # this tests basic _get_record() used by hash/genhash/verify. + # we have to omit scheme=xxx so codepath is tested fully + if PY2: + c2 = cc.copy(default="phpass") + self.assertTrue(c2.genconfig(category=u("admin")).startswith("$P$5")) + self.assertTrue(c2.genconfig(category=u("staff")).startswith("$H$5")) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().genconfig) + self.assertRaises(KeyError, CryptContext().genconfig, scheme='md5_crypt') + + # bad scheme values + self.assertRaises(KeyError, cc.genconfig, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.genconfig, scheme=1, category='staff') + self.assertRaises(TypeError, cc.genconfig, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.genconfig, category=1) + + + def test_42_genhash(self): + """test genhash() method""" + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + hash = cc.hash('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.genhash, secret, hash, **kwds) + + # rejects non-string config strings + cc = CryptContext(["des_crypt"]) + for config, kwds in self.nonstring_vectors: + if hash is None: + # NOTE: as of 1.7, genhash is just wrapper for hash(), + # and handles genhash(secret, None) fine. + continue + self.assertRaises(TypeError, cc.genhash, 'secret', config, **kwds) + + # rejects config=None, even if default scheme lacks config string + cc = CryptContext(["mysql323"]) + self.assertRaises(TypeError, cc.genhash, "stub", None) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().genhash, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.genhash, 'secret', hash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.genhash, 'secret', hash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.genconfig, 'secret', hash, category=1) + + def test_43_hash(self,): + """test hash() method""" + # XXX: what more can we test here that isn't deprecated + # or handled under another test (e.g. context kwds?) + + # respects rounds + cc = CryptContext(**self.sample_4_dict) + hash = cc.hash("password") + self.assertTrue(hash.startswith("$5$rounds=3000$")) + self.assertTrue(cc.verify("password", hash)) + self.assertFalse(cc.verify("passwordx", hash)) + + # make default > max throws error if attempted + # XXX: move this to copy() test? + self.assertRaises(ValueError, cc.copy, + sha256_crypt__default_rounds=4000) + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.hash, secret, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().hash, 'secret') + + # bad category values + self.assertRaises(TypeError, cc.hash, 'secret', category=1) + + def test_43_hash_legacy(self, use_16_legacy=False): + """test hash() method -- legacy 'scheme' and settings keywords""" + cc = CryptContext(**self.sample_4_dict) + + # TODO: should migrate these tests elsewhere, or remove them. + # can be replaced with following equivalent: + # + # def wrapper(secret, scheme=None, category=None, **kwds): + # handler = cc.handler(scheme, category) + # if kwds: + # handler = handler.using(**kwds) + # return handler.hash(secret) + # + # need to make sure bits being tested here are tested + # under the tests for the equivalent methods called above, + # and then discard the rest of these under 2.0. + + # hash specific settings + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", scheme="phpass", salt='.'*8), + '$H$5........De04R5Egz0aq8Tf.1eVhY/', + ) + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", scheme="phpass", salt='.'*8, ident="P"), + '$P$5........De04R5Egz0aq8Tf.1eVhY/', + ) + + # NOTE: more thorough job of rounds limits done below. + + # min rounds + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", rounds=1999, salt="nacl"), + '$5$rounds=1999$nacl$nmfwJIxqj0csloAAvSER0B8LU0ERCAbhmMug4Twl609', + ) + + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertEqual( + cc.hash("password", rounds=2001, salt="nacl"), + '$5$rounds=2001$nacl$8PdeoPL4aXQnJ0woHhqgIw/efyfCKC2WHneOpnvF.31' + ) + # NOTE: max rounds, etc tested in genconfig() + + # bad scheme values + self.assertRaises(KeyError, cc.hash, 'secret', scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.hash, 'secret', scheme=1) + + def test_44_identify(self): + """test identify() border cases""" + handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + # check unknown hash + self.assertEqual(cc.identify('$9$232323123$1287319827'), None) + self.assertRaises(ValueError, cc.identify, '$9$232323123$1287319827', required=True) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.identify, hash, **kwds) + + # throws error without schemes + cc = CryptContext() + self.assertIs(cc.identify('hash'), None) + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + + # bad category values + self.assertRaises(TypeError, cc.identify, None, category=1) + + def test_45_verify(self): + """test verify() scheme kwd""" + handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + h = hash.md5_crypt.hash("test") + + # check base verify + self.assertTrue(cc.verify("test", h)) + self.assertTrue(not cc.verify("notest", h)) + + # check verify using right alg + self.assertTrue(cc.verify('test', h, scheme='md5_crypt')) + self.assertTrue(not cc.verify('notest', h, scheme='md5_crypt')) + + # check verify using wrong alg + self.assertRaises(ValueError, cc.verify, 'test', h, scheme='bsdi_crypt') + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # unknown hash should throw error + self.assertRaises(ValueError, cc.verify, 'stub', '$6$232323123$1287319827') + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + h = refhash = cc.hash('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify, secret, h, **kwds) + + # always treat hash=None as False + self.assertFalse(cc.verify(secret, None)) + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for h, kwds in self.nonstring_vectors: + if h is None: + continue + self.assertRaises(TypeError, cc.verify, 'secret', h, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().verify, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.verify, 'secret', refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.verify, 'secret', refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.verify, 'secret', refhash, category=1) + + def test_46_needs_update(self): + """test needs_update() method""" + cc = CryptContext(**self.sample_4_dict) + + # check deprecated scheme + self.assertTrue(cc.needs_update('9XXD4trGYeGJA')) + self.assertFalse(cc.needs_update('$1$J8HC2RCr$HcmM.7NxB2weSvlw2FgzU0')) + + # check min rounds + self.assertTrue(cc.needs_update('$5$rounds=1999$jD81UCoo.zI.UETs$Y7qSTQ6mTiU9qZB4fRr43wRgQq4V.5AAf7F97Pzxey/')) + self.assertFalse(cc.needs_update('$5$rounds=2000$228SSRje04cnNCaQ$YGV4RYu.5sNiBvorQDlO0WWQjyJVGKBcJXz3OtyQ2u8')) + + # check max rounds + self.assertFalse(cc.needs_update('$5$rounds=3000$fS9iazEwTKi7QPW4$VasgBC8FqlOvD7x2HhABaMXCTh9jwHclPA9j5YQdns.')) + self.assertTrue(cc.needs_update('$5$rounds=3001$QlFHHifXvpFX4PLs$/0ekt7lSs/lOikSerQ0M/1porEHxYq7W/2hdFpxA3fA')) + + #-------------------------------------------------------------- + # test hash.needs_update() interface + #-------------------------------------------------------------- + check_state = [] + class dummy(uh.StaticHandler): + name = 'dummy' + _hash_prefix = '@' + + @classmethod + def needs_update(cls, hash, secret=None): + check_state.append((hash, secret)) + return secret == "nu" + + def _calc_checksum(self, secret): + from hashlib import md5 + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(md5(secret).hexdigest()) + + # calling needs_update should query callback + ctx = CryptContext([dummy]) + hash = refhash = dummy.hash("test") + self.assertFalse(ctx.needs_update(hash)) + self.assertEqual(check_state, [(hash,None)]) + del check_state[:] + + # now with a password + self.assertFalse(ctx.needs_update(hash, secret='bob')) + self.assertEqual(check_state, [(hash,'bob')]) + del check_state[:] + + # now when it returns True + self.assertTrue(ctx.needs_update(hash, secret='nu')) + self.assertEqual(check_state, [(hash,'nu')]) + del check_state[:] + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.needs_update, hash, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().needs_update, 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.needs_update, refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.needs_update, refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.needs_update, refhash, category=1) + + def test_47_verify_and_update(self): + """test verify_and_update()""" + cc = CryptContext(**self.sample_4_dict) + + # create some hashes + h1 = cc.handler("des_crypt").hash("password") + h2 = cc.handler("sha256_crypt").hash("password") + + # check bad password, deprecated hash + ok, new_hash = cc.verify_and_update("wrongpass", h1) + self.assertFalse(ok) + self.assertIs(new_hash, None) + + # check bad password, good hash + ok, new_hash = cc.verify_and_update("wrongpass", h2) + self.assertFalse(ok) + self.assertIs(new_hash, None) + + # check right password, deprecated hash + ok, new_hash = cc.verify_and_update("password", h1) + self.assertTrue(ok) + self.assertTrue(cc.identify(new_hash), "sha256_crypt") + + # check right password, good hash + ok, new_hash = cc.verify_and_update("password", h2) + self.assertTrue(ok) + self.assertIs(new_hash, None) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + hash = refhash = cc.hash('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify_and_update, secret, hash, **kwds) + + # always treat hash=None as False + self.assertEqual(cc.verify_and_update(secret, None), (False, None)) + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + if hash is None: + continue + self.assertRaises(TypeError, cc.verify_and_update, 'secret', hash, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().verify_and_update, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.verify_and_update, 'secret', refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, category=1) + + def test_48_context_kwds(self): + """hash(), verify(), and verify_and_update() -- discard unused context keywords""" + + # setup test case + # NOTE: postgres_md5 hash supports 'user' context kwd, which is used for this test. + from passlib.hash import des_crypt, md5_crypt, postgres_md5 + des_hash = des_crypt.hash("stub") + pg_root_hash = postgres_md5.hash("stub", user="root") + pg_admin_hash = postgres_md5.hash("stub", user="admin") + + #------------------------------------------------------------ + # case 1: contextual kwds not supported by any hash in CryptContext + #------------------------------------------------------------ + cc1 = CryptContext([des_crypt, md5_crypt]) + self.assertEqual(cc1.context_kwds, set()) + + # des_scrypt should work w/o any contextual kwds + self.assertTrue(des_crypt.identify(cc1.hash("stub")), "des_crypt") + self.assertTrue(cc1.verify("stub", des_hash)) + self.assertEqual(cc1.verify_and_update("stub", des_hash), (True, None)) + + # des_crypt should throw error due to unknown context keyword + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertRaises(TypeError, cc1.hash, "stub", user="root") + self.assertRaises(TypeError, cc1.verify, "stub", des_hash, user="root") + self.assertRaises(TypeError, cc1.verify_and_update, "stub", des_hash, user="root") + + #------------------------------------------------------------ + # case 2: at least one contextual kwd supported by non-default hash + #------------------------------------------------------------ + cc2 = CryptContext([des_crypt, postgres_md5]) + self.assertEqual(cc2.context_kwds, set(["user"])) + + # verify des_crypt works w/o "user" kwd + self.assertTrue(des_crypt.identify(cc2.hash("stub")), "des_crypt") + self.assertTrue(cc2.verify("stub", des_hash)) + self.assertEqual(cc2.verify_and_update("stub", des_hash), (True, None)) + + # verify des_crypt ignores "user" kwd + self.assertTrue(des_crypt.identify(cc2.hash("stub", user="root")), "des_crypt") + self.assertTrue(cc2.verify("stub", des_hash, user="root")) + self.assertEqual(cc2.verify_and_update("stub", des_hash, user="root"), (True, None)) + + # verify error with unknown kwd + with self.assertWarningList(["passing settings to.*is deprecated"]): + self.assertRaises(TypeError, cc2.hash, "stub", badkwd="root") + self.assertRaises(TypeError, cc2.verify, "stub", des_hash, badkwd="root") + self.assertRaises(TypeError, cc2.verify_and_update, "stub", des_hash, badkwd="root") + + #------------------------------------------------------------ + # case 3: at least one contextual kwd supported by default hash + #------------------------------------------------------------ + cc3 = CryptContext([postgres_md5, des_crypt], deprecated="auto") + self.assertEqual(cc3.context_kwds, set(["user"])) + + # postgres_md5 should have error w/o context kwd + self.assertRaises(TypeError, cc3.hash, "stub") + self.assertRaises(TypeError, cc3.verify, "stub", pg_root_hash) + self.assertRaises(TypeError, cc3.verify_and_update, "stub", pg_root_hash) + + # postgres_md5 should work w/ context kwd + self.assertEqual(cc3.hash("stub", user="root"), pg_root_hash) + self.assertTrue(cc3.verify("stub", pg_root_hash, user="root")) + self.assertEqual(cc3.verify_and_update("stub", pg_root_hash, user="root"), (True, None)) + + # verify_and_update() should fail against wrong user + self.assertEqual(cc3.verify_and_update("stub", pg_root_hash, user="admin"), (False, None)) + + # verify_and_update() should pass all context kwds through when rehashing + self.assertEqual(cc3.verify_and_update("stub", des_hash, user="root"), + (True, pg_root_hash)) + + #=================================================================== + # rounds options + #=================================================================== + + # TODO: now that rounds generation has moved out of _CryptRecord to HasRounds, + # this should just test that we're passing right options to handler.using(), + # and that resulting handler has right settings. + # Can then just let HasRounds tests (which are a copy of this) deal with things. + + # NOTE: the follow tests check how _CryptRecord handles + # the min/max/default/vary_rounds options, via the output of + # genconfig(). it's assumed hash() takes the same codepath. + + def test_50_rounds_limits(self): + """test rounds limits""" + cc = CryptContext(schemes=["sha256_crypt"], + sha256_crypt__min_rounds=2000, + sha256_crypt__max_rounds=3000, + sha256_crypt__default_rounds=2500, + ) + + # stub digest returned by sha256_crypt's genconfig calls.. + STUB = '...........................................' + + #-------------------------------------------------- + # settings should have been applied to custom handler, + # it should take care of the rest + #-------------------------------------------------- + custom_handler = cc._get_record("sha256_crypt", None) + self.assertEqual(custom_handler.min_desired_rounds, 2000) + self.assertEqual(custom_handler.max_desired_rounds, 3000) + self.assertEqual(custom_handler.default_rounds, 2500) + + #-------------------------------------------------- + # min_rounds + #-------------------------------------------------- + + # set below handler minimum + with self.assertWarningList([PasslibHashWarning]*2): + c2 = cc.copy(sha256_crypt__min_rounds=500, sha256_crypt__max_rounds=None, + sha256_crypt__default_rounds=500) + self.assertEqual(c2.genconfig(salt="nacl"), "$5$rounds=1000$nacl$" + STUB) + + # below policy minimum + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .replace() + with self.assertWarningList([]): + self.assertEqual( + cc.genconfig(rounds=1999, salt="nacl"), '$5$rounds=1999$nacl$' + STUB) + + # equal to policy minimum + self.assertEqual( + cc.genconfig(rounds=2000, salt="nacl"), '$5$rounds=2000$nacl$' + STUB) + + # above policy minimum + self.assertEqual( + cc.genconfig(rounds=2001, salt="nacl"), '$5$rounds=2001$nacl$' + STUB) + + #-------------------------------------------------- + # max rounds + #-------------------------------------------------- + + # set above handler max + with self.assertWarningList([PasslibHashWarning]*2): + c2 = cc.copy(sha256_crypt__max_rounds=int(1e9)+500, sha256_crypt__min_rounds=None, + sha256_crypt__default_rounds=int(1e9)+500) + + self.assertEqual(c2.genconfig(salt="nacl"), "$5$rounds=999999999$nacl$" + STUB) + + # above policy max + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .using() + with self.assertWarningList([]): + self.assertEqual( + cc.genconfig(rounds=3001, salt="nacl"), '$5$rounds=3001$nacl$' + STUB) + + # equal policy max + self.assertEqual( + cc.genconfig(rounds=3000, salt="nacl"), '$5$rounds=3000$nacl$' + STUB) + + # below policy max + self.assertEqual( + cc.genconfig(rounds=2999, salt="nacl"), '$5$rounds=2999$nacl$' + STUB) + + #-------------------------------------------------- + # default_rounds + #-------------------------------------------------- + + # explicit default rounds + self.assertEqual(cc.genconfig(salt="nacl"), '$5$rounds=2500$nacl$' + STUB) + + # fallback default rounds - use handler's + df = hash.sha256_crypt.default_rounds + c2 = cc.copy(sha256_crypt__default_rounds=None, sha256_crypt__max_rounds=df<<1) + self.assertEqual(c2.genconfig(salt="nacl"), '$5$rounds=%d$nacl$%s' % (df, STUB)) + + # fallback default rounds - use handler's, but clipped to max rounds + c2 = cc.copy(sha256_crypt__default_rounds=None, sha256_crypt__max_rounds=3000) + self.assertEqual(c2.genconfig(salt="nacl"), '$5$rounds=3000$nacl$' + STUB) + + # TODO: test default falls back to mx / mn if handler has no default. + + # default rounds - out of bounds + self.assertRaises(ValueError, cc.copy, sha256_crypt__default_rounds=1999) + cc.copy(sha256_crypt__default_rounds=2000) + cc.copy(sha256_crypt__default_rounds=3000) + self.assertRaises(ValueError, cc.copy, sha256_crypt__default_rounds=3001) + + #-------------------------------------------------- + # border cases + #-------------------------------------------------- + + # invalid min/max bounds + c2 = CryptContext(schemes=["sha256_crypt"]) + # NOTE: as of v1.7, these are clipped w/ a warning instead... + # self.assertRaises(ValueError, c2.copy, sha256_crypt__min_rounds=-1) + # self.assertRaises(ValueError, c2.copy, sha256_crypt__max_rounds=-1) + self.assertRaises(ValueError, c2.copy, sha256_crypt__min_rounds=2000, + sha256_crypt__max_rounds=1999) + + # test bad values + self.assertRaises(ValueError, CryptContext, sha256_crypt__min_rounds='x') + self.assertRaises(ValueError, CryptContext, sha256_crypt__max_rounds='x') + self.assertRaises(ValueError, CryptContext, all__vary_rounds='x') + self.assertRaises(ValueError, CryptContext, sha256_crypt__default_rounds='x') + + # test bad types rejected + bad = datetime.datetime.now() # picked cause can't be compared to int + self.assertRaises(TypeError, CryptContext, "sha256_crypt", sha256_crypt__min_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", sha256_crypt__max_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__vary_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", sha256_crypt__default_rounds=bad) + + def test_51_linear_vary_rounds(self): + """test linear vary rounds""" + cc = CryptContext(schemes=["sha256_crypt"], + sha256_crypt__min_rounds=1995, + sha256_crypt__max_rounds=2005, + sha256_crypt__default_rounds=2000, + ) + + # test negative + self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1) + self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%") + self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%") + + # test static + c2 = cc.copy(all__vary_rounds=0) + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000) + + c2 = cc.copy(all__vary_rounds="0%") + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000) + + # test absolute + c2 = cc.copy(all__vary_rounds=1) + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 1) + self.assert_rounds_range(c2, "sha256_crypt", 1999, 2001) + c2 = cc.copy(all__vary_rounds=100) + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 100) + self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005) + + # test relative + c2 = cc.copy(all__vary_rounds="0.1%") + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 0.001) + self.assert_rounds_range(c2, "sha256_crypt", 1998, 2002) + c2 = cc.copy(all__vary_rounds="100%") + self.assertEqual(c2._get_record("sha256_crypt", None).vary_rounds, 1.0) + self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005) + + def test_52_log2_vary_rounds(self): + """test log2 vary rounds""" + cc = CryptContext(schemes=["bcrypt"], + bcrypt__min_rounds=15, + bcrypt__max_rounds=25, + bcrypt__default_rounds=20, + ) + + # test negative + self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1) + self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%") + self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%") + + # test static + c2 = cc.copy(all__vary_rounds=0) + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="0%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + # test absolute + c2 = cc.copy(all__vary_rounds=1) + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 1) + self.assert_rounds_range(c2, "bcrypt", 19, 21) + c2 = cc.copy(all__vary_rounds=100) + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 100) + self.assert_rounds_range(c2, "bcrypt", 15, 25) + + # test relative - should shift over at 50% mark + c2 = cc.copy(all__vary_rounds="1%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0.01) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="49%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0.49) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="50%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 0.5) + self.assert_rounds_range(c2, "bcrypt", 19, 20) + + c2 = cc.copy(all__vary_rounds="100%") + self.assertEqual(c2._get_record("bcrypt", None).vary_rounds, 1.0) + self.assert_rounds_range(c2, "bcrypt", 15, 21) + + def assert_rounds_range(self, context, scheme, lower, upper): + """helper to check vary_rounds covers specified range""" + # NOTE: this runs enough times the min and max *should* be hit, + # though there's a faint chance it will randomly fail. + handler = context.handler(scheme) + salt = handler.default_salt_chars[0:1] * handler.max_salt_size + seen = set() + for i in irange(300): + h = context.genconfig(scheme, salt=salt) + r = handler.from_string(h).rounds + seen.add(r) + self.assertEqual(min(seen), lower, "vary_rounds had wrong lower limit:") + self.assertEqual(max(seen), upper, "vary_rounds had wrong upper limit:") + + #=================================================================== + # harden_verify / min_verify_time + #=================================================================== + def test_harden_verify_parsing(self): + """harden_verify -- parsing""" + warnings.filterwarnings("ignore", ".*harden_verify.*", + category=DeprecationWarning) + + # valid values + ctx = CryptContext(schemes=["sha256_crypt"]) + self.assertEqual(ctx.harden_verify, None) + self.assertEqual(ctx.using(harden_verify="").harden_verify, None) + self.assertEqual(ctx.using(harden_verify="true").harden_verify, None) + self.assertEqual(ctx.using(harden_verify="false").harden_verify, None) + + def test_dummy_verify(self): + """ + dummy_verify() method + """ + # check dummy_verify() takes expected time + expected = 0.05 + accuracy = 0.2 + handler = DelayHash.using() + handler.delay = expected + ctx = CryptContext(schemes=[handler]) + ctx.dummy_verify() # prime the memoized helpers + elapsed, _ = time_call(ctx.dummy_verify) + self.assertAlmostEqual(elapsed, expected, delta=expected * accuracy) + + # TODO: test dummy_verify() invoked by .verify() when hash is None, + # and same for .verify_and_update() + + #=================================================================== + # feature tests + #=================================================================== + def test_61_autodeprecate(self): + """test deprecated='auto' is handled correctly""" + + def getstate(ctx, category=None): + return [ctx.handler(scheme, category).deprecated for scheme in ctx.schemes()] + + # correctly reports default + ctx = CryptContext("sha256_crypt,md5_crypt,des_crypt", deprecated="auto") + self.assertEqual(getstate(ctx, None), [False, True, True]) + self.assertEqual(getstate(ctx, "admin"), [False, True, True]) + + # correctly reports changed default + ctx.update(default="md5_crypt") + self.assertEqual(getstate(ctx, None), [True, False, True]) + self.assertEqual(getstate(ctx, "admin"), [True, False, True]) + + # category default is handled correctly + ctx.update(admin__context__default="des_crypt") + self.assertEqual(getstate(ctx, None), [True, False, True]) + self.assertEqual(getstate(ctx, "admin"), [True, True, False]) + + # handles 1 scheme + ctx = CryptContext(["sha256_crypt"], deprecated="auto") + self.assertEqual(getstate(ctx, None), [False]) + self.assertEqual(getstate(ctx, "admin"), [False]) + + # disallow auto & other deprecated schemes at same time. + self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt", + deprecated="auto,md5_crypt") + self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt", + deprecated="md5_crypt,auto") + + def test_disabled_hashes(self): + """disabled hash support""" + # + # init ref info + # + from passlib.exc import UnknownHashError + from passlib.hash import md5_crypt, unix_disabled + + ctx = CryptContext(["des_crypt"]) + ctx2 = CryptContext(["des_crypt", "unix_disabled"]) + h_ref = ctx.hash("foo") + h_other = md5_crypt.hash('foo') + + # + # ctx.disable() + # + + # test w/o disabled hash support + self.assertRaisesRegex(RuntimeError, "no disabled hasher present", + ctx.disable) + self.assertRaisesRegex(RuntimeError, "no disabled hasher present", + ctx.disable, h_ref) + self.assertRaisesRegex(RuntimeError, "no disabled hasher present", + ctx.disable, h_other) + + # test w/ disabled hash support + h_dis = ctx2.disable() + self.assertEqual(h_dis, unix_disabled.default_marker) + h_dis_ref = ctx2.disable(h_ref) + self.assertEqual(h_dis_ref, unix_disabled.default_marker + h_ref) + + h_dis_other = ctx2.disable(h_other) + self.assertEqual(h_dis_other, unix_disabled.default_marker + h_other) + + # don't double-wrap existing disabled hash + self.assertEqual(ctx2.disable(h_dis_ref), h_dis_ref) + + # + # ctx.is_enabled() + # + + # test w/o disabled hash support + self.assertTrue(ctx.is_enabled(h_ref)) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_other) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_dis) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_dis_ref) + + # test w/ disabled hash support + self.assertTrue(ctx2.is_enabled(h_ref)) + self.assertRaises(UnknownHashError, ctx.is_enabled, h_other) + self.assertFalse(ctx2.is_enabled(h_dis)) + self.assertFalse(ctx2.is_enabled(h_dis_ref)) + + # + # ctx.enable() + # + + # test w/o disabled hash support + self.assertRaises(UnknownHashError, ctx.enable, "") + self.assertRaises(TypeError, ctx.enable, None) + self.assertEqual(ctx.enable(h_ref), h_ref) + self.assertRaises(UnknownHashError, ctx.enable, h_other) + self.assertRaises(UnknownHashError, ctx.enable, h_dis) + self.assertRaises(UnknownHashError, ctx.enable, h_dis_ref) + + # test w/ disabled hash support + self.assertRaises(UnknownHashError, ctx.enable, "") + self.assertRaises(TypeError, ctx2.enable, None) + self.assertEqual(ctx2.enable(h_ref), h_ref) + self.assertRaises(UnknownHashError, ctx2.enable, h_other) + self.assertRaisesRegex(ValueError, "cannot restore original hash", + ctx2.enable, h_dis) + self.assertEqual(ctx2.enable(h_dis_ref), h_ref) + + #=================================================================== + # eoc + #=================================================================== + +import hashlib, time + +class DelayHash(uh.StaticHandler): + """dummy hasher which delays by specified amount""" + name = "delay_hash" + checksum_chars = uh.LOWER_HEX_CHARS + checksum_size = 40 + delay = 0 + _hash_prefix = u("$x$") + + def _calc_checksum(self, secret): + time.sleep(self.delay) + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(hashlib.sha1(b"prefix" + secret).hexdigest()) + +#============================================================================= +# LazyCryptContext +#============================================================================= +class dummy_2(uh.StaticHandler): + name = "dummy_2" + +class LazyCryptContextTest(TestCase): + descriptionPrefix = "LazyCryptContext" + + def setUp(self): + # make sure this isn't registered before OR after + unload_handler_name("dummy_2") + self.addCleanup(unload_handler_name, "dummy_2") + + def test_kwd_constructor(self): + """test plain kwds""" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + cc = LazyCryptContext(iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt")) + self.assertTrue(cc.handler("des_crypt").deprecated) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + + def test_callable_constructor(self): + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + def onload(flag=False): + self.assertTrue(flag) + return dict(schemes=iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + cc = LazyCryptContext(onload=onload, flag=True) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt")) + self.assertTrue(cc.handler("des_crypt").deprecated) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_context_deprecated.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_context_deprecated.py new file mode 100644 index 000000000..0f76624c7 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_context_deprecated.py @@ -0,0 +1,743 @@ +"""tests for passlib.context + +this file is a clone of the 1.5 test_context.py, +containing the tests using the legacy CryptPolicy api. +it's being preserved here to ensure the old api doesn't break +(until Passlib 1.8, when this and the legacy api will be removed). +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from logging import getLogger +import os +import warnings +# site +try: + from pkg_resources import resource_filename +except ImportError: + resource_filename = None +# pkg +from passlib import hash +from passlib.context import CryptContext, CryptPolicy, LazyCryptContext +from passlib.utils import to_bytes, to_unicode +import passlib.utils.handlers as uh +from passlib.tests.utils import TestCase, set_file +from passlib.registry import (register_crypt_handler_path, + _has_crypt_handler as has_crypt_handler, + _unload_handler_name as unload_handler_name, + ) +# module +log = getLogger(__name__) + +#============================================================================= +# +#============================================================================= +class CryptPolicyTest(TestCase): + """test CryptPolicy object""" + + # TODO: need to test user categories w/in all this + + descriptionPrefix = "CryptPolicy" + + #=================================================================== + # sample crypt policies used for testing + #=================================================================== + + #--------------------------------------------------------------- + # sample 1 - average config file + #--------------------------------------------------------------- + # NOTE: copy of this is stored in file passlib/tests/sample_config_1s.cfg + sample_config_1s = """\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all.vary_rounds = 10%% +bsdi_crypt.max_rounds = 30000 +bsdi_crypt.default_rounds = 25000 +sha512_crypt.max_rounds = 50000 +sha512_crypt.min_rounds = 40000 +""" + sample_config_1s_path = os.path.abspath(os.path.join( + os.path.dirname(__file__), "sample_config_1s.cfg")) + if not os.path.exists(sample_config_1s_path) and resource_filename: + # in case we're zipped up in an egg. + sample_config_1s_path = resource_filename("passlib.tests", + "sample_config_1s.cfg") + + # make sure sample_config_1s uses \n linesep - tests rely on this + assert sample_config_1s.startswith("[passlib]\nschemes") + + sample_config_1pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30000, + bsdi_crypt__default_rounds = 25000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + sample_config_1pid = { + "schemes": "des_crypt, md5_crypt, bsdi_crypt, sha512_crypt", + "default": "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + "all.vary_rounds": 0.1, + "bsdi_crypt.max_rounds": 30000, + "bsdi_crypt.default_rounds": 25000, + "sha512_crypt.max_rounds": 50000, + "sha512_crypt.min_rounds": 40000, + } + + sample_config_1prd = dict( + schemes = [ hash.des_crypt, hash.md5_crypt, hash.bsdi_crypt, hash.sha512_crypt], + default = "md5_crypt", # NOTE: passlib <= 1.5 was handler obj. + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30000, + bsdi_crypt__default_rounds = 25000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + #--------------------------------------------------------------- + # sample 2 - partial policy & result of overlay on sample 1 + #--------------------------------------------------------------- + sample_config_2s = """\ +[passlib] +bsdi_crypt.min_rounds = 29000 +bsdi_crypt.max_rounds = 35000 +bsdi_crypt.default_rounds = 31000 +sha512_crypt.min_rounds = 45000 +""" + + sample_config_2pd = dict( + # using this to test full replacement of existing options + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + # using this to test partial replacement of existing options + sha512_crypt__min_rounds=45000, + ) + + sample_config_12pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds=45000, + ) + + #--------------------------------------------------------------- + # sample 3 - just changing default + #--------------------------------------------------------------- + sample_config_3pd = dict( + default="sha512_crypt", + ) + + sample_config_123pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "sha512_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds=45000, + ) + + #--------------------------------------------------------------- + # sample 4 - category specific + #--------------------------------------------------------------- + sample_config_4s = """ +[passlib] +schemes = sha512_crypt +all.vary_rounds = 10%% +default.sha512_crypt.max_rounds = 20000 +admin.all.vary_rounds = 5%% +admin.sha512_crypt.max_rounds = 40000 +""" + + sample_config_4pd = dict( + schemes = [ "sha512_crypt" ], + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + sha512_crypt__max_rounds = 20000, + # NOTE: not maintaining backwards compat for rendering to "5%" + admin__all__vary_rounds = 0.05, + admin__sha512_crypt__max_rounds = 40000, + ) + + #--------------------------------------------------------------- + # sample 5 - to_string & deprecation testing + #--------------------------------------------------------------- + sample_config_5s = sample_config_1s + """\ +deprecated = des_crypt +admin__context__deprecated = des_crypt, bsdi_crypt +""" + + sample_config_5pd = sample_config_1pd.copy() + sample_config_5pd.update( + deprecated = [ "des_crypt" ], + admin__context__deprecated = [ "des_crypt", "bsdi_crypt" ], + ) + + sample_config_5pid = sample_config_1pid.copy() + sample_config_5pid.update({ + "deprecated": "des_crypt", + "admin.context.deprecated": "des_crypt, bsdi_crypt", + }) + + sample_config_5prd = sample_config_1prd.copy() + sample_config_5prd.update({ + # XXX: should deprecated return the actual handlers in this case? + # would have to modify how policy stores info, for one. + "deprecated": ["des_crypt"], + "admin__context__deprecated": ["des_crypt", "bsdi_crypt"], + }) + + #=================================================================== + # constructors + #=================================================================== + def setUp(self): + TestCase.setUp(self) + warnings.filterwarnings("ignore", + r"The CryptPolicy class has been deprecated") + warnings.filterwarnings("ignore", + r"the method.*hash_needs_update.*is deprecated") + warnings.filterwarnings("ignore", "The 'all' scheme is deprecated.*") + warnings.filterwarnings("ignore", "bsdi_crypt rounds should be odd") + + def test_00_constructor(self): + """test CryptPolicy() constructor""" + policy = CryptPolicy(**self.sample_config_1pd) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + policy = CryptPolicy(self.sample_config_1pd) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + self.assertRaises(TypeError, CryptPolicy, {}, {}) + self.assertRaises(TypeError, CryptPolicy, {}, dummy=1) + + # check key with too many separators is rejected + self.assertRaises(TypeError, CryptPolicy, + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + bad__key__bsdi_crypt__max_rounds = 30000, + ) + + # check nameless handler rejected + class nameless(uh.StaticHandler): + name = None + self.assertRaises(ValueError, CryptPolicy, schemes=[nameless]) + + # check scheme must be name or crypt handler + self.assertRaises(TypeError, CryptPolicy, schemes=[uh.StaticHandler]) + + # check name conflicts are rejected + class dummy_1(uh.StaticHandler): + name = 'dummy_1' + self.assertRaises(KeyError, CryptPolicy, schemes=[dummy_1, dummy_1]) + + # with unknown deprecated value + self.assertRaises(KeyError, CryptPolicy, + schemes=['des_crypt'], + deprecated=['md5_crypt']) + + # with unknown default value + self.assertRaises(KeyError, CryptPolicy, + schemes=['des_crypt'], + default='md5_crypt') + + def test_01_from_path_simple(self): + """test CryptPolicy.from_path() constructor""" + # NOTE: this is separate so it can also run under GAE + + # test preset stored in existing file + path = self.sample_config_1s_path + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test if path missing + self.assertRaises(EnvironmentError, CryptPolicy.from_path, path + 'xxx') + + def test_01_from_path(self): + """test CryptPolicy.from_path() constructor with encodings""" + path = self.mktemp() + + # test "\n" linesep + set_file(path, self.sample_config_1s) + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test "\r\n" linesep + set_file(path, self.sample_config_1s.replace("\n","\r\n")) + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with custom encoding + uc2 = to_bytes(self.sample_config_1s, "utf-16", source_encoding="utf-8") + set_file(path, uc2) + policy = CryptPolicy.from_path(path, encoding="utf-16") + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + def test_02_from_string(self): + """test CryptPolicy.from_string() constructor""" + # test "\n" linesep + policy = CryptPolicy.from_string(self.sample_config_1s) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test "\r\n" linesep + policy = CryptPolicy.from_string( + self.sample_config_1s.replace("\n","\r\n")) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with unicode + data = to_unicode(self.sample_config_1s) + policy = CryptPolicy.from_string(data) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with non-ascii-compatible encoding + uc2 = to_bytes(self.sample_config_1s, "utf-16", source_encoding="utf-8") + policy = CryptPolicy.from_string(uc2, encoding="utf-16") + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test category specific options + policy = CryptPolicy.from_string(self.sample_config_4s) + self.assertEqual(policy.to_dict(), self.sample_config_4pd) + + def test_03_from_source(self): + """test CryptPolicy.from_source() constructor""" + # pass it a path + policy = CryptPolicy.from_source(self.sample_config_1s_path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it a string + policy = CryptPolicy.from_source(self.sample_config_1s) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it a dict (NOTE: make a copy to detect in-place modifications) + policy = CryptPolicy.from_source(self.sample_config_1pd.copy()) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it existing policy + p2 = CryptPolicy.from_source(policy) + self.assertIs(policy, p2) + + # pass it something wrong + self.assertRaises(TypeError, CryptPolicy.from_source, 1) + self.assertRaises(TypeError, CryptPolicy.from_source, []) + + def test_04_from_sources(self): + """test CryptPolicy.from_sources() constructor""" + + # pass it empty list + self.assertRaises(ValueError, CryptPolicy.from_sources, []) + + # pass it one-element list + policy = CryptPolicy.from_sources([self.sample_config_1s]) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass multiple sources + policy = CryptPolicy.from_sources( + [ + self.sample_config_1s_path, + self.sample_config_2s, + self.sample_config_3pd, + ]) + self.assertEqual(policy.to_dict(), self.sample_config_123pd) + + def test_05_replace(self): + """test CryptPolicy.replace() constructor""" + + p1 = CryptPolicy(**self.sample_config_1pd) + + # check overlaying sample 2 + p2 = p1.replace(**self.sample_config_2pd) + self.assertEqual(p2.to_dict(), self.sample_config_12pd) + + # check repeating overlay makes no change + p2b = p2.replace(**self.sample_config_2pd) + self.assertEqual(p2b.to_dict(), self.sample_config_12pd) + + # check overlaying sample 3 + p3 = p2.replace(self.sample_config_3pd) + self.assertEqual(p3.to_dict(), self.sample_config_123pd) + + def test_06_forbidden(self): + """test CryptPolicy() forbidden kwds""" + + # salt not allowed to be set + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + des_crypt__salt="xx", + ) + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + all__salt="xx", + ) + + # schemes not allowed for category + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + user__context__schemes=["md5_crypt"], + ) + + #=================================================================== + # reading + #=================================================================== + def test_10_has_schemes(self): + """test has_schemes() method""" + + p1 = CryptPolicy(**self.sample_config_1pd) + self.assertTrue(p1.has_schemes()) + + p3 = CryptPolicy(**self.sample_config_3pd) + self.assertTrue(not p3.has_schemes()) + + def test_11_iter_handlers(self): + """test iter_handlers() method""" + + p1 = CryptPolicy(**self.sample_config_1pd) + s = self.sample_config_1prd['schemes'] + self.assertEqual(list(p1.iter_handlers()), s) + + p3 = CryptPolicy(**self.sample_config_3pd) + self.assertEqual(list(p3.iter_handlers()), []) + + def test_12_get_handler(self): + """test get_handler() method""" + + p1 = CryptPolicy(**self.sample_config_1pd) + + # check by name + self.assertIs(p1.get_handler("bsdi_crypt"), hash.bsdi_crypt) + + # check by missing name + self.assertIs(p1.get_handler("sha256_crypt"), None) + self.assertRaises(KeyError, p1.get_handler, "sha256_crypt", required=True) + + # check default + self.assertIs(p1.get_handler(), hash.md5_crypt) + + def test_13_get_options(self): + """test get_options() method""" + + p12 = CryptPolicy(**self.sample_config_12pd) + + self.assertEqual(p12.get_options("bsdi_crypt"),dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds = 0.1, + min_rounds = 29000, + max_rounds = 35000, + default_rounds = 31000, + )) + + self.assertEqual(p12.get_options("sha512_crypt"),dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds = 0.1, + min_rounds = 45000, + max_rounds = 50000, + )) + + p4 = CryptPolicy.from_string(self.sample_config_4s) + self.assertEqual(p4.get_options("sha512_crypt"), dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(p4.get_options("sha512_crypt", "user"), dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(p4.get_options("sha512_crypt", "admin"), dict( + # NOTE: not maintaining backwards compat for rendering to "5%" + vary_rounds=0.05, + max_rounds=40000, + )) + + def test_14_handler_is_deprecated(self): + """test handler_is_deprecated() method""" + pa = CryptPolicy(**self.sample_config_1pd) + pb = CryptPolicy(**self.sample_config_5pd) + + self.assertFalse(pa.handler_is_deprecated("des_crypt")) + self.assertFalse(pa.handler_is_deprecated(hash.bsdi_crypt)) + self.assertFalse(pa.handler_is_deprecated("sha512_crypt")) + + self.assertTrue(pb.handler_is_deprecated("des_crypt")) + self.assertFalse(pb.handler_is_deprecated(hash.bsdi_crypt)) + self.assertFalse(pb.handler_is_deprecated("sha512_crypt")) + + # check categories as well + self.assertTrue(pb.handler_is_deprecated("des_crypt", "user")) + self.assertFalse(pb.handler_is_deprecated("bsdi_crypt", "user")) + self.assertTrue(pb.handler_is_deprecated("des_crypt", "admin")) + self.assertTrue(pb.handler_is_deprecated("bsdi_crypt", "admin")) + + # check deprecation is overridden per category + pc = CryptPolicy( + schemes=["md5_crypt", "des_crypt"], + deprecated=["md5_crypt"], + user__context__deprecated=["des_crypt"], + ) + self.assertTrue(pc.handler_is_deprecated("md5_crypt")) + self.assertFalse(pc.handler_is_deprecated("des_crypt")) + self.assertFalse(pc.handler_is_deprecated("md5_crypt", "user")) + self.assertTrue(pc.handler_is_deprecated("des_crypt", "user")) + + def test_15_min_verify_time(self): + """test get_min_verify_time() method""" + # silence deprecation warnings for min verify time + warnings.filterwarnings("ignore", category=DeprecationWarning) + + pa = CryptPolicy() + self.assertEqual(pa.get_min_verify_time(), 0) + self.assertEqual(pa.get_min_verify_time('admin'), 0) + + pb = pa.replace(min_verify_time=.1) + self.assertEqual(pb.get_min_verify_time(), 0) + self.assertEqual(pb.get_min_verify_time('admin'), 0) + + #=================================================================== + # serialization + #=================================================================== + def test_20_iter_config(self): + """test iter_config() method""" + p5 = CryptPolicy(**self.sample_config_5pd) + self.assertEqual(dict(p5.iter_config()), self.sample_config_5pd) + self.assertEqual(dict(p5.iter_config(resolve=True)), self.sample_config_5prd) + self.assertEqual(dict(p5.iter_config(ini=True)), self.sample_config_5pid) + + def test_21_to_dict(self): + """test to_dict() method""" + p5 = CryptPolicy(**self.sample_config_5pd) + self.assertEqual(p5.to_dict(), self.sample_config_5pd) + self.assertEqual(p5.to_dict(resolve=True), self.sample_config_5prd) + + def test_22_to_string(self): + """test to_string() method""" + pa = CryptPolicy(**self.sample_config_5pd) + s = pa.to_string() # NOTE: can't compare string directly, ordering etc may not match + pb = CryptPolicy.from_string(s) + self.assertEqual(pb.to_dict(), self.sample_config_5pd) + + s = pa.to_string(encoding="latin-1") + self.assertIsInstance(s, bytes) + + #=================================================================== + # + #=================================================================== + +#============================================================================= +# CryptContext +#============================================================================= +class CryptContextTest(TestCase): + """test CryptContext class""" + descriptionPrefix = "CryptContext" + + def setUp(self): + TestCase.setUp(self) + warnings.filterwarnings("ignore", + r"CryptContext\(\)\.replace\(\) has been deprecated.*") + warnings.filterwarnings("ignore", + r"The CryptContext ``policy`` keyword has been deprecated.*") + warnings.filterwarnings("ignore", ".*(CryptPolicy|context\.policy).*(has|have) been deprecated.*") + warnings.filterwarnings("ignore", + r"the method.*hash_needs_update.*is deprecated") + + #=================================================================== + # constructor + #=================================================================== + def test_00_constructor(self): + """test constructor""" + # create crypt context using handlers + cc = CryptContext([hash.md5_crypt, hash.bsdi_crypt, hash.des_crypt]) + c,b,a = cc.policy.iter_handlers() + self.assertIs(a, hash.des_crypt) + self.assertIs(b, hash.bsdi_crypt) + self.assertIs(c, hash.md5_crypt) + + # create context using names + cc = CryptContext(["md5_crypt", "bsdi_crypt", "des_crypt"]) + c,b,a = cc.policy.iter_handlers() + self.assertIs(a, hash.des_crypt) + self.assertIs(b, hash.bsdi_crypt) + self.assertIs(c, hash.md5_crypt) + + # policy kwd + policy = cc.policy + cc = CryptContext(policy=policy) + self.assertEqual(cc.to_dict(), policy.to_dict()) + + cc = CryptContext(policy=policy, default="bsdi_crypt") + self.assertNotEqual(cc.to_dict(), policy.to_dict()) + self.assertEqual(cc.to_dict(), dict(schemes=["md5_crypt","bsdi_crypt","des_crypt"], + default="bsdi_crypt")) + + self.assertRaises(TypeError, setattr, cc, 'policy', None) + self.assertRaises(TypeError, CryptContext, policy='x') + + def test_01_replace(self): + """test replace()""" + + cc = CryptContext(["md5_crypt", "bsdi_crypt", "des_crypt"]) + self.assertIs(cc.policy.get_handler(), hash.md5_crypt) + + cc2 = cc.replace() + self.assertIsNot(cc2, cc) + # NOTE: was not able to maintain backward compatibility with this... + ##self.assertIs(cc2.policy, cc.policy) + + cc3 = cc.replace(default="bsdi_crypt") + self.assertIsNot(cc3, cc) + # NOTE: was not able to maintain backward compatibility with this... + ##self.assertIs(cc3.policy, cc.policy) + self.assertIs(cc3.policy.get_handler(), hash.bsdi_crypt) + + def test_02_no_handlers(self): + """test no handlers""" + + # check constructor... + cc = CryptContext() + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + self.assertRaises(KeyError, cc.hash, 'secret') + self.assertRaises(KeyError, cc.verify, 'secret', 'hash') + + # check updating policy after the fact... + cc = CryptContext(['md5_crypt']) + p = CryptPolicy(schemes=[]) + cc.policy = p + + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + self.assertRaises(KeyError, cc.hash, 'secret') + self.assertRaises(KeyError, cc.verify, 'secret', 'hash') + + #=================================================================== + # policy adaptation + #=================================================================== + sample_policy_1 = dict( + schemes = [ "des_crypt", "md5_crypt", "phpass", "bsdi_crypt", + "sha256_crypt"], + deprecated = [ "des_crypt", ], + default = "sha256_crypt", + bsdi_crypt__max_rounds = 30, + bsdi_crypt__default_rounds = 25, + bsdi_crypt__vary_rounds = 0, + sha256_crypt__max_rounds = 3000, + sha256_crypt__min_rounds = 2000, + sha256_crypt__default_rounds = 3000, + phpass__ident = "H", + phpass__default_rounds = 7, + ) + + def test_12_hash_needs_update(self): + """test hash_needs_update() method""" + cc = CryptContext(**self.sample_policy_1) + + # check deprecated scheme + self.assertTrue(cc.hash_needs_update('9XXD4trGYeGJA')) + self.assertFalse(cc.hash_needs_update('$1$J8HC2RCr$HcmM.7NxB2weSvlw2FgzU0')) + + # check min rounds + self.assertTrue(cc.hash_needs_update('$5$rounds=1999$jD81UCoo.zI.UETs$Y7qSTQ6mTiU9qZB4fRr43wRgQq4V.5AAf7F97Pzxey/')) + self.assertFalse(cc.hash_needs_update('$5$rounds=2000$228SSRje04cnNCaQ$YGV4RYu.5sNiBvorQDlO0WWQjyJVGKBcJXz3OtyQ2u8')) + + # check max rounds + self.assertFalse(cc.hash_needs_update('$5$rounds=3000$fS9iazEwTKi7QPW4$VasgBC8FqlOvD7x2HhABaMXCTh9jwHclPA9j5YQdns.')) + self.assertTrue(cc.hash_needs_update('$5$rounds=3001$QlFHHifXvpFX4PLs$/0ekt7lSs/lOikSerQ0M/1porEHxYq7W/2hdFpxA3fA')) + + #=================================================================== + # border cases + #=================================================================== + def test_30_nonstring_hash(self): + """test non-string hash values cause error""" + warnings.filterwarnings("ignore", ".*needs_update.*'scheme' keyword is deprecated.*") + + # + # test hash=None or some other non-string causes TypeError + # and that explicit-scheme code path behaves the same. + # + cc = CryptContext(["des_crypt"]) + for hash, kwds in [ + (None, {}), + # NOTE: 'scheme' kwd is deprecated... + (None, {"scheme": "des_crypt"}), + (1, {}), + ((), {}), + ]: + + self.assertRaises(TypeError, cc.hash_needs_update, hash, **kwds) + + cc2 = CryptContext(["mysql323"]) + self.assertRaises(TypeError, cc2.hash_needs_update, None) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# LazyCryptContext +#============================================================================= +class dummy_2(uh.StaticHandler): + name = "dummy_2" + +class LazyCryptContextTest(TestCase): + descriptionPrefix = "LazyCryptContext" + + def setUp(self): + TestCase.setUp(self) + + # make sure this isn't registered before OR after + unload_handler_name("dummy_2") + self.addCleanup(unload_handler_name, "dummy_2") + + # silence some warnings + warnings.filterwarnings("ignore", + r"CryptContext\(\)\.replace\(\) has been deprecated") + warnings.filterwarnings("ignore", ".*(CryptPolicy|context\.policy).*(has|have) been deprecated.*") + + def test_kwd_constructor(self): + """test plain kwds""" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + cc = LazyCryptContext(iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertTrue(cc.policy.handler_is_deprecated("des_crypt")) + self.assertEqual(cc.policy.schemes(), ["dummy_2", "des_crypt"]) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + + def test_callable_constructor(self): + """test create_policy() hook, returning CryptPolicy""" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + def create_policy(flag=False): + self.assertTrue(flag) + return CryptPolicy(schemes=iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + cc = LazyCryptContext(create_policy=create_policy, flag=True) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertTrue(cc.policy.handler_is_deprecated("des_crypt")) + self.assertEqual(cc.policy.schemes(), ["dummy_2", "des_crypt"]) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_builtin_md4.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_builtin_md4.py new file mode 100644 index 000000000..0aca1eb03 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_builtin_md4.py @@ -0,0 +1,160 @@ +"""passlib.tests -- unittests for passlib.crypto._md4""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, division +# core +from binascii import hexlify +import hashlib +# site +# pkg +# module +from passlib.utils.compat import bascii_to_str, PY3, u +from passlib.crypto.digest import lookup_hash +from passlib.tests.utils import TestCase, skipUnless +# local +__all__ = [ + "_Common_MD4_Test", + "MD4_Builtin_Test", + "MD4_SSL_Test", +] +#============================================================================= +# test pure-python MD4 implementation +#============================================================================= +class _Common_MD4_Test(TestCase): + """common code for testing md4 backends""" + + vectors = [ + # input -> hex digest + # test vectors from http://www.faqs.org/rfcs/rfc1320.html - A.5 + (b"", "31d6cfe0d16ae931b73c59d7e0c089c0"), + (b"a", "bde52cb31de33e46245e05fbdbd6fb24"), + (b"abc", "a448017aaf21d8525fc10ae87aa6729d"), + (b"message digest", "d9130a8164549fe818874806e1c7014b"), + (b"abcdefghijklmnopqrstuvwxyz", "d79e1c308aa5bbcdeea8ed63df412da9"), + (b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", "043f8582f241db351ce627e153e7f0e4"), + (b"12345678901234567890123456789012345678901234567890123456789012345678901234567890", "e33b4ddc9c38f2199c3e7b164fcc0536"), + ] + + def get_md4_const(self): + """ + get md4 constructor -- + overridden by subclasses to use alternate backends. + """ + return lookup_hash("md4").const + + def test_attrs(self): + """informational attributes""" + h = self.get_md4_const()() + self.assertEqual(h.name, "md4") + self.assertEqual(h.digest_size, 16) + self.assertEqual(h.block_size, 64) + + def test_md4_update(self): + """update() method""" + md4 = self.get_md4_const() + h = md4(b'') + self.assertEqual(h.hexdigest(), "31d6cfe0d16ae931b73c59d7e0c089c0") + + h.update(b'a') + self.assertEqual(h.hexdigest(), "bde52cb31de33e46245e05fbdbd6fb24") + + h.update(b'bcdefghijklmnopqrstuvwxyz') + self.assertEqual(h.hexdigest(), "d79e1c308aa5bbcdeea8ed63df412da9") + + if PY3: + # reject unicode, hash should return digest of b'' + h = md4() + self.assertRaises(TypeError, h.update, u('a')) + self.assertEqual(h.hexdigest(), "31d6cfe0d16ae931b73c59d7e0c089c0") + else: + # coerce unicode to ascii, hash should return digest of b'a' + h = md4() + h.update(u('a')) + self.assertEqual(h.hexdigest(), "bde52cb31de33e46245e05fbdbd6fb24") + + def test_md4_hexdigest(self): + """hexdigest() method""" + md4 = self.get_md4_const() + for input, hex in self.vectors: + out = md4(input).hexdigest() + self.assertEqual(out, hex) + + def test_md4_digest(self): + """digest() method""" + md4 = self.get_md4_const() + for input, hex in self.vectors: + out = bascii_to_str(hexlify(md4(input).digest())) + self.assertEqual(out, hex) + + def test_md4_copy(self): + """copy() method""" + md4 = self.get_md4_const() + h = md4(b'abc') + + h2 = h.copy() + h2.update(b'def') + self.assertEqual(h2.hexdigest(), '804e7f1c2586e50b49ac65db5b645131') + + h.update(b'ghi') + self.assertEqual(h.hexdigest(), 'c5225580bfe176f6deeee33dee98732c') + + +#------------------------------------------------------------------------ +# create subclasses to test various backends +#------------------------------------------------------------------------ + +def has_native_md4(): # pragma: no cover -- runtime detection + """ + check if hashlib natively supports md4. + """ + try: + hashlib.new("md4") + return True + except ValueError: + # not supported - ssl probably missing (e.g. ironpython) + return False + + +@skipUnless(has_native_md4(), "hashlib lacks ssl/md4 support") +class MD4_SSL_Test(_Common_MD4_Test): + descriptionPrefix = "hashlib.new('md4')" + + # NOTE: we trust ssl got md4 implementation right, + # this is more to test our test is correct :) + + def setUp(self): + super(MD4_SSL_Test, self).setUp() + + # make sure we're using right constructor. + self.assertEqual(self.get_md4_const().__module__, "hashlib") + + +class MD4_Builtin_Test(_Common_MD4_Test): + descriptionPrefix = "passlib.crypto._md4.md4()" + + def setUp(self): + super(MD4_Builtin_Test, self).setUp() + + if has_native_md4(): + + # Temporarily make lookup_hash() use builtin pure-python implementation, + # by monkeypatching hashlib.new() to ensure we fall back to passlib's md4 class. + orig = hashlib.new + def wrapper(name, *args): + if name == "md4": + raise ValueError("md4 disabled for testing") + return orig(name, *args) + self.patchAttr(hashlib, "new", wrapper) + + # flush cache before & after test, since we're mucking with it. + lookup_hash.clear_cache() + self.addCleanup(lookup_hash.clear_cache) + + # make sure we're using right constructor. + self.assertEqual(self.get_md4_const().__module__, "passlib.crypto._md4") + + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_des.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_des.py new file mode 100644 index 000000000..ab31845ec --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_des.py @@ -0,0 +1,194 @@ +"""passlib.tests -- unittests for passlib.crypto.des""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, division +# core +from functools import partial +# site +# pkg +# module +from passlib.utils import getrandbytes +from passlib.tests.utils import TestCase + +#============================================================================= +# test DES routines +#============================================================================= +class DesTest(TestCase): + descriptionPrefix = "passlib.crypto.des" + + # test vectors taken from http://www.skepticfiles.org/faq/testdes.htm + des_test_vectors = [ + # key, plaintext, ciphertext + (0x0000000000000000, 0x0000000000000000, 0x8CA64DE9C1B123A7), + (0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, 0x7359B2163E4EDC58), + (0x3000000000000000, 0x1000000000000001, 0x958E6E627A05557B), + (0x1111111111111111, 0x1111111111111111, 0xF40379AB9E0EC533), + (0x0123456789ABCDEF, 0x1111111111111111, 0x17668DFC7292532D), + (0x1111111111111111, 0x0123456789ABCDEF, 0x8A5AE1F81AB8F2DD), + (0x0000000000000000, 0x0000000000000000, 0x8CA64DE9C1B123A7), + (0xFEDCBA9876543210, 0x0123456789ABCDEF, 0xED39D950FA74BCC4), + (0x7CA110454A1A6E57, 0x01A1D6D039776742, 0x690F5B0D9A26939B), + (0x0131D9619DC1376E, 0x5CD54CA83DEF57DA, 0x7A389D10354BD271), + (0x07A1133E4A0B2686, 0x0248D43806F67172, 0x868EBB51CAB4599A), + (0x3849674C2602319E, 0x51454B582DDF440A, 0x7178876E01F19B2A), + (0x04B915BA43FEB5B6, 0x42FD443059577FA2, 0xAF37FB421F8C4095), + (0x0113B970FD34F2CE, 0x059B5E0851CF143A, 0x86A560F10EC6D85B), + (0x0170F175468FB5E6, 0x0756D8E0774761D2, 0x0CD3DA020021DC09), + (0x43297FAD38E373FE, 0x762514B829BF486A, 0xEA676B2CB7DB2B7A), + (0x07A7137045DA2A16, 0x3BDD119049372802, 0xDFD64A815CAF1A0F), + (0x04689104C2FD3B2F, 0x26955F6835AF609A, 0x5C513C9C4886C088), + (0x37D06BB516CB7546, 0x164D5E404F275232, 0x0A2AEEAE3FF4AB77), + (0x1F08260D1AC2465E, 0x6B056E18759F5CCA, 0xEF1BF03E5DFA575A), + (0x584023641ABA6176, 0x004BD6EF09176062, 0x88BF0DB6D70DEE56), + (0x025816164629B007, 0x480D39006EE762F2, 0xA1F9915541020B56), + (0x49793EBC79B3258F, 0x437540C8698F3CFA, 0x6FBF1CAFCFFD0556), + (0x4FB05E1515AB73A7, 0x072D43A077075292, 0x2F22E49BAB7CA1AC), + (0x49E95D6D4CA229BF, 0x02FE55778117F12A, 0x5A6B612CC26CCE4A), + (0x018310DC409B26D6, 0x1D9D5C5018F728C2, 0x5F4C038ED12B2E41), + (0x1C587F1C13924FEF, 0x305532286D6F295A, 0x63FAC0D034D9F793), + (0x0101010101010101, 0x0123456789ABCDEF, 0x617B3A0CE8F07100), + (0x1F1F1F1F0E0E0E0E, 0x0123456789ABCDEF, 0xDB958605F8C8C606), + (0xE0FEE0FEF1FEF1FE, 0x0123456789ABCDEF, 0xEDBFD1C66C29CCC7), + (0x0000000000000000, 0xFFFFFFFFFFFFFFFF, 0x355550B2150E2451), + (0xFFFFFFFFFFFFFFFF, 0x0000000000000000, 0xCAAAAF4DEAF1DBAE), + (0x0123456789ABCDEF, 0x0000000000000000, 0xD5D44FF720683D0D), + (0xFEDCBA9876543210, 0xFFFFFFFFFFFFFFFF, 0x2A2BB008DF97C2F2), + ] + + def test_01_expand(self): + """expand_des_key()""" + from passlib.crypto.des import expand_des_key, shrink_des_key, \ + _KDATA_MASK, INT_56_MASK + + # make sure test vectors are preserved (sans parity bits) + # uses ints, bytes are tested under # 02 + for key1, _, _ in self.des_test_vectors: + key2 = shrink_des_key(key1) + key3 = expand_des_key(key2) + # NOTE: this assumes expand_des_key() sets parity bits to 0 + self.assertEqual(key3, key1 & _KDATA_MASK) + + # type checks + self.assertRaises(TypeError, expand_des_key, 1.0) + + # too large + self.assertRaises(ValueError, expand_des_key, INT_56_MASK+1) + self.assertRaises(ValueError, expand_des_key, b"\x00"*8) + + # too small + self.assertRaises(ValueError, expand_des_key, -1) + self.assertRaises(ValueError, expand_des_key, b"\x00"*6) + + def test_02_shrink(self): + """shrink_des_key()""" + from passlib.crypto.des import expand_des_key, shrink_des_key, INT_64_MASK + rng = self.getRandom() + + # make sure reverse works for some random keys + # uses bytes, ints are tested under # 01 + for i in range(20): + key1 = getrandbytes(rng, 7) + key2 = expand_des_key(key1) + key3 = shrink_des_key(key2) + self.assertEqual(key3, key1) + + # type checks + self.assertRaises(TypeError, shrink_des_key, 1.0) + + # too large + self.assertRaises(ValueError, shrink_des_key, INT_64_MASK+1) + self.assertRaises(ValueError, shrink_des_key, b"\x00"*9) + + # too small + self.assertRaises(ValueError, shrink_des_key, -1) + self.assertRaises(ValueError, shrink_des_key, b"\x00"*7) + + def _random_parity(self, key): + """randomize parity bits""" + from passlib.crypto.des import _KDATA_MASK, _KPARITY_MASK, INT_64_MASK + rng = self.getRandom() + return (key & _KDATA_MASK) | (rng.randint(0,INT_64_MASK) & _KPARITY_MASK) + + def test_03_encrypt_bytes(self): + """des_encrypt_block()""" + from passlib.crypto.des import (des_encrypt_block, shrink_des_key, + _pack64, _unpack64) + + # run through test vectors + for key, plaintext, correct in self.des_test_vectors: + # convert to bytes + key = _pack64(key) + plaintext = _pack64(plaintext) + correct = _pack64(correct) + + # test 64-bit key + result = des_encrypt_block(key, plaintext) + self.assertEqual(result, correct, "key=%r plaintext=%r:" % + (key, plaintext)) + + # test 56-bit version + key2 = shrink_des_key(key) + result = des_encrypt_block(key2, plaintext) + self.assertEqual(result, correct, "key=%r shrink(key)=%r plaintext=%r:" % + (key, key2, plaintext)) + + # test with random parity bits + for _ in range(20): + key3 = _pack64(self._random_parity(_unpack64(key))) + result = des_encrypt_block(key3, plaintext) + self.assertEqual(result, correct, "key=%r rndparity(key)=%r plaintext=%r:" % + (key, key3, plaintext)) + + # check invalid keys + stub = b'\x00' * 8 + self.assertRaises(TypeError, des_encrypt_block, 0, stub) + self.assertRaises(ValueError, des_encrypt_block, b'\x00'*6, stub) + + # check invalid input + self.assertRaises(TypeError, des_encrypt_block, stub, 0) + self.assertRaises(ValueError, des_encrypt_block, stub, b'\x00'*7) + + # check invalid salts + self.assertRaises(ValueError, des_encrypt_block, stub, stub, salt=-1) + self.assertRaises(ValueError, des_encrypt_block, stub, stub, salt=1<<24) + + # check invalid rounds + self.assertRaises(ValueError, des_encrypt_block, stub, stub, 0, rounds=0) + + def test_04_encrypt_ints(self): + """des_encrypt_int_block()""" + from passlib.crypto.des import des_encrypt_int_block + + # run through test vectors + for key, plaintext, correct in self.des_test_vectors: + # test 64-bit key + result = des_encrypt_int_block(key, plaintext) + self.assertEqual(result, correct, "key=%r plaintext=%r:" % + (key, plaintext)) + + # test with random parity bits + for _ in range(20): + key3 = self._random_parity(key) + result = des_encrypt_int_block(key3, plaintext) + self.assertEqual(result, correct, "key=%r rndparity(key)=%r plaintext=%r:" % + (key, key3, plaintext)) + + # check invalid keys + self.assertRaises(TypeError, des_encrypt_int_block, b'\x00', 0) + self.assertRaises(ValueError, des_encrypt_int_block, -1, 0) + + # check invalid input + self.assertRaises(TypeError, des_encrypt_int_block, 0, b'\x00') + self.assertRaises(ValueError, des_encrypt_int_block, 0, -1) + + # check invalid salts + self.assertRaises(ValueError, des_encrypt_int_block, 0, 0, salt=-1) + self.assertRaises(ValueError, des_encrypt_int_block, 0, 0, salt=1<<24) + + # check invalid rounds + self.assertRaises(ValueError, des_encrypt_int_block, 0, 0, 0, rounds=0) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_digest.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_digest.py new file mode 100644 index 000000000..461d20965 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_digest.py @@ -0,0 +1,544 @@ +"""tests for passlib.utils.(des|pbkdf2|md4)""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, division +# core +from binascii import hexlify +import hashlib +import warnings +# site +# pkg +# module +from passlib.exc import UnknownHashError +from passlib.utils.compat import PY3, u, JYTHON +from passlib.tests.utils import TestCase, TEST_MODE, skipUnless, hb + +#============================================================================= +# test assorted crypto helpers +#============================================================================= +class HashInfoTest(TestCase): + """test various crypto functions""" + descriptionPrefix = "passlib.crypto.digest" + + #: list of formats norm_hash_name() should support + norm_hash_formats = ["hashlib", "iana"] + + #: test cases for norm_hash_name() + #: each row contains (iana name, hashlib name, ... 0+ unnormalized names) + norm_hash_samples = [ + # real hashes + ("md5", "md5", "SCRAM-MD5-PLUS", "MD-5"), + ("sha1", "sha-1", "SCRAM-SHA-1", "SHA1"), + ("sha256", "sha-256", "SHA_256", "sha2-256"), + ("ripemd160", "ripemd-160", "SCRAM-RIPEMD-160", "RIPEmd160", + # NOTE: there was an older "RIPEMD" & "RIPEMD-128", but python treates "RIPEMD" + # as alias for "RIPEMD-160" + "ripemd", "SCRAM-RIPEMD"), + + # fake hashes (to check if fallback normalization behaves sanely) + ("sha4_256", "sha4-256", "SHA4-256", "SHA-4-256"), + ("test128", "test-128", "TEST128"), + ("test2", "test2", "TEST-2"), + ("test3_128", "test3-128", "TEST-3-128"), + ] + + def test_norm_hash_name(self): + """norm_hash_name()""" + from itertools import chain + from passlib.crypto.digest import norm_hash_name, _known_hash_names + + # snapshot warning state, ignore unknown hash warnings + ctx = warnings.catch_warnings() + ctx.__enter__() + self.addCleanup(ctx.__exit__) + warnings.filterwarnings("ignore", '.*unknown hash') + warnings.filterwarnings("ignore", '.*unsupported hash') + + # test string types + self.assertEqual(norm_hash_name(u("MD4")), "md4") + self.assertEqual(norm_hash_name(b"MD4"), "md4") + self.assertRaises(TypeError, norm_hash_name, None) + + # test selected results + for row in chain(_known_hash_names, self.norm_hash_samples): + for idx, format in enumerate(self.norm_hash_formats): + correct = row[idx] + for value in row: + result = norm_hash_name(value, format) + self.assertEqual(result, correct, + "name=%r, format=%r:" % (value, + format)) + + def test_lookup_hash_ctor(self): + """lookup_hash() -- constructor""" + from passlib.crypto.digest import lookup_hash + + # invalid/unknown names should be rejected + self.assertRaises(ValueError, lookup_hash, "new") + self.assertRaises(ValueError, lookup_hash, "__name__") + self.assertRaises(ValueError, lookup_hash, "sha4") + + # 1. should return hashlib builtin if found + self.assertEqual(lookup_hash("md5"), (hashlib.md5, 16, 64)) + + # 2. should return wrapper around hashlib.new() if found + try: + hashlib.new("sha") + has_sha = True + except ValueError: + has_sha = False + if has_sha: + record = lookup_hash("sha") + const = record[0] + self.assertEqual(record, (const, 20, 64)) + self.assertEqual(hexlify(const(b"abc").digest()), + b"0164b8a914cd2a5e74c4f7ff082c4d97f1edf880") + + else: + self.assertRaises(ValueError, lookup_hash, "sha") + + # 3. should fall back to builtin md4 + try: + hashlib.new("md4") + has_md4 = True + except ValueError: + has_md4 = False + record = lookup_hash("md4") + const = record[0] + if not has_md4: + from passlib.crypto._md4 import md4 + self.assertIs(const, md4) + self.assertEqual(record, (const, 16, 64)) + self.assertEqual(hexlify(const(b"abc").digest()), + b"a448017aaf21d8525fc10ae87aa6729d") + + # should memoize records + self.assertIs(lookup_hash("md5"), lookup_hash("md5")) + + def test_lookup_hash_w_unknown_name(self): + """lookup_hash() -- unknown hash name""" + from passlib.crypto.digest import lookup_hash + + # unknown names should be rejected by default + self.assertRaises(UnknownHashError, lookup_hash, "xxx256") + + # required=False should return stub record instead + info = lookup_hash("xxx256", required=False) + self.assertFalse(info.supported) + self.assertRaisesRegex(UnknownHashError, "unknown hash: 'xxx256'", info.const) + self.assertEqual(info.name, "xxx256") + self.assertEqual(info.digest_size, None) + self.assertEqual(info.block_size, None) + + # should cache stub records + info2 = lookup_hash("xxx256", required=False) + self.assertIs(info2, info) + + def test_mock_fips_mode(self): + """ + lookup_hash() -- test set_mock_fips_mode() + """ + from passlib.crypto.digest import lookup_hash, _set_mock_fips_mode + + # check if md5 is available so we can test mock helper + if not lookup_hash("md5", required=False).supported: + raise self.skipTest("md5 not supported") + + # enable monkeypatch to mock up fips mode + _set_mock_fips_mode() + self.addCleanup(_set_mock_fips_mode, False) + + pat = "'md5' hash disabled for fips" + self.assertRaisesRegex(UnknownHashError, pat, lookup_hash, "md5") + + info = lookup_hash("md5", required=False) + self.assertRegex(info.error_text, pat) + self.assertRaisesRegex(UnknownHashError, pat, info.const) + + # should use hardcoded fallback info + self.assertEqual(info.digest_size, 16) + self.assertEqual(info.block_size, 64) + + def test_lookup_hash_metadata(self): + """lookup_hash() -- metadata""" + + from passlib.crypto.digest import lookup_hash + + # quick test of metadata using known reference - sha256 + info = lookup_hash("sha256") + self.assertEqual(info.name, "sha256") + self.assertEqual(info.iana_name, "sha-256") + self.assertEqual(info.block_size, 64) + self.assertEqual(info.digest_size, 32) + self.assertIs(lookup_hash("SHA2-256"), info) + + # quick test of metadata using known reference - md5 + info = lookup_hash("md5") + self.assertEqual(info.name, "md5") + self.assertEqual(info.iana_name, "md5") + self.assertEqual(info.block_size, 64) + self.assertEqual(info.digest_size, 16) + + def test_lookup_hash_alt_types(self): + """lookup_hash() -- alternate types""" + + from passlib.crypto.digest import lookup_hash + + info = lookup_hash("sha256") + self.assertIs(lookup_hash(info), info) + self.assertIs(lookup_hash(info.const), info) + + self.assertRaises(TypeError, lookup_hash, 123) + + # TODO: write full test of compile_hmac() -- currently relying on pbkdf2_hmac() tests + +#============================================================================= +# test PBKDF1 support +#============================================================================= +class Pbkdf1_Test(TestCase): + """test kdf helpers""" + descriptionPrefix = "passlib.crypto.digest.pbkdf1" + + pbkdf1_tests = [ + # (password, salt, rounds, keylen, hash, result) + + # + # from http://www.di-mgt.com.au/cryptoKDFs.html + # + (b'password', hb('78578E5A5D63CB06'), 1000, 16, 'sha1', hb('dc19847e05c64d2faf10ebfb4a3d2a20')), + + # + # custom + # + (b'password', b'salt', 1000, 0, 'md5', b''), + (b'password', b'salt', 1000, 1, 'md5', hb('84')), + (b'password', b'salt', 1000, 8, 'md5', hb('8475c6a8531a5d27')), + (b'password', b'salt', 1000, 16, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), + (b'password', b'salt', 1000, None, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), + (b'password', b'salt', 1000, None, 'sha1', hb('4a8fd48e426ed081b535be5769892fa396293efb')), + ] + if not JYTHON: # FIXME: find out why not jython, or reenable this. + pbkdf1_tests.append( + (b'password', b'salt', 1000, None, 'md4', hb('f7f2e91100a8f96190f2dd177cb26453')) + ) + + def test_known(self): + """test reference vectors""" + from passlib.crypto.digest import pbkdf1 + for secret, salt, rounds, keylen, digest, correct in self.pbkdf1_tests: + result = pbkdf1(digest, secret, salt, rounds, keylen) + self.assertEqual(result, correct) + + def test_border(self): + """test border cases""" + from passlib.crypto.digest import pbkdf1 + def helper(secret=b'secret', salt=b'salt', rounds=1, keylen=1, hash='md5'): + return pbkdf1(hash, secret, salt, rounds, keylen) + helper() + + # salt/secret wrong type + self.assertRaises(TypeError, helper, secret=1) + self.assertRaises(TypeError, helper, salt=1) + + # non-existent hashes + self.assertRaises(ValueError, helper, hash='missing') + + # rounds < 1 and wrong type + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='1') + + # keylen < 0, keylen > block_size, and wrong type + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=17, hash='md5') + self.assertRaises(TypeError, helper, keylen='1') + +#============================================================================= +# test PBKDF2-HMAC support +#============================================================================= + +# import the test subject +from passlib.crypto.digest import pbkdf2_hmac, PBKDF2_BACKENDS + +# NOTE: relying on tox to verify this works under all the various backends. +class Pbkdf2Test(TestCase): + """test pbkdf2() support""" + descriptionPrefix = "passlib.crypto.digest.pbkdf2_hmac() " % ", ".join(PBKDF2_BACKENDS) + + pbkdf2_test_vectors = [ + # (result, secret, salt, rounds, keylen, digest="sha1") + + # + # from rfc 3962 + # + + # test case 1 / 128 bit + ( + hb("cdedb5281bb2f801565a1122b2563515"), + b"password", b"ATHENA.MIT.EDUraeburn", 1, 16 + ), + + # test case 2 / 128 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935d"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 16 + ), + + # test case 2 / 256 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 32 + ), + + # test case 3 / 256 bit + ( + hb("5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13"), + b"password", b"ATHENA.MIT.EDUraeburn", 1200, 32 + ), + + # test case 4 / 256 bit + ( + hb("d1daa78615f287e6a1c8b120d7062a493f98d203e6be49a6adf4fa574b6e64ee"), + b"password", b'\x12\x34\x56\x78\x78\x56\x34\x12', 5, 32 + ), + + # test case 5 / 256 bit + ( + hb("139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1"), + b"X"*64, b"pass phrase equals block size", 1200, 32 + ), + + # test case 6 / 256 bit + ( + hb("9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a"), + b"X"*65, b"pass phrase exceeds block size", 1200, 32 + ), + + # + # from rfc 6070 + # + ( + hb("0c60c80f961f0e71f3a9b524af6012062fe037a6"), + b"password", b"salt", 1, 20, + ), + + ( + hb("ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + b"password", b"salt", 2, 20, + ), + + ( + hb("4b007901b765489abead49d926f721d065a429c1"), + b"password", b"salt", 4096, 20, + ), + + # just runs too long - could enable if ALL option is set + ##( + ## + ## hb("eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), + ## "password", "salt", 16777216, 20, + ##), + + ( + hb("3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + b"passwordPASSWORDpassword", + b"saltSALTsaltSALTsaltSALTsaltSALTsalt", + 4096, 25, + ), + + ( + hb("56fa6aa75548099dcc37d7f03425e0c3"), + b"pass\00word", b"sa\00lt", 4096, 16, + ), + + # + # from example in http://grub.enbug.org/Authentication + # + ( + hb("887CFF169EA8335235D8004242AA7D6187A41E3187DF0CE14E256D85ED" + "97A97357AAA8FF0A3871AB9EEFF458392F462F495487387F685B7472FC" + "6C29E293F0A0"), + b"hello", + hb("9290F727ED06C38BA4549EF7DE25CF5642659211B7FC076F2D28FEFD71" + "784BB8D8F6FB244A8CC5C06240631B97008565A120764C0EE9C2CB0073" + "994D79080136"), + 10000, 64, "sha512" + ), + + # + # test vectors from fastpbkdf2 + # + ( + hb('55ac046e56e3089fec1691c22544b605f94185216dde0465e68b9d57c20dacbc' + '49ca9cccf179b645991664b39d77ef317c71b845b1e30bd509112041d3a19783'), + b'passwd', b'salt', 1, 64, 'sha256', + ), + + ( + hb('4ddcd8f60b98be21830cee5ef22701f9641a4418d04c0414aeff08876b34ab56' + 'a1d425a1225833549adb841b51c9b3176a272bdebba1d078478f62b397f33c8d'), + b'Password', b'NaCl', 80000, 64, 'sha256', + ), + + ( + hb('120fb6cffcf8b32c43e7225256c4f837a86548c92ccc35480805987cb70be17b'), + b'password', b'salt', 1, 32, 'sha256', + ), + + ( + hb('ae4d0c95af6b46d32d0adff928f06dd02a303f8ef3c251dfd6e2d85a95474c43'), + b'password', b'salt', 2, 32, 'sha256', + ), + + ( + hb('c5e478d59288c841aa530db6845c4c8d962893a001ce4e11a4963873aa98134a'), + b'password', b'salt', 4096, 32, 'sha256', + ), + + ( + hb('348c89dbcbd32b2f32d814b8116e84cf2b17347ebc1800181c4e2a1fb8dd53e1c' + '635518c7dac47e9'), + b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt', + 4096, 40, 'sha256', + ), + + ( + hb('9e83f279c040f2a11aa4a02b24c418f2d3cb39560c9627fa4f47e3bcc2897c3d'), + b'', b'salt', 1024, 32, 'sha256', + ), + + ( + hb('ea5808411eb0c7e830deab55096cee582761e22a9bc034e3ece925225b07bf46'), + b'password', b'', 1024, 32, 'sha256', + ), + + ( + hb('89b69d0516f829893c696226650a8687'), + b'pass\x00word', b'sa\x00lt', 4096, 16, 'sha256', + ), + + ( + hb('867f70cf1ade02cff3752599a3a53dc4af34c7a669815ae5d513554e1c8cf252'), + b'password', b'salt', 1, 32, 'sha512', + ), + + ( + hb('e1d9c16aa681708a45f5c7c4e215ceb66e011a2e9f0040713f18aefdb866d53c'), + b'password', b'salt', 2, 32, 'sha512', + ), + + ( + hb('d197b1b33db0143e018b12f3d1d1479e6cdebdcc97c5c0f87f6902e072f457b5'), + b'password', b'salt', 4096, 32, 'sha512', + ), + + ( + hb('6e23f27638084b0f7ea1734e0d9841f55dd29ea60a834466f3396bac801fac1eeb' + '63802f03a0b4acd7603e3699c8b74437be83ff01ad7f55dac1ef60f4d56480c35e' + 'e68fd52c6936'), + b'passwordPASSWORDpassword', b'saltSALTsaltSALTsaltSALTsaltSALTsalt', + 1, 72, 'sha512', + ), + + ( + hb('0c60c80f961f0e71f3a9b524af6012062fe037a6'), + b'password', b'salt', 1, 20, 'sha1', + ), + + # + # custom tests + # + ( + hb('e248fb6b13365146f8ac6307cc222812'), + b"secret", b"salt", 10, 16, "sha1", + ), + ( + hb('e248fb6b13365146f8ac6307cc2228127872da6d'), + b"secret", b"salt", 10, None, "sha1", + ), + ( + hb('b1d5485772e6f76d5ebdc11b38d3eff0a5b2bd50dc11f937e86ecacd0cd40d1b' + '9113e0734e3b76a3'), + b"secret", b"salt", 62, 40, "md5", + ), + ( + hb('ea014cc01f78d3883cac364bb5d054e2be238fb0b6081795a9d84512126e3129' + '062104d2183464c4'), + b"secret", b"salt", 62, 40, "md4", + ), + ] + + def test_known(self): + """test reference vectors""" + for row in self.pbkdf2_test_vectors: + correct, secret, salt, rounds, keylen = row[:5] + digest = row[5] if len(row) == 6 else "sha1" + result = pbkdf2_hmac(digest, secret, salt, rounds, keylen) + self.assertEqual(result, correct) + + def test_backends(self): + """verify expected backends are present""" + from passlib.crypto.digest import PBKDF2_BACKENDS + + # check for fastpbkdf2 + try: + import fastpbkdf2 + has_fastpbkdf2 = True + except ImportError: + has_fastpbkdf2 = False + self.assertEqual("fastpbkdf2" in PBKDF2_BACKENDS, has_fastpbkdf2) + + # check for hashlib + try: + from hashlib import pbkdf2_hmac + has_hashlib_ssl = pbkdf2_hmac.__module__ != "hashlib" + except ImportError: + has_hashlib_ssl = False + self.assertEqual("hashlib-ssl" in PBKDF2_BACKENDS, has_hashlib_ssl) + + # check for appropriate builtin + from passlib.utils.compat import PY3 + if PY3: + self.assertIn("builtin-from-bytes", PBKDF2_BACKENDS) + else: + # XXX: only true as long as this is preferred over hexlify + self.assertIn("builtin-unpack", PBKDF2_BACKENDS) + + def test_border(self): + """test border cases""" + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, digest="sha1"): + return pbkdf2_hmac(digest, secret, salt, rounds, keylen) + helper() + + # invalid rounds + self.assertRaises(ValueError, helper, rounds=-1) + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='x') + + # invalid keylen + helper(keylen=1) + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=0) + # NOTE: hashlib actually throws error for keylen>=MAX_SINT32, + # but pbkdf2 forbids anything > MAX_UINT32 * digest_size + self.assertRaises(OverflowError, helper, keylen=20*(2**32-1)+1) + self.assertRaises(TypeError, helper, keylen='x') + + # invalid secret/salt type + self.assertRaises(TypeError, helper, salt=5) + self.assertRaises(TypeError, helper, secret=5) + + # invalid hash + self.assertRaises(ValueError, helper, digest='foo') + self.assertRaises(TypeError, helper, digest=5) + + def test_default_keylen(self): + """test keylen==None""" + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, digest="sha1"): + return pbkdf2_hmac(digest, secret, salt, rounds, keylen) + self.assertEqual(len(helper(digest='sha1')), 20) + self.assertEqual(len(helper(digest='sha256')), 32) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_scrypt.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_scrypt.py new file mode 100644 index 000000000..73ff1fa0d --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_crypto_scrypt.py @@ -0,0 +1,634 @@ +"""tests for passlib.utils.scrypt""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import hashlib +import logging; log = logging.getLogger(__name__) +import struct +import warnings +warnings.filterwarnings("ignore", ".*using builtin scrypt backend.*") +# site +# pkg +from passlib import exc +from passlib.utils import getrandbytes +from passlib.utils.compat import PYPY, u, bascii_to_str +from passlib.utils.decor import classproperty +from passlib.tests.utils import TestCase, skipUnless, TEST_MODE, hb +# subject +from passlib.crypto import scrypt as scrypt_mod +# local +__all__ = [ + "ScryptEngineTest", + "BuiltinScryptTest", + "FastScryptTest", +] + +#============================================================================= +# support functions +#============================================================================= +def hexstr(data): + """return bytes as hex str""" + return bascii_to_str(hexlify(data)) + +def unpack_uint32_list(data, check_count=None): + """unpack bytes as list of uint32 values""" + count = len(data) // 4 + assert check_count is None or check_count == count + return struct.unpack("<%dI" % count, data) + +def seed_bytes(seed, count): + """ + generate random reference bytes from specified seed. + used to generate some predictable test vectors. + """ + if hasattr(seed, "encode"): + seed = seed.encode("ascii") + buf = b'' + i = 0 + while len(buf) < count: + buf += hashlib.sha256(seed + struct.pack("" % cls.backend + backend = None + + #============================================================================= + # setup + #============================================================================= + def setUp(self): + assert self.backend + scrypt_mod._set_backend(self.backend) + super(_CommonScryptTest, self).setUp() + + #============================================================================= + # reference vectors + #============================================================================= + + reference_vectors = [ + # entry format: (secret, salt, n, r, p, keylen, result) + + #------------------------------------------------------------------------ + # test vectors from scrypt whitepaper -- + # http://www.tarsnap.com/scrypt/scrypt.pdf, appendix b + # + # also present in (expired) scrypt rfc draft -- + # https://tools.ietf.org/html/draft-josefsson-scrypt-kdf-01, section 11 + #------------------------------------------------------------------------ + ("", "", 16, 1, 1, 64, hb(""" + 77 d6 57 62 38 65 7b 20 3b 19 ca 42 c1 8a 04 97 + f1 6b 48 44 e3 07 4a e8 df df fa 3f ed e2 14 42 + fc d0 06 9d ed 09 48 f8 32 6a 75 3a 0f c8 1f 17 + e8 d3 e0 fb 2e 0d 36 28 cf 35 e2 0c 38 d1 89 06 + """)), + + ("password", "NaCl", 1024, 8, 16, 64, hb(""" + fd ba be 1c 9d 34 72 00 78 56 e7 19 0d 01 e9 fe + 7c 6a d7 cb c8 23 78 30 e7 73 76 63 4b 37 31 62 + 2e af 30 d9 2e 22 a3 88 6f f1 09 27 9d 98 30 da + c7 27 af b9 4a 83 ee 6d 83 60 cb df a2 cc 06 40 + """)), + + # NOTE: the following are skipped for all backends unless TEST_MODE="full" + + ("pleaseletmein", "SodiumChloride", 16384, 8, 1, 64, hb(""" + 70 23 bd cb 3a fd 73 48 46 1c 06 cd 81 fd 38 eb + fd a8 fb ba 90 4f 8e 3e a9 b5 43 f6 54 5d a1 f2 + d5 43 29 55 61 3f 0f cf 62 d4 97 05 24 2a 9a f9 + e6 1e 85 dc 0d 65 1e 40 df cf 01 7b 45 57 58 87 + """)), + + # NOTE: the following are always skipped for the builtin backend, + # (just takes too long to be worth it) + + ("pleaseletmein", "SodiumChloride", 1048576, 8, 1, 64, hb(""" + 21 01 cb 9b 6a 51 1a ae ad db be 09 cf 70 f8 81 + ec 56 8d 57 4a 2f fd 4d ab e5 ee 98 20 ad aa 47 + 8e 56 fd 8f 4b a5 d0 9f fa 1c 6d 92 7c 40 f4 c3 + 37 30 40 49 e8 a9 52 fb cb f4 5c 6f a7 7a 41 a4 + """)), + ] + + def test_reference_vectors(self): + """reference vectors""" + for secret, salt, n, r, p, keylen, result in self.reference_vectors: + if n >= 1024 and TEST_MODE(max="default"): + # skip large values unless we're running full test suite + continue + if n > 16384 and self.backend == "builtin": + # skip largest vector for builtin, takes WAAY too long + # (46s under pypy, ~5m under cpython) + continue + log.debug("scrypt reference vector: %r %r n=%r r=%r p=%r", secret, salt, n, r, p) + self.assertEqual(scrypt_mod.scrypt(secret, salt, n, r, p, keylen), result) + + #============================================================================= + # fuzz testing + #============================================================================= + + _already_tested_others = None + + def test_other_backends(self): + """compare output to other backends""" + # only run once, since test is symetric. + # maybe this means it should go somewhere else? + if self._already_tested_others: + raise self.skipTest("already run under %r backend test" % self._already_tested_others) + self._already_tested_others = self.backend + rng = self.getRandom() + + # get available backends + orig = scrypt_mod.backend + available = set(name for name in scrypt_mod.backend_values + if scrypt_mod._has_backend(name)) + scrypt_mod._set_backend(orig) + available.discard(self.backend) + if not available: + raise self.skipTest("no other backends found") + + warnings.filterwarnings("ignore", "(?i)using builtin scrypt backend", + category=exc.PasslibSecurityWarning) + + # generate some random options, and cross-check output + for _ in range(10): + # NOTE: keeping values low due to builtin test + secret = getrandbytes(rng, rng.randint(0, 64)) + salt = getrandbytes(rng, rng.randint(0, 64)) + n = 1 << rng.randint(1, 10) + r = rng.randint(1, 8) + p = rng.randint(1, 3) + ks = rng.randint(1, 64) + previous = None + backends = set() + for name in available: + scrypt_mod._set_backend(name) + self.assertNotIn(scrypt_mod._scrypt, backends) + backends.add(scrypt_mod._scrypt) + result = hexstr(scrypt_mod.scrypt(secret, salt, n, r, p, ks)) + self.assertEqual(len(result), 2*ks) + if previous is not None: + self.assertEqual(result, previous, + msg="%r output differs from others %r: %r" % + (name, available, [secret, salt, n, r, p, ks])) + + #============================================================================= + # test input types + #============================================================================= + def test_backend(self): + """backend management""" + # clobber backend + scrypt_mod.backend = None + scrypt_mod._scrypt = None + self.assertRaises(TypeError, scrypt_mod.scrypt, 's', 's', 2, 2, 2, 16) + + # reload backend + scrypt_mod._set_backend(self.backend) + self.assertEqual(scrypt_mod.backend, self.backend) + scrypt_mod.scrypt('s', 's', 2, 2, 2, 16) + + # throw error for unknown backend + self.assertRaises(ValueError, scrypt_mod._set_backend, 'xxx') + self.assertEqual(scrypt_mod.backend, self.backend) + + def test_secret_param(self): + """'secret' parameter""" + + def run_scrypt(secret): + return hexstr(scrypt_mod.scrypt(secret, "salt", 2, 2, 2, 16)) + + # unicode + TEXT = u("abc\u00defg") + self.assertEqual(run_scrypt(TEXT), '05717106997bfe0da42cf4779a2f8bd8') + + # utf8 bytes + TEXT_UTF8 = b'abc\xc3\x9efg' + self.assertEqual(run_scrypt(TEXT_UTF8), '05717106997bfe0da42cf4779a2f8bd8') + + # latin1 bytes + TEXT_LATIN1 = b'abc\xdefg' + self.assertEqual(run_scrypt(TEXT_LATIN1), '770825d10eeaaeaf98e8a3c40f9f441d') + + # accept empty string + self.assertEqual(run_scrypt(""), 'ca1399e5fae5d3b9578dcd2b1faff6e2') + + # reject other types + self.assertRaises(TypeError, run_scrypt, None) + self.assertRaises(TypeError, run_scrypt, 1) + + def test_salt_param(self): + """'salt' parameter""" + + def run_scrypt(salt): + return hexstr(scrypt_mod.scrypt("secret", salt, 2, 2, 2, 16)) + + # unicode + TEXT = u("abc\u00defg") + self.assertEqual(run_scrypt(TEXT), 'a748ec0f4613929e9e5f03d1ab741d88') + + # utf8 bytes + TEXT_UTF8 = b'abc\xc3\x9efg' + self.assertEqual(run_scrypt(TEXT_UTF8), 'a748ec0f4613929e9e5f03d1ab741d88') + + # latin1 bytes + TEXT_LATIN1 = b'abc\xdefg' + self.assertEqual(run_scrypt(TEXT_LATIN1), '91d056fb76fb6e9a7d1cdfffc0a16cd1') + + # reject other types + self.assertRaises(TypeError, run_scrypt, None) + self.assertRaises(TypeError, run_scrypt, 1) + + def test_n_param(self): + """'n' (rounds) parameter""" + + def run_scrypt(n): + return hexstr(scrypt_mod.scrypt("secret", "salt", n, 2, 2, 16)) + + # must be > 1, and a power of 2 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertRaises(ValueError, run_scrypt, 1) + self.assertEqual(run_scrypt(2), 'dacf2bca255e2870e6636fa8c8957a66') + self.assertRaises(ValueError, run_scrypt, 3) + self.assertRaises(ValueError, run_scrypt, 15) + self.assertEqual(run_scrypt(16), '0272b8fc72bc54b1159340ed99425233') + + def test_r_param(self): + """'r' (block size) parameter""" + def run_scrypt(r, n=2, p=2): + return hexstr(scrypt_mod.scrypt("secret", "salt", n, r, p, 16)) + + # must be > 1 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertEqual(run_scrypt(1), '3d630447d9f065363b8a79b0b3670251') + self.assertEqual(run_scrypt(2), 'dacf2bca255e2870e6636fa8c8957a66') + self.assertEqual(run_scrypt(5), '114f05e985a903c27237b5578e763736') + + # reject r*p >= 2**30 + self.assertRaises(ValueError, run_scrypt, (1<<30), p=1) + self.assertRaises(ValueError, run_scrypt, (1<<30) / 2, p=2) + + def test_p_param(self): + """'p' (parallelism) parameter""" + def run_scrypt(p, n=2, r=2): + return hexstr(scrypt_mod.scrypt("secret", "salt", n, r, p, 16)) + + # must be > 1 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertEqual(run_scrypt(1), 'f2960ea8b7d48231fcec1b89b784a6fa') + self.assertEqual(run_scrypt(2), 'dacf2bca255e2870e6636fa8c8957a66') + self.assertEqual(run_scrypt(5), '848a0eeb2b3543e7f543844d6ca79782') + + # reject r*p >= 2**30 + self.assertRaises(ValueError, run_scrypt, (1<<30), r=1) + self.assertRaises(ValueError, run_scrypt, (1<<30) / 2, r=2) + + def test_keylen_param(self): + """'keylen' parameter""" + rng = self.getRandom() + + def run_scrypt(keylen): + return hexstr(scrypt_mod.scrypt("secret", "salt", 2, 2, 2, keylen)) + + # must be > 0 + self.assertRaises(ValueError, run_scrypt, -1) + self.assertRaises(ValueError, run_scrypt, 0) + self.assertEqual(run_scrypt(1), 'da') + + # pick random value + ksize = rng.randint(1, 1 << 10) + self.assertEqual(len(run_scrypt(ksize)), 2*ksize) # 2 hex chars per output + + # one more than upper bound + self.assertRaises(ValueError, run_scrypt, ((2**32) - 1) * 32 + 1) + + #============================================================================= + # eoc + #============================================================================= + + +#----------------------------------------------------------------------- +# check what backends 'should' be available +#----------------------------------------------------------------------- + +def _can_import_cffi_scrypt(): + try: + import scrypt + except ImportError as err: + if "scrypt" in str(err): + return False + raise + return True + +has_cffi_scrypt = _can_import_cffi_scrypt() + + +def _can_import_stdlib_scrypt(): + try: + from hashlib import scrypt + return True + except ImportError: + return False + +has_stdlib_scrypt = _can_import_stdlib_scrypt() + +#----------------------------------------------------------------------- +# test individual backends +#----------------------------------------------------------------------- + +# NOTE: builtin version runs VERY slow (except under PyPy, where it's only 11x slower), +# so skipping under quick test mode. +@skipUnless(PYPY or TEST_MODE(min="default"), "skipped under current test mode") +class BuiltinScryptTest(_CommonScryptTest): + backend = "builtin" + + def setUp(self): + super(BuiltinScryptTest, self).setUp() + warnings.filterwarnings("ignore", "(?i)using builtin scrypt backend", + category=exc.PasslibSecurityWarning) + + def test_missing_backend(self): + """backend management -- missing backend""" + if has_stdlib_scrypt or has_cffi_scrypt: + raise self.skipTest("non-builtin backend is present") + self.assertRaises(exc.MissingBackendError, scrypt_mod._set_backend, 'scrypt') + + +@skipUnless(has_cffi_scrypt, "'scrypt' package not found") +class ScryptPackageTest(_CommonScryptTest): + backend = "scrypt" + + def test_default_backend(self): + """backend management -- default backend""" + if has_stdlib_scrypt: + raise self.skipTest("higher priority backend present") + scrypt_mod._set_backend("default") + self.assertEqual(scrypt_mod.backend, "scrypt") + + +@skipUnless(has_stdlib_scrypt, "'hashlib.scrypt()' not found") +class StdlibScryptTest(_CommonScryptTest): + backend = "stdlib" + + def test_default_backend(self): + """backend management -- default backend""" + scrypt_mod._set_backend("default") + self.assertEqual(scrypt_mod.backend, "stdlib") + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_ext_django.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_ext_django.py new file mode 100644 index 000000000..2a0b418a8 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_ext_django.py @@ -0,0 +1,1080 @@ +"""test passlib.ext.django""" +#============================================================================= +# imports +#============================================================================= +# core +from __future__ import absolute_import, division, print_function +import logging; log = logging.getLogger(__name__) +import sys +import re +# site +# pkg +from passlib import apps as _apps, exc, registry +from passlib.apps import django10_context, django14_context, django16_context +from passlib.context import CryptContext +from passlib.ext.django.utils import ( + DJANGO_VERSION, MIN_DJANGO_VERSION, DjangoTranslator, quirks, +) +from passlib.utils.compat import iteritems, get_method_function, u +from passlib.utils.decor import memoized_property +# tests +from passlib.tests.utils import TestCase, TEST_MODE, handler_derived_from +from passlib.tests.test_handlers import get_handler_case +# local +__all__ = [ + "DjangoBehaviorTest", + "ExtensionBehaviorTest", + "DjangoExtensionTest", + + "_ExtensionSupport", + "_ExtensionTest", +] +#============================================================================= +# configure django settings for testcases +#============================================================================= + +# whether we have supported django version +has_min_django = DJANGO_VERSION >= MIN_DJANGO_VERSION + +# import and configure empty django settings +# NOTE: we don't want to set up entirety of django, so not using django.setup() directly. +# instead, manually configuring the settings, and setting it up w/ no apps installed. +# in future, may need to alter this so we call django.setup() after setting +# DJANGO_SETTINGS_MODULE to a custom settings module w/ a dummy django app. +if has_min_django: + # + # initialize django settings manually + # + from django.conf import settings, LazySettings + + if not isinstance(settings, LazySettings): + # this probably means django globals have been configured already, + # which we don't want, since test cases reset and manipulate settings. + raise RuntimeError("expected django.conf.settings to be LazySettings: %r" % (settings,)) + + # else configure a blank settings instance for the unittests + if not settings.configured: + settings.configure() + + # + # init django apps w/ NO installed apps. + # NOTE: required for django >= 1.9 + # + from django.apps import apps + apps.populate(["django.contrib.contenttypes", "django.contrib.auth"]) + +# log a warning if tested w/ newer version. +# NOTE: this is mainly here as place to mark what version it was run against before release. +if DJANGO_VERSION >= (3, 2): + log.info("this release hasn't been tested against Django %r", DJANGO_VERSION) + +#============================================================================= +# support funcs +#============================================================================= + +# flag for update_settings() to remove specified key entirely +UNSET = object() + +def update_settings(**kwds): + """helper to update django settings from kwds""" + for k,v in iteritems(kwds): + if v is UNSET: + if hasattr(settings, k): + delattr(settings, k) + else: + setattr(settings, k, v) + +if has_min_django: + from django.contrib.auth.models import User + + class FakeUser(User): + """mock user object for use in testing""" + # NOTE: this mainly just overrides .save() to test commit behavior. + + # NOTE: .Meta.app_label required for django >= 1.9 + class Meta: + app_label = __name__ + + @memoized_property + def saved_passwords(self): + return [] + + def pop_saved_passwords(self): + try: + return self.saved_passwords[:] + finally: + del self.saved_passwords[:] + + def save(self, update_fields=None): + # NOTE: ignoring update_fields for test purposes + self.saved_passwords.append(self.password) + +def create_mock_setter(): + state = [] + def setter(password): + state.append(password) + def popstate(): + try: + return state[:] + finally: + del state[:] + setter.popstate = popstate + return setter + + +def check_django_hasher_has_backend(name): + """ + check whether django hasher is available; + or if it should be skipped because django lacks third-party library. + """ + assert name + from django.contrib.auth.hashers import make_password + try: + make_password("", hasher=name) + return True + except ValueError as err: + if re.match("Couldn't load '.*?' algorithm .* No module named .*", str(err)): + return False + raise + +#============================================================================= +# work up stock django config +#============================================================================= + +def _modify_django_config(kwds, sha_rounds=None): + """ + helper to build django CryptContext config matching expected setup for stock django deploy. + :param kwds: + :param sha_rounds: + :return: + """ + # make sure we have dict + if hasattr(kwds, "to_dict"): + # type: CryptContext + kwds = kwds.to_dict() + + # update defaults + kwds.update( + # TODO: push this to passlib.apps django contexts + deprecated="auto", + ) + + # fill in default rounds for current django version, so our sample hashes come back + # unchanged, instead of being upgraded in-place by check_password(). + if sha_rounds is None and has_min_django: + from django.contrib.auth.hashers import PBKDF2PasswordHasher + sha_rounds = PBKDF2PasswordHasher.iterations + + # modify rounds + if sha_rounds: + kwds.update( + django_pbkdf2_sha1__default_rounds=sha_rounds, + django_pbkdf2_sha256__default_rounds=sha_rounds, + ) + + return kwds + +#---------------------------------------------------- +# build config dict that matches stock django +#---------------------------------------------------- + +# XXX: replace this with code that interrogates default django config directly? +# could then separate out "validation of djangoXX_context objects" +# and "validation that individual hashers match django". +# or maybe add a "get_django_context(django_version)" helper to passlib.apps? +if DJANGO_VERSION >= (2, 1): + stock_config = _modify_django_config(_apps.django21_context) +elif DJANGO_VERSION >= (1, 10): + stock_config = _modify_django_config(_apps.django110_context) +else: + # assert DJANGO_VERSION >= (1, 8) + stock_config = _modify_django_config(_apps.django16_context) + +#---------------------------------------------------- +# override sample hashes used in test cases +#---------------------------------------------------- +from passlib.hash import django_pbkdf2_sha256 +sample_hashes = dict( + django_pbkdf2_sha256=("not a password", django_pbkdf2_sha256 + .using(rounds=stock_config.get("django_pbkdf2_sha256__default_rounds")) + .hash("not a password")) +) + +#============================================================================= +# test utils +#============================================================================= + +class _ExtensionSupport(object): + """ + test support funcs for loading/unloading extension. + this class is mixed in to various TestCase subclasses. + """ + #=================================================================== + # support funcs + #=================================================================== + + @classmethod + def _iter_patch_candidates(cls): + """helper to scan for monkeypatches. + + returns tuple containing: + * object (module or class) + * attribute of object + * value of attribute + * whether it should or should not be patched + """ + # XXX: this and assert_unpatched() could probably be refactored to use + # the PatchManager class to do the heavy lifting. + from django.contrib.auth import models, hashers + user_attrs = ["check_password", "set_password"] + model_attrs = ["check_password", "make_password"] + hasher_attrs = ["check_password", "make_password", "get_hasher", "identify_hasher", + "get_hashers"] + objs = [(models, model_attrs), + (models.User, user_attrs), + (hashers, hasher_attrs), + ] + for obj, patched in objs: + for attr in dir(obj): + if attr.startswith("_"): + continue + value = obj.__dict__.get(attr, UNSET) # can't use getattr() due to GAE + if value is UNSET and attr not in patched: + continue + value = get_method_function(value) + source = getattr(value, "__module__", None) + if source: + yield obj, attr, source, (attr in patched) + + #=================================================================== + # verify current patch state + #=================================================================== + + def assert_unpatched(self): + """ + test that django is in unpatched state + """ + # make sure we aren't currently patched + mod = sys.modules.get("passlib.ext.django.models") + self.assertFalse(mod and mod.adapter.patched, "patch should not be enabled") + + # make sure no objects have been replaced, by checking __module__ + for obj, attr, source, patched in self._iter_patch_candidates(): + if patched: + self.assertTrue(source.startswith("django.contrib.auth."), + "obj=%r attr=%r was not reverted: %r" % + (obj, attr, source)) + else: + self.assertFalse(source.startswith("passlib."), + "obj=%r attr=%r should not have been patched: %r" % + (obj, attr, source)) + + def assert_patched(self, context=None): + """ + helper to ensure django HAS been patched, and is using specified config + """ + # make sure we're currently patched + mod = sys.modules.get("passlib.ext.django.models") + self.assertTrue(mod and mod.adapter.patched, "patch should have been enabled") + + # make sure only the expected objects have been patched + for obj, attr, source, patched in self._iter_patch_candidates(): + if patched: + self.assertTrue(source == "passlib.ext.django.utils", + "obj=%r attr=%r should have been patched: %r" % + (obj, attr, source)) + else: + self.assertFalse(source.startswith("passlib."), + "obj=%r attr=%r should not have been patched: %r" % + (obj, attr, source)) + + # check context matches + if context is not None: + context = CryptContext._norm_source(context) + self.assertEqual(mod.password_context.to_dict(resolve=True), + context.to_dict(resolve=True)) + + #=================================================================== + # load / unload the extension (and verify it worked) + #=================================================================== + + _config_keys = ["PASSLIB_CONFIG", "PASSLIB_CONTEXT", "PASSLIB_GET_CATEGORY"] + + def load_extension(self, check=True, **kwds): + """ + helper to load extension with specified config & patch django + """ + self.unload_extension() + if check: + config = kwds.get("PASSLIB_CONFIG") or kwds.get("PASSLIB_CONTEXT") + for key in self._config_keys: + kwds.setdefault(key, UNSET) + update_settings(**kwds) + import passlib.ext.django.models + if check: + self.assert_patched(context=config) + + def unload_extension(self): + """ + helper to remove patches and unload extension + """ + # remove patches and unload module + mod = sys.modules.get("passlib.ext.django.models") + if mod: + mod.adapter.remove_patch() + del sys.modules["passlib.ext.django.models"] + # wipe config from django settings + update_settings(**dict((key, UNSET) for key in self._config_keys)) + # check everything's gone + self.assert_unpatched() + + #=================================================================== + # eoc + #=================================================================== + + +# XXX: rename to ExtensionFixture? +# NOTE: would roll this into _ExtensionSupport class; +# but we have to mix that one into django's TestCase classes as well; +# and our TestCase class (and this setUp() method) would foul things up. +class _ExtensionTest(TestCase, _ExtensionSupport): + """ + TestCase mixin which makes sure extension is unloaded before test; + and make sure it's unloaded after test as well. + """ + #============================================================================= + # setup + #============================================================================= + + def setUp(self): + super(_ExtensionTest, self).setUp() + + self.require_TEST_MODE("default") + + if not DJANGO_VERSION: + raise self.skipTest("Django not installed") + elif not has_min_django: + raise self.skipTest("Django version too old") + + # reset to baseline, and verify it worked + self.unload_extension() + + # and do the same when the test exits + self.addCleanup(self.unload_extension) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# extension tests +#============================================================================= + +#: static passwords used by DjangoBehaviorTest methods +PASS1 = "toomanysecrets" +WRONG1 = "letmein" + + +class DjangoBehaviorTest(_ExtensionTest): + """ + tests model to verify it matches django's behavior. + + running this class verifies the tests correctly assert what Django itself does. + + running the ExtensionBehaviorTest subclass below verifies "passlib.ext.django" + matches what the tests assert. + """ + #============================================================================= + # class attrs + #============================================================================= + + descriptionPrefix = "verify django behavior" + + #: tracks whether tests should assume "passlib.ext.django" monkeypatch is applied. + #: (set to True by ExtensionBehaviorTest subclass) + patched = False + + #: dict containing CryptContext() config which should match current django deploy. + #: used by tests to verify expected behavior. + config = stock_config + + # NOTE: if this test fails, it means we're not accounting for + # some part of django's hashing logic, or that this is + # running against an untested version of django with a new + # hashing policy. + + #============================================================================= + # test helpers + #============================================================================= + + @memoized_property + def context(self): + """ + per-test CryptContext() created from .config. + """ + return CryptContext._norm_source(self.config) + + def assert_unusable_password(self, user): + """ + check that user object is set to 'unusable password' constant + """ + self.assertTrue(user.password.startswith("!")) + self.assertFalse(user.has_usable_password()) + self.assertEqual(user.pop_saved_passwords(), []) + + def assert_valid_password(self, user, hash=UNSET, saved=None): + """ + check that user object has a usable password hash. + :param hash: optionally check it has this exact hash + :param saved: check that mock commit history for user.password matches this list + """ + if hash is UNSET: + self.assertNotEqual(user.password, "!") + self.assertNotEqual(user.password, None) + else: + self.assertEqual(user.password, hash) + self.assertTrue(user.has_usable_password(), + "hash should be usable: %r" % (user.password,)) + self.assertEqual(user.pop_saved_passwords(), + [] if saved is None else [saved]) + + #============================================================================= + # test hashing interface + #----------------------------------------------------------------------------- + # these functions are run against both the actual django code, + # to verify the assumptions of the unittests are correct; + # and run against the passlib extension, to verify it matches those assumptions. + # + # these tests check the following django methods: + # User.set_password() + # User.check_password() + # make_password() -- 1.4 only + # check_password() + # identify_hasher() + # User.has_usable_password() + # User.set_unusable_password() + # + # XXX: this take a while to run. what could be trimmed? + # + # TODO: add get_hasher() checks where appropriate in tests below. + #============================================================================= + + def test_extension_config(self): + """ + test extension config is loaded correctly + """ + if not self.patched: + raise self.skipTest("extension not loaded") + + ctx = self.context + + # contexts should match + from django.contrib.auth.hashers import check_password + from passlib.ext.django.models import password_context + self.assertEqual(password_context.to_dict(resolve=True), ctx.to_dict(resolve=True)) + + # should have patched both places + from django.contrib.auth.models import check_password as check_password2 + self.assertEqual(check_password2, check_password) + + def test_default_algorithm(self): + """ + test django's default algorithm + """ + ctx = self.context + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import make_password + + # User.set_password() should use default alg + user = FakeUser() + user.set_password(PASS1) + self.assertTrue(ctx.handler().verify(PASS1, user.password)) + self.assert_valid_password(user) + + # User.check_password() - n/a + + # make_password() should use default alg + hash = make_password(PASS1) + self.assertTrue(ctx.handler().verify(PASS1, hash)) + + # check_password() - n/a + + def test_empty_password(self): + """ + test how methods handle empty string as password + """ + ctx = self.context + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() should use default alg + user = FakeUser() + user.set_password('') + hash = user.password + self.assertTrue(ctx.handler().verify('', hash)) + self.assert_valid_password(user, hash) + + # User.check_password() should return True + self.assertTrue(user.check_password("")) + self.assert_valid_password(user, hash) + + # XXX: test make_password() ? + + # TODO: is_password_usable() + + # identify_hasher() -- na + + # check_password() should return True + self.assertTrue(check_password("", hash)) + + def test_unusable_flag(self): + """ + test how methods handle 'unusable flag' in hash + """ + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # sanity check via user.set_unusable_password() + user = FakeUser() + user.set_unusable_password() + self.assert_unusable_password(user) + + # ensure User.set_password() sets unusable flag + user = FakeUser() + user.set_password(None) + self.assert_unusable_password(user) + + # User.check_password() should always fail + self.assertFalse(user.check_password(None)) + self.assertFalse(user.check_password('None')) + self.assertFalse(user.check_password('')) + self.assertFalse(user.check_password(PASS1)) + self.assertFalse(user.check_password(WRONG1)) + self.assert_unusable_password(user) + + # make_password() should also set flag + self.assertTrue(make_password(None).startswith("!")) + + # check_password() should return False (didn't handle disabled under 1.3) + self.assertFalse(check_password(PASS1, '!')) + + # identify_hasher() and is_password_usable() should reject it + self.assertFalse(is_password_usable(user.password)) + self.assertRaises(ValueError, identify_hasher, user.password) + + def test_none_hash_value(self): + """ + test how methods handle None as hash value + """ + patched = self.patched + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() - n/a + + # User.check_password() - returns False + user = FakeUser() + user.password = None + if quirks.none_causes_check_password_error and not patched: + # django 2.1+ + self.assertRaises(TypeError, user.check_password, PASS1) + else: + self.assertFalse(user.check_password(PASS1)) + + self.assertEqual(user.has_usable_password(), + quirks.empty_is_usable_password) + + # TODO: is_password_usable() + + # make_password() - n/a + + # check_password() - error + if quirks.none_causes_check_password_error and not patched: + self.assertRaises(TypeError, check_password, PASS1, None) + else: + self.assertFalse(check_password(PASS1, None)) + + # identify_hasher() - error + self.assertRaises(TypeError, identify_hasher, None) + + def test_empty_hash_value(self): + """ + test how methods handle empty string as hash value + """ + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() - n/a + + # User.check_password() + # As of django 1.5, blank hash returns False (django issue 18453) + user = FakeUser() + user.password = "" + self.assertFalse(user.check_password(PASS1)) + + # verify hash wasn't changed/upgraded during check_password() call + self.assertEqual(user.password, "") + self.assertEqual(user.pop_saved_passwords(), []) + + # User.has_usable_password() + self.assertEqual(user.has_usable_password(), quirks.empty_is_usable_password) + + # TODO: is_password_usable() + + # make_password() - n/a + + # check_password() + self.assertFalse(check_password(PASS1, "")) + + # identify_hasher() - throws error + self.assertRaises(ValueError, identify_hasher, "") + + def test_invalid_hash_values(self): + """ + test how methods handle invalid hash values. + """ + for hash in [ + "$789$foo", # empty identifier + ]: + with self.subTest(hash=hash): + self._do_test_invalid_hash_value(hash) + + def _do_test_invalid_hash_value(self, hash): + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + # User.set_password() - n/a + + # User.check_password() + # As of django 1.5, invalid hash returns False (side effect of django issue 18453) + user = FakeUser() + user.password = hash + self.assertFalse(user.check_password(PASS1)) + + # verify hash wasn't changed/upgraded during check_password() call + self.assertEqual(user.password, hash) + self.assertEqual(user.pop_saved_passwords(), []) + + # User.has_usable_password() + self.assertEqual(user.has_usable_password(), quirks.invalid_is_usable_password) + + # TODO: is_password_usable() + + # make_password() - n/a + + # check_password() + self.assertFalse(check_password(PASS1, hash)) + + # identify_hasher() - throws error + self.assertRaises(ValueError, identify_hasher, hash) + + def test_available_schemes(self): + """ + run a bunch of subtests for each hasher available in the default django setup + (as determined by reading self.context) + """ + for scheme in self.context.schemes(): + with self.subTest(scheme=scheme): + self._do_test_available_scheme(scheme) + + def _do_test_available_scheme(self, scheme): + """ + helper to test how specific hasher behaves. + :param scheme: *passlib* name of hasher (e.g. "django_pbkdf2_sha256") + """ + log = self.getLogger() + ctx = self.context + patched = self.patched + setter = create_mock_setter() + + # NOTE: import has to be done w/in method, in case monkeypatching is applied by setUp() + from django.contrib.auth.hashers import ( + check_password, + make_password, + is_password_usable, + identify_hasher, + ) + + #------------------------------------------------------- + # setup constants & imports, pick a sample secret/hash combo + #------------------------------------------------------- + handler = ctx.handler(scheme) + log.debug("testing scheme: %r => %r", scheme, handler) + deprecated = ctx.handler(scheme).deprecated + assert not deprecated or scheme != ctx.default_scheme() + try: + testcase = get_handler_case(scheme) + except exc.MissingBackendError: + raise self.skipTest("backend not available") + assert handler_derived_from(handler, testcase.handler) + if handler.is_disabled: + raise self.skipTest("skip disabled hasher") + + # verify that django has a backend available + # (since our hasher may use different set of backends, + # get_handler_case() above may work, but django will have nothing) + if not patched and not check_django_hasher_has_backend(handler.django_name): + assert scheme in ["django_bcrypt", "django_bcrypt_sha256", "django_argon2"], \ + "%r scheme should always have active backend" % scheme + log.warning("skipping scheme %r due to missing django dependency", scheme) + raise self.skipTest("skip due to missing dependency") + + # find a sample (secret, hash) pair to test with + try: + secret, hash = sample_hashes[scheme] + except KeyError: + get_sample_hash = testcase("setUp").get_sample_hash + while True: + secret, hash = get_sample_hash() + if secret: # don't select blank passwords + break + other = 'dontletmein' + + #------------------------------------------------------- + # User.set_password() - not tested here + #------------------------------------------------------- + + #------------------------------------------------------- + # User.check_password()+migration against known hash + #------------------------------------------------------- + user = FakeUser() + user.password = hash + + # check against invalid password + self.assertFalse(user.check_password(None)) + ##self.assertFalse(user.check_password('')) + self.assertFalse(user.check_password(other)) + self.assert_valid_password(user, hash) + + # check against valid password + self.assertTrue(user.check_password(secret)) + + # check if it upgraded the hash + # NOTE: needs_update kept separate in case we need to test rounds. + needs_update = deprecated + if needs_update: + self.assertNotEqual(user.password, hash) + self.assertFalse(handler.identify(user.password)) + self.assertTrue(ctx.handler().verify(secret, user.password)) + self.assert_valid_password(user, saved=user.password) + else: + self.assert_valid_password(user, hash) + + # don't need to check rest for most deployments + if TEST_MODE(max="default"): + return + + #------------------------------------------------------- + # make_password() correctly selects algorithm + #------------------------------------------------------- + alg = DjangoTranslator().passlib_to_django_name(scheme) + hash2 = make_password(secret, hasher=alg) + self.assertTrue(handler.verify(secret, hash2)) + + #------------------------------------------------------- + # check_password()+setter against known hash + #------------------------------------------------------- + # should call setter only if it needs_update + self.assertTrue(check_password(secret, hash, setter=setter)) + self.assertEqual(setter.popstate(), [secret] if needs_update else []) + + # should not call setter + self.assertFalse(check_password(other, hash, setter=setter)) + self.assertEqual(setter.popstate(), []) + + ### check preferred kwd is ignored (feature we don't currently support fully) + ##self.assertTrue(check_password(secret, hash, setter=setter, preferred='fooey')) + ##self.assertEqual(setter.popstate(), [secret]) + + # TODO: get_hasher() + + #------------------------------------------------------- + # identify_hasher() recognizes known hash + #------------------------------------------------------- + self.assertTrue(is_password_usable(hash)) + name = DjangoTranslator().django_to_passlib_name(identify_hasher(hash).algorithm) + self.assertEqual(name, scheme) + + #=================================================================== + # eoc + #=================================================================== + +#=================================================================== +# extension fidelity tests +#=================================================================== + +class ExtensionBehaviorTest(DjangoBehaviorTest): + """ + test that "passlib.ext.django" conforms to behavioral assertions in DjangoBehaviorTest + """ + descriptionPrefix = "verify extension behavior" + + config = dict( + schemes="sha256_crypt,md5_crypt,des_crypt", + deprecated="des_crypt", + ) + + def setUp(self): + super(ExtensionBehaviorTest, self).setUp() + + # always load extension before each test + self.load_extension(PASSLIB_CONFIG=self.config) + self.patched = True + +#=================================================================== +# extension internal tests +#=================================================================== + +class DjangoExtensionTest(_ExtensionTest): + """ + test the ``passlib.ext.django`` plugin + """ + #=================================================================== + # class attrs + #=================================================================== + + descriptionPrefix = "passlib.ext.django plugin" + + #=================================================================== + # monkeypatch testing + #=================================================================== + + def test_00_patch_control(self): + """test set_django_password_context patch/unpatch""" + + # check config="disabled" + self.load_extension(PASSLIB_CONFIG="disabled", check=False) + self.assert_unpatched() + + # check legacy config=None + with self.assertWarningList("PASSLIB_CONFIG=None is deprecated"): + self.load_extension(PASSLIB_CONFIG=None, check=False) + self.assert_unpatched() + + # try stock django 1.0 context + self.load_extension(PASSLIB_CONFIG="django-1.0", check=False) + self.assert_patched(context=django10_context) + + # try to remove patch + self.unload_extension() + + # patch to use stock django 1.4 context + self.load_extension(PASSLIB_CONFIG="django-1.4", check=False) + self.assert_patched(context=django14_context) + + # try to remove patch again + self.unload_extension() + + def test_01_overwrite_detection(self): + """test detection of foreign monkeypatching""" + # NOTE: this sets things up, and spot checks two methods, + # this should be enough to verify patch manager is working. + # TODO: test unpatch behavior honors flag. + + # configure plugin to use sample context + config = "[passlib]\nschemes=des_crypt\n" + self.load_extension(PASSLIB_CONFIG=config) + + # setup helpers + import django.contrib.auth.models as models + from passlib.ext.django.models import adapter + def dummy(): + pass + + # mess with User.set_password, make sure it's detected + orig = models.User.set_password + models.User.set_password = dummy + with self.assertWarningList("another library has patched.*User\.set_password"): + adapter._manager.check_all() + models.User.set_password = orig + + # mess with models.check_password, make sure it's detected + orig = models.check_password + models.check_password = dummy + with self.assertWarningList("another library has patched.*models:check_password"): + adapter._manager.check_all() + models.check_password = orig + + def test_02_handler_wrapper(self): + """test Hasher-compatible handler wrappers""" + from django.contrib.auth import hashers + + passlib_to_django = DjangoTranslator().passlib_to_django + + # should return native django hasher if available + if DJANGO_VERSION > (1, 10): + self.assertRaises(ValueError, passlib_to_django, "hex_md5") + else: + hasher = passlib_to_django("hex_md5") + self.assertIsInstance(hasher, hashers.UnsaltedMD5PasswordHasher) + + # should return native django hasher + # NOTE: present but not enabled by default in django as of 2.1 + # (see _builtin_django_hashers) + hasher = passlib_to_django("django_bcrypt") + self.assertIsInstance(hasher, hashers.BCryptPasswordHasher) + + # otherwise should return wrapper + from passlib.hash import sha256_crypt + hasher = passlib_to_django("sha256_crypt") + self.assertEqual(hasher.algorithm, "passlib_sha256_crypt") + + # and wrapper should return correct hash + encoded = hasher.encode("stub") + self.assertTrue(sha256_crypt.verify("stub", encoded)) + self.assertTrue(hasher.verify("stub", encoded)) + self.assertFalse(hasher.verify("xxxx", encoded)) + + # test wrapper accepts options + encoded = hasher.encode("stub", "abcd"*4, rounds=1234) + self.assertEqual(encoded, "$5$rounds=1234$abcdabcdabcdabcd$" + "v2RWkZQzctPdejyRqmmTDQpZN6wTh7.RUy9zF2LftT6") + self.assertEqual(hasher.safe_summary(encoded), + {'algorithm': 'sha256_crypt', + 'salt': u('abcdab**********'), + 'rounds': 1234, + 'hash': u('v2RWkZ*************************************'), + }) + + # made up name should throw error + # XXX: should this throw ValueError instead, to match django? + self.assertRaises(KeyError, passlib_to_django, "does_not_exist") + + #=================================================================== + # PASSLIB_CONFIG settings + #=================================================================== + def test_11_config_disabled(self): + """test PASSLIB_CONFIG='disabled'""" + # test config=None (deprecated) + with self.assertWarningList("PASSLIB_CONFIG=None is deprecated"): + self.load_extension(PASSLIB_CONFIG=None, check=False) + self.assert_unpatched() + + # test disabled config + self.load_extension(PASSLIB_CONFIG="disabled", check=False) + self.assert_unpatched() + + def test_12_config_presets(self): + """test PASSLIB_CONFIG=''""" + # test django presets + self.load_extension(PASSLIB_CONTEXT="django-default", check=False) + ctx = django16_context + self.assert_patched(ctx) + + self.load_extension(PASSLIB_CONFIG="django-1.0", check=False) + self.assert_patched(django10_context) + + self.load_extension(PASSLIB_CONFIG="django-1.4", check=False) + self.assert_patched(django14_context) + + def test_13_config_defaults(self): + """test PASSLIB_CONFIG default behavior""" + # check implicit default + from passlib.ext.django.utils import PASSLIB_DEFAULT + default = CryptContext.from_string(PASSLIB_DEFAULT) + self.load_extension() + self.assert_patched(PASSLIB_DEFAULT) + + # check default preset + self.load_extension(PASSLIB_CONTEXT="passlib-default", check=False) + self.assert_patched(PASSLIB_DEFAULT) + + # check explicit string + self.load_extension(PASSLIB_CONTEXT=PASSLIB_DEFAULT, check=False) + self.assert_patched(PASSLIB_DEFAULT) + + def test_14_config_invalid(self): + """test PASSLIB_CONFIG type checks""" + update_settings(PASSLIB_CONTEXT=123, PASSLIB_CONFIG=UNSET) + self.assertRaises(TypeError, __import__, 'passlib.ext.django.models') + + self.unload_extension() + update_settings(PASSLIB_CONFIG="missing-preset", PASSLIB_CONTEXT=UNSET) + self.assertRaises(ValueError, __import__, 'passlib.ext.django.models') + + #=================================================================== + # PASSLIB_GET_CATEGORY setting + #=================================================================== + def test_21_category_setting(self): + """test PASSLIB_GET_CATEGORY parameter""" + # define config where rounds can be used to detect category + config = dict( + schemes = ["sha256_crypt"], + sha256_crypt__default_rounds = 1000, + staff__sha256_crypt__default_rounds = 2000, + superuser__sha256_crypt__default_rounds = 3000, + ) + from passlib.hash import sha256_crypt + + def run(**kwds): + """helper to take in user opts, return rounds used in password""" + user = FakeUser(**kwds) + user.set_password("stub") + return sha256_crypt.from_string(user.password).rounds + + # test default get_category + self.load_extension(PASSLIB_CONFIG=config) + self.assertEqual(run(), 1000) + self.assertEqual(run(is_staff=True), 2000) + self.assertEqual(run(is_superuser=True), 3000) + + # test patch uses explicit get_category function + def get_category(user): + return user.first_name or None + self.load_extension(PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY=get_category) + self.assertEqual(run(), 1000) + self.assertEqual(run(first_name='other'), 1000) + self.assertEqual(run(first_name='staff'), 2000) + self.assertEqual(run(first_name='superuser'), 3000) + + # test patch can disable get_category entirely + def get_category(user): + return None + self.load_extension(PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY=get_category) + self.assertEqual(run(), 1000) + self.assertEqual(run(first_name='other'), 1000) + self.assertEqual(run(first_name='staff', is_staff=True), 1000) + self.assertEqual(run(first_name='superuser', is_superuser=True), 1000) + + # test bad value + self.assertRaises(TypeError, self.load_extension, PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY='x') + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_ext_django_source.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_ext_django_source.py new file mode 100644 index 000000000..4b42e59bc --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_ext_django_source.py @@ -0,0 +1,250 @@ +""" +test passlib.ext.django against django source tests +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils.compat import suppress_cause +from passlib.ext.django.utils import DJANGO_VERSION, DjangoTranslator, _PasslibHasherWrapper +# tests +from passlib.tests.utils import TestCase, TEST_MODE +from .test_ext_django import ( + has_min_django, stock_config, _ExtensionSupport, +) +if has_min_django: + from .test_ext_django import settings +# local +__all__ = [ + "HashersTest", +] +#============================================================================= +# HashersTest -- +# hack up the some of the real django tests to run w/ extension loaded, +# to ensure we mimic their behavior. +# however, the django tests were moved out of the package, and into a source-only location +# as of django 1.7. so we disable tests from that point on unless test-runner specifies +#============================================================================= + +#: ref to django unittest root module (if found) +test_hashers_mod = None + +#: message about why test module isn't present (if not found) +hashers_skip_msg = None + +#---------------------------------------------------------------------- +# try to load django's tests/auth_tests/test_hasher.py module, +# or note why we failed. +#---------------------------------------------------------------------- +if TEST_MODE(max="quick"): + hashers_skip_msg = "requires >= 'default' test mode" + +elif has_min_django: + import os + import sys + source_path = os.environ.get("PASSLIB_TESTS_DJANGO_SOURCE_PATH") + + if source_path: + if not os.path.exists(source_path): + raise EnvironmentError("django source path not found: %r" % source_path) + if not all(os.path.exists(os.path.join(source_path, name)) + for name in ["django", "tests"]): + raise EnvironmentError("invalid django source path: %r" % source_path) + log.info("using django tests from source path: %r", source_path) + tests_path = os.path.join(source_path, "tests") + sys.path.insert(0, tests_path) + try: + from auth_tests import test_hashers as test_hashers_mod + except ImportError as err: + raise suppress_cause( + EnvironmentError("error trying to import django tests " + "from source path (%r): %r" % + (source_path, err))) + finally: + sys.path.remove(tests_path) + + else: + hashers_skip_msg = "requires PASSLIB_TESTS_DJANGO_SOURCE_PATH to be set" + + if TEST_MODE("full"): + # print warning so user knows what's happening + sys.stderr.write("\nWARNING: $PASSLIB_TESTS_DJANGO_SOURCE_PATH is not set; " + "can't run Django's own unittests against passlib.ext.django\n") + +elif DJANGO_VERSION: + hashers_skip_msg = "django version too old" + +else: + hashers_skip_msg = "django not installed" + +#---------------------------------------------------------------------- +# if found module, create wrapper to run django's own tests, +# but with passlib monkeypatched in. +#---------------------------------------------------------------------- +if test_hashers_mod: + from django.core.signals import setting_changed + from django.dispatch import receiver + from django.utils.module_loading import import_string + from passlib.utils.compat import get_unbound_method_function + + class HashersTest(test_hashers_mod.TestUtilsHashPass, _ExtensionSupport): + """ + Run django's hasher unittests against passlib's extension + and workalike implementations + """ + + #================================================================== + # helpers + #================================================================== + + # port patchAttr() helper method from passlib.tests.utils.TestCase + patchAttr = get_unbound_method_function(TestCase.patchAttr) + + #================================================================== + # custom setup + #================================================================== + def setUp(self): + #--------------------------------------------------------- + # install passlib.ext.django adapter, and get context + #--------------------------------------------------------- + self.load_extension(PASSLIB_CONTEXT=stock_config, check=False) + from passlib.ext.django.models import adapter + context = adapter.context + + #--------------------------------------------------------- + # patch tests module to use our versions of patched funcs + # (which should be installed in hashers module) + #--------------------------------------------------------- + from django.contrib.auth import hashers + for attr in ["make_password", + "check_password", + "identify_hasher", + "is_password_usable", + "get_hasher"]: + self.patchAttr(test_hashers_mod, attr, getattr(hashers, attr)) + + #--------------------------------------------------------- + # django tests expect empty django_des_crypt salt field + #--------------------------------------------------------- + from passlib.hash import django_des_crypt + self.patchAttr(django_des_crypt, "use_duplicate_salt", False) + + #--------------------------------------------------------- + # install receiver to update scheme list if test changes settings + #--------------------------------------------------------- + django_to_passlib_name = DjangoTranslator().django_to_passlib_name + + @receiver(setting_changed, weak=False) + def update_schemes(**kwds): + if kwds and kwds['setting'] != 'PASSWORD_HASHERS': + return + assert context is adapter.context + schemes = [ + django_to_passlib_name(import_string(hash_path)()) + for hash_path in settings.PASSWORD_HASHERS + ] + # workaround for a few tests that only specify hex_md5, + # but test for django_salted_md5 format. + if "hex_md5" in schemes and "django_salted_md5" not in schemes: + schemes.append("django_salted_md5") + schemes.append("django_disabled") + context.update(schemes=schemes, deprecated="auto") + adapter.reset_hashers() + + self.addCleanup(setting_changed.disconnect, update_schemes) + + update_schemes() + + #--------------------------------------------------------- + # need password_context to keep up to date with django_hasher.iterations, + # which is frequently patched by django tests. + # + # HACK: to fix this, inserting wrapper around a bunch of context + # methods so that any time adapter calls them, + # attrs are resynced first. + #--------------------------------------------------------- + + def update_rounds(): + """ + sync django hasher config -> passlib hashers + """ + for handler in context.schemes(resolve=True): + if 'rounds' not in handler.setting_kwds: + continue + hasher = adapter.passlib_to_django(handler) + if isinstance(hasher, _PasslibHasherWrapper): + continue + rounds = getattr(hasher, "rounds", None) or \ + getattr(hasher, "iterations", None) + if rounds is None: + continue + # XXX: this doesn't modify the context, which would + # cause other weirdness (since it would replace handler factories completely, + # instead of just updating their state) + handler.min_desired_rounds = handler.max_desired_rounds = handler.default_rounds = rounds + + _in_update = [False] + + def update_wrapper(wrapped, *args, **kwds): + """ + wrapper around arbitrary func, that first triggers sync + """ + if not _in_update[0]: + _in_update[0] = True + try: + update_rounds() + finally: + _in_update[0] = False + return wrapped(*args, **kwds) + + # sync before any context call + for attr in ["schemes", "handler", "default_scheme", "hash", + "verify", "needs_update", "verify_and_update"]: + self.patchAttr(context, attr, update_wrapper, wrap=True) + + # sync whenever adapter tries to resolve passlib hasher + self.patchAttr(adapter, "django_to_passlib", update_wrapper, wrap=True) + + def tearDown(self): + # NOTE: could rely on addCleanup() instead, but need py26 compat + self.unload_extension() + super(HashersTest, self).tearDown() + + #================================================================== + # skip a few methods that can't be replicated properly + # *want to minimize these as much as possible* + #================================================================== + + _OMIT = lambda self: self.skipTest("omitted by passlib") + + # XXX: this test registers two classes w/ same algorithm id, + # something we don't support -- how does django sanely handle + # that anyways? get_hashers_by_algorithm() should throw KeyError, right? + test_pbkdf2_upgrade_new_hasher = _OMIT + + # TODO: support wrapping django's harden-runtime feature? + # would help pass their tests. + test_check_password_calls_harden_runtime = _OMIT + test_bcrypt_harden_runtime = _OMIT + test_pbkdf2_harden_runtime = _OMIT + + #================================================================== + # eoc + #================================================================== + +else: + # otherwise leave a stub so test log tells why test was skipped. + + class HashersTest(TestCase): + + def test_external_django_hasher_tests(self): + """external django hasher tests""" + raise self.skipTest(hashers_skip_msg) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers.py new file mode 100644 index 000000000..cad5ef992 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers.py @@ -0,0 +1,1819 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import os +import sys +import warnings +# site +# pkg +from passlib import exc, hash +from passlib.utils import repeat_string +from passlib.utils.compat import irange, PY3, u, get_method_function +from passlib.tests.utils import TestCase, HandlerCase, skipUnless, \ + TEST_MODE, UserHandlerMixin, EncodingHandlerMixin +# module + +#============================================================================= +# constants & support +#============================================================================= + +# some common unicode passwords which used as test cases +UPASS_WAV = u('\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2') +UPASS_USD = u("\u20AC\u00A5$") +UPASS_TABLE = u("t\u00e1\u0411\u2113\u0259") + +PASS_TABLE_UTF8 = b't\xc3\xa1\xd0\x91\xe2\x84\x93\xc9\x99' # utf-8 + +# handlers which support multiple backends, but don't have multi-backend tests. +_omitted_backend_tests = ["django_bcrypt", "django_bcrypt_sha256", "django_argon2"] + +#: modules where get_handler_case() should search for test cases. +_handler_test_modules = [ + "test_handlers", + "test_handlers_argon2", + "test_handlers_bcrypt", + "test_handlers_cisco", + "test_handlers_django", + "test_handlers_pbkdf2", + "test_handlers_scrypt", +] + +def get_handler_case(scheme): + """ + return HandlerCase instance for scheme, used by other tests. + + :param scheme: name of hasher to locate test for (e.g. "bcrypt") + + :raises KeyError: + if scheme isn't known hasher. + + :raises MissingBackendError: + if hasher doesn't have any available backends. + + :returns: + HandlerCase subclass (which derives from TestCase) + """ + from passlib.registry import get_crypt_handler + handler = get_crypt_handler(scheme) + if hasattr(handler, "backends") and scheme not in _omitted_backend_tests: + # XXX: if no backends available, could proceed to pick first backend for test lookup; + # should investigate if that would be useful to callers. + try: + backend = handler.get_backend() + except exc.MissingBackendError: + assert scheme in conditionally_available_hashes + raise + name = "%s_%s_test" % (scheme, backend) + else: + name = "%s_test" % scheme + for module in _handler_test_modules: + modname = "passlib.tests." + module + __import__(modname) + mod = sys.modules[modname] + try: + return getattr(mod, name) + except AttributeError: + pass + # every hasher should have test suite, so if we get here, means test is either missing, + # misnamed, or _handler_test_modules list is out of date. + raise RuntimeError("can't find test case named %r for %r" % (name, scheme)) + +#: hashes which there may not be a backend available for, +#: and get_handler_case() may (correctly) throw a MissingBackendError +conditionally_available_hashes = ["argon2", "bcrypt", "bcrypt_sha256"] + +#============================================================================= +# apr md5 crypt +#============================================================================= +class apr_md5_crypt_test(HandlerCase): + handler = hash.apr_md5_crypt + + known_correct_hashes = [ + # + # http://httpd.apache.org/docs/2.2/misc/password_encryptions.html + # + ('myPassword', '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA/'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$apr1$bzYrOHUx$a1FcpXuQDJV3vPY20CS6N1'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash ----\/ + '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA!' + ] + +#============================================================================= +# bigcrypt +#============================================================================= +class bigcrypt_test(HandlerCase): + handler = hash.bigcrypt + + # TODO: find an authoritative source of test vectors + known_correct_hashes = [ + + # + # various docs & messages on the web. + # + ("passphrase", "qiyh4XPJGsOZ2MEAyLkfWqeQ"), + ("This is very long passwd", "f8.SVpL2fvwjkAnxn8/rgTkwvrif6bjYB5c"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'SEChBAyMbMNhgGLyP7kD1HZU'), + ] + + known_unidentified_hashes = [ + # one char short (10 % 11) + "qiyh4XPJGsOZ2MEAyLkfWqe" + + # one char too many (1 % 11) + "f8.SVpL2fvwjkAnxn8/rgTkwvrif6bjYB5cd" + ] + + # omit des_crypt from known_other since it's a valid bigcrypt hash too. + known_other_hashes = [row for row in HandlerCase.known_other_hashes + if row[0] != "des_crypt"] + + def test_90_internal(self): + # check that _norm_checksum() also validates checksum size. + # (current code uses regex in parser) + self.assertRaises(ValueError, hash.bigcrypt, use_defaults=True, + checksum=u('yh4XPJGsOZ')) + +#============================================================================= +# bsdi crypt +#============================================================================= +class _bsdi_crypt_test(HandlerCase): + """test BSDiCrypt algorithm""" + handler = hash.bsdi_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '_J9..CCCCXBrJUJV154M'), + ('U*U***U', '_J9..CCCCXUhOBTXzaiE'), + ('U*U***U*', '_J9..CCCC4gQ.mB/PffM'), + ('*U*U*U*U', '_J9..XXXXvlzQGqpPPdk'), + ('*U*U*U*U*', '_J9..XXXXsqM/YSSP..Y'), + ('*U*U*U*U*U*U*U*U', '_J9..XXXXVL7qJCnku0I'), + ('*U*U*U*U*U*U*U*U*', '_J9..XXXXAj8cFbP5scI'), + ('ab1234567', '_J9..SDizh.vll5VED9g'), + ('cr1234567', '_J9..SDizRjWQ/zePPHc'), + ('zxyDPWgydbQjgq', '_J9..SDizxmRI1GjnQuE'), + ('726 even', '_K9..SaltNrQgIYUAeoY'), + ('', '_J9..SDSD5YGyRCr4W4c'), + + # + # custom + # + (" ", "_K1..crsmZxOLzfJH8iw"), + ("my", '_KR/.crsmykRplHbAvwA'), # <-- to detect old 12-bit rounds bug + ("my socra", "_K1..crsmf/9NzZr1fLM"), + ("my socrates", '_K1..crsmOv1rbde9A9o'), + ("my socrates note", "_K1..crsm/2qeAhdISMA"), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '_7C/.ABw0WIKy0ILVqo2'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correctly formatted hash + # \/ + "_K1.!crsmZxOLzfJH8iw" + ] + + platform_crypt_support = [ + # openbsd 5.8 dropped everything except bcrypt + ("openbsd[6789]", False), + ("openbsd5", None), + ("openbsd", True), + + ("freebsd|netbsd|darwin", True), + ("solaris", False), + ("linux", None), # may be present if libxcrypt is in use + ] + + def test_77_fuzz_input(self, **kwds): + # we want to generate even rounds to verify it's correct, but want to ignore warnings + warnings.filterwarnings("ignore", "bsdi_crypt rounds should be odd.*") + super(_bsdi_crypt_test, self).test_77_fuzz_input(**kwds) + + def test_needs_update_w_even_rounds(self): + """needs_update() should flag even rounds""" + handler = self.handler + even_hash = '_Y/../cG0zkJa6LY6k4c' + odd_hash = '_Z/..TgFg0/ptQtpAgws' + secret = 'test' + + # don't issue warning + self.assertTrue(handler.verify(secret, even_hash)) + self.assertTrue(handler.verify(secret, odd_hash)) + + # *do* signal as needing updates + self.assertTrue(handler.needs_update(even_hash)) + self.assertFalse(handler.needs_update(odd_hash)) + + # new hashes shouldn't have even rounds + new_hash = handler.hash("stub") + self.assertFalse(handler.needs_update(new_hash)) + +# create test cases for specific backends +bsdi_crypt_os_crypt_test = _bsdi_crypt_test.create_backend_case("os_crypt") +bsdi_crypt_builtin_test = _bsdi_crypt_test.create_backend_case("builtin") + +#============================================================================= +# crypt16 +#============================================================================= +class crypt16_test(HandlerCase): + handler = hash.crypt16 + + # TODO: find an authortative source of test vectors + known_correct_hashes = [ + # + # from messages around the web, including + # http://seclists.org/bugtraq/1999/Mar/76 + # + ("passphrase", "qi8H8R7OM4xMUNMPuRAZxlY."), + ("printf", "aaCjFz4Sh8Eg2QSqAReePlq6"), + ("printf", "AA/xje2RyeiSU0iBY3PDwjYo"), + ("LOLOAQICI82QB4IP", "/.FcK3mad6JwYt8LVmDqz9Lc"), + ("LOLOAQICI", "/.FcK3mad6JwYSaRHJoTPzY2"), + ("LOLOAQIC", "/.FcK3mad6JwYelhbtlysKy6"), + ("L", "/.CIu/PzYCkl6elhbtlysKy6"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'YeDc9tKkkmDvwP7buzpwhoqQ'), + ] + +#============================================================================= +# des crypt +#============================================================================= +class _des_crypt_test(HandlerCase): + """test des-crypt algorithm""" + handler = hash.des_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', 'CCNf8Sbh3HDfQ'), + ('U*U***U', 'CCX.K.MFy4Ois'), + ('U*U***U*', 'CC4rMpbg9AMZ.'), + ('*U*U*U*U', 'XXxzOu6maQKqQ'), + ('', 'SDbsugeBiC58A'), + + # + # custom + # + ('', 'OgAwTx2l6NADI'), + (' ', '/Hk.VPuwQTXbc'), + ('test', 'N1tQbOFcM5fpg'), + ('Compl3X AlphaNu3meric', 'um.Wguz3eVCx2'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', 'sNYqfOyauIyic'), + ('AlOtBsOl', 'cEpWz5IUCShqM'), + + # ensures utf-8 used for unicode + (u('hell\u00D6'), 'saykDgk3BPZ9E'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correctly formatted hash + #\/ + '!gAwTx2l6NADI', + + # wrong size + 'OgAwTx2l6NAD', + 'OgAwTx2l6NADIj', + ] + + platform_crypt_support = [ + # openbsd 5.8 dropped everything except bcrypt + ("openbsd[6789]", False), + ("openbsd5", None), + ("openbsd", True), + + ("freebsd|netbsd|linux|solaris|darwin", True), + ] + +# create test cases for specific backends +des_crypt_os_crypt_test = _des_crypt_test.create_backend_case("os_crypt") +des_crypt_builtin_test = _des_crypt_test.create_backend_case("builtin") + +#============================================================================= +# fshp +#============================================================================= +class fshp_test(HandlerCase): + """test fshp algorithm""" + handler = hash.fshp + + known_correct_hashes = [ + # + # test vectors from FSHP reference implementation + # https://github.com/bdd/fshp-is-not-secure-anymore/blob/master/python/test.py + # + ('test', '{FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M='), + + ('test', + '{FSHP1|8|4096}MTIzNDU2NzjTdHcmoXwNc0f' + 'f9+ArUHoN0CvlbPZpxFi1C6RDM/MHSA==' + ), + + ('OrpheanBeholderScryDoubt', + '{FSHP1|8|4096}GVSUFDAjdh0vBosn1GUhz' + 'GLHP7BmkbCZVH/3TQqGIjADXpc+6NCg3g==' + ), + ('ExecuteOrder66', + '{FSHP3|16|8192}0aY7rZQ+/PR+Rd5/I9ss' + 'RM7cjguyT8ibypNaSp/U1uziNO3BVlg5qPU' + 'ng+zHUDQC3ao/JbzOnIBUtAeWHEy7a2vZeZ' + '7jAwyJJa2EqOsq4Io=' + ), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '{FSHP1|16|16384}9v6/l3Lu/d9by5nznpOS' + 'cqQo8eKu/b/CKli3RCkgYg4nRTgZu5y659YV8cCZ68UL'), + ] + + known_unidentified_hashes = [ + # incorrect header + '{FSHX0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + 'FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + ] + + known_malformed_hashes = [ + # bad base64 padding + '{FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M', + + # wrong salt size + '{FSHP0|1|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + + # bad rounds + '{FSHP0|0|A}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + ] + + def test_90_variant(self): + """test variant keyword""" + handler = self.handler + kwds = dict(salt=b'a', rounds=1) + + # accepts ints + handler(variant=1, **kwds) + + # accepts bytes or unicode + handler(variant=u('1'), **kwds) + handler(variant=b'1', **kwds) + + # aliases + handler(variant=u('sha256'), **kwds) + handler(variant=b'sha256', **kwds) + + # rejects None + self.assertRaises(TypeError, handler, variant=None, **kwds) + + # rejects other types + self.assertRaises(TypeError, handler, variant=complex(1,1), **kwds) + + # invalid variant + self.assertRaises(ValueError, handler, variant='9', **kwds) + self.assertRaises(ValueError, handler, variant=9, **kwds) + +#============================================================================= +# hex digests +#============================================================================= +class hex_md4_test(HandlerCase): + handler = hash.hex_md4 + known_correct_hashes = [ + ("password", '8a9d093f14f8701df17732b2bb182c74'), + (UPASS_TABLE, '876078368c47817ce5f9115f3a42cf74'), + ] + +class hex_md5_test(HandlerCase): + handler = hash.hex_md5 + known_correct_hashes = [ + ("password", '5f4dcc3b5aa765d61d8327deb882cf99'), + (UPASS_TABLE, '05473f8a19f66815e737b33264a0d0b0'), + ] + + # XXX: should test this for ALL the create_hex_md5() hashers. + def test_mock_fips_mode(self): + """ + if md5 isn't available, a dummy instance should be created. + (helps on FIPS systems). + """ + from passlib.exc import UnknownHashError + from passlib.crypto.digest import lookup_hash, _set_mock_fips_mode + + # check if md5 is available so we can test mock helper + supported = lookup_hash("md5", required=False).supported + self.assertEqual(self.handler.supported, supported) + if supported: + _set_mock_fips_mode() + self.addCleanup(_set_mock_fips_mode, False) + + # HACK: have to recreate hasher, since underlying HashInfo has changed. + # could reload module and re-import, but this should be good enough. + from passlib.handlers.digests import create_hex_hash + hasher = create_hex_hash("md5", required=False) + self.assertFalse(hasher.supported) + + # can identify hashes even if disabled + ref1 = '5f4dcc3b5aa765d61d8327deb882cf99' + ref2 = 'xxx' + self.assertTrue(hasher.identify(ref1)) + self.assertFalse(hasher.identify(ref2)) + + # throw error if try to use it + pat = "'md5' hash disabled for fips" + self.assertRaisesRegex(UnknownHashError, pat, hasher.hash, "password") + self.assertRaisesRegex(UnknownHashError, pat, hasher.verify, "password", ref1) + + +class hex_sha1_test(HandlerCase): + handler = hash.hex_sha1 + known_correct_hashes = [ + ("password", '5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8'), + (UPASS_TABLE, 'e059b2628e3a3e2de095679de9822c1d1466e0f0'), + ] + +class hex_sha256_test(HandlerCase): + handler = hash.hex_sha256 + known_correct_hashes = [ + ("password", '5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8'), + (UPASS_TABLE, '6ed729e19bf24d3d20f564375820819932029df05547116cfc2cc868a27b4493'), + ] + +class hex_sha512_test(HandlerCase): + handler = hash.hex_sha512 + known_correct_hashes = [ + ("password", 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c' + '706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cac' + 'bc86'), + (UPASS_TABLE, 'd91bb0a23d66dca07a1781fd63ae6a05f6919ee5fc368049f350c9f' + '293b078a18165d66097cf0d89fdfbeed1ad6e7dba2344e57348cd6d51308c843a06f' + '29caf'), + ] + +#============================================================================= +# htdigest hash +#============================================================================= +class htdigest_test(UserHandlerMixin, HandlerCase): + handler = hash.htdigest + + known_correct_hashes = [ + # secret, user, realm + + # from RFC 2617 + (("Circle Of Life", "Mufasa", "testrealm@host.com"), + '939e7578ed9e3c518a452acee763bce9'), + + # custom + ((UPASS_TABLE, UPASS_USD, UPASS_WAV), + '4dabed2727d583178777fab468dd1f17'), + ] + + known_unidentified_hashes = [ + # bad char \/ - currently rejecting upper hex chars, may change + '939e7578edAe3c518a452acee763bce9', + + # bad char \/ + '939e7578edxe3c518a452acee763bce9', + ] + + def test_80_user(self): + raise self.skipTest("test case doesn't support 'realm' keyword") + + def populate_context(self, secret, kwds): + """insert username into kwds""" + if isinstance(secret, tuple): + secret, user, realm = secret + else: + user, realm = "user", "realm" + kwds.setdefault("user", user) + kwds.setdefault("realm", realm) + return secret + +#============================================================================= +# ldap hashes +#============================================================================= +class ldap_md5_test(HandlerCase): + handler = hash.ldap_md5 + known_correct_hashes = [ + ("helloworld", '{MD5}/F4DjTilcDIIVEHn/nAQsA=='), + (UPASS_TABLE, '{MD5}BUc/ihn2aBXnN7MyZKDQsA=='), + ] + +class ldap_sha1_test(HandlerCase): + handler = hash.ldap_sha1 + known_correct_hashes = [ + ("helloworld", '{SHA}at+xg6SiyUovktq1redipHiJpaE='), + (UPASS_TABLE, '{SHA}4FmyYo46Pi3glWed6YIsHRRm4PA='), + ] + +class ldap_salted_md5_test(HandlerCase): + handler = hash.ldap_salted_md5 + known_correct_hashes = [ + ("testing1234", '{SMD5}UjFY34os/pnZQ3oQOzjqGu4yeXE='), + (UPASS_TABLE, '{SMD5}Z0ioJ58LlzUeRxm3K6JPGAvBGIM='), + + # alternate salt sizes (8, 15, 16) + ('test', '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw'), + ('test', '{SMD5}XRlncfRzvGi0FDzgR98tUgBg7B3jXOs9p9S615qTkg=='), + ('test', '{SMD5}FbAkzOMOxRbMp6Nn4hnZuel9j9Gas7a2lvI+x5hT6j0='), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SMD5}IGVhwK+anvspmfDt2t0vgGjt/Q==', + + # incorrect base64 encoding + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4c', + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw' + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw=', + '{SMD5}LnuZPJhiaY95/4lmV=pg548xBsD4P4cw', + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P===', + ] + +class ldap_salted_sha1_test(HandlerCase): + handler = hash.ldap_salted_sha1 + known_correct_hashes = [ + ("testing123", '{SSHA}0c0blFTXXNuAMHECS4uxrj3ZieMoWImr'), + ("secret", "{SSHA}0H+zTv8o4MR4H43n03eCsvw1luG8LdB7"), + (UPASS_TABLE, '{SSHA}3yCSD1nLZXznra4N8XzZgAL+s1sQYsx5'), + + # alternate salt sizes (8, 15, 16) + ('test', '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOckw=='), + ('test', '{SSHA}/ZMF5KymNM+uEOjW+9STKlfCFj51bg3BmBNCiPHeW2ttbU0='), + ('test', '{SSHA}Pfx6Vf48AT9x3FVv8znbo8WQkEVSipHSWovxXmvNWUvp/d/7'), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SSHA}ZQK3Yvtvl6wtIRoISgMGPkcWU7Nfq5U=', + + # incorrect base64 encoding + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOck', + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOckw=', + '{SSHA}P90+qijSp8MJ1tN25j5o1Pf=UvlqjXHOGeOckw==', + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOck===', + ] + + +class ldap_salted_sha256_test(HandlerCase): + handler = hash.ldap_salted_sha256 + known_correct_hashes = [ + # generated locally + # salt size = 8 + ("password", '{SSHA256}x1tymSTVjozxQ2PtT46ysrzhZxbcskK0o2f8hEFx7fAQQmhtDSEkJA=='), + ("test", '{SSHA256}xfqc9aOR6z15YaEk3/Ufd7UL9+JozB/1EPmCDTizL0GkdA7BuNda6w=='), + ("toomanysecrets", '{SSHA256}RrTKrg6HFXcjJ+eDAq4UtbODxOr9RLeG+I69FoJvutcbY0zpfU+p1Q=='), + (u('letm\xe8\xefn'), '{SSHA256}km7UjUTBZN8a+gf1ND2/qn15N7LsO/jmGYJXvyTfJKAbI0RoLWWslQ=='), + + # alternate salt sizes (4, 15, 16) + # generated locally + ('test', '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDIm'), + ('test', '{SSHA256}J6MFQdkfjdmXz9UyUPb773kekJdm4dgSL4y8WQEQW11VipHSundOKaV0LsV4L6U='), + ('test', '{SSHA256}uBLazLaiBaPb6Cpnvq2XTYDkvXbYIuqRW1anMKk85d1/j1GqFQIgpHSOMUYIIcS4'), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SSHA256}Lpdyr1+lR+rtxgp3SpQnUuNw33ENivTl28nzF2ZI4Gm41/o=', + + # incorrect base64 encoding + '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDI@', + '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDI', + '{SSHA256}TFv2RpwyO0U9mA0Hk8FsXRa1I+4dNUtv27Qa8dzGVLinlDIm===', + ] + + + +class ldap_salted_sha512_test(HandlerCase): + handler = hash.ldap_salted_sha512 + known_correct_hashes = [ + # generated by testing ldap server web interface (see issue 124 comments) + # salt size = 8 + ("toomanysecrets", '{SSHA512}wExp4xjiCHS0zidJDC4UJq9EEeIebAQPJ1PWSwfhxWjfutI9XiiKuHm2AE41cEFfK+8HyI8bh+ztbczUGsvVFIgICWWPt7qu'), + (u('letm\xe8\xefn'), '{SSHA512}mpNUSmZc3TNx+RnPwkIAVMf7ocEKLPrIoQNsg4Eu8dHvyCeb2xzHp5A6n4tF7ntknSvfvRZaJII4ImvNJlYsgiwAm0FMqR+3'), + + # generated locally + # salt size = 8 + ("password", '{SSHA512}f/lFQskkl7PdMsTGJxHZq8LDt/l+UqRMm6/pj4pV7/xZkcOaKCgvQqp+KCeXc/Vd4RY6vEHWn4y0DnFcQ6wgyv9fyxk='), + ("test", '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+Uc'), + + # alternate salt sizes (4, 15, 16) + # generated locally + ('test', '{SSHA512}Yg9DQ2wURCFGwobu7R2O6cq7nVbnGMPrFCX0aPQ9kj/y1hd6k9PEzkgWCB5aXdPwPzNrVb0PkiHiBnG1CxFiT+B8L8U='), + ('test', '{SSHA512}5ecDGWs5RY4xLszUO6hAcl90W3wAozGQoI4Gqj8xSZdcfU1lVEM4aY8s+4xVeLitcn7BO8i7xkzMFWLoxas7SeHc23sP4dx77937PyeE0A=='), + ('test', '{SSHA512}6FQv5W47HGg2MFBFZofoiIbO8KRW75Pm51NKoInpthYQQ5ujazHGhVGzrj3JXgA7j0k+UNmkHdbJjdY5xcUHPzynFEII4fwfIySEcG5NKSU='), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SSHA512}zFnn4/8x8GveUaMqgrYWyIWqFQ0Irt6gADPtRk4Uv3nUC6uR5cD8+YdQni/0ZNij9etm6p17kSFuww3M6l+d6AbAeA==', + + # incorrect base64 encoding + '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+U', + '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+U@', + '{SSHA512}Tgx/uhHnlM9/GgQvI31dN7cheDXg7WypZwaaIkyRsgV/BKIzBG3G/wUd9o1dpi06p3SYzMedg0lvTc3b6CtdO0Xo/f9/L+U===', + ] + + +class ldap_plaintext_test(HandlerCase): + # TODO: integrate EncodingHandlerMixin + handler = hash.ldap_plaintext + known_correct_hashes = [ + ("password", 'password'), + (UPASS_TABLE, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + (PASS_TABLE_UTF8, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + ] + known_unidentified_hashes = [ + "{FOO}bar", + + # NOTE: this hash currently rejects the empty string. + "", + ] + + known_other_hashes = [ + ("ldap_md5", "{MD5}/F4DjTilcDIIVEHn/nAQsA==") + ] + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_password(self): + # NOTE: this hash currently rejects the empty string. + while True: + pwd = super(ldap_plaintext_test.FuzzHashGenerator, self).random_password() + if pwd: + return pwd + +class _ldap_md5_crypt_test(HandlerCase): + # NOTE: since the ldap_{crypt} handlers are all wrappers, don't need + # separate test; this is just to test the codebase end-to-end + handler = hash.ldap_md5_crypt + + known_correct_hashes = [ + # + # custom + # + ('', '{CRYPT}$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + (' ', '{CRYPT}$1$m/5ee7ol$bZn0kIBFipq39e.KDXX8I0'), + ('test', '{CRYPT}$1$ec6XvcoW$ghEtNK2U1MC5l.Dwgi3020'), + ('Compl3X AlphaNu3meric', '{CRYPT}$1$nX1e7EeI$ljQn72ZUgt6Wxd9hfvHdV0'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '{CRYPT}$1$jQS7o98J$V6iTcr71CGgwW2laf17pi1'), + ('test', '{CRYPT}$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '{CRYPT}$1$d6/Ky1lU$/xpf8m7ftmWLF.TjHCqel0'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '{CRYPT}$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o!', + ] + +# create test cases for specific backends +ldap_md5_crypt_os_crypt_test =_ldap_md5_crypt_test.create_backend_case("os_crypt") +ldap_md5_crypt_builtin_test =_ldap_md5_crypt_test.create_backend_case("builtin") + +class _ldap_sha1_crypt_test(HandlerCase): + # NOTE: this isn't for testing the hash (see ldap_md5_crypt note) + # but as a self-test of the os_crypt patching code in HandlerCase. + handler = hash.ldap_sha1_crypt + + known_correct_hashes = [ + ('password', '{CRYPT}$sha1$10$c.mcTzCw$gF8UeYst9yXX7WNZKc5Fjkq0.au7'), + (UPASS_TABLE, '{CRYPT}$sha1$10$rnqXlOsF$aGJf.cdRPewJAXo1Rn1BkbaYh0fP'), + ] + + def populate_settings(self, kwds): + kwds.setdefault("rounds", 10) + super(_ldap_sha1_crypt_test, self).populate_settings(kwds) + + def test_77_fuzz_input(self, **ignored): + raise self.skipTest("unneeded") + +# create test cases for specific backends +ldap_sha1_crypt_os_crypt_test = _ldap_sha1_crypt_test.create_backend_case("os_crypt") + +#============================================================================= +# lanman +#============================================================================= +class lmhash_test(EncodingHandlerMixin, HandlerCase): + handler = hash.lmhash + secret_case_insensitive = True + + known_correct_hashes = [ + # + # http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + # + ("OLDPASSWORD", "c9b81d939d6fd80cd408e6b105741864"), + ("NEWPASSWORD", '09eeab5aa415d6e4d408e6b105741864'), + ("welcome", "c23413a8a1e7665faad3b435b51404ee"), + + # + # custom + # + ('', 'aad3b435b51404eeaad3b435b51404ee'), + ('zzZZZzz', 'a5e6066de61c3e35aad3b435b51404ee'), + ('passphrase', '855c3697d9979e78ac404c4ba2c66533'), + ('Yokohama', '5ecd9236d21095ce7584248b8d2c9f9e'), + + # ensures cp437 used for unicode + (u('ENCYCLOP\xC6DIA'), 'fed6416bffc9750d48462b9d7aaac065'), + (u('encyclop\xE6dia'), 'fed6416bffc9750d48462b9d7aaac065'), + + # test various encoding values + ((u("\xC6"), None), '25d8ab4a0659c97aaad3b435b51404ee'), + ((u("\xC6"), "cp437"), '25d8ab4a0659c97aaad3b435b51404ee'), + ((u("\xC6"), "latin-1"), '184eecbbe9991b44aad3b435b51404ee'), + ((u("\xC6"), "utf-8"), '00dd240fcfab20b8aad3b435b51404ee'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '855c3697d9979e78ac404c4ba2c6653X', + ] + + def test_90_raw(self): + """test lmhash.raw() method""" + from binascii import unhexlify + from passlib.utils.compat import str_to_bascii + lmhash = self.handler + for secret, hash in self.known_correct_hashes: + kwds = {} + secret = self.populate_context(secret, kwds) + data = unhexlify(str_to_bascii(hash)) + self.assertEqual(lmhash.raw(secret, **kwds), data) + self.assertRaises(TypeError, lmhash.raw, 1) + +#============================================================================= +# md5 crypt +#============================================================================= +class _md5_crypt_test(HandlerCase): + handler = hash.md5_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$1$dXc3I7Rw$ctlgjDdWJLMT.qwHsWhXR1'), + ('U*U***U', '$1$dXc3I7Rw$94JPyQc/eAgQ3MFMCoMF.0'), + ('U*U***U*', '$1$dXc3I7Rw$is1mVIAEtAhIzSdfn5JOO0'), + ('*U*U*U*U', '$1$eQT9Hwbt$XtuElNJD.eW5MN5UCWyTQ0'), + ('', '$1$Eu.GHtia$CFkL/nE1BYTlEPiVx1VWX0'), + + # + # custom + # + + # NOTE: would need to patch HandlerCase to coerce hashes + # to native str for this first one to work under py3. +## ('', b('$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.')), + ('', '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + (' ', '$1$m/5ee7ol$bZn0kIBFipq39e.KDXX8I0'), + ('test', '$1$ec6XvcoW$ghEtNK2U1MC5l.Dwgi3020'), + ('Compl3X AlphaNu3meric', '$1$nX1e7EeI$ljQn72ZUgt6Wxd9hfvHdV0'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$1$jQS7o98J$V6iTcr71CGgwW2laf17pi1'), + ('test', '$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + (b'test', '$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + (u('s'), '$1$ssssssss$YgmLTApYTv12qgTwBoj8i/'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$1$d6/Ky1lU$/xpf8m7ftmWLF.TjHCqel0'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash \/ + '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o!', + + # too many fields + '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.$', + ] + + platform_crypt_support = [ + # openbsd 5.8 dropped everything except bcrypt + ("openbsd[6789]", False), + ("openbsd5", None), + ("openbsd", True), + + ("freebsd|netbsd|linux|solaris", True), + ("darwin", False), + ] + +# create test cases for specific backends +md5_crypt_os_crypt_test = _md5_crypt_test.create_backend_case("os_crypt") +md5_crypt_builtin_test = _md5_crypt_test.create_backend_case("builtin") + +#============================================================================= +# msdcc 1 & 2 +#============================================================================= +class msdcc_test(UserHandlerMixin, HandlerCase): + handler = hash.msdcc + user_case_insensitive = True + + known_correct_hashes = [ + + # + # http://www.jedge.com/wordpress/windows-password-cache/ + # + (("Asdf999", "sevans"), "b1176c2587478785ec1037e5abc916d0"), + + # + # http://infosecisland.com/blogview/12156-Cachedump-for-Meterpreter-in-Action.html + # + (("ASDqwe123", "jdoe"), "592cdfbc3f1ef77ae95c75f851e37166"), + + # + # http://comments.gmane.org/gmane.comp.security.openwall.john.user/1917 + # + (("test1", "test1"), "64cd29e36a8431a2b111378564a10631"), + (("test2", "test2"), "ab60bdb4493822b175486810ac2abe63"), + (("test3", "test3"), "14dd041848e12fc48c0aa7a416a4a00c"), + (("test4", "test4"), "b945d24866af4b01a6d89b9d932a153c"), + + # + # http://ciscoit.wordpress.com/2011/04/13/metasploit-hashdump-vs-cachedump/ + # + (("1234qwer!@#$", "Administrator"), "7b69d06ef494621e3f47b9802fe7776d"), + + # + # http://www.securiteam.com/tools/5JP0I2KFPA.html + # + (("password", "user"), "2d9f0b052932ad18b87f315641921cda"), + + # + # from JTR 1.7.9 + # + (("", "root"), "176a4c2bd45ac73687676c2f09045353"), + (("test1", "TEST1"), "64cd29e36a8431a2b111378564a10631"), + (("okolada", "nineteen_characters"), "290efa10307e36a79b3eebf2a6b29455"), + ((u("\u00FC"), u("\u00FC")), "48f84e6f73d6d5305f6558a33fa2c9bb"), + ((u("\u00FC\u00FC"), u("\u00FC\u00FC")), "593246a8335cf0261799bda2a2a9c623"), + ((u("\u20AC\u20AC"), "user"), "9121790702dda0fa5d353014c334c2ce"), + + # + # custom + # + + # ensures utf-8 used for unicode + ((UPASS_TABLE, 'bob'), 'fcb82eb4212865c7ac3503156ca3f349'), + ] + + known_alternate_hashes = [ + # check uppercase accepted. + ("B1176C2587478785EC1037E5ABC916D0", ("Asdf999", "sevans"), + "b1176c2587478785ec1037e5abc916d0"), + ] + +class msdcc2_test(UserHandlerMixin, HandlerCase): + handler = hash.msdcc2 + user_case_insensitive = True + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + (("test1", "test1"), "607bbe89611e37446e736f7856515bf8"), + (("qerwt", "Joe"), "e09b38f84ab0be586b730baf61781e30"), + (("12345", "Joe"), "6432f517a900b3fc34ffe57f0f346e16"), + (("", "bin"), "c0cbe0313a861062e29f92ede58f9b36"), + (("w00t", "nineteen_characters"), "87136ae0a18b2dafe4a41d555425b2ed"), + (("w00t", "eighteencharacters"), "fc5df74eca97afd7cd5abb0032496223"), + (("longpassword", "twentyXXX_characters"), "cfc6a1e33eb36c3d4f84e4c2606623d2"), + (("longpassword", "twentyoneX_characters"), "99ff74cea552799da8769d30b2684bee"), + (("longpassword", "twentytwoXX_characters"), "0a721bdc92f27d7fb23b87a445ec562f"), + (("test2", "TEST2"), "c6758e5be7fc943d00b97972a8a97620"), + (("test3", "test3"), "360e51304a2d383ea33467ab0b639cc4"), + (("test4", "test4"), "6f79ee93518306f071c47185998566ae"), + ((u("\u00FC"), "joe"), "bdb80f2c4656a8b8591bd27d39064a54"), + ((u("\u20AC\u20AC"), "joe"), "1e1e20f482ff748038e47d801d0d1bda"), + ((u("\u00FC\u00FC"), "admin"), "0839e4a07c00f18a8c65cf5b985b9e73"), + + # + # custom + # + + # custom unicode test + ((UPASS_TABLE, 'bob'), 'cad511dc9edefcf69201da72efb6bb55'), + ] + +#============================================================================= +# mssql 2000 & 2005 +#============================================================================= +class mssql2000_test(HandlerCase): + handler = hash.mssql2000 + secret_case_insensitive = "verify-only" + # FIXME: fix UT framework - this hash is sensitive to password case, but verify() is not + + known_correct_hashes = [ + # + # http://hkashfi.blogspot.com/2007/08/breaking-sql-server-2005-hashes.html + # + ('Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED2503412FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + ('TEST', '0x010034767D5C2FD54D6119FFF04129A1D72E7C3194F7284A7F3A2FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + + # + # http://www.sqlmag.com/forums/aft/68438 + # + ('x', '0x010086489146C46DD7318D2514D1AC706457CBF6CD3DF8407F071DB4BBC213939D484BF7A766E974F03C96524794'), + + # + # http://stackoverflow.com/questions/173329/how-to-decrypt-a-password-from-sql-server + # + ('AAAA', '0x0100CF465B7B12625EF019E157120D58DD46569AC7BF4118455D12625EF019E157120D58DD46569AC7BF4118455D'), + + # + # http://msmvps.com/blogs/gladchenko/archive/2005/04/06/41083.aspx + # + ('123', '0x01002D60BA07FE612C8DE537DF3BFCFA49CD9968324481C1A8A8FE612C8DE537DF3BFCFA49CD9968324481C1A8A8'), + + # + # http://www.simple-talk.com/sql/t-sql-programming/temporarily-changing-an-unknown-password-of-the-sa-account-/ + # + ('12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + + # + # XXX: sample is incomplete, password unknown + # https://anthonystechblog.wordpress.com/2011/04/20/password-encryption-in-sql-server-how-to-tell-if-a-user-is-using-a-weak-password/ + # (????, '0x0100813F782D66EF15E40B1A3FDF7AB88B322F51401A87D8D3E3A8483C4351A3D96FC38499E6CDD2B6F?????????'), + # + + # + # from JTR 1.7.9 + # + ('foo', '0x0100A607BA7C54A24D17B565C59F1743776A10250F581D482DA8B6D6261460D3F53B279CC6913CE747006A2E3254'), + ('bar', '0x01000508513EADDF6DB7DDD270CCA288BF097F2FF69CC2DB74FBB9644D6901764F999BAB9ECB80DE578D92E3F80D'), + ('canard', '0x01008408C523CF06DCB237835D701C165E68F9460580132E28ED8BC558D22CEDF8801F4503468A80F9C52A12C0A3'), + ('lapin', '0x0100BF088517935FC9183FE39FDEC77539FD5CB52BA5F5761881E5B9638641A79DBF0F1501647EC941F3355440A2'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_USD, '0x0100624C0961B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5'), + (UPASS_TABLE, '0x010083104228FAD559BE52477F2131E538BE9734E5C4B0ADEFD7F6D784B03C98585DC634FE2B8CA3A6DFFEC729B4'), + + ] + + known_alternate_hashes = [ + # lower case hex + ('0x01005b20054332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b3', + '12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + + known_unidentified_hashes = [ + # malformed start + '0X01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + + # wrong magic value + '0x02005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + + # wrong size + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3', + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3AF', + + # mssql2005 + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + ] + + known_malformed_hashes = [ + # non-hex char -----\/ + b'0x01005B200543327G2E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + u('0x01005B200543327G2E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + +class mssql2005_test(HandlerCase): + handler = hash.mssql2005 + + known_correct_hashes = [ + # + # http://hkashfi.blogspot.com/2007/08/breaking-sql-server-2005-hashes.html + # + ('TEST', '0x010034767D5C2FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + + # + # http://www.openwall.com/lists/john-users/2009/07/14/2 + # + ('toto', '0x01004086CEB6BF932BC4151A1AF1F13CD17301D70816A8886908'), + + # + # http://msmvps.com/blogs/gladchenko/archive/2005/04/06/41083.aspx + # + ('123', '0x01004A335DCEDB366D99F564D460B1965B146D6184E4E1025195'), + ('123', '0x0100E11D573F359629B344990DCD3D53DE82CF8AD6BBA7B638B6'), + + # + # XXX: password unknown + # http://www.simple-talk.com/sql/t-sql-programming/temporarily-changing-an-unknown-password-of-the-sa-account-/ + # (???, '0x01004086CEB6301EEC0A994E49E30DA235880057410264030797'), + # + + # + # http://therelentlessfrontend.com/2010/03/26/encrypting-and-decrypting-passwords-in-sql-server/ + # + ('AAAA', '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30'), + + # + # from JTR 1.7.9 + # + ("toto", "0x01004086CEB6BF932BC4151A1AF1F13CD17301D70816A8886908"), + ("titi", "0x01004086CEB60ED526885801C23B366965586A43D3DEAC6DD3FD"), + ("foo", "0x0100A607BA7C54A24D17B565C59F1743776A10250F581D482DA8"), + ("bar", "0x01000508513EADDF6DB7DDD270CCA288BF097F2FF69CC2DB74FB"), + ("canard", "0x01008408C523CF06DCB237835D701C165E68F9460580132E28ED"), + ("lapin", "0x0100BF088517935FC9183FE39FDEC77539FD5CB52BA5F5761881"), + + # + # adapted from mssql2000.known_correct_hashes (above) + # + ('Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED250341'), + ('Test', '0x0100993BF2315F36CC441485B35C4D84687DC02C78B0E680411F'), + ('x', '0x010086489146C46DD7318D2514D1AC706457CBF6CD3DF8407F07'), + ('AAAA', '0x0100CF465B7B12625EF019E157120D58DD46569AC7BF4118455D'), + ('123', '0x01002D60BA07FE612C8DE537DF3BFCFA49CD9968324481C1A8A8'), + ('12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_USD, '0x0100624C0961B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5'), + (UPASS_TABLE, '0x010083104228FAD559BE52477F2131E538BE9734E5C4B0ADEFD7'), + ] + + known_alternate_hashes = [ + # lower case hex + ('0x01005b20054332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b3', + '12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + + known_unidentified_hashes = [ + # malformed start + '0X010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30', + + # wrong magic value + '0x020036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30', + + # wrong size + '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F', + '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F3012', + + # mssql2000 + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + ] + + known_malformed_hashes = [ + # non-hex char --\/ + '0x010036D726AE86G34E97F20B198ACD219D60B446AC5E48C54F30', + ] + +#============================================================================= +# mysql 323 & 41 +#============================================================================= +class mysql323_test(HandlerCase): + handler = hash.mysql323 + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('drew', '697a7de87c5390b2'), + ('password', "5d2e19393cc5ef67"), + + # + # custom + # + ('mypass', '6f8c114b58f2ce9e'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '4ef327ca5491c8d7'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '6z8c114b58f2ce9e', + ] + + def test_90_whitespace(self): + """check whitespace is ignored per spec""" + h = self.do_encrypt("mypass") + h2 = self.do_encrypt("my pass") + self.assertEqual(h, h2) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def accept_password_pair(self, secret, other): + # override to handle whitespace + return secret.replace(" ","") != other.replace(" ","") + +class mysql41_test(HandlerCase): + handler = hash.mysql41 + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('verysecretpassword', '*2C905879F74F28F8570989947D06A8429FB943E6'), + ('12345678123456781234567812345678', '*F9F1470004E888963FB466A5452C9CBD9DF6239C'), + ("' OR 1 /*'", '*97CF7A3ACBE0CA58D5391AC8377B5D9AC11D46D9'), + + # + # custom + # + ('mypass', '*6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '*E7AFE21A9CFA2FC9D15D942AE8FB5C240FE5837B'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '*6Z8989366EAF75BB670AD8EA7A7FC1176A95CEF4', + ] + +#============================================================================= +# NTHASH +#============================================================================= +class nthash_test(HandlerCase): + handler = hash.nthash + + known_correct_hashes = [ + # + # http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + # + ("OLDPASSWORD", u("6677b2c394311355b54f25eec5bfacf5")), + ("NEWPASSWORD", u("256781a62031289d3c2c98c14f1efc8c")), + + # + # from JTR 1.7.9 + # + + # ascii + ('', '31d6cfe0d16ae931b73c59d7e0c089c0'), + ('tigger', 'b7e0ea9fbffcf6dd83086e905089effd'), + + # utf-8 + (b'\xC3\xBC', '8bd6e4fb88e01009818749c5443ea712'), + (b'\xC3\xBC\xC3\xBC', 'cc1260adb6985ca749f150c7e0b22063'), + (b'\xE2\x82\xAC', '030926b781938db4365d46adc7cfbcb8'), + (b'\xE2\x82\xAC\xE2\x82\xAC','682467b963bb4e61943e170a04f7db46'), + + # + # custom + # + ('passphrase', '7f8fe03093cc84b267b109625f6bbf4b'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '7f8fe03093cc84b267b109625f6bbfxb', + ] + +class bsd_nthash_test(HandlerCase): + handler = hash.bsd_nthash + + known_correct_hashes = [ + ('passphrase', '$3$$7f8fe03093cc84b267b109625f6bbf4b'), + (b'\xC3\xBC', '$3$$8bd6e4fb88e01009818749c5443ea712'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash --\/ + '$3$$7f8fe03093cc84b267b109625f6bbfxb', + ] + +#============================================================================= +# oracle 10 & 11 +#============================================================================= +class oracle10_test(UserHandlerMixin, HandlerCase): + handler = hash.oracle10 + secret_case_insensitive = True + user_case_insensitive = True + + # TODO: get more test vectors (especially ones which properly test unicode) + known_correct_hashes = [ + # ((secret,user),hash) + + # + # http://www.petefinnigan.com/default/default_password_list.htm + # + (('tiger', 'scott'), 'F894844C34402B67'), + ((u('ttTiGGeR'), u('ScO')), '7AA1A84E31ED7771'), + (("d_syspw", "SYSTEM"), '1B9F1F9A5CB9EB31'), + (("strat_passwd", "strat_user"), 'AEBEDBB4EFB5225B'), + + # + # http://openwall.info/wiki/john/sample-hashes + # + (('#95LWEIGHTS', 'USER'), '000EA4D72A142E29'), + (('CIAO2010', 'ALFREDO'), 'EB026A76F0650F7B'), + + # + # from JTR 1.7.9 + # + (('GLOUGlou', 'Bob'), 'CDC6B483874B875B'), + (('GLOUGLOUTER', 'bOB'), 'EF1F9139DB2D5279'), + (('LONG_MOT_DE_PASSE_OUI', 'BOB'), 'EC8147ABB3373D53'), + + # + # custom + # + ((UPASS_TABLE, 'System'), 'B915A853F297B281'), + ] + + known_unidentified_hashes = [ + # bad char in hash --\ + 'F894844C34402B6Z', + ] + +class oracle11_test(HandlerCase): + handler = hash.oracle11 + # TODO: find more test vectors (especially ones which properly test unicode) + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ("abc123", "S:5FDAB69F543563582BA57894FE1C1361FB8ED57B903603F2C52ED1B4D642"), + ("SyStEm123!@#", "S:450F957ECBE075D2FA009BA822A9E28709FBC3DA82B44D284DDABEC14C42"), + ("oracle", "S:3437FF72BD69E3FB4D10C750B92B8FB90B155E26227B9AB62D94F54E5951"), + ("11g", "S:61CE616647A4F7980AFD7C7245261AF25E0AFE9C9763FCF0D54DA667D4E6"), + ("11g", "S:B9E7556F53500C8C78A58F50F24439D79962DE68117654B6700CE7CC71CF"), + + # + # source? + # + ("SHAlala", "S:2BFCFDF5895014EE9BB2B9BA067B01E0389BB5711B7B5F82B7235E9E182C"), + + # + # custom + # + (UPASS_TABLE, 'S:51586343E429A6DF024B8F242F2E9F8507B1096FACD422E29142AA4974B0'), + ] + +#============================================================================= +# PHPass Portable Crypt +#============================================================================= +class phpass_test(HandlerCase): + handler = hash.phpass + + known_correct_hashes = [ + # + # from official 0.3 implementation + # http://www.openwall.com/phpass/ + # + ('test12345', '$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0'), # from the source + + # + # from JTR 1.7.9 + # + ('test1', '$H$9aaaaaSXBjgypwqm.JsMssPLiS8YQ00'), + ('123456', '$H$9PE8jEklgZhgLmZl5.HYJAzfGCQtzi1'), + ('123456', '$H$9pdx7dbOW3Nnt32sikrjAxYFjX8XoK1'), + ('thisisalongertestPW', '$P$912345678LIjjb6PhecupozNBmDndU0'), + ('JohnRipper', '$P$612345678si5M0DDyPpmRCmcltU/YW/'), + ('JohnRipper', '$H$712345678WhEyvy1YWzT4647jzeOmo0'), + ('JohnRipper', '$P$B12345678L6Lpt4BxNotVIMILOa9u81'), + + # + # custom + # + ('', '$P$7JaFQsPzJSuenezefD/3jHgt5hVfNH0'), + ('compL3X!', '$P$FiS0N5L672xzQx1rt1vgdJQRYKnQM9/'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$P$7SMy8VxnfsIy2Sxm7fJxDSdil.h7TW.'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + # ---\/ + '$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r!L0', + ] + +#============================================================================= +# plaintext +#============================================================================= +class plaintext_test(HandlerCase): + # TODO: integrate EncodingHandlerMixin + handler = hash.plaintext + accepts_all_hashes = True + + known_correct_hashes = [ + ('',''), + ('password', 'password'), + + # ensure unicode uses utf-8 + (UPASS_TABLE, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + (PASS_TABLE_UTF8, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + ] + +#============================================================================= +# postgres_md5 +#============================================================================= +class postgres_md5_test(UserHandlerMixin, HandlerCase): + handler = hash.postgres_md5 + known_correct_hashes = [ + # ((secret,user),hash) + + # + # generated using postgres 8.1 + # + (('mypass', 'postgres'), 'md55fba2ea04fd36069d2574ea71c8efe9d'), + (('mypass', 'root'), 'md540c31989b20437833f697e485811254b'), + (("testpassword",'testuser'), 'md5d4fc5129cc2c25465a5370113ae9835f'), + + # + # custom + # + + # verify unicode->utf8 + ((UPASS_TABLE, 'postgres'), 'md5cb9f11283265811ce076db86d18a22d2'), + ] + known_unidentified_hashes = [ + # bad 'z' char in otherwise correct hash + 'md54zc31989b20437833f697e485811254b', + ] + +#============================================================================= +# (netbsd's) sha1 crypt +#============================================================================= +class _sha1_crypt_test(HandlerCase): + handler = hash.sha1_crypt + + known_correct_hashes = [ + # + # custom + # + ("password", "$sha1$19703$iVdJqfSE$v4qYKl1zqYThwpjJAoKX6UvlHq/a"), + ("password", "$sha1$21773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH"), + (UPASS_TABLE, '$sha1$40000$uJ3Sp7LE$.VEmLO5xntyRFYihC7ggd3297T/D'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '$sha1$21773$u!7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH', + + # zero padded rounds + '$sha1$01773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH', + + # too many fields + '$sha1$21773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH$', + + # empty rounds field + '$sha1$$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH$', + ] + + platform_crypt_support = [ + ("netbsd", True), + ("freebsd|openbsd|solaris|darwin", False), + ("linux", None), # may be present if libxcrypt is in use + ] + +# create test cases for specific backends +sha1_crypt_os_crypt_test = _sha1_crypt_test.create_backend_case("os_crypt") +sha1_crypt_builtin_test = _sha1_crypt_test.create_backend_case("builtin") + +#============================================================================= +# roundup +#============================================================================= + +# NOTE: all roundup hashes use PrefixWrapper, +# so there's nothing natively to test. +# so we just have a few quick cases... + +class RoundupTest(TestCase): + + def _test_pair(self, h, secret, hash): + self.assertTrue(h.verify(secret, hash)) + self.assertFalse(h.verify('x'+secret, hash)) + + def test_pairs(self): + self._test_pair( + hash.ldap_hex_sha1, + "sekrit", + '{SHA}8d42e738c7adee551324955458b5e2c0b49ee655') + + self._test_pair( + hash.ldap_hex_md5, + "sekrit", + '{MD5}ccbc53f4464604e714f69dd11138d8b5') + + self._test_pair( + hash.ldap_des_crypt, + "sekrit", + '{CRYPT}nFia0rj2TT59A') + + self._test_pair( + hash.roundup_plaintext, + "sekrit", + '{plaintext}sekrit') + + self._test_pair( + hash.ldap_pbkdf2_sha1, + "sekrit", + '{PBKDF2}5000$7BvbBq.EZzz/O0HuwX3iP.nAG3s$g3oPnFFaga2BJaX5PoPRljl4XIE') + +#============================================================================= +# sha256-crypt +#============================================================================= +class _sha256_crypt_test(HandlerCase): + handler = hash.sha256_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$5$LKO/Ute40T3FNF95$U0prpBQd4PloSGU0pnpM4z9wKn4vZ1.jsrzQfPqxph9'), + ('U*U***U', '$5$LKO/Ute40T3FNF95$fdgfoJEBoMajNxCv3Ru9LyQ0xZgv0OBMQoq80LQ/Qd.'), + ('U*U***U*', '$5$LKO/Ute40T3FNF95$8Ry82xGnnPI/6HtFYnvPBTYgOL23sdMXn8C29aO.x/A'), + ('*U*U*U*U', '$5$9mx1HkCz7G1xho50$O7V7YgleJKLUhcfk9pgzdh3RapEaWqMtEp9UUBAKIPA'), + ('', '$5$kc7lRD1fpYg0g.IP$d7CMTcEqJyTXyeq8hTdu/jB/I6DGkoo62NXbHIR7S43'), + + # + # custom tests + # + ('', '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3'), + (' ', '$5$rounds=10376$I5lNtXtRmf.OoMd8$Ko3AI1VvTANdyKhBPavaRjJzNpSatKU6QVN9uwS9MH.'), + ('test', '$5$rounds=11858$WH1ABM5sKhxbkgCK$aTQsjPkz0rBsH3lQlJxw9HDTDXPKBxC0LlVeV69P.t1'), + ('Compl3X AlphaNu3meric', '$5$rounds=10350$o.pwkySLCzwTdmQX$nCMVsnF3TXWcBPOympBUUSQi6LGGloZoOsVJMGJ09UB'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$5$rounds=11944$9dhlu07dQMRWvTId$LyUI5VWkGFwASlzntk1RLurxX54LUhgAcJZIt0pYGT7'), + (u('with unic\u00D6de'), '$5$rounds=1000$IbG0EuGQXw5EkMdP$LQ5AfPf13KufFsKtmazqnzSGZ4pxtUNw3woQ.ELRDF4'), + ] + + if TEST_MODE("full"): + # builtin alg was changed in 1.6, and had possibility of fencepost + # errors near rounds that are multiples of 42. these hashes test rounds + # 1004..1012 (42*24=1008 +/- 4) to ensure no mistakes were made. + # (also relying on fuzz testing against os_crypt backend). + known_correct_hashes.extend([ + ("secret", '$5$rounds=1004$nacl$oiWPbm.kQ7.jTCZoOtdv7/tO5mWv/vxw5yTqlBagVR7'), + ("secret", '$5$rounds=1005$nacl$6Mo/TmGDrXxg.bMK9isRzyWH3a..6HnSVVsJMEX7ud/'), + ("secret", '$5$rounds=1006$nacl$I46VwuAiUBwmVkfPFakCtjVxYYaOJscsuIeuZLbfKID'), + ("secret", '$5$rounds=1007$nacl$9fY4j1AV3N/dV/YMUn1enRHKH.7nEL4xf1wWB6wfDD4'), + ("secret", '$5$rounds=1008$nacl$CiFWCfn8ODmWs0I1xAdXFo09tM8jr075CyP64bu3by9'), + ("secret", '$5$rounds=1009$nacl$QtpFX.CJHgVQ9oAjVYStxAeiU38OmFILWm684c6FyED'), + ("secret", '$5$rounds=1010$nacl$ktAwXuT5WbjBW/0ZU1eNMpqIWY1Sm4twfRE1zbZyo.B'), + ("secret", '$5$rounds=1011$nacl$QJWLBEhO9qQHyMx4IJojSN9sS41P1Yuz9REddxdO721'), + ("secret", '$5$rounds=1012$nacl$mmf/k2PkbBF4VCtERgky3bEVavmLZKFwAcvxD1p3kV2'), + ]) + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '$5$rounds=10428$uy/:jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMeZGsGx2aBvxTvDFI613c3', + + # zero-padded rounds + '$5$rounds=010428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3', + + # extra "$" + '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3$', + ] + + known_correct_configs = [ + # config, secret, result + + # + # taken from official specification at http://www.akkadia.org/drepper/SHA-crypt.txt + # + ( "$5$saltstring", "Hello world!", + "$5$saltstring$5B8vYYiY.CVt1RlTTf8KbXBH3hsxY/GNooZaBBGWEc5" ), + ( "$5$rounds=10000$saltstringsaltstring", "Hello world!", + "$5$rounds=10000$saltstringsaltst$3xv.VbSHBb41AL9AvLeujZkZRBAwqFMz2." + "opqey6IcA" ), + ( "$5$rounds=5000$toolongsaltstring", "This is just a test", + "$5$rounds=5000$toolongsaltstrin$Un/5jzAHMgOGZ5.mWJpuVolil07guHPvOW8" + "mGRcvxa5" ), + ( "$5$rounds=1400$anotherlongsaltstring", + "a very much longer text to encrypt. This one even stretches over more" + "than one line.", + "$5$rounds=1400$anotherlongsalts$Rx.j8H.h8HjEDGomFU8bDkXm3XIUnzyxf12" + "oP84Bnq1" ), + ( "$5$rounds=77777$short", + "we have a short salt string but not a short password", + "$5$rounds=77777$short$JiO1O3ZpDAxGJeaDIuqCoEFysAe1mZNJRs3pw0KQRd/" ), + ( "$5$rounds=123456$asaltof16chars..", "a short string", + "$5$rounds=123456$asaltof16chars..$gP3VQ/6X7UUEW3HkBn2w1/Ptq2jxPyzV/" + "cZKmF/wJvD" ), + ( "$5$rounds=10$roundstoolow", "the minimum number is still observed", + "$5$rounds=1000$roundstoolow$yfvwcWrQ8l/K0DAWyuPMDNHpIVlTQebY9l/gL97" + "2bIC" ), + ] + + filter_config_warnings = True # rounds too low, salt too small + + platform_crypt_support = [ + ("freebsd(9|1\d)|linux", True), + ("freebsd8", None), # added in freebsd 8.3 + ("freebsd|openbsd|netbsd|darwin", False), + ("solaris", None), # depends on policy + ] + +# create test cases for specific backends +sha256_crypt_os_crypt_test = _sha256_crypt_test.create_backend_case("os_crypt") +sha256_crypt_builtin_test = _sha256_crypt_test.create_backend_case("builtin") + +#============================================================================= +# test sha512-crypt +#============================================================================= +class _sha512_crypt_test(HandlerCase): + handler = hash.sha512_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', "$6$LKO/Ute40T3FNF95$6S/6T2YuOIHY0N3XpLKABJ3soYcXD9mB7uVbtEZDj/LNscVhZoZ9DEH.sBciDrMsHOWOoASbNLTypH/5X26gN0"), + ('U*U***U', "$6$LKO/Ute40T3FNF95$wK80cNqkiAUzFuVGxW6eFe8J.fSVI65MD5yEm8EjYMaJuDrhwe5XXpHDJpwF/kY.afsUs1LlgQAaOapVNbggZ1"), + ('U*U***U*', "$6$LKO/Ute40T3FNF95$YS81pp1uhOHTgKLhSMtQCr2cDiUiN03Ud3gyD4ameviK1Zqz.w3oXsMgO6LrqmIEcG3hiqaUqHi/WEE2zrZqa/"), + ('*U*U*U*U', "$6$OmBOuxFYBZCYAadG$WCckkSZok9xhp4U1shIZEV7CCVwQUwMVea7L3A77th6SaE9jOPupEMJB.z0vIWCDiN9WLh2m9Oszrj5G.gt330"), + ('', "$6$ojWH1AiTee9x1peC$QVEnTvRVlPRhcLQCk/HnHaZmlGAAjCfrAN0FtOsOnUk5K5Bn/9eLHHiRzrTzaIKjW9NTLNIBUCtNVOowWS2mN."), + + # + # custom tests + # + ('', '$6$rounds=11021$KsvQipYPWpr93wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1'), + (' ', '$6$rounds=11104$ED9SA4qGmd57Fq2m$q/.PqACDM/JpAHKmr86nkPzzuR5.YpYa8ZJJvI8Zd89ZPUYTJExsFEIuTYbM7gAGcQtTkCEhBKmp1S1QZwaXx0'), + ('test', '$6$rounds=11531$G/gkPn17kHYo0gTF$Kq.uZBHlSBXyzsOJXtxJruOOH4yc0Is13uY7yK0PvAvXxbvc1w8DO1RzREMhKsc82K/Jh8OquV8FZUlreYPJk1'), + ('Compl3X AlphaNu3meric', '$6$rounds=10787$wakX8nGKEzgJ4Scy$X78uqaX1wYXcSCtS4BVYw2trWkvpa8p7lkAtS9O/6045fK4UB2/Jia0Uy/KzCpODlfVxVNZzCCoV9s2hoLfDs/'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$6$rounds=11065$5KXQoE1bztkY5IZr$Jf6krQSUKKOlKca4hSW07MSerFFzVIZt/N3rOTsUgKqp7cUdHrwV8MoIVNCk9q9WL3ZRMsdbwNXpVk0gVxKtz1'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$6$rounds=40000$PEZTJDiyzV28M3.m$GTlnzfzGB44DGd1XqlmC4erAJKCP.rhvLvrYxiT38htrNzVGBnplFOHjejUGVrCfusGWxLQCc3pFO0A/1jYYr0'), + ] + + known_malformed_hashes = [ + # zero-padded rounds + '$6$rounds=011021$KsvQipYPWpr93wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1', + # bad char in otherwise correct hash + '$6$rounds=11021$KsvQipYPWpr9:wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1', + ] + + known_correct_configs = [ + # config, secret, result + + # + # taken from official specification at http://www.akkadia.org/drepper/SHA-crypt.txt + # + ("$6$saltstring", "Hello world!", + "$6$saltstring$svn8UoSVapNtMuq1ukKS4tPQd8iKwSMHWjl/O817G3uBnIFNjnQJu" + "esI68u4OTLiBFdcbYEdFCoEOfaS35inz1" ), + + ( "$6$rounds=10000$saltstringsaltstring", "Hello world!", + "$6$rounds=10000$saltstringsaltst$OW1/O6BYHV6BcXZu8QVeXbDWra3Oeqh0sb" + "HbbMCVNSnCM/UrjmM0Dp8vOuZeHBy/YTBmSK6H9qs/y3RnOaw5v." ), + + ( "$6$rounds=5000$toolongsaltstring", "This is just a test", + "$6$rounds=5000$toolongsaltstrin$lQ8jolhgVRVhY4b5pZKaysCLi0QBxGoNeKQ" + "zQ3glMhwllF7oGDZxUhx1yxdYcz/e1JSbq3y6JMxxl8audkUEm0" ), + + ( "$6$rounds=1400$anotherlongsaltstring", + "a very much longer text to encrypt. This one even stretches over more" + "than one line.", + "$6$rounds=1400$anotherlongsalts$POfYwTEok97VWcjxIiSOjiykti.o/pQs.wP" + "vMxQ6Fm7I6IoYN3CmLs66x9t0oSwbtEW7o7UmJEiDwGqd8p4ur1" ), + + ( "$6$rounds=77777$short", + "we have a short salt string but not a short password", + "$6$rounds=77777$short$WuQyW2YR.hBNpjjRhpYD/ifIw05xdfeEyQoMxIXbkvr0g" + "ge1a1x3yRULJ5CCaUeOxFmtlcGZelFl5CxtgfiAc0" ), + + ( "$6$rounds=123456$asaltof16chars..", "a short string", + "$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc" + "elCjmw2kSYu.Ec6ycULevoBK25fs2xXgMNrCzIMVcgEJAstJeonj1" ), + + ( "$6$rounds=10$roundstoolow", "the minimum number is still observed", + "$6$rounds=1000$roundstoolow$kUMsbe306n21p9R.FRkW3IGn.S9NPN0x50YhH1x" + "hLsPuWGsUSklZt58jaTfF4ZEQpyUNGc0dqbpBYYBaHHrsX." ), + ] + + filter_config_warnings = True # rounds too low, salt too small + + platform_crypt_support = _sha256_crypt_test.platform_crypt_support + +# create test cases for specific backends +sha512_crypt_os_crypt_test = _sha512_crypt_test.create_backend_case("os_crypt") +sha512_crypt_builtin_test = _sha512_crypt_test.create_backend_case("builtin") + +#============================================================================= +# sun md5 crypt +#============================================================================= +class sun_md5_crypt_test(HandlerCase): + handler = hash.sun_md5_crypt + + # TODO: this scheme needs some real test vectors, especially due to + # the "bare salt" issue which plagued the official parser. + known_correct_hashes = [ + # + # http://forums.halcyoninc.com/showthread.php?t=258 + # + ("Gpcs3_adm", "$md5$zrdhpMlZ$$wBvMOEqbSjU.hu5T2VEP01"), + + # + # http://www.c0t0d0s0.org/archives/4453-Less-known-Solaris-features-On-passwords-Part-2-Using-stronger-password-hashing.html + # + ("aa12345678", "$md5$vyy8.OVF$$FY4TWzuauRl4.VQNobqMY."), + + # + # http://www.cuddletech.com/blog/pivot/entry.php?id=778 + # + ("this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + + # + # http://compgroups.net/comp.unix.solaris/password-file-in-linux-and-solaris-8-9 + # + ("passwd", "$md5$RPgLF6IJ$WTvAlUJ7MqH5xak2FMEwS/"), + + # + # source: http://solaris-training.com/301_HTML/docs/deepdiv.pdf page 27 + # FIXME: password unknown + # "$md5,rounds=8000$kS9FT1JC$$mnUrRO618lLah5iazwJ9m1" + + # + # source: http://www.visualexams.com/310-303.htm + # XXX: this has 9 salt chars unlike all other hashes. is that valid? + # FIXME: password unknown + # "$md5,rounds=2006$2amXesSj5$$kCF48vfPsHDjlKNXeEw7V." + # + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$md5,rounds=5000$10VYDzAA$$1arAVtMA3trgE1qJ2V0Ez1'), + ] + + known_correct_configs = [ + # (config, secret, hash) + + #--------------------------- + # test salt string handling + # + # these tests attempt to verify that passlib is handling + # the "bare salt" issue (see sun md5 crypt docs) + # in a sane manner + #--------------------------- + + # config with "$" suffix, hash strings with "$$" suffix, + # should all be treated the same, with one "$" added to salt digest. + ("$md5$3UqYqndY$", + "this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + ("$md5$3UqYqndY$$.................DUMMY", + "this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + + # config with no suffix, hash strings with "$" suffix, + # should all be treated the same, and no suffix added to salt digest. + # NOTE: this is just a guess re: config w/ no suffix, + # but otherwise there's no sane way to encode bare_salt=False + # within config string. + ("$md5$3UqYqndY", + "this", "$md5$3UqYqndY$HIZVnfJNGCPbDZ9nIRSgP1"), + ("$md5$3UqYqndY$.................DUMMY", + "this", "$md5$3UqYqndY$HIZVnfJNGCPbDZ9nIRSgP1"), + ] + + known_malformed_hashes = [ + # unexpected end of hash + "$md5,rounds=5000", + + # bad rounds + "$md5,rounds=500A$xxxx", + "$md5,rounds=0500$xxxx", + "$md5,rounds=0$xxxx", + + # bad char in otherwise correct hash + "$md5$RPgL!6IJ$WTvAlUJ7MqH5xak2FMEwS/", + + # digest too short + "$md5$RPgLa6IJ$WTvAlUJ7MqH5xak2FMEwS", + + # digest too long + "$md5$RPgLa6IJ$WTvAlUJ7MqH5xak2FMEwS/.", + + # 2+ "$" at end of salt in config + # NOTE: not sure what correct behavior is, so forbidding format for now. + "$md5$3UqYqndY$$", + + # 3+ "$" at end of salt in hash + # NOTE: not sure what correct behavior is, so forbidding format for now. + "$md5$RPgLa6IJ$$$WTvAlUJ7MqH5xak2FMEwS/", + + ] + + platform_crypt_support = [ + ("solaris", True), + ("freebsd|openbsd|netbsd|linux|darwin", False), + ] + def do_verify(self, secret, hash): + # Override to fake error for "$..." hash string listed in known_correct_configs (above) + # These have to be hash strings, in order to test bare salt issue. + if isinstance(hash, str) and hash.endswith("$.................DUMMY"): + raise ValueError("pretending '$...' stub hash is config string") + return self.handler.verify(secret, hash) + +#============================================================================= +# unix disabled / fallback +#============================================================================= +class unix_disabled_test(HandlerCase): + handler = hash.unix_disabled +# accepts_all_hashes = True # TODO: turn this off. + + known_correct_hashes = [ + # everything should hash to "!" (or "*" on BSD), + # and nothing should verify against either string + ("password", "!"), + (UPASS_TABLE, "*"), + ] + + known_unidentified_hashes = [ + # should never identify anything crypt() could return... + "$1$xxx", + "abc", + "./az", + "{SHA}xxx", + ] + + def test_76_hash_border(self): + # so empty strings pass + self.accepts_all_hashes = True + super(unix_disabled_test, self).test_76_hash_border() + + def test_90_special(self): + """test marker option & special behavior""" + warnings.filterwarnings("ignore", "passing settings to .*.hash\(\) is deprecated") + handler = self.handler + + # preserve hash if provided + self.assertEqual(handler.genhash("stub", "!asd"), "!asd") + + # use marker if no hash + self.assertEqual(handler.genhash("stub", ""), handler.default_marker) + self.assertEqual(handler.hash("stub"), handler.default_marker) + self.assertEqual(handler.using().default_marker, handler.default_marker) + + # custom marker + self.assertEqual(handler.genhash("stub", "", marker="*xxx"), "*xxx") + self.assertEqual(handler.hash("stub", marker="*xxx"), "*xxx") + self.assertEqual(handler.using(marker="*xxx").hash("stub"), "*xxx") + + # reject invalid marker + self.assertRaises(ValueError, handler.genhash, 'stub', "", marker='abc') + self.assertRaises(ValueError, handler.hash, 'stub', marker='abc') + self.assertRaises(ValueError, handler.using, marker='abc') + +class unix_fallback_test(HandlerCase): + handler = hash.unix_fallback + accepts_all_hashes = True + + known_correct_hashes = [ + # *everything* should hash to "!", and nothing should verify + ("password", "!"), + (UPASS_TABLE, "!"), + ] + + # silence annoying deprecation warning + def setUp(self): + super(unix_fallback_test, self).setUp() + warnings.filterwarnings("ignore", "'unix_fallback' is deprecated") + + def test_90_wildcard(self): + """test enable_wildcard flag""" + h = self.handler + self.assertTrue(h.verify('password','', enable_wildcard=True)) + self.assertFalse(h.verify('password','')) + for c in "!*x": + self.assertFalse(h.verify('password',c, enable_wildcard=True)) + self.assertFalse(h.verify('password',c)) + + def test_91_preserves_existing(self): + """test preserves existing disabled hash""" + handler = self.handler + + # use marker if no hash + self.assertEqual(handler.genhash("stub", ""), "!") + self.assertEqual(handler.hash("stub"), "!") + + # use hash if provided and valid + self.assertEqual(handler.genhash("stub", "!asd"), "!asd") + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_argon2.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_argon2.py new file mode 100644 index 000000000..e77176934 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_argon2.py @@ -0,0 +1,507 @@ +"""passlib.tests.test_handlers_argon2 - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +# core +import logging +log = logging.getLogger(__name__) +import re +import warnings +# site +# pkg +from passlib import hash +from passlib.utils.compat import unicode +from passlib.tests.utils import HandlerCase, TEST_MODE +from passlib.tests.test_handlers import UPASS_TABLE, PASS_TABLE_UTF8 +# module + +#============================================================================= +# a bunch of tests lifted nearlky verbatim from official argon2 UTs... +# https://github.com/P-H-C/phc-winner-argon2/blob/master/src/test.c +#============================================================================= +def hashtest(version, t, logM, p, secret, salt, hex_digest, hash): + return dict(version=version, rounds=t, logM=logM, memory_cost=1< max uint32 + "$argon2i$v=19$m=65536,t=8589934592,p=4$c29tZXNhbHQAAAAAAAAAAA$QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY", + + # unexpected param + "$argon2i$v=19$m=65536,t=2,p=4,q=5$c29tZXNhbHQAAAAAAAAAAA$QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY", + + # wrong param order + "$argon2i$v=19$t=2,m=65536,p=4,q=5$c29tZXNhbHQAAAAAAAAAAA$QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY", + + # constraint violation: m < 8 * p + "$argon2i$v=19$m=127,t=2,p=16$c29tZXNhbHQ$IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4", + ] + + known_parsehash_results = [ + ('$argon2i$v=19$m=256,t=2,p=3$c29tZXNhbHQ$AJFIsNZTMKTAewB4+ETN1A', + dict(type="i", memory_cost=256, rounds=2, parallelism=3, salt=b'somesalt', + checksum=b'\x00\x91H\xb0\xd6S0\xa4\xc0{\x00x\xf8D\xcd\xd4')), + ] + + def setUpWarnings(self): + super(_base_argon2_test, self).setUpWarnings() + warnings.filterwarnings("ignore", ".*Using argon2pure backend.*") + + def do_stub_encrypt(self, handler=None, **settings): + if self.backend == "argon2_cffi": + # overriding default since no way to get stub config from argon2._calc_hash() + # (otherwise test_21b_max_rounds blocks trying to do max rounds) + handler = (handler or self.handler).using(**settings) + self = handler(use_defaults=True) + self.checksum = self._stub_checksum + assert self.checksum + return self.to_string() + else: + return super(_base_argon2_test, self).do_stub_encrypt(handler, **settings) + + def test_03_legacy_hash_workflow(self): + # override base method + raise self.skipTest("legacy 1.6 workflow not supported") + + def test_keyid_parameter(self): + # NOTE: keyid parameter currently not supported by official argon2 hash parser, + # even though it's mentioned in the format spec. + # we're trying to be consistent w/ this, so hashes w/ keyid should + # always through a NotImplementedError. + self.assertRaises(NotImplementedError, self.handler.verify, 'password', + "$argon2i$v=19$m=65536,t=2,p=4,keyid=ABCD$c29tZXNhbHQ$" + "IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4") + + def test_data_parameter(self): + # NOTE: argon2 c library doesn't support passing in a data parameter to argon2_hash(); + # but argon2_verify() appears to parse that info... but then discards it (!?). + # not sure what proper behavior is, filed issue -- https://github.com/P-H-C/phc-winner-argon2/issues/143 + # For now, replicating behavior we have for the two backends, to detect when things change. + handler = self.handler + + # ref hash of 'password' when 'data' is correctly passed into argon2() + sample1 = '$argon2i$v=19$m=512,t=2,p=2,data=c29tZWRhdGE$c29tZXNhbHQ$KgHyCesFyyjkVkihZ5VNFw' + + # ref hash of 'password' when 'data' is silently discarded (same digest as w/o data) + sample2 = '$argon2i$v=19$m=512,t=2,p=2,data=c29tZWRhdGE$c29tZXNhbHQ$uEeXt1dxN1iFKGhklseW4w' + + # hash of 'password' w/o the data field + sample3 = '$argon2i$v=19$m=512,t=2,p=2$c29tZXNhbHQ$uEeXt1dxN1iFKGhklseW4w' + + # + # test sample 1 + # + + if self.backend == "argon2_cffi": + # argon2_cffi v16.1 would incorrectly return False here. + # but v16.2 patches so it throws error on data parameter. + # our code should detect that, and adapt it into a NotImplementedError + self.assertRaises(NotImplementedError, handler.verify, "password", sample1) + + # incorrectly returns sample3, dropping data parameter + self.assertEqual(handler.genhash("password", sample1), sample3) + + else: + assert self.backend == "argon2pure" + # should parse and verify + self.assertTrue(handler.verify("password", sample1)) + + # should preserve sample1 + self.assertEqual(handler.genhash("password", sample1), sample1) + + # + # test sample 2 + # + + if self.backend == "argon2_cffi": + # argon2_cffi v16.1 would incorrectly return True here. + # but v16.2 patches so it throws error on data parameter. + # our code should detect that, and adapt it into a NotImplementedError + self.assertRaises(NotImplementedError, handler.verify,"password", sample2) + + # incorrectly returns sample3, dropping data parameter + self.assertEqual(handler.genhash("password", sample1), sample3) + + else: + assert self.backend == "argon2pure" + # should parse, but fail to verify + self.assertFalse(self.handler.verify("password", sample2)) + + # should return sample1 (corrected digest) + self.assertEqual(handler.genhash("password", sample2), sample1) + + def test_keyid_and_data_parameters(self): + # test combination of the two, just in case + self.assertRaises(NotImplementedError, self.handler.verify, 'stub', + "$argon2i$v=19$m=65536,t=2,p=4,keyid=ABCD,data=EFGH$c29tZXNhbHQ$" + "IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4") + + def test_type_kwd(self): + cls = self.handler + + # XXX: this mirrors test_30_HasManyIdents(); + # maybe switch argon2 class to use that mixin instead of "type" kwd? + + # check settings + self.assertTrue("type" in cls.setting_kwds) + + # check supported type_values + for value in cls.type_values: + self.assertIsInstance(value, unicode) + self.assertTrue("i" in cls.type_values) + self.assertTrue("d" in cls.type_values) + + # check default + self.assertTrue(cls.type in cls.type_values) + + # check constructor validates ident correctly. + handler = cls + hash = self.get_sample_hash()[1] + kwds = handler.parsehash(hash) + del kwds['type'] + + # ... accepts good type + handler(type=cls.type, **kwds) + + # XXX: this is policy "ident" uses, maybe switch to it? + # # ... requires type w/o defaults + # self.assertRaises(TypeError, handler, **kwds) + handler(**kwds) + + # ... supplies default type + handler(use_defaults=True, **kwds) + + # ... rejects bad type + self.assertRaises(ValueError, handler, type='xXx', **kwds) + + def test_type_using(self): + handler = self.handler + + # XXX: this mirrors test_has_many_idents_using(); + # maybe switch argon2 class to use that mixin instead of "type" kwd? + + orig_type = handler.type + for alt_type in handler.type_values: + if alt_type != orig_type: + break + else: + raise AssertionError("expected to find alternate type: default=%r values=%r" % + (orig_type, handler.type_values)) + + def effective_type(cls): + return cls(use_defaults=True).type + + # keep default if nothing else specified + subcls = handler.using() + self.assertEqual(subcls.type, orig_type) + + # accepts alt type + subcls = handler.using(type=alt_type) + self.assertEqual(subcls.type, alt_type) + self.assertEqual(handler.type, orig_type) + + # check subcls actually *generates* default type, + # and that we didn't affect orig handler + self.assertEqual(effective_type(subcls), alt_type) + self.assertEqual(effective_type(handler), orig_type) + + # rejects bad type + self.assertRaises(ValueError, handler.using, type='xXx') + + # honor 'type' alias + subcls = handler.using(type=alt_type) + self.assertEqual(subcls.type, alt_type) + self.assertEqual(handler.type, orig_type) + + # check type aliases are being honored + self.assertEqual(effective_type(handler.using(type="I")), "i") + + def test_needs_update_w_type(self): + handler = self.handler + + hash = handler.hash("stub") + self.assertFalse(handler.needs_update(hash)) + + hash2 = re.sub(r"\$argon2\w+\$", "$argon2d$", hash) + self.assertTrue(handler.needs_update(hash2)) + + def test_needs_update_w_version(self): + handler = self.handler.using(memory_cost=65536, time_cost=2, parallelism=4, + digest_size=32) + hash = ("$argon2i$m=65536,t=2,p=4$c29tZXNhbHQAAAAAAAAAAA$" + "QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY") + if handler.max_version == 0x10: + self.assertFalse(handler.needs_update(hash)) + else: + self.assertTrue(handler.needs_update(hash)) + + def test_argon_byte_encoding(self): + """verify we're using right base64 encoding for argon2""" + handler = self.handler + if handler.version != 0x13: + # TODO: make this fatal, and add refs for other version. + raise self.skipTest("handler uses wrong version for sample hashes") + + # 8 byte salt + salt = b'somesalt' + temp = handler.using(memory_cost=256, time_cost=2, parallelism=2, salt=salt, + checksum_size=32, type="i") + hash = temp.hash("password") + self.assertEqual(hash, "$argon2i$v=19$m=256,t=2,p=2" + "$c29tZXNhbHQ" + "$T/XOJ2mh1/TIpJHfCdQan76Q5esCFVoT5MAeIM1Oq2E") + + # 16 byte salt + salt = b'somesalt\x00\x00\x00\x00\x00\x00\x00\x00' + temp = handler.using(memory_cost=256, time_cost=2, parallelism=2, salt=salt, + checksum_size=32, type="i") + hash = temp.hash("password") + self.assertEqual(hash, "$argon2i$v=19$m=256,t=2,p=2" + "$c29tZXNhbHQAAAAAAAAAAA" + "$rqnbEp1/jFDUEKZZmw+z14amDsFqMDC53dIe57ZHD38") + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + settings_map = HandlerCase.FuzzHashGenerator.settings_map.copy() + settings_map.update(memory_cost="random_memory_cost", type="random_type") + + def random_type(self): + return self.rng.choice(self.handler.type_values) + + def random_memory_cost(self): + if self.test.backend == "argon2pure": + return self.randintgauss(128, 384, 256, 128) + else: + return self.randintgauss(128, 32767, 16384, 4096) + + # TODO: fuzz parallelism, digest_size + +#----------------------------------------- +# test suites for specific backends +#----------------------------------------- + +class argon2_argon2_cffi_test(_base_argon2_test.create_backend_case("argon2_cffi")): + + # add some more test vectors that take too long under argon2pure + known_correct_hashes = _base_argon2_test.known_correct_hashes + [ + # + # sample hashes from argon2 cffi package's unittests, + # which in turn were generated by official argon2 cmdline tool. + # + + # v1.2, type I, w/o a version tag + ('password', "$argon2i$m=65536,t=2,p=4$c29tZXNhbHQAAAAAAAAAAA$" + "QWLzI4TY9HkL2ZTLc8g6SinwdhZewYrzz9zxCo0bkGY"), + + # v1.3, type I + ('password', "$argon2i$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "IMit9qkFULCMA/ViizL57cnTLOa5DiVM9eMwpAvPwr4"), + + # v1.3, type D + ('password', "$argon2d$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "cZn5d+rFh+ZfuRhm2iGUGgcrW5YLeM6q7L3vBsdmFA0"), + + # v1.3, type ID + ('password', "$argon2id$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "GpZ3sK/oH9p7VIiV56G/64Zo/8GaUw434IimaPqxwCo"), + + # + # custom + # + + # ensure trailing null bytes handled correctly + ('password\x00', "$argon2i$v=19$m=65536,t=2,p=4$c29tZXNhbHQ$" + "Vpzuc0v0SrP88LcVvmg+z5RoOYpMDKH/lt6O+CZabIQ"), + + ] + + # add reference hashes from argon2 clib tests + known_correct_hashes.extend( + (info['secret'], info['hash']) for info in reference_data + if info['logM'] <= (18 if TEST_MODE("full") else 16) + ) + +class argon2_argon2pure_test(_base_argon2_test.create_backend_case("argon2pure")): + + # XXX: setting max_threads at 1 to prevent argon2pure from using multiprocessing, + # which causes big problems when testing under pypy. + # would like a "pure_use_threads" option instead, to make it use multiprocessing.dummy instead. + handler = hash.argon2.using(memory_cost=32, parallelism=2) + + # don't use multiprocessing for unittests, makes it a lot harder to ctrl-c + # XXX: make this controlled by env var? + handler.pure_use_threads = True + + # add reference hashes from argon2 clib tests + known_correct_hashes = _base_argon2_test.known_correct_hashes[:] + + known_correct_hashes.extend( + (info['secret'], info['hash']) for info in reference_data + if info['logM'] < 16 + ) + + class FuzzHashGenerator(_base_argon2_test.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(1, 3, 2, 1) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_bcrypt.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_bcrypt.py new file mode 100644 index 000000000..64fc8bff9 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_bcrypt.py @@ -0,0 +1,688 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import os +import warnings +# site +# pkg +from passlib import hash +from passlib.handlers.bcrypt import IDENT_2, IDENT_2X +from passlib.utils import repeat_string, to_bytes, is_safe_crypt_input +from passlib.utils.compat import irange, PY3 +from passlib.tests.utils import HandlerCase, TEST_MODE +from passlib.tests.test_handlers import UPASS_TABLE +# module + +#============================================================================= +# bcrypt +#============================================================================= +class _bcrypt_test(HandlerCase): + """base for BCrypt test cases""" + handler = hash.bcrypt + reduce_default_rounds = True + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$2a$05$c92SVSfjeiCD6F2nAD6y0uBpJDjdRkt0EgeC4/31Rf2LUZbDRDE.O'), + ('U*U***U', '$2a$05$WY62Xk2TXZ7EvVDQ5fmjNu7b0GEzSzUXUh2cllxJwhtOeMtWV3Ujq'), + ('U*U***U*', '$2a$05$Fa0iKV3E2SYVUlMknirWU.CFYGvJ67UwVKI1E2FP6XeLiZGcH3MJi'), + ('*U*U*U*U', '$2a$05$.WRrXibc1zPgIdRXYfv.4uu6TD1KWf0VnHzq/0imhUhuxSxCyeBs2'), + ('', '$2a$05$Otz9agnajgrAe0.kFVF9V.tzaStZ2s1s4ZWi/LY4sw2k/MTVFj/IO'), + + # + # test vectors from http://www.openwall.com/crypt v1.2 + # note that this omits any hashes that depend on crypt_blowfish's + # various CVE-2011-2483 workarounds (hash 2a and \xff\xff in password, + # and any 2x hashes); and only contain hashes which are correct + # under both crypt_blowfish 1.2 AND OpenBSD. + # + ('U*U', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.E5YPO9kmyuRGyh0XouQYb4YMJKvyOeW'), + ('U*U*', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.VGOzA784oUp/Z0DY336zx7pLYAy0lwK'), + ('U*U*U', '$2a$05$XXXXXXXXXXXXXXXXXXXXXOAcXxm9kjPGEMsLznoKqmqw7tc8WCx4a'), + ('', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.7uG0VCzI2bS7j6ymqJi9CdcdxiRTWNy'), + ('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789chars after 72 are ignored', + '$2a$05$abcdefghijklmnopqrstuu5s2v8.iXieOjg/.AySBTTZIIVFJeBui'), + (b'\xa3', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq'), + (b'\xff\xa3345', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.nRht2l/HRhr6zmCp9vYUvvsqynflf9e'), + (b'\xa3ab', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.6IflQkJytoRVc1yuaNtHfiuq.FRlSIS'), + (b'\xaa'*72 + b'chars after 72 are ignored as usual', + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.swQOIzjOiJ9GHEPuhEkvqrUyvWhEMx6'), + (b'\xaa\x55'*36, + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.R9xrDjiycxMbQE2bp.vgqlYpW5wx2yy'), + (b'\x55\xaa\xff'*24, + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.9tQZzcJfm3uj2NvJ/n5xkhpqLrMpWCe'), + + # keeping one of their 2y tests, because we are supporting that. + (b'\xa3', + '$2y$05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq'), + + # + # 8bit bug (fixed in 2y/2b) + # + + # NOTE: see assert_lacks_8bit_bug() for origins of this test vector. + (b"\xd1\x91", "$2y$05$6bNw2HLQYeqHYyBfLMsv/OUcZd0LKP39b87nBw3.S2tVZSqiQX6eu"), + + # + # bsd wraparound bug (fixed in 2b) + # + + # NOTE: if backend is vulnerable, password will hash the same as '0'*72 + # ("$2a$04$R1lJ2gkNaoPGdafE.H.16.nVyh2niHsGJhayOHLMiXlI45o8/DU.6"), + # rather than same as ("0123456789"*8)[:72] + # 255 should be sufficient, but checking + (("0123456789"*26)[:254], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + (("0123456789"*26)[:255], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + (("0123456789"*26)[:256], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + (("0123456789"*26)[:257], '$2a$04$R1lJ2gkNaoPGdafE.H.16.1MKHPvmKwryeulRe225LKProWYwt9Oi'), + + + # + # from py-bcrypt tests + # + ('', '$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.'), + ('a', '$2a$10$k87L/MF28Q673VKh8/cPi.SUl7MU/rWuSiIDDFayrKk/1tBsSQu4u'), + ('abc', '$2a$10$WvvTPHKwdBJ3uk0Z37EMR.hLA2W6N9AEBhEgrAOljy2Ae5MtaSIUi'), + ('abcdefghijklmnopqrstuvwxyz', + '$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq'), + ('~!@#$%^&*() ~!@#$%^&*()PNBFRD', + '$2a$10$LgfYWkbzEvQ4JakH7rOvHe0y8pHKF9OaFgwUZ2q7W2FFZmZzJYlfS'), + + # + # custom test vectors + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, + '$2a$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + + # ensure 2b support + (UPASS_TABLE, + '$2b$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + + ] + + if TEST_MODE("full"): + # + # add some extra tests related to 2/2a + # + CONFIG_2 = '$2$05$' + '.'*22 + CONFIG_A = '$2a$05$' + '.'*22 + known_correct_hashes.extend([ + ("", CONFIG_2 + 'J2ihDv8vVf7QZ9BsaRrKyqs2tkn55Yq'), + ("", CONFIG_A + 'J2ihDv8vVf7QZ9BsaRrKyqs2tkn55Yq'), + ("abc", CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc", CONFIG_A + 'ev6gDwpVye3oMCUpLY85aTpfBNHD0Ga'), + ("abc"*23, CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*23, CONFIG_A + '2kIdfSj/4/R/Q6n847VTvc68BXiRYZC'), + ("abc"*24, CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24, CONFIG_A + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24+'x', CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24+'x', CONFIG_A + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ]) + + known_correct_configs = [ + ('$2a$04$uM6csdM8R9SXTex/gbTaye', UPASS_TABLE, + '$2a$04$uM6csdM8R9SXTex/gbTayezuvzFEufYGd2uB6of7qScLjQ4GwcD4G'), + ] + + known_unidentified_hashes = [ + # invalid minor version + "$2f$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + "$2`$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + # \/ + "$2a$12$EXRkfkdmXn!gzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + + # unsupported (but recognized) minor version + "$2x$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + + # rounds not zero-padded (py-bcrypt rejects this, therefore so do we) + '$2a$6$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.' + + # NOTE: salts with padding bits set are technically malformed, + # but we can reliably correct & issue a warning for that. + ] + + platform_crypt_support = [ + ("freedbsd|openbsd|netbsd", True), + ("darwin", False), + ("linux", None), # may be present via addon, e.g. debian's libpam-unix2 + ("solaris", None), # depends on system policy + ] + + #=================================================================== + # override some methods + #=================================================================== + def setUp(self): + # ensure builtin is enabled for duration of test. + if TEST_MODE("full") and self.backend == "builtin": + key = "PASSLIB_BUILTIN_BCRYPT" + orig = os.environ.get(key) + if orig: + self.addCleanup(os.environ.__setitem__, key, orig) + else: + self.addCleanup(os.environ.__delitem__, key) + os.environ[key] = "true" + + super(_bcrypt_test, self).setUp() + + # silence this warning, will come up a bunch during testing of old 2a hashes. + warnings.filterwarnings("ignore", ".*backend is vulnerable to the bsd wraparound bug.*") + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 4) + super(_bcrypt_test, self).populate_settings(kwds) + + #=================================================================== + # fuzz testing + #=================================================================== + def crypt_supports_variant(self, hash): + """check if OS crypt is expected to support given ident""" + from passlib.handlers.bcrypt import bcrypt, IDENT_2X, IDENT_2Y + from passlib.utils import safe_crypt + ident = bcrypt.from_string(hash) + return (safe_crypt("test", ident + "04$5BJqKfqMQvV7nS.yUguNcu") or "").startswith(ident) + + fuzz_verifiers = HandlerCase.fuzz_verifiers + ( + "fuzz_verifier_bcrypt", + "fuzz_verifier_pybcrypt", + "fuzz_verifier_bcryptor", + ) + + def fuzz_verifier_bcrypt(self): + # test against bcrypt, if available + from passlib.handlers.bcrypt import IDENT_2, IDENT_2A, IDENT_2B, IDENT_2X, IDENT_2Y, _detect_pybcrypt + from passlib.utils import to_native_str, to_bytes + try: + import bcrypt + except ImportError: + return + if _detect_pybcrypt(): + return + def check_bcrypt(secret, hash): + """bcrypt""" + secret = to_bytes(secret, self.FuzzHashGenerator.password_encoding) + if hash.startswith(IDENT_2B): + # bcrypt <1.1 lacks 2B support + hash = IDENT_2A + hash[4:] + elif hash.startswith(IDENT_2): + # bcrypt doesn't support $2$ hashes; but we can fake it + # using the $2a$ algorithm, by repeating the password until + # it's 72 chars in length. + hash = IDENT_2A + hash[3:] + if secret: + secret = repeat_string(secret, 72) + elif hash.startswith(IDENT_2Y) and bcrypt.__version__ == "3.0.0": + hash = IDENT_2B + hash[4:] + hash = to_bytes(hash) + try: + return bcrypt.hashpw(secret, hash) == hash + except ValueError: + raise ValueError("bcrypt rejected hash: %r (secret=%r)" % (hash, secret)) + return check_bcrypt + + def fuzz_verifier_pybcrypt(self): + # test against py-bcrypt, if available + from passlib.handlers.bcrypt import ( + IDENT_2, IDENT_2A, IDENT_2B, IDENT_2X, IDENT_2Y, + _PyBcryptBackend, + ) + from passlib.utils import to_native_str + + loaded = _PyBcryptBackend._load_backend_mixin("pybcrypt", False) + if not loaded: + return + + from passlib.handlers.bcrypt import _pybcrypt as bcrypt_mod + + lock = _PyBcryptBackend._calc_lock # reuse threadlock workaround for pybcrypt 0.2 + + def check_pybcrypt(secret, hash): + """pybcrypt""" + secret = to_native_str(secret, self.FuzzHashGenerator.password_encoding) + if len(secret) > 200: # vulnerable to wraparound bug + secret = secret[:200] + if hash.startswith((IDENT_2B, IDENT_2Y)): + hash = IDENT_2A + hash[4:] + try: + if lock: + with lock: + return bcrypt_mod.hashpw(secret, hash) == hash + else: + return bcrypt_mod.hashpw(secret, hash) == hash + except ValueError: + raise ValueError("py-bcrypt rejected hash: %r" % (hash,)) + return check_pybcrypt + + def fuzz_verifier_bcryptor(self): + # test against bcryptor if available + from passlib.handlers.bcrypt import IDENT_2, IDENT_2A, IDENT_2Y, IDENT_2B + from passlib.utils import to_native_str + try: + from bcryptor.engine import Engine + except ImportError: + return + def check_bcryptor(secret, hash): + """bcryptor""" + secret = to_native_str(secret, self.FuzzHashGenerator.password_encoding) + if hash.startswith((IDENT_2B, IDENT_2Y)): + hash = IDENT_2A + hash[4:] + elif hash.startswith(IDENT_2): + # bcryptor doesn't support $2$ hashes; but we can fake it + # using the $2a$ algorithm, by repeating the password until + # it's 72 chars in length. + hash = IDENT_2A + hash[3:] + if secret: + secret = repeat_string(secret, 72) + return Engine(False).hash_key(secret, hash) == hash + return check_bcryptor + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def generate(self): + opts = super(_bcrypt_test.FuzzHashGenerator, self).generate() + + secret = opts['secret'] + other = opts['other'] + settings = opts['settings'] + ident = settings.get('ident') + + if ident == IDENT_2X: + # 2x is just recognized, not supported. don't test with it. + del settings['ident'] + + elif ident == IDENT_2 and other and repeat_string(to_bytes(other), len(to_bytes(secret))) == to_bytes(secret): + # avoid false failure due to flaw in 0-revision bcrypt: + # repeated strings like 'abc' and 'abcabc' hash identically. + opts['secret'], opts['other'] = self.random_password_pair() + + return opts + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + #=================================================================== + # custom tests + #=================================================================== + known_incorrect_padding = [ + # password, bad hash, good hash + + # 2 bits of salt padding set +# ("loppux", # \/ +# "$2a$12$oaQbBqq8JnSM1NHRPQGXORm4GCUMqp7meTnkft4zgSnrbhoKdDV0C", +# "$2a$12$oaQbBqq8JnSM1NHRPQGXOOm4GCUMqp7meTnkft4zgSnrbhoKdDV0C"), + ("test", # \/ + '$2a$04$oaQbBqq8JnSM1NHRPQGXORY4Vw3bdHKLIXTecPDRAcJ98cz1ilveO', + '$2a$04$oaQbBqq8JnSM1NHRPQGXOOY4Vw3bdHKLIXTecPDRAcJ98cz1ilveO'), + + # all 4 bits of salt padding set +# ("Passlib11", # \/ +# "$2a$12$M8mKpW9a2vZ7PYhq/8eJVcUtKxpo6j0zAezu0G/HAMYgMkhPu4fLK", +# "$2a$12$M8mKpW9a2vZ7PYhq/8eJVOUtKxpo6j0zAezu0G/HAMYgMkhPu4fLK"), + ("test", # \/ + "$2a$04$yjDgE74RJkeqC0/1NheSScrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS", + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"), + + # bad checksum padding + ("test", # \/ + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIV", + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"), + ] + + def test_90_bcrypt_padding(self): + """test passlib correctly handles bcrypt padding bits""" + self.require_TEST_MODE("full") + # + # prevents reccurrence of issue 25 (https://code.google.com/p/passlib/issues/detail?id=25) + # were some unused bits were incorrectly set in bcrypt salt strings. + # (fixed since 1.5.3) + # + bcrypt = self.handler + corr_desc = ".*incorrectly set padding bits" + + # + # test hash() / genconfig() don't generate invalid salts anymore + # + def check_padding(hash): + assert hash.startswith(("$2a$", "$2b$")) and len(hash) >= 28, \ + "unexpectedly malformed hash: %r" % (hash,) + self.assertTrue(hash[28] in '.Oeu', + "unused bits incorrectly set in hash: %r" % (hash,)) + for i in irange(6): + check_padding(bcrypt.genconfig()) + for i in irange(3): + check_padding(bcrypt.using(rounds=bcrypt.min_rounds).hash("bob")) + + # + # test genconfig() corrects invalid salts & issues warning. + # + with self.assertWarningList(["salt too large", corr_desc]): + hash = bcrypt.genconfig(salt="."*21 + "A.", rounds=5, relaxed=True) + self.assertEqual(hash, "$2b$05$" + "." * (22 + 31)) + + # + # test public methods against good & bad hashes + # + samples = self.known_incorrect_padding + for pwd, bad, good in samples: + + # make sure genhash() corrects bad configs, leaves good unchanged + with self.assertWarningList([corr_desc]): + self.assertEqual(bcrypt.genhash(pwd, bad), good) + with self.assertWarningList([]): + self.assertEqual(bcrypt.genhash(pwd, good), good) + + # make sure verify() works correctly with good & bad hashes + with self.assertWarningList([corr_desc]): + self.assertTrue(bcrypt.verify(pwd, bad)) + with self.assertWarningList([]): + self.assertTrue(bcrypt.verify(pwd, good)) + + # make sure normhash() corrects bad hashes, leaves good unchanged + with self.assertWarningList([corr_desc]): + self.assertEqual(bcrypt.normhash(bad), good) + with self.assertWarningList([]): + self.assertEqual(bcrypt.normhash(good), good) + + # make sure normhash() leaves non-bcrypt hashes alone + self.assertEqual(bcrypt.normhash("$md5$abc"), "$md5$abc") + + def test_needs_update_w_padding(self): + """needs_update corrects bcrypt padding""" + # NOTE: see padding test above for details about issue this detects + bcrypt = self.handler.using(rounds=4) + + # PASS1 = "test" + # bad contains invalid 'c' char at end of salt: + # \/ + BAD1 = "$2a$04$yjDgE74RJkeqC0/1NheSScrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + GOOD1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + + self.assertTrue(bcrypt.needs_update(BAD1)) + self.assertFalse(bcrypt.needs_update(GOOD1)) + + #=================================================================== + # eoc + #=================================================================== + +# create test cases for specific backends +bcrypt_bcrypt_test = _bcrypt_test.create_backend_case("bcrypt") +bcrypt_pybcrypt_test = _bcrypt_test.create_backend_case("pybcrypt") +bcrypt_bcryptor_test = _bcrypt_test.create_backend_case("bcryptor") + +class bcrypt_os_crypt_test(_bcrypt_test.create_backend_case("os_crypt")): + + # os crypt doesn't support non-utf8 secret bytes + known_correct_hashes = [row for row in _bcrypt_test.known_correct_hashes + if is_safe_crypt_input(row[0])] + + # os crypt backend doesn't currently implement a per-call fallback if it fails + has_os_crypt_fallback = False + +bcrypt_builtin_test = _bcrypt_test.create_backend_case("builtin") + +#============================================================================= +# bcrypt +#============================================================================= +class _bcrypt_sha256_test(HandlerCase): + "base for BCrypt-SHA256 test cases" + handler = hash.bcrypt_sha256 + reduce_default_rounds = True + forbidden_characters = None + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + #------------------------------------------------------------------- + # custom test vectors for old v1 format + #------------------------------------------------------------------- + + # empty + ("", + '$bcrypt-sha256$2a,5$E/e/2AOhqM5W/KJTFQzLce$F6dYSxOdAEoJZO2eoHUZWZljW/e0TXO'), + + # ascii + ("password", + '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + + # unicode / utf8 + (UPASS_TABLE, + '$bcrypt-sha256$2a,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + (UPASS_TABLE.encode("utf-8"), + '$bcrypt-sha256$2a,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + + # ensure 2b support + ("password", + '$bcrypt-sha256$2b,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + (UPASS_TABLE, + '$bcrypt-sha256$2b,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + # NOTE: test_60_truncate_size() handles this already, this is just for overkill :) + (repeat_string("abc123", 72), + '$bcrypt-sha256$2b,5$X1g1nh3g0v4h6970O68cxe$r/hyEtqJ0teqPEmfTLoZ83ciAI1Q74.'), + (repeat_string("abc123", 72) + "qwr", + '$bcrypt-sha256$2b,5$X1g1nh3g0v4h6970O68cxe$021KLEif6epjot5yoxk0m8I0929ohEa'), + (repeat_string("abc123", 72) + "xyz", + '$bcrypt-sha256$2b,5$X1g1nh3g0v4h6970O68cxe$7.1kgpHduMGEjvM3fX6e/QCvfn6OKja'), + + #------------------------------------------------------------------- + # custom test vectors for v2 format + # TODO: convert to v2 format + #------------------------------------------------------------------- + + # empty + ("", + '$bcrypt-sha256$v=2,t=2b,r=5$E/e/2AOhqM5W/KJTFQzLce$WFPIZKtDDTriqWwlmRFfHiOTeheAZWe'), + + # ascii + ("password", + '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$wOK1VFFtS8IGTrGa7.h5fs0u84qyPbS'), + + # unicode / utf8 + (UPASS_TABLE, + '$bcrypt-sha256$v=2,t=2b,r=5$.US1fQ4TQS.ZTz/uJ5Kyn.$pzzgp40k8reM1CuQb03PvE0IDPQSdV6'), + (UPASS_TABLE.encode("utf-8"), + '$bcrypt-sha256$v=2,t=2b,r=5$.US1fQ4TQS.ZTz/uJ5Kyn.$pzzgp40k8reM1CuQb03PvE0IDPQSdV6'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + # NOTE: test_60_truncate_size() handles this already, this is just for overkill :) + (repeat_string("abc123", 72), + '$bcrypt-sha256$v=2,t=2b,r=5$X1g1nh3g0v4h6970O68cxe$zu1cloESVFIOsUIo7fCEgkdHaI9SSue'), + (repeat_string("abc123", 72) + "qwr", + '$bcrypt-sha256$v=2,t=2b,r=5$X1g1nh3g0v4h6970O68cxe$CBF9csfEdW68xv3DwE6xSULXMtqEFP.'), + (repeat_string("abc123", 72) + "xyz", + '$bcrypt-sha256$v=2,t=2b,r=5$X1g1nh3g0v4h6970O68cxe$zC/1UDUG2ofEXB6Onr2vvyFzfhEOS3S'), + ] + + known_correct_configs =[ + # v1 + ('$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe', + "password", '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + # v2 + ('$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe', + "password", '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$wOK1VFFtS8IGTrGa7.h5fs0u84qyPbS'), + ] + + known_malformed_hashes = [ + #------------------------------------------------------------------- + # v1 format + #------------------------------------------------------------------- + + # bad char in otherwise correct hash + # \/ + '$bcrypt-sha256$2a,5$5Hg1DKF!PE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unrecognized bcrypt variant + '$bcrypt-sha256$2c,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unsupported bcrypt variant + '$bcrypt-sha256$2x,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # rounds zero-padded + '$bcrypt-sha256$2a,05$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # config string w/ $ added + '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$', + + #------------------------------------------------------------------- + # v2 format + #------------------------------------------------------------------- + + # bad char in otherwise correct hash + # \/ + '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKF!PE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unsupported version (for this format) + '$bcrypt-sha256$v=1,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unrecognized version + '$bcrypt-sha256$v=3,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unrecognized bcrypt variant + '$bcrypt-sha256$v=2,t=2c,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unsupported bcrypt variant + '$bcrypt-sha256$v=2,t=2a,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + '$bcrypt-sha256$v=2,t=2x,r=5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # rounds zero-padded + '$bcrypt-sha256$v=2,t=2b,r=05$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # config string w/ $ added + '$bcrypt-sha256$v=2,t=2b,r=5$5Hg1DKFqPE8C2aflZ5vVoe$', + ] + + #=================================================================== + # override some methods -- cloned from bcrypt + #=================================================================== + def setUp(self): + # ensure builtin is enabled for duration of test. + if TEST_MODE("full") and self.backend == "builtin": + key = "PASSLIB_BUILTIN_BCRYPT" + orig = os.environ.get(key) + if orig: + self.addCleanup(os.environ.__setitem__, key, orig) + else: + self.addCleanup(os.environ.__delitem__, key) + os.environ[key] = "enabled" + super(_bcrypt_sha256_test, self).setUp() + warnings.filterwarnings("ignore", ".*backend is vulnerable to the bsd wraparound bug.*") + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 4) + super(_bcrypt_sha256_test, self).populate_settings(kwds) + + #=================================================================== + # override ident tests for now + #=================================================================== + + def require_many_idents(self): + raise self.skipTest("multiple idents not supported") + + def test_30_HasOneIdent(self): + # forbidding ident keyword, we only support "2b" for now + handler = self.handler + handler(use_defaults=True) + self.assertRaises(ValueError, handler, ident="$2y$", use_defaults=True) + + #=================================================================== + # fuzz testing -- cloned from bcrypt + #=================================================================== + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + def random_ident(self): + return "2b" + + #=================================================================== + # custom tests + #=================================================================== + + def test_using_version(self): + # default to v2 + handler = self.handler + self.assertEqual(handler.version, 2) + + # allow v1 explicitly + subcls = handler.using(version=1) + self.assertEqual(subcls.version, 1) + + # forbid unknown ver + self.assertRaises(ValueError, handler.using, version=999) + + # allow '2a' only for v1 + subcls = handler.using(version=1, ident="2a") + self.assertRaises(ValueError, handler.using, ident="2a") + + def test_calc_digest_v2(self): + """ + test digest calc v2 matches bcrypt() + """ + from passlib.hash import bcrypt + from passlib.crypto.digest import compile_hmac + from passlib.utils.binary import b64encode + + # manually calc intermediary digest + salt = "nyKYxTAvjmy6lMDYMl11Uu" + secret = "test" + temp_digest = compile_hmac("sha256", salt.encode("ascii"))(secret.encode("ascii")) + temp_digest = b64encode(temp_digest).decode("ascii") + self.assertEqual(temp_digest, "J5TlyIDm+IcSWmKiDJm+MeICndBkFVPn4kKdJW8f+xY=") + + # manually final hash from intermediary + # XXX: genhash() could be useful here + bcrypt_digest = bcrypt(ident="2b", salt=salt, rounds=12)._calc_checksum(temp_digest) + self.assertEqual(bcrypt_digest, "M0wE0Ov/9LXoQFCe.jRHu3MSHPF54Ta") + self.assertTrue(bcrypt.verify(temp_digest, "$2b$12$" + salt + bcrypt_digest)) + + # confirm handler outputs same thing. + # XXX: genhash() could be useful here + result = self.handler(ident="2b", salt=salt, rounds=12)._calc_checksum(secret) + self.assertEqual(result, bcrypt_digest) + + #=================================================================== + # eoc + #=================================================================== + +# create test cases for specific backends +bcrypt_sha256_bcrypt_test = _bcrypt_sha256_test.create_backend_case("bcrypt") +bcrypt_sha256_pybcrypt_test = _bcrypt_sha256_test.create_backend_case("pybcrypt") +bcrypt_sha256_bcryptor_test = _bcrypt_sha256_test.create_backend_case("bcryptor") + +class bcrypt_sha256_os_crypt_test(_bcrypt_sha256_test.create_backend_case("os_crypt")): + + @classmethod + def _get_safe_crypt_handler_backend(cls): + return bcrypt_os_crypt_test._get_safe_crypt_handler_backend() + + has_os_crypt_fallback = False + +bcrypt_sha256_builtin_test = _bcrypt_sha256_test.create_backend_case("builtin") + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_cisco.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_cisco.py new file mode 100644 index 000000000..ea6594bf8 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_cisco.py @@ -0,0 +1,457 @@ +""" +passlib.tests.test_handlers_cisco - tests for Cisco-specific algorithms +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function +# core +import logging +log = logging.getLogger(__name__) +# site +# pkg +from passlib import hash, exc +from passlib.utils.compat import u +from .utils import UserHandlerMixin, HandlerCase, repeat_string +from .test_handlers import UPASS_TABLE +# module +__all__ = [ + "cisco_pix_test", + "cisco_asa_test", + "cisco_type7_test", +] +#============================================================================= +# shared code for cisco PIX & ASA +#============================================================================= + +class _PixAsaSharedTest(UserHandlerMixin, HandlerCase): + """ + class w/ shared info for PIX & ASA tests. + """ + __unittest_skip = True # for TestCase + requires_user = False # for UserHandlerMixin + + #: shared list of hashes which should be identical under pix & asa7 + #: (i.e. combined secret + user < 17 bytes) + pix_asa_shared_hashes = [ + # + # http://www.perlmonks.org/index.pl?node_id=797623 + # + (("cisco", ""), "2KFQnbNIdI.2KYOU"), # confirmed ASA 9.6 + + # + # http://www.hsc.fr/ressources/breves/pix_crack.html.en + # + (("hsc", ""), "YtT8/k6Np8F1yz2c"), # confirmed ASA 9.6 + + # + # www.freerainbowtables.com/phpBB3/viewtopic.php?f=2&t=1441 + # + (("", ""), "8Ry2YjIyt7RRXU24"), # confirmed ASA 9.6 + (("cisco", "john"), "hN7LzeyYjw12FSIU"), + (("cisco", "jack"), "7DrfeZ7cyOj/PslD"), + + # + # http://comments.gmane.org/gmane.comp.security.openwall.john.user/2529 + # + (("ripper", "alex"), "h3mJrcH0901pqX/m"), + (("cisco", "cisco"), "3USUcOPFUiMCO4Jk"), + (("cisco", "cisco1"), "3USUcOPFUiMCO4Jk"), + (("CscFw-ITC!", "admcom"), "lZt7HSIXw3.QP7.R"), + ("cangetin", "TynyB./ftknE77QP"), + (("cangetin", "rramsey"), "jgBZqYtsWfGcUKDi"), + + # + # http://openwall.info/wiki/john/sample-hashes + # + (("phonehome", "rharris"), "zyIIMSYjiPm0L7a6"), + + # + # http://www.openwall.com/lists/john-users/2010/08/08/3 + # + (("cangetin", ""), "TynyB./ftknE77QP"), + (("cangetin", "rramsey"), "jgBZqYtsWfGcUKDi"), + + # + # from JTR 1.7.9 + # + ("test1", "TRPEas6f/aa6JSPL"), + ("test2", "OMT6mXmAvGyzrCtp"), + ("test3", "gTC7RIy1XJzagmLm"), + ("test4", "oWC1WRwqlBlbpf/O"), + ("password", "NuLKvvWGg.x9HEKO"), + ("0123456789abcdef", ".7nfVBEIEu4KbF/1"), + + # + # http://www.cisco.com/en/US/docs/security/pix/pix50/configuration/guide/commands.html#wp5472 + # + (("1234567890123456", ""), "feCkwUGktTCAgIbD"), # canonical source + (("watag00s1am", ""), "jMorNbK0514fadBh"), # canonical source + + # + # custom + # + (("cisco1", "cisco1"), "jmINXNH6p1BxUppp"), + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'CaiIvkLMu2TOHXGT'), + + # + # passlib reference vectors + # + # Some of these have been confirmed on various ASA firewalls, + # and the exact version is noted next to each hash. + # Would like to verify these under more PIX & ASA versions. + # + # Those without a note are generally an extrapolation, + # to ensure the code stays consistent, but for various reasons, + # hasn't been verified. + # + # * One such case is usernames w/ 1 & 2 digits -- + # ASA (9.6 at least) requires 3+ digits in username. + # + # The following hashes (below 13 chars) should be identical for PIX/ASA. + # Ones which differ are listed separately in the known_correct_hashes + # list for the two test classes. + # + + # 4 char password + (('1234', ''), 'RLPMUQ26KL4blgFN'), # confirmed ASA 9.6 + + # 8 char password + (('01234567', ''), '0T52THgnYdV1tlOF'), # confirmed ASA 9.6 + (('01234567', '3'), '.z0dT9Alkdc7EIGS'), + (('01234567', '36'), 'CC3Lam53t/mHhoE7'), + (('01234567', '365'), '8xPrWpNnBdD2DzdZ'), # confirmed ASA 9.6 + (('01234567', '3333'), '.z0dT9Alkdc7EIGS'), # confirmed ASA 9.6 + (('01234567', '3636'), 'CC3Lam53t/mHhoE7'), # confirmed ASA 9.6 + (('01234567', '3653'), '8xPrWpNnBdD2DzdZ'), # confirmed ASA 9.6 + (('01234567', 'adm'), 'dfWs2qiao6KD/P2L'), # confirmed ASA 9.6 + (('01234567', 'adma'), 'dfWs2qiao6KD/P2L'), # confirmed ASA 9.6 + (('01234567', 'admad'), 'dfWs2qiao6KD/P2L'), # confirmed ASA 9.6 + (('01234567', 'user'), 'PNZ4ycbbZ0jp1.j1'), # confirmed ASA 9.6 + (('01234567', 'user1234'), 'PNZ4ycbbZ0jp1.j1'), # confirmed ASA 9.6 + + # 12 char password + (('0123456789ab', ''), 'S31BxZOGlAigndcJ'), # confirmed ASA 9.6 + (('0123456789ab', '36'), 'wFqSX91X5.YaRKsi'), + (('0123456789ab', '365'), 'qjgo3kNgTVxExbno'), # confirmed ASA 9.6 + (('0123456789ab', '3333'), 'mcXPL/vIZcIxLUQs'), # confirmed ASA 9.6 + (('0123456789ab', '3636'), 'wFqSX91X5.YaRKsi'), # confirmed ASA 9.6 + (('0123456789ab', '3653'), 'qjgo3kNgTVxExbno'), # confirmed ASA 9.6 + (('0123456789ab', 'user'), 'f.T4BKdzdNkjxQl7'), # confirmed ASA 9.6 + (('0123456789ab', 'user1234'), 'f.T4BKdzdNkjxQl7'), # confirmed ASA 9.6 + + # NOTE: remaining reference vectors for 13+ char passwords + # are split up between cisco_pix & cisco_asa tests. + + # unicode passwords + # ASA supposedly uses utf-8 encoding, but entering non-ascii + # chars is error-prone, and while UTF-8 appears to be intended, + # observed behaviors include: + # * ssh cli stripping non-ascii chars entirely + # * ASDM web iface double-encoding utf-8 strings + ((u("t\xe1ble").encode("utf-8"), 'user'), 'Og8fB4NyF0m5Ed9c'), + ((u("t\xe1ble").encode("utf-8").decode("latin-1").encode("utf-8"), + 'user'), 'cMvFC2XVBmK/68yB'), # confirmed ASA 9.6 when typed into ASDM + ] + + def test_calc_digest_spoiler(self): + """ + _calc_checksum() -- spoil oversize passwords during verify + + for details, see 'spoil_digest' flag instead that function. + this helps cisco_pix/cisco_asa implement their policy of + ``.truncate_verify_reject=True``. + """ + def calc(secret, for_hash=False): + return self.handler(use_defaults=for_hash)._calc_checksum(secret) + + # short (non-truncated) password + short_secret = repeat_string("1234", self.handler.truncate_size) + short_hash = calc(short_secret) + + # longer password should have totally different hash, + # to prevent verify from matching (i.e. "spoiled"). + long_secret = short_secret + "X" + long_hash = calc(long_secret) + self.assertNotEqual(long_hash, short_hash) + + # spoiled hash should depend on whole secret, + # so that output isn't predictable + alt_long_secret = short_secret + "Y" + alt_long_hash = calc(alt_long_secret) + self.assertNotEqual(alt_long_hash, short_hash) + self.assertNotEqual(alt_long_hash, long_hash) + + # for hash(), should throw error if password too large + calc(short_secret, for_hash=True) + self.assertRaises(exc.PasswordSizeError, calc, long_secret, for_hash=True) + self.assertRaises(exc.PasswordSizeError, calc, alt_long_secret, for_hash=True) + +#============================================================================= +# cisco pix +#============================================================================= +class cisco_pix_test(_PixAsaSharedTest): + handler = hash.cisco_pix + + #: known correct pix hashes + known_correct_hashes = _PixAsaSharedTest.pix_asa_shared_hashes + [ + # + # passlib reference vectors (PIX-specific) + # + # NOTE: See 'pix_asa_shared_hashes' for general PIX+ASA vectors, + # and general notes about the 'passlib reference vectors' test set. + # + # All of the following are PIX-specific, as ASA starts + # to use a different padding size at 13 characters. + # + # TODO: these need confirming w/ an actual PIX system. + # + + # 13 char password + (('0123456789abc', ''), 'eacOpB7vE7ZDukSF'), + (('0123456789abc', '3'), 'ylJTd/qei66WZe3w'), + (('0123456789abc', '36'), 'hDx8QRlUhwd6bU8N'), + (('0123456789abc', '365'), 'vYOOtnkh1HXcMrM7'), + (('0123456789abc', '3333'), 'ylJTd/qei66WZe3w'), + (('0123456789abc', '3636'), 'hDx8QRlUhwd6bU8N'), + (('0123456789abc', '3653'), 'vYOOtnkh1HXcMrM7'), + (('0123456789abc', 'user'), 'f4/.SALxqDo59mfV'), + (('0123456789abc', 'user1234'), 'f4/.SALxqDo59mfV'), + + # 14 char password + (('0123456789abcd', ''), '6r8888iMxEoPdLp4'), + (('0123456789abcd', '3'), 'f5lvmqWYj9gJqkIH'), + (('0123456789abcd', '36'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', '365'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', '3333'), 'f5lvmqWYj9gJqkIH'), + (('0123456789abcd', '3636'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', '3653'), 'OJJ1Khg5HeAYBH1c'), + (('0123456789abcd', 'adm'), 'DbPLCFIkHc2SiyDk'), + (('0123456789abcd', 'adma'), 'DbPLCFIkHc2SiyDk'), + (('0123456789abcd', 'user'), 'WfO2UiTapPkF/FSn'), + (('0123456789abcd', 'user1234'), 'WfO2UiTapPkF/FSn'), + + # 15 char password + (('0123456789abcde', ''), 'al1e0XFIugTYLai3'), + (('0123456789abcde', '3'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '36'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '365'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '3333'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '3636'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', '3653'), 'lYbwBu.f82OIApQB'), + (('0123456789abcde', 'adm'), 'KgKx1UQvdR/09i9u'), + (('0123456789abcde', 'adma'), 'KgKx1UQvdR/09i9u'), + (('0123456789abcde', 'user'), 'qLopkenJ4WBqxaZN'), + (('0123456789abcde', 'user1234'), 'qLopkenJ4WBqxaZN'), + + # 16 char password + (('0123456789abcdef', ''), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '36'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '365'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '3333'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '3636'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', '3653'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', 'user'), '.7nfVBEIEu4KbF/1'), + (('0123456789abcdef', 'user1234'), '.7nfVBEIEu4KbF/1'), + ] + + +#============================================================================= +# cisco asa +#============================================================================= +class cisco_asa_test(_PixAsaSharedTest): + handler = hash.cisco_asa + + known_correct_hashes = _PixAsaSharedTest.pix_asa_shared_hashes + [ + # + # passlib reference vectors (ASA-specific) + # + # NOTE: See 'pix_asa_shared_hashes' for general PIX+ASA vectors, + # and general notes about the 'passlib reference vectors' test set. + # + + # 13 char password + # NOTE: past this point, ASA pads to 32 bytes instead of 16 + # for all cases where user is set (secret + 4 bytes > 16), + # but still uses 16 bytes for enable pwds (secret <= 16). + # hashes w/ user WON'T match PIX, but "enable" passwords will. + (('0123456789abc', ''), 'eacOpB7vE7ZDukSF'), # confirmed ASA 9.6 + (('0123456789abc', '36'), 'FRV9JG18UBEgX0.O'), + (('0123456789abc', '365'), 'NIwkusG9hmmMy6ZQ'), # confirmed ASA 9.6 + (('0123456789abc', '3333'), 'NmrkP98nT7RAeKZz'), # confirmed ASA 9.6 + (('0123456789abc', '3636'), 'FRV9JG18UBEgX0.O'), # confirmed ASA 9.6 + (('0123456789abc', '3653'), 'NIwkusG9hmmMy6ZQ'), # confirmed ASA 9.6 + (('0123456789abc', 'user'), '8Q/FZeam5ai1A47p'), # confirmed ASA 9.6 + (('0123456789abc', 'user1234'), '8Q/FZeam5ai1A47p'), # confirmed ASA 9.6 + + # 14 char password + (('0123456789abcd', ''), '6r8888iMxEoPdLp4'), # confirmed ASA 9.6 + (('0123456789abcd', '3'), 'yxGoujXKPduTVaYB'), + (('0123456789abcd', '36'), 'W0jckhnhjnr/DiT/'), + (('0123456789abcd', '365'), 'HuVOxfMQNahaoF8u'), # confirmed ASA 9.6 + (('0123456789abcd', '3333'), 'yxGoujXKPduTVaYB'), # confirmed ASA 9.6 + (('0123456789abcd', '3636'), 'W0jckhnhjnr/DiT/'), # confirmed ASA 9.6 + (('0123456789abcd', '3653'), 'HuVOxfMQNahaoF8u'), # confirmed ASA 9.6 + (('0123456789abcd', 'adm'), 'RtOmSeoCs4AUdZqZ'), # confirmed ASA 9.6 + (('0123456789abcd', 'adma'), 'RtOmSeoCs4AUdZqZ'), # confirmed ASA 9.6 + (('0123456789abcd', 'user'), 'rrucwrcM0h25pr.m'), # confirmed ASA 9.6 + (('0123456789abcd', 'user1234'), 'rrucwrcM0h25pr.m'), # confirmed ASA 9.6 + + # 15 char password + (('0123456789abcde', ''), 'al1e0XFIugTYLai3'), # confirmed ASA 9.6 + (('0123456789abcde', '3'), 'nAZrQoHaL.fgrIqt'), + (('0123456789abcde', '36'), '2GxIQ6ICE795587X'), + (('0123456789abcde', '365'), 'QmDsGwCRBbtGEKqM'), # confirmed ASA 9.6 + (('0123456789abcde', '3333'), 'nAZrQoHaL.fgrIqt'), # confirmed ASA 9.6 + (('0123456789abcde', '3636'), '2GxIQ6ICE795587X'), # confirmed ASA 9.6 + (('0123456789abcde', '3653'), 'QmDsGwCRBbtGEKqM'), # confirmed ASA 9.6 + (('0123456789abcde', 'adm'), 'Aj2aP0d.nk62wl4m'), # confirmed ASA 9.6 + (('0123456789abcde', 'adma'), 'Aj2aP0d.nk62wl4m'), # confirmed ASA 9.6 + (('0123456789abcde', 'user'), 'etxiXfo.bINJcXI7'), # confirmed ASA 9.6 + (('0123456789abcde', 'user1234'), 'etxiXfo.bINJcXI7'), # confirmed ASA 9.6 + + # 16 char password + (('0123456789abcdef', ''), '.7nfVBEIEu4KbF/1'), # confirmed ASA 9.6 + (('0123456789abcdef', '36'), 'GhI8.yFSC5lwoafg'), + (('0123456789abcdef', '365'), 'KFBI6cNQauyY6h/G'), # confirmed ASA 9.6 + (('0123456789abcdef', '3333'), 'Ghdi1IlsswgYzzMH'), # confirmed ASA 9.6 + (('0123456789abcdef', '3636'), 'GhI8.yFSC5lwoafg'), # confirmed ASA 9.6 + (('0123456789abcdef', '3653'), 'KFBI6cNQauyY6h/G'), # confirmed ASA 9.6 + (('0123456789abcdef', 'user'), 'IneB.wc9sfRzLPoh'), # confirmed ASA 9.6 + (('0123456789abcdef', 'user1234'), 'IneB.wc9sfRzLPoh'), # confirmed ASA 9.6 + + # 17 char password + # NOTE: past this point, ASA pads to 32 bytes instead of 16 + # for ALL cases, since secret > 16 bytes even for enable pwds; + # and so none of these rest here should match PIX. + (('0123456789abcdefq', ''), 'bKshl.EN.X3CVFRQ'), # confirmed ASA 9.6 + (('0123456789abcdefq', '36'), 'JAeTXHs0n30svlaG'), + (('0123456789abcdefq', '365'), '4fKSSUBHT1ChGqHp'), # confirmed ASA 9.6 + (('0123456789abcdefq', '3333'), 'USEJbxI6.VY4ecBP'), # confirmed ASA 9.6 + (('0123456789abcdefq', '3636'), 'JAeTXHs0n30svlaG'), # confirmed ASA 9.6 + (('0123456789abcdefq', '3653'), '4fKSSUBHT1ChGqHp'), # confirmed ASA 9.6 + (('0123456789abcdefq', 'user'), '/dwqyD7nGdwSrDwk'), # confirmed ASA 9.6 + (('0123456789abcdefq', 'user1234'), '/dwqyD7nGdwSrDwk'), # confirmed ASA 9.6 + + # 27 char password + (('0123456789abcdefqwertyuiopa', ''), '4wp19zS3OCe.2jt5'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '36'), 'PjUoGqWBKPyV9qOe'), + (('0123456789abcdefqwertyuiopa', '365'), 'bfCy6xFAe5O/gzvM'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '3333'), 'rd/ZMuGTJFIb2BNG'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '3636'), 'PjUoGqWBKPyV9qOe'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', '3653'), 'bfCy6xFAe5O/gzvM'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', 'user'), 'zynfWw3UtszxLMgL'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopa', 'user1234'), 'zynfWw3UtszxLMgL'), # confirmed ASA 9.6 + + # 28 char password + # NOTE: past this point, ASA stops appending the username AT ALL, + # even though there's still room for the first few chars. + (('0123456789abcdefqwertyuiopas', ''), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopas', '36'), 'W6nbOddI0SutTK7m'), + (('0123456789abcdefqwertyuiopas', '365'), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopas', 'user'), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopas', 'user1234'), 'W6nbOddI0SutTK7m'), # confirmed ASA 9.6 + + # 32 char password + # NOTE: this is max size that ASA allows, and throws error for larger + (('0123456789abcdefqwertyuiopasdfgh', ''), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopasdfgh', '36'), '5hPT/iC6DnoBxo6a'), + (('0123456789abcdefqwertyuiopasdfgh', '365'), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopasdfgh', 'user'), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + (('0123456789abcdefqwertyuiopasdfgh', 'user1234'), '5hPT/iC6DnoBxo6a'), # confirmed ASA 9.6 + ] + + +#============================================================================= +# cisco type 7 +#============================================================================= +class cisco_type7_test(HandlerCase): + handler = hash.cisco_type7 + salt_bits = 4 + salt_type = int + + known_correct_hashes = [ + # + # http://mccltd.net/blog/?p=1034 + # + ("secure ", "04480E051A33490E"), + + # + # http://insecure.org/sploits/cisco.passwords.html + # + ("Its time to go to lunch!", + "153B1F1F443E22292D73212D5300194315591954465A0D0B59"), + + # + # http://blog.ioshints.info/2007/11/type-7-decryption-in-cisco-ios.html + # + ("t35t:pa55w0rd", "08351F1B1D431516475E1B54382F"), + + # + # http://www.m00nie.com/2011/09/cisco-type-7-password-decryption-and-encryption-with-perl/ + # + ("hiImTesting:)", "020E0D7206320A325847071E5F5E"), + + # + # http://packetlife.net/forums/thread/54/ + # + ("cisco123", "060506324F41584B56"), + ("cisco123", "1511021F07257A767B"), + + # + # source ? + # + ('Supe&8ZUbeRp4SS', "06351A3149085123301517391C501918"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '0958EDC8A9F495F6F8A5FD'), + ] + + known_unidentified_hashes = [ + # salt with hex value + "0A480E051A33490E", + + # salt value > 52. this may in fact be valid, but we reject it for now + # (see docs for more). + '99400E4812', + ] + + def test_90_decode(self): + """test cisco_type7.decode()""" + from passlib.utils import to_unicode, to_bytes + + handler = self.handler + for secret, hash in self.known_correct_hashes: + usecret = to_unicode(secret) + bsecret = to_bytes(secret) + self.assertEqual(handler.decode(hash), usecret) + self.assertEqual(handler.decode(hash, None), bsecret) + + self.assertRaises(UnicodeDecodeError, handler.decode, + '0958EDC8A9F495F6F8A5FD', 'ascii') + + def test_91_salt(self): + """test salt value border cases""" + handler = self.handler + self.assertRaises(TypeError, handler, salt=None) + handler(salt=None, use_defaults=True) + self.assertRaises(TypeError, handler, salt='abc') + self.assertRaises(ValueError, handler, salt=-10) + self.assertRaises(ValueError, handler, salt=100) + + self.assertRaises(TypeError, handler.using, salt='abc') + self.assertRaises(ValueError, handler.using, salt=-10) + self.assertRaises(ValueError, handler.using, salt=100) + with self.assertWarningList("salt/offset must be.*"): + subcls = handler.using(salt=100, relaxed=True) + self.assertEqual(subcls(use_defaults=True).salt, 52) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_django.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_django.py new file mode 100644 index 000000000..f7c9a0d8f --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_django.py @@ -0,0 +1,413 @@ +"""passlib.tests.test_handlers_django - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import re +import warnings +# site +# pkg +from passlib import hash +from passlib.utils import repeat_string +from passlib.utils.compat import u +from passlib.tests.utils import TestCase, HandlerCase, skipUnless, SkipTest +from passlib.tests.test_handlers import UPASS_USD, UPASS_TABLE +from passlib.tests.test_ext_django import DJANGO_VERSION, MIN_DJANGO_VERSION, \ + check_django_hasher_has_backend +# module + +#============================================================================= +# django +#============================================================================= + +# standard string django uses +UPASS_LETMEIN = u('l\xe8tmein') + +def vstr(version): + return ".".join(str(e) for e in version) + +class _DjangoHelper(TestCase): + """ + mixin for HandlerCase subclasses that are testing a hasher + which is also present in django. + """ + __unittest_skip = True + + #: minimum django version where hash alg is present / that we support testing against + min_django_version = MIN_DJANGO_VERSION + + #: max django version where hash alg is present + #: TODO: for a bunch of the tests below, this is just max version where + #: settings.PASSWORD_HASHERS includes it by default -- could add helper to patch + #: desired django hasher back in for duration of test. + #: XXX: change this to "disabled_in_django_version" instead? + max_django_version = None + + def _require_django_support(self): + # make sure min django version + if DJANGO_VERSION < self.min_django_version: + raise self.skipTest("Django >= %s not installed" % vstr(self.min_django_version)) + if self.max_django_version and DJANGO_VERSION > self.max_django_version: + raise self.skipTest("Django <= %s not installed" % vstr(self.max_django_version)) + + # make sure django has a backend for specified hasher + name = self.handler.django_name + if not check_django_hasher_has_backend(name): + raise self.skipTest('django hasher %r not available' % name) + + return True + + extra_fuzz_verifiers = HandlerCase.fuzz_verifiers + ( + "fuzz_verifier_django", + ) + + def fuzz_verifier_django(self): + try: + self._require_django_support() + except SkipTest: + return None + from django.contrib.auth.hashers import check_password + + def verify_django(secret, hash): + """django/check_password""" + if self.handler.name == "django_bcrypt" and hash.startswith("bcrypt$$2y$"): + hash = hash.replace("$$2y$", "$$2a$") + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + return check_password(secret, hash) + return verify_django + + def test_90_django_reference(self): + """run known correct hashes through Django's check_password()""" + self._require_django_support() + # XXX: esp. when it's no longer supported by django, + # should verify it's *NOT* recognized + from django.contrib.auth.hashers import check_password + assert self.known_correct_hashes + for secret, hash in self.iter_known_hashes(): + self.assertTrue(check_password(secret, hash), + "secret=%r hash=%r failed to verify" % + (secret, hash)) + self.assertFalse(check_password('x' + secret, hash), + "mangled secret=%r hash=%r incorrect verified" % + (secret, hash)) + + def test_91_django_generation(self): + """test against output of Django's make_password()""" + self._require_django_support() + # XXX: esp. when it's no longer supported by django, + # should verify it's *NOT* recognized + from passlib.utils import tick + from django.contrib.auth.hashers import make_password + name = self.handler.django_name # set for all the django_* handlers + end = tick() + self.max_fuzz_time/2 + generator = self.FuzzHashGenerator(self, self.getRandom()) + while tick() < end: + secret, other = generator.random_password_pair() + if not secret: # django rejects empty passwords. + continue + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + hash = make_password(secret, hasher=name) + self.assertTrue(self.do_identify(hash)) + self.assertTrue(self.do_verify(secret, hash)) + self.assertFalse(self.do_verify(other, hash)) + +class django_disabled_test(HandlerCase): + """test django_disabled""" + handler = hash.django_disabled + disabled_contains_salt = True + + known_correct_hashes = [ + # *everything* should hash to "!", and nothing should verify + ("password", "!"), + ("", "!"), + (UPASS_TABLE, "!"), + ] + + known_alternate_hashes = [ + # django 1.6 appends random alpnum string + ("!9wa845vn7098ythaehasldkfj", "password", "!"), + ] + +class django_des_crypt_test(HandlerCase, _DjangoHelper): + """test django_des_crypt""" + handler = hash.django_des_crypt + max_django_version = (1,9) + + known_correct_hashes = [ + # ensures only first two digits of salt count. + ("password", 'crypt$c2$c2M87q...WWcU'), + ("password", 'crypt$c2e86$c2M87q...WWcU'), + ("passwordignoreme", 'crypt$c2.AZ$c2M87q...WWcU'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'crypt$c2e86$c2hN1Bxd6ZiWs'), + (UPASS_TABLE, 'crypt$0.aQs$0.wB.TT0Czvlo'), + (u("hell\u00D6"), "crypt$sa$saykDgk3BPZ9E"), + + # prevent regression of issue 22 + ("foo", 'crypt$MNVY.9ajgdvDQ$MNVY.9ajgdvDQ'), + ] + + known_alternate_hashes = [ + # ensure django 1.4 empty salt field is accepted; + # but that salt field is re-filled (for django 1.0 compatibility) + ('crypt$$c2M87q...WWcU', "password", 'crypt$c2$c2M87q...WWcU'), + ] + + known_unidentified_hashes = [ + 'sha1$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'crypt$c2$c2M87q', + + # salt must be >2 + 'crypt$f$c2M87q...WWcU', + + # make sure first 2 chars of salt & chk field agree. + 'crypt$ffe86$c2M87q...WWcU', + ] + +class django_salted_md5_test(HandlerCase, _DjangoHelper): + """test django_salted_md5""" + handler = hash.django_salted_md5 + max_django_version = (1,9) + + known_correct_hashes = [ + # test extra large salt + ("password", 'md5$123abcdef$c8272612932975ee80e8a35995708e80'), + + # test django 1.4 alphanumeric salt + ("test", 'md5$3OpqnFAHW5CT$54b29300675271049a1ebae07b395e20'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'md5$c2e86$92105508419a81a6babfaecf876a2fa0'), + (UPASS_TABLE, 'md5$d9eb8$01495b32852bffb27cf5d4394fe7a54c'), + ] + + known_unidentified_hashes = [ + 'sha1$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'md5$aa$bb', + ] + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_salt_size(self): + # workaround for django14 regression -- + # 1.4 won't accept hashes with empty salt strings, unlike 1.3 and earlier. + # looks to be fixed in a future release -- https://code.djangoproject.com/ticket/18144 + # for now, we avoid salt_size==0 under 1.4 + handler = self.handler + default = handler.default_salt_size + assert handler.min_salt_size == 0 + lower = 1 + upper = handler.max_salt_size or default*4 + return self.randintgauss(lower, upper, default, default*.5) + +class django_salted_sha1_test(HandlerCase, _DjangoHelper): + """test django_salted_sha1""" + handler = hash.django_salted_sha1 + max_django_version = (1,9) + + known_correct_hashes = [ + # test extra large salt + ("password",'sha1$123abcdef$e4a1877b0e35c47329e7ed7e58014276168a37ba'), + + # test django 1.4 alphanumeric salt + ("test", 'sha1$bcwHF9Hy8lxS$6b4cfa0651b43161c6f1471ce9523acf1f751ba3'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'sha1$c2e86$0f75c5d7fbd100d587c127ef0b693cde611b4ada'), + (UPASS_TABLE, 'sha1$6d853$ef13a4d8fb57aed0cb573fe9c82e28dc7fd372d4'), + + # generic password + ("MyPassword", 'sha1$54123$893cf12e134c3c215f3a76bd50d13f92404a54d3'), + ] + + known_unidentified_hashes = [ + 'md5$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'sha1$c2e86$0f75', + ] + + # reuse custom random_salt_size() helper... + FuzzHashGenerator = django_salted_md5_test.FuzzHashGenerator + +class django_pbkdf2_sha256_test(HandlerCase, _DjangoHelper): + """test django_pbkdf2_sha256""" + handler = hash.django_pbkdf2_sha256 + + known_correct_hashes = [ + # + # custom - generated via django 1.4 hasher + # + ('not a password', + 'pbkdf2_sha256$10000$kjVJaVz6qsnJ$5yPHw3rwJGECpUf70daLGhOrQ5+AMxIJdz1c3bqK1Rs='), + (UPASS_TABLE, + 'pbkdf2_sha256$10000$bEwAfNrH1TlQ$OgYUblFNUX1B8GfMqaCYUK/iHyO0pa7STTDdaEJBuY0='), + ] + +class django_pbkdf2_sha1_test(HandlerCase, _DjangoHelper): + """test django_pbkdf2_sha1""" + handler = hash.django_pbkdf2_sha1 + + known_correct_hashes = [ + # + # custom - generated via django 1.4 hashers + # + ('not a password', + 'pbkdf2_sha1$10000$wz5B6WkasRoF$atJmJ1o+XfJxKq1+Nu1f1i57Z5I='), + (UPASS_TABLE, + 'pbkdf2_sha1$10000$KZKWwvqb8BfL$rw5pWsxJEU4JrZAQhHTCO+u0f5Y='), + ] + +@skipUnless(hash.bcrypt.has_backend(), "no bcrypt backends available") +class django_bcrypt_test(HandlerCase, _DjangoHelper): + """test django_bcrypt""" + handler = hash.django_bcrypt + # XXX: not sure when this wasn't in default list anymore. somewhere in [2.0 - 2.2] + max_django_version = (2, 0) + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # just copied and adapted a few test vectors from bcrypt (above), + # since django_bcrypt is just a wrapper for the real bcrypt class. + # + ('', 'bcrypt$$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.'), + ('abcdefghijklmnopqrstuvwxyz', + 'bcrypt$$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq'), + (UPASS_TABLE, + 'bcrypt$$2a$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + ] + + # NOTE: the following have been cloned from _bcrypt_test() + + def populate_settings(self, kwds): + # speed up test w/ lower rounds + kwds.setdefault("rounds", 4) + super(django_bcrypt_test, self).populate_settings(kwds) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + def random_ident(self): + # omit multi-ident tests, only $2a$ counts for this class + # XXX: enable this to check 2a / 2b? + return None + +@skipUnless(hash.bcrypt.has_backend(), "no bcrypt backends available") +class django_bcrypt_sha256_test(HandlerCase, _DjangoHelper): + """test django_bcrypt_sha256""" + handler = hash.django_bcrypt_sha256 + forbidden_characters = None + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # custom - generated via django 1.6 hasher + # + ('', + 'bcrypt_sha256$$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu'), + (UPASS_LETMEIN, + 'bcrypt_sha256$$2a$08$NDjSAIcas.EcoxCRiArvT.MkNiPYVhrsrnJsRkLueZOoV1bsQqlmC'), + (UPASS_TABLE, + 'bcrypt_sha256$$2a$06$kCXUnRFQptGg491siDKNTu8RxjBGSjALHRuvhPYNFsa4Ea5d9M48u'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + (repeat_string("abc123",72), + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61OySmyXA8FoY4PjGizjE1QSDfuL5MXNni'), + (repeat_string("abc123",72)+"qwr", + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61Ocy0BEz1RK6xslSNi8PlaLX2pe7x/KQG'), + (repeat_string("abc123",72)+"xyz", + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61OvY2zoRVUa2Pugv2ExVOUT2YmhvxUFUa'), + ] + + known_malformed_hashers = [ + # data in django salt field + 'bcrypt_sha256$xyz$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu', + ] + + # NOTE: the following have been cloned from _bcrypt_test() + + def populate_settings(self, kwds): + # speed up test w/ lower rounds + kwds.setdefault("rounds", 4) + super(django_bcrypt_sha256_test, self).populate_settings(kwds) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(5, 8, 6, 1) + + def random_ident(self): + # omit multi-ident tests, only $2a$ counts for this class + # XXX: enable this to check 2a / 2b? + return None + +from passlib.tests.test_handlers_argon2 import _base_argon2_test + +@skipUnless(hash.argon2.has_backend(), "no argon2 backends available") +class django_argon2_test(HandlerCase, _DjangoHelper): + """test django_bcrypt""" + handler = hash.django_argon2 + + # NOTE: most of this adapted from _base_argon2_test & argon2pure test + + known_correct_hashes = [ + # sample test + ("password", 'argon2$argon2i$v=19$m=256,t=1,p=1$c29tZXNhbHQ$AJFIsNZTMKTAewB4+ETN1A'), + + # sample w/ all parameters different + ("password", 'argon2$argon2i$v=19$m=380,t=2,p=2$c29tZXNhbHQ$SrssP8n7m/12VWPM8dvNrw'), + + # generated from django 1.10.3 + (UPASS_LETMEIN, 'argon2$argon2i$v=19$m=512,t=2,p=2$V25jN1l4UUJZWkR1$MxpA1BD2Gh7+D79gaAw6sQ'), + ] + + def setUpWarnings(self): + super(django_argon2_test, self).setUpWarnings() + warnings.filterwarnings("ignore", ".*Using argon2pure backend.*") + + def do_stub_encrypt(self, handler=None, **settings): + # overriding default since no way to get stub config from argon2._calc_hash() + # (otherwise test_21b_max_rounds blocks trying to do max rounds) + handler = (handler or self.handler).using(**settings) + self = handler.wrapped(use_defaults=True) + self.checksum = self._stub_checksum + assert self.checksum + return handler._wrap_hash(self.to_string()) + + def test_03_legacy_hash_workflow(self): + # override base method + raise self.skipTest("legacy 1.6 workflow not supported") + + class FuzzHashGenerator(_base_argon2_test.FuzzHashGenerator): + + def random_type(self): + # override default since django only uses type I (see note in class) + return "I" + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(1, 3, 2, 1) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_pbkdf2.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_pbkdf2.py new file mode 100644 index 000000000..4d2f048f0 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_pbkdf2.py @@ -0,0 +1,480 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +# core +import logging +log = logging.getLogger(__name__) +import warnings +# site +# pkg +from passlib import hash +from passlib.utils.compat import u +from passlib.tests.utils import TestCase, HandlerCase +from passlib.tests.test_handlers import UPASS_WAV +# module + +#============================================================================= +# ldap_pbkdf2_{digest} +#============================================================================= +# NOTE: since these are all wrappers for the pbkdf2_{digest} hasehs, +# they don't extensive separate testing. + +class ldap_pbkdf2_test(TestCase): + + def test_wrappers(self): + """test ldap pbkdf2 wrappers""" + + self.assertTrue( + hash.ldap_pbkdf2_sha1.verify( + "password", + '{PBKDF2}1212$OB.dtnSEXZK8U5cgxU/GYQ$y5LKPOplRmok7CZp/aqVDVg8zGI', + ) + ) + + self.assertTrue( + hash.ldap_pbkdf2_sha256.verify( + "password", + '{PBKDF2-SHA256}1212$4vjV83LKPjQzk31VI4E0Vw$hsYF68OiOUPdDZ1Fg' + '.fJPeq1h/gXXY7acBp9/6c.tmQ' + ) + ) + + self.assertTrue( + hash.ldap_pbkdf2_sha512.verify( + "password", + '{PBKDF2-SHA512}1212$RHY0Fr3IDMSVO/RSZyb5ow$eNLfBK.eVozomMr.1gYa1' + '7k9B7KIK25NOEshvhrSX.esqY3s.FvWZViXz4KoLlQI.BzY/YTNJOiKc5gBYFYGww' + ) + ) + +#============================================================================= +# pbkdf2 hashes +#============================================================================= +class atlassian_pbkdf2_sha1_test(HandlerCase): + handler = hash.atlassian_pbkdf2_sha1 + + known_correct_hashes = [ + # + # generated using Jira + # + ("admin", '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/p'), + (UPASS_WAV, + "{PKCS5S2}cE9Yq6Am5tQGdHSHhky2XLeOnURwzaLBG2sur7FHKpvy2u0qDn6GcVGRjlmJoIUy"), + ] + + known_malformed_hashes = [ + # bad char ---\/ + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy!0IPksHChwoTAVYFrhsgoq8/p' + + # bad size, missing padding + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/' + + # bad size, with correct padding + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/=' + ] + +class pbkdf2_sha1_test(HandlerCase): + handler = hash.pbkdf2_sha1 + known_correct_hashes = [ + ("password", '$pbkdf2$1212$OB.dtnSEXZK8U5cgxU/GYQ$y5LKPOplRmok7CZp/aqVDVg8zGI'), + (UPASS_WAV, + '$pbkdf2$1212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc'), + ] + + known_malformed_hashes = [ + # zero padded rounds field + '$pbkdf2$01212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc', + + # empty rounds field + '$pbkdf2$$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc', + + # too many field + '$pbkdf2$1212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc$', + ] + +class pbkdf2_sha256_test(HandlerCase): + handler = hash.pbkdf2_sha256 + known_correct_hashes = [ + ("password", + '$pbkdf2-sha256$1212$4vjV83LKPjQzk31VI4E0Vw$hsYF68OiOUPdDZ1Fg.fJPeq1h/gXXY7acBp9/6c.tmQ' + ), + (UPASS_WAV, + '$pbkdf2-sha256$1212$3SABFJGDtyhrQMVt1uABPw$WyaUoqCLgvz97s523nF4iuOqZNbp5Nt8do/cuaa7AiI' + ), + ] + +class pbkdf2_sha512_test(HandlerCase): + handler = hash.pbkdf2_sha512 + known_correct_hashes = [ + ("password", + '$pbkdf2-sha512$1212$RHY0Fr3IDMSVO/RSZyb5ow$eNLfBK.eVozomMr.1gYa1' + '7k9B7KIK25NOEshvhrSX.esqY3s.FvWZViXz4KoLlQI.BzY/YTNJOiKc5gBYFYGww' + ), + (UPASS_WAV, + '$pbkdf2-sha512$1212$KkbvoKGsAIcF8IslDR6skQ$8be/PRmd88Ps8fmPowCJt' + 'tH9G3vgxpG.Krjt3KT.NP6cKJ0V4Prarqf.HBwz0dCkJ6xgWnSj2ynXSV7MlvMa8Q' + ), + ] + +class cta_pbkdf2_sha1_test(HandlerCase): + handler = hash.cta_pbkdf2_sha1 + known_correct_hashes = [ + # + # test vectors from original implementation + # + (u("hashy the \N{SNOWMAN}"), '$p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0='), + + # + # custom + # + ("password", "$p5k2$1$$h1TDLGSw9ST8UMAPeIE13i0t12c="), + (UPASS_WAV, + "$p5k2$4321$OTg3NjU0MzIx$jINJrSvZ3LXeIbUdrJkRpN62_WQ="), + ] + +class dlitz_pbkdf2_sha1_test(HandlerCase): + handler = hash.dlitz_pbkdf2_sha1 + known_correct_hashes = [ + # + # test vectors from original implementation + # + ('cloadm', '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql'), + ('gnu', '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g'), + ('dcl', '$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL'), + ('spam', '$p5k2$3e8$H0NX9mT/$wk/sE8vv6OMKuMaqazCJYDSUhWY9YB2J'), + (UPASS_WAV, + '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ'), + ] + +class grub_pbkdf2_sha512_test(HandlerCase): + handler = hash.grub_pbkdf2_sha512 + known_correct_hashes = [ + # + # test vectors generated from cmd line tool + # + + # salt=32 bytes + (UPASS_WAV, + 'grub.pbkdf2.sha512.10000.BCAC1CEC5E4341C8C511C529' + '7FA877BE91C2817B32A35A3ECF5CA6B8B257F751.6968526A' + '2A5B1AEEE0A29A9E057336B48D388FFB3F600233237223C21' + '04DE1752CEC35B0DD1ED49563398A282C0F471099C2803FBA' + '47C7919CABC43192C68F60'), + + # salt=64 bytes + ('toomanysecrets', + 'grub.pbkdf2.sha512.10000.9B436BB6978682363D5C449B' + 'BEAB322676946C632208BC1294D51F47174A9A3B04A7E4785' + '986CD4EA7470FAB8FE9F6BD522D1FC6C51109A8596FB7AD48' + '7C4493.0FE5EF169AFFCB67D86E2581B1E251D88C777B98BA' + '2D3256ECC9F765D84956FC5CA5C4B6FD711AA285F0A04DCF4' + '634083F9A20F4B6F339A52FBD6BED618E527B'), + + ] + +#============================================================================= +# scram hash +#============================================================================= +class scram_test(HandlerCase): + handler = hash.scram + + # TODO: need a bunch more reference vectors from some real + # SCRAM transactions. + known_correct_hashes = [ + # + # taken from example in SCRAM specification (rfc 5802) + # + ('pencil', '$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30'), + + # + # custom + # + + # same as 5802 example hash, but with sha-256 & sha-512 added. + ('pencil', '$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ'), + + # test unicode passwords & saslprep (all the passwords below + # should normalize to the same value: 'IX \xE0') + (u('IX \xE0'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + (u('\u2168\u3000a\u0300'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + (u('\u00ADIX \xE0'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + ] + + known_malformed_hashes = [ + # zero-padding in rounds + '$scram$04096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # non-digit in rounds + '$scram$409A$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # bad char in salt ---\/ + '$scram$4096$QSXCR.Q6sek8bf9-$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # bad char in digest ---\/ + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX3-', + + # missing sections + '$scram$4096$QSXCR.Q6sek8bf92', + '$scram$4096$QSXCR.Q6sek8bf92$', + + # too many sections + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30$', + + # missing separator + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY', + + # too many chars in alg name + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'shaxxx-190=HZbuOlKbWl.eR8AfIposuKbhX30', + + # missing sha-1 alg + '$scram$4096$QSXCR.Q6sek8bf92$sha-256=HZbuOlKbWl.eR8AfIposuKbhX30', + + # non-iana name + '$scram$4096$QSXCR.Q6sek8bf92$sha1=HZbuOlKbWl.eR8AfIposuKbhX30', + ] + + def setUp(self): + super(scram_test, self).setUp() + + # some platforms lack stringprep (e.g. Jython, IronPython) + self.require_stringprep() + + # silence norm_hash_name() warning + warnings.filterwarnings("ignore", r"norm_hash_name\(\): unknown hash") + + def test_90_algs(self): + """test parsing of 'algs' setting""" + defaults = dict(salt=b'A'*10, rounds=1000) + def parse(algs, **kwds): + for k in defaults: + kwds.setdefault(k, defaults[k]) + return self.handler(algs=algs, **kwds).algs + + # None -> default list + self.assertEqual(parse(None, use_defaults=True), hash.scram.default_algs) + self.assertRaises(TypeError, parse, None) + + # strings should be parsed + self.assertEqual(parse("sha1"), ["sha-1"]) + self.assertEqual(parse("sha1, sha256, md5"), ["md5","sha-1","sha-256"]) + + # lists should be normalized + self.assertEqual(parse(["sha-1","sha256"]), ["sha-1","sha-256"]) + + # sha-1 required + self.assertRaises(ValueError, parse, ["sha-256"]) + self.assertRaises(ValueError, parse, algs=[], use_defaults=True) + + # alg names must be < 10 chars + self.assertRaises(ValueError, parse, ["sha-1","shaxxx-190"]) + + # alg & checksum mutually exclusive. + self.assertRaises(RuntimeError, parse, ['sha-1'], + checksum={"sha-1": b"\x00"*20}) + + def test_90_checksums(self): + """test internal parsing of 'checksum' keyword""" + # check non-bytes checksum values are rejected + self.assertRaises(TypeError, self.handler, use_defaults=True, + checksum={'sha-1': u('X')*20}) + + # check sha-1 is required + self.assertRaises(ValueError, self.handler, use_defaults=True, + checksum={'sha-256': b'X'*32}) + + # XXX: anything else that's not tested by the other code already? + + def test_91_extract_digest_info(self): + """test scram.extract_digest_info()""" + edi = self.handler.extract_digest_info + + # return appropriate value or throw KeyError + h = "$scram$10$AAAAAA$sha-1=AQ,bbb=Ag,ccc=Aw" + s = b'\x00'*4 + self.assertEqual(edi(h,"SHA1"), (s,10, b'\x01')) + self.assertEqual(edi(h,"bbb"), (s,10, b'\x02')) + self.assertEqual(edi(h,"ccc"), (s,10, b'\x03')) + self.assertRaises(KeyError, edi, h, "ddd") + + # config strings should cause value error. + c = "$scram$10$....$sha-1,bbb,ccc" + self.assertRaises(ValueError, edi, c, "sha-1") + self.assertRaises(ValueError, edi, c, "bbb") + self.assertRaises(ValueError, edi, c, "ddd") + + def test_92_extract_digest_algs(self): + """test scram.extract_digest_algs()""" + eda = self.handler.extract_digest_algs + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30'), ["sha-1"]) + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', format="hashlib"), + ["sha1"]) + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ'), + ["sha-1","sha-256","sha-512"]) + + def test_93_derive_digest(self): + """test scram.derive_digest()""" + # NOTE: this just does a light test, since derive_digest + # is used by hash / verify, and is tested pretty well via those. + hash = self.handler.derive_digest + + # check various encodings of password work. + s1 = b'\x01\x02\x03' + d1 = b'\xb2\xfb\xab\x82[tNuPnI\x8aZZ\x19\x87\xcen\xe9\xd3' + self.assertEqual(hash(u("\u2168"), s1, 1000, 'sha-1'), d1) + self.assertEqual(hash(b"\xe2\x85\xa8", s1, 1000, 'SHA-1'), d1) + self.assertEqual(hash(u("IX"), s1, 1000, 'sha1'), d1) + self.assertEqual(hash(b"IX", s1, 1000, 'SHA1'), d1) + + # check algs + self.assertEqual(hash("IX", s1, 1000, 'md5'), + b'3\x19\x18\xc0\x1c/\xa8\xbf\xe4\xa3\xc2\x8eM\xe8od') + self.assertRaises(ValueError, hash, "IX", s1, 1000, 'sha-666') + + # check rounds + self.assertRaises(ValueError, hash, "IX", s1, 0, 'sha-1') + + # unicode salts accepted as of passlib 1.7 (previous caused TypeError) + self.assertEqual(hash(u("IX"), s1.decode("latin-1"), 1000, 'sha1'), d1) + + def test_94_saslprep(self): + """test hash/verify use saslprep""" + # NOTE: this just does a light test that saslprep() is being + # called in various places, relying in saslpreps()'s tests + # to verify full normalization behavior. + + # hash unnormalized + h = self.do_encrypt(u("I\u00ADX")) + self.assertTrue(self.do_verify(u("IX"), h)) + self.assertTrue(self.do_verify(u("\u2168"), h)) + + # hash normalized + h = self.do_encrypt(u("\xF3")) + self.assertTrue(self.do_verify(u("o\u0301"), h)) + self.assertTrue(self.do_verify(u("\u200Do\u0301"), h)) + + # throws error if forbidden char provided + self.assertRaises(ValueError, self.do_encrypt, u("\uFDD0")) + self.assertRaises(ValueError, self.do_verify, u("\uFDD0"), h) + + def test_94_using_w_default_algs(self, param="default_algs"): + """using() -- 'default_algs' parameter""" + # create subclass + handler = self.handler + orig = list(handler.default_algs) # in case it's modified in place + subcls = handler.using(**{param: "sha1,md5"}) + + # shouldn't have changed handler + self.assertEqual(handler.default_algs, orig) + + # should have own set + self.assertEqual(subcls.default_algs, ["md5", "sha-1"]) + + # test hash output + h1 = subcls.hash("dummy") + self.assertEqual(handler.extract_digest_algs(h1), ["md5", "sha-1"]) + + def test_94_using_w_algs(self): + """using() -- 'algs' parameter""" + self.test_94_using_w_default_algs(param="algs") + + def test_94_needs_update_algs(self): + """needs_update() -- algs setting""" + handler1 = self.handler.using(algs="sha1,md5") + + # shouldn't need update, has same algs + h1 = handler1.hash("dummy") + self.assertFalse(handler1.needs_update(h1)) + + # *currently* shouldn't need update, has superset of algs required by handler2 + # (may change this policy) + handler2 = handler1.using(algs="sha1") + self.assertFalse(handler2.needs_update(h1)) + + # should need update, doesn't have all algs required by handler3 + handler3 = handler1.using(algs="sha1,sha256") + self.assertTrue(handler3.needs_update(h1)) + + def test_95_context_algs(self): + """test handling of 'algs' in context object""" + handler = self.handler + from passlib.context import CryptContext + c1 = CryptContext(["scram"], scram__algs="sha1,md5") + + h = c1.hash("dummy") + self.assertEqual(handler.extract_digest_algs(h), ["md5", "sha-1"]) + self.assertFalse(c1.needs_update(h)) + + c2 = c1.copy(scram__algs="sha1") + self.assertFalse(c2.needs_update(h)) + + c2 = c1.copy(scram__algs="sha1,sha256") + self.assertTrue(c2.needs_update(h)) + + def test_96_full_verify(self): + """test verify(full=True) flag""" + def vpart(s, h): + return self.handler.verify(s, h) + def vfull(s, h): + return self.handler.verify(s, h, full=True) + + # reference + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertTrue(vfull('pencil', h)) + self.assertFalse(vfull('tape', h)) + + # catch truncated digests. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhV,' # -1 char + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertRaises(ValueError, vfull, 'pencil', h) + + # catch padded digests. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVYa,' # +1 char + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertRaises(ValueError, vfull, 'pencil', h) + + # catch hash containing digests belonging to diff passwords. + # proper behavior for quick-verify (the default) is undefined, + # but full-verify should throw error. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' # 'pencil' + 'sha-256=R7RJDWIbeKRTFwhE9oxh04kab0CllrQ3kCcpZUcligc,' # 'tape' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' # 'pencil' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertTrue(vpart('tape', h)) + self.assertFalse(vpart('pencil', h)) + self.assertRaises(ValueError, vfull, 'pencil', h) + self.assertRaises(ValueError, vfull, 'tape', h) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_scrypt.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_scrypt.py new file mode 100644 index 000000000..5ab6d9fb5 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_handlers_scrypt.py @@ -0,0 +1,111 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +import warnings +warnings.filterwarnings("ignore", ".*using builtin scrypt backend.*") +# site +# pkg +from passlib import hash +from passlib.tests.utils import HandlerCase, TEST_MODE +from passlib.tests.test_handlers import UPASS_TABLE, PASS_TABLE_UTF8 +# module + +#============================================================================= +# scrypt hash +#============================================================================= +class _scrypt_test(HandlerCase): + handler = hash.scrypt + + known_correct_hashes = [ + # + # excepted from test vectors from scrypt whitepaper + # (http://www.tarsnap.com/scrypt/scrypt.pdf, appendix b), + # and encoded using passlib's custom format + # + + # salt=b"" + ("", "$scrypt$ln=4,r=1,p=1$$d9ZXYjhleyA7GcpCwYoEl/FrSETjB0ro39/6P+3iFEI"), + + # salt=b"NaCl" + ("password", "$scrypt$ln=10,r=8,p=16$TmFDbA$/bq+HJ00cgB4VucZDQHp/nxq18vII3gw53N2Y0s3MWI"), + + # + # custom + # + + # simple test + ("test", '$scrypt$ln=8,r=8,p=1$wlhLyXmP8b53bm1NKYVQqg$mTpvG8lzuuDk+DWz8HZIB6Vum6erDuUm0As5yU+VxWA'), + + # different block value + ("password", '$scrypt$ln=8,r=2,p=1$dO6d0xoDoLT2PofQGoNQag$g/Wf2A0vhHhaJM+addK61QPBthSmYB6uVTtQzh8CM3o'), + + # different rounds + (UPASS_TABLE, '$scrypt$ln=7,r=8,p=1$jjGmtDamdA4BQAjBeA9BSA$OiWRHhQtpDx7M/793x6UXK14AD512jg/qNm/hkWZG4M'), + + # alt encoding + (PASS_TABLE_UTF8, '$scrypt$ln=7,r=8,p=1$jjGmtDamdA4BQAjBeA9BSA$OiWRHhQtpDx7M/793x6UXK14AD512jg/qNm/hkWZG4M'), + + # diff block & parallel counts as well + ("nacl", '$scrypt$ln=1,r=4,p=2$yhnD+J+Tci4lZCwFgHCuVQ$fAsEWmxSHuC0cHKMwKVFPzrQukgvK09Sj+NueTSxKds') + ] + + if TEST_MODE("full"): + # add some hashes with larger rounds value. + known_correct_hashes.extend([ + # + # from scrypt whitepaper + # + + # salt=b"SodiumChloride" + ("pleaseletmein", "$scrypt$ln=14,r=8,p=1$U29kaXVtQ2hsb3JpZGU" + "$cCO9yzr9c0hGHAbNgf046/2o+7qQT44+qbVD9lRdofI"), + + # + # openwall format (https://gitlab.com/jas/scrypt-unix-crypt/blob/master/unix-scrypt.txt) + # + ("pleaseletmein", + "$7$C6..../....SodiumChloride$kBGj9fHznVYFQMEn/qDCfrDevf9YDtcDdKvEqHJLV8D"), + + ]) + + known_malformed_hashes = [ + # missing 'p' value + '$scrypt$ln=10,r=1$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + + # rounds too low + '$scrypt$ln=0,r=1,p=1$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + + # invalid block size + '$scrypt$ln=10,r=A,p=1$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + + # r*p too large + '$scrypt$ln=10,r=134217728,p=8$wvif8/4fg1Cq9V7L2dv73w$bJcLia1lyfQ1X2x0xflehwVXPzWIUQWWdnlGwfVzBeQ', + ] + + def setUpWarnings(self): + super(_scrypt_test, self).setUpWarnings() + warnings.filterwarnings("ignore", ".*using builtin scrypt backend.*") + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 6) + super(_scrypt_test, self).populate_settings(kwds) + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + def random_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return self.randintgauss(4, 10, 6, 1) + +# create test cases for specific backends +scrypt_stdlib_test = _scrypt_test.create_backend_case("stdlib") +scrypt_scrypt_test = _scrypt_test.create_backend_case("scrypt") +scrypt_builtin_test = _scrypt_test.create_backend_case("builtin") + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_hosts.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_hosts.py new file mode 100644 index 000000000..cbf93ab7a --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_hosts.py @@ -0,0 +1,97 @@ +"""test passlib.hosts""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib import hosts, hash as hashmod +from passlib.utils import unix_crypt_schemes +from passlib.tests.utils import TestCase +# module + +#============================================================================= +# test predefined app contexts +#============================================================================= +class HostsTest(TestCase): + """perform general tests to make sure contexts work""" + # NOTE: these tests are not really comprehensive, + # since they would do little but duplicate + # the presets in apps.py + # + # they mainly try to ensure no typos + # or dynamic behavior foul-ups. + + def check_unix_disabled(self, ctx): + for hash in [ + "", + "!", + "*", + "!$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0", + ]: + self.assertEqual(ctx.identify(hash), 'unix_disabled') + self.assertFalse(ctx.verify('test', hash)) + + def test_linux_context(self): + ctx = hosts.linux_context + for hash in [ + ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751'), + ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17'), + '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0', + 'kAJJz.Rwp0A/I', + ]: + self.assertTrue(ctx.verify("test", hash)) + self.check_unix_disabled(ctx) + + def test_bsd_contexts(self): + for ctx in [ + hosts.freebsd_context, + hosts.openbsd_context, + hosts.netbsd_context, + ]: + for hash in [ + '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0', + 'kAJJz.Rwp0A/I', + ]: + self.assertTrue(ctx.verify("test", hash)) + h1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + if hashmod.bcrypt.has_backend(): + self.assertTrue(ctx.verify("test", h1)) + else: + self.assertEqual(ctx.identify(h1), "bcrypt") + self.check_unix_disabled(ctx) + + def test_host_context(self): + ctx = getattr(hosts, "host_context", None) + if not ctx: + return self.skipTest("host_context not available on this platform") + + # validate schemes is non-empty, + # and contains unix_disabled + at least one real scheme + schemes = list(ctx.schemes()) + self.assertTrue(schemes, "appears to be unix system, but no known schemes supported by crypt") + self.assertTrue('unix_disabled' in schemes) + schemes.remove("unix_disabled") + self.assertTrue(schemes, "should have schemes beside fallback scheme") + self.assertTrue(set(unix_crypt_schemes).issuperset(schemes)) + + # check for hash support + self.check_unix_disabled(ctx) + for scheme, hash in [ + ("sha512_crypt", ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751')), + ("sha256_crypt", ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17')), + ("md5_crypt", '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0'), + ("des_crypt", 'kAJJz.Rwp0A/I'), + ]: + if scheme in schemes: + self.assertTrue(ctx.verify("test", hash)) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_pwd.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_pwd.py new file mode 100644 index 000000000..2c983cdf5 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_pwd.py @@ -0,0 +1,205 @@ +"""passlib.tests -- tests for passlib.pwd""" +#============================================================================= +# imports +#============================================================================= +# core +import itertools +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.tests.utils import TestCase +# local +__all__ = [ + "UtilsTest", + "GenerateTest", + "StrengthTest", +] + +#============================================================================= +# +#============================================================================= +class UtilsTest(TestCase): + """test internal utilities""" + descriptionPrefix = "passlib.pwd" + + def test_self_info_rate(self): + """_self_info_rate()""" + from passlib.pwd import _self_info_rate + + self.assertEqual(_self_info_rate(""), 0) + + self.assertEqual(_self_info_rate("a" * 8), 0) + + self.assertEqual(_self_info_rate("ab"), 1) + self.assertEqual(_self_info_rate("ab" * 8), 1) + + self.assertEqual(_self_info_rate("abcd"), 2) + self.assertEqual(_self_info_rate("abcd" * 8), 2) + self.assertAlmostEqual(_self_info_rate("abcdaaaa"), 1.5488, places=4) + + # def test_total_self_info(self): + # """_total_self_info()""" + # from passlib.pwd import _total_self_info + # + # self.assertEqual(_total_self_info(""), 0) + # + # self.assertEqual(_total_self_info("a" * 8), 0) + # + # self.assertEqual(_total_self_info("ab"), 2) + # self.assertEqual(_total_self_info("ab" * 8), 16) + # + # self.assertEqual(_total_self_info("abcd"), 8) + # self.assertEqual(_total_self_info("abcd" * 8), 64) + # self.assertAlmostEqual(_total_self_info("abcdaaaa"), 12.3904, places=4) + +#============================================================================= +# word generation +#============================================================================= + +# import subject +from passlib.pwd import genword, default_charsets +ascii_62 = default_charsets['ascii_62'] +hex = default_charsets['hex'] + +class WordGeneratorTest(TestCase): + """test generation routines""" + descriptionPrefix = "passlib.pwd.genword()" + + def setUp(self): + super(WordGeneratorTest, self).setUp() + + # patch some RNG references so they're reproducible. + from passlib.pwd import SequenceGenerator + self.patchAttr(SequenceGenerator, "rng", + self.getRandom("pwd generator")) + + def assertResultContents(self, results, count, chars, unique=True): + """check result list matches expected count & charset""" + self.assertEqual(len(results), count) + if unique: + if unique is True: + unique = count + self.assertEqual(len(set(results)), unique) + self.assertEqual(set("".join(results)), set(chars)) + + def test_general(self): + """general behavior""" + + # basic usage + result = genword() + self.assertEqual(len(result), 9) + + # malformed keyword should have useful error. + self.assertRaisesRegex(TypeError, "(?i)unexpected keyword.*badkwd", genword, badkwd=True) + + def test_returns(self): + """'returns' keyword""" + # returns=int option + results = genword(returns=5000) + self.assertResultContents(results, 5000, ascii_62) + + # returns=iter option + gen = genword(returns=iter) + results = [next(gen) for _ in range(5000)] + self.assertResultContents(results, 5000, ascii_62) + + # invalid returns option + self.assertRaises(TypeError, genword, returns='invalid-type') + + def test_charset(self): + """'charset' & 'chars' options""" + # charset option + results = genword(charset="hex", returns=5000) + self.assertResultContents(results, 5000, hex) + + # chars option + # there are 3**3=27 possible combinations + results = genword(length=3, chars="abc", returns=5000) + self.assertResultContents(results, 5000, "abc", unique=27) + + # chars + charset + self.assertRaises(TypeError, genword, chars='abc', charset='hex') + + # TODO: test rng option + +#============================================================================= +# phrase generation +#============================================================================= + +# import subject +from passlib.pwd import genphrase +simple_words = ["alpha", "beta", "gamma"] + +class PhraseGeneratorTest(TestCase): + """test generation routines""" + descriptionPrefix = "passlib.pwd.genphrase()" + + def assertResultContents(self, results, count, words, unique=True, sep=" "): + """check result list matches expected count & charset""" + self.assertEqual(len(results), count) + if unique: + if unique is True: + unique = count + self.assertEqual(len(set(results)), unique) + out = set(itertools.chain.from_iterable(elem.split(sep) for elem in results)) + self.assertEqual(out, set(words)) + + def test_general(self): + """general behavior""" + + # basic usage + result = genphrase() + self.assertEqual(len(result.split(" ")), 4) # 48 / log(7776, 2) ~= 3.7 -> 4 + + # malformed keyword should have useful error. + self.assertRaisesRegex(TypeError, "(?i)unexpected keyword.*badkwd", genphrase, badkwd=True) + + def test_entropy(self): + """'length' & 'entropy' keywords""" + + # custom entropy + result = genphrase(entropy=70) + self.assertEqual(len(result.split(" ")), 6) # 70 / log(7776, 2) ~= 5.4 -> 6 + + # custom length + result = genphrase(length=3) + self.assertEqual(len(result.split(" ")), 3) + + # custom length < entropy + result = genphrase(length=3, entropy=48) + self.assertEqual(len(result.split(" ")), 4) + + # custom length > entropy + result = genphrase(length=4, entropy=12) + self.assertEqual(len(result.split(" ")), 4) + + def test_returns(self): + """'returns' keyword""" + # returns=int option + results = genphrase(returns=1000, words=simple_words) + self.assertResultContents(results, 1000, simple_words) + + # returns=iter option + gen = genphrase(returns=iter, words=simple_words) + results = [next(gen) for _ in range(1000)] + self.assertResultContents(results, 1000, simple_words) + + # invalid returns option + self.assertRaises(TypeError, genphrase, returns='invalid-type') + + def test_wordset(self): + """'wordset' & 'words' options""" + # wordset option + results = genphrase(words=simple_words, returns=5000) + self.assertResultContents(results, 5000, simple_words) + + # words option + results = genphrase(length=3, words=simple_words, returns=5000) + self.assertResultContents(results, 5000, simple_words, unique=3**3) + + # words + wordset + self.assertRaises(TypeError, genphrase, words=simple_words, wordset='bip39') + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_registry.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_registry.py new file mode 100644 index 000000000..8cec48df0 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_registry.py @@ -0,0 +1,228 @@ +"""tests for passlib.hash -- (c) Assurance Technologies 2003-2009""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from logging import getLogger +import warnings +import sys +# site +# pkg +from passlib import hash, registry, exc +from passlib.registry import register_crypt_handler, register_crypt_handler_path, \ + get_crypt_handler, list_crypt_handlers, _unload_handler_name as unload_handler_name +import passlib.utils.handlers as uh +from passlib.tests.utils import TestCase +# module +log = getLogger(__name__) + +#============================================================================= +# dummy handlers +# +# NOTE: these are defined outside of test case +# since they're used by test_register_crypt_handler_path(), +# which needs them to be available as module globals. +#============================================================================= +class dummy_0(uh.StaticHandler): + name = "dummy_0" + +class alt_dummy_0(uh.StaticHandler): + name = "dummy_0" + +dummy_x = 1 + +#============================================================================= +# test registry +#============================================================================= +class RegistryTest(TestCase): + + descriptionPrefix = "passlib.registry" + + def setUp(self): + super(RegistryTest, self).setUp() + + # backup registry state & restore it after test. + locations = dict(registry._locations) + handlers = dict(registry._handlers) + def restore(): + registry._locations.clear() + registry._locations.update(locations) + registry._handlers.clear() + registry._handlers.update(handlers) + self.addCleanup(restore) + + def test_hash_proxy(self): + """test passlib.hash proxy object""" + # check dir works + dir(hash) + + # check repr works + repr(hash) + + # check non-existent attrs raise error + self.assertRaises(AttributeError, getattr, hash, 'fooey') + + # GAE tries to set __loader__, + # make sure that doesn't call register_crypt_handler. + old = getattr(hash, "__loader__", None) + test = object() + hash.__loader__ = test + self.assertIs(hash.__loader__, test) + if old is None: + del hash.__loader__ + self.assertFalse(hasattr(hash, "__loader__")) + else: + hash.__loader__ = old + self.assertIs(hash.__loader__, old) + + # check storing attr calls register_crypt_handler + class dummy_1(uh.StaticHandler): + name = "dummy_1" + hash.dummy_1 = dummy_1 + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + # check storing under wrong name results in error + self.assertRaises(ValueError, setattr, hash, "dummy_1x", dummy_1) + + def test_register_crypt_handler_path(self): + """test register_crypt_handler_path()""" + # NOTE: this messes w/ internals of registry, shouldn't be used publically. + paths = registry._locations + + # check namespace is clear + self.assertTrue('dummy_0' not in paths) + self.assertFalse(hasattr(hash, 'dummy_0')) + + # check invalid names are rejected + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", ".test_registry") + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", __name__ + ":dummy_0:xxx") + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", __name__ + ":dummy_0.xxx") + + # try lazy load + register_crypt_handler_path('dummy_0', __name__) + self.assertTrue('dummy_0' in list_crypt_handlers()) + self.assertTrue('dummy_0' not in list_crypt_handlers(loaded_only=True)) + self.assertIs(hash.dummy_0, dummy_0) + self.assertTrue('dummy_0' in list_crypt_handlers(loaded_only=True)) + unload_handler_name('dummy_0') + + # try lazy load w/ alt + register_crypt_handler_path('dummy_0', __name__ + ':alt_dummy_0') + self.assertIs(hash.dummy_0, alt_dummy_0) + unload_handler_name('dummy_0') + + # check lazy load w/ wrong type fails + register_crypt_handler_path('dummy_x', __name__) + self.assertRaises(TypeError, get_crypt_handler, 'dummy_x') + + # check lazy load w/ wrong name fails + register_crypt_handler_path('alt_dummy_0', __name__) + self.assertRaises(ValueError, get_crypt_handler, "alt_dummy_0") + unload_handler_name("alt_dummy_0") + + # TODO: check lazy load which calls register_crypt_handler (warning should be issued) + sys.modules.pop("passlib.tests._test_bad_register", None) + register_crypt_handler_path("dummy_bad", "passlib.tests._test_bad_register") + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", "xxxxxxxxxx", DeprecationWarning) + h = get_crypt_handler("dummy_bad") + from passlib.tests import _test_bad_register as tbr + self.assertIs(h, tbr.alt_dummy_bad) + + def test_register_crypt_handler(self): + """test register_crypt_handler()""" + + self.assertRaises(TypeError, register_crypt_handler, {}) + + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name=None))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="AB_CD"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab-cd"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab__cd"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="default"))) + + class dummy_1(uh.StaticHandler): + name = "dummy_1" + + class dummy_1b(uh.StaticHandler): + name = "dummy_1" + + self.assertTrue('dummy_1' not in list_crypt_handlers()) + + register_crypt_handler(dummy_1) + register_crypt_handler(dummy_1) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + self.assertRaises(KeyError, register_crypt_handler, dummy_1b) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + register_crypt_handler(dummy_1b, force=True) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1b) + + self.assertTrue('dummy_1' in list_crypt_handlers()) + + def test_get_crypt_handler(self): + """test get_crypt_handler()""" + + class dummy_1(uh.StaticHandler): + name = "dummy_1" + + # without available handler + self.assertRaises(KeyError, get_crypt_handler, "dummy_1") + self.assertIs(get_crypt_handler("dummy_1", None), None) + + # already loaded handler + register_crypt_handler(dummy_1) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", "handler names should be lower-case, and use underscores instead of hyphens:.*", UserWarning) + + # already loaded handler, using incorrect name + self.assertIs(get_crypt_handler("DUMMY-1"), dummy_1) + + # lazy load of unloaded handler, using incorrect name + register_crypt_handler_path('dummy_0', __name__) + self.assertIs(get_crypt_handler("DUMMY-0"), dummy_0) + + # check system & private names aren't returned + from passlib import hash + hash.__dict__["_fake"] = "dummy" + for name in ["_fake", "__package__"]: + self.assertRaises(KeyError, get_crypt_handler, name) + self.assertIs(get_crypt_handler(name, None), None) + + def test_list_crypt_handlers(self): + """test list_crypt_handlers()""" + from passlib.registry import list_crypt_handlers + + # check system & private names aren't returned + hash.__dict__["_fake"] = "dummy" + for name in list_crypt_handlers(): + self.assertFalse(name.startswith("_"), "%r: " % name) + unload_handler_name("_fake") + + def test_handlers(self): + """verify we have tests for all builtin handlers""" + from passlib.registry import list_crypt_handlers + from passlib.tests.test_handlers import get_handler_case, conditionally_available_hashes + for name in list_crypt_handlers(): + # skip some wrappers that don't need independant testing + if name.startswith("ldap_") and name[5:] in list_crypt_handlers(): + continue + if name in ["roundup_plaintext"]: + continue + # check the remaining ones all have a handler + try: + self.assertTrue(get_handler_case(name)) + except exc.MissingBackendError: + if name in conditionally_available_hashes: # expected to fail on some setups + continue + raise + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_totp.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_totp.py new file mode 100644 index 000000000..604d2e98a --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_totp.py @@ -0,0 +1,1604 @@ +"""passlib.tests -- test passlib.totp""" +#============================================================================= +# imports +#============================================================================= +# core +import datetime +from functools import partial +import logging; log = logging.getLogger(__name__) +import sys +import time as _time +# site +# pkg +from passlib import exc +from passlib.utils.compat import unicode, u +from passlib.tests.utils import TestCase, time_call +# subject +from passlib import totp as totp_module +from passlib.totp import TOTP, AppWallet, AES_SUPPORT +# local +__all__ = [ + "EngineTest", +] + +#============================================================================= +# helpers +#============================================================================= + +# XXX: python 3 changed what error base64.b16decode() throws, from TypeError to base64.Error(). +# it wasn't until 3.3 that base32decode() also got changed. +# really should normalize this in the code to a single BinaryDecodeError, +# predicting this cross-version is getting unmanagable. +Base32DecodeError = Base16DecodeError = TypeError +if sys.version_info >= (3,0): + from binascii import Error as Base16DecodeError +if sys.version_info >= (3,3): + from binascii import Error as Base32DecodeError + +PASS1 = "abcdef" +PASS2 = b"\x00\xFF" +KEY1 = '4AOGGDBBQSYHNTUZ' +KEY1_RAW = b'\xe0\x1cc\x0c!\x84\xb0v\xce\x99' +KEY2_RAW = b'\xee]\xcb9\x870\x06 D\xc8y/\xa54&\xe4\x9c\x13\xc2\x18' +KEY3 = 'S3JDVB7QD2R7JPXX' # used in docstrings +KEY4 = 'JBSWY3DPEHPK3PXP' # from google keyuri spec +KEY4_RAW = b'Hello!\xde\xad\xbe\xef' + +# NOTE: for randtime() below, +# * want at least 7 bits on fractional side, to test fractional times to at least 0.01s precision +# * want at least 32 bits on integer side, to test for 32-bit epoch issues. +# most systems *should* have 53 bit mantissa, leaving plenty of room on both ends, +# so using (1<<37) as scale, to allocate 16 bits on fractional side, but generate reasonable # of > 1<<32 times. +# sanity check that we're above 44 ensures minimum requirements (44 - 37 int = 7 frac) +assert sys.float_info.radix == 2, "unexpected float_info.radix" +assert sys.float_info.mant_dig >= 44, "double precision unexpectedly small" + +def _get_max_time_t(): + """ + helper to calc max_time_t constant (see below) + """ + value = 1 << 30 # even for 32 bit systems will handle this + year = 0 + while True: + next_value = value << 1 + try: + next_year = datetime.datetime.utcfromtimestamp(next_value-1).year + except (ValueError, OSError, OverflowError): + # utcfromtimestamp() may throw any of the following: + # + # * year out of range for datetime: + # py < 3.6 throws ValueError. + # (py 3.6.0 returns odd value instead, see workaround below) + # + # * int out of range for host's gmtime/localtime: + # py2 throws ValueError, py3 throws OSError. + # + # * int out of range for host's time_t: + # py2 throws ValueError, py3 throws OverflowError. + # + break + + # Workaround for python 3.6.0 issue -- + # Instead of throwing ValueError if year out of range for datetime, + # Python 3.6 will do some weird behavior that masks high bits + # e.g. (1<<40) -> year 36812, but (1<<41) -> year 6118. + # (Appears to be bug http://bugs.python.org/issue29100) + # This check stops at largest non-wrapping bit size. + if next_year < year: + break + + value = next_value + + # 'value-1' is maximum. + value -= 1 + + # check for crazy case where we're beyond what datetime supports + # (caused by bug 29100 again). compare to max value that datetime + # module supports -- datetime.datetime(9999, 12, 31, 23, 59, 59, 999999) + max_datetime_timestamp = 253402318800 + return min(value, max_datetime_timestamp) + +#: Rough approximation of max value acceptable by hosts's time_t. +#: This is frequently ~2**37 on 64 bit, and ~2**31 on 32 bit systems. +max_time_t = _get_max_time_t() + +def to_b32_size(raw_size): + return (raw_size * 8 + 4) // 5 + +#============================================================================= +# wallet +#============================================================================= +class AppWalletTest(TestCase): + descriptionPrefix = "passlib.totp.AppWallet" + + #============================================================================= + # constructor + #============================================================================= + + def test_secrets_types(self): + """constructor -- 'secrets' param -- input types""" + + # no secrets + wallet = AppWallet() + self.assertEqual(wallet._secrets, {}) + self.assertFalse(wallet.has_secrets) + + # dict + ref = {"1": b"aaa", "2": b"bbb"} + wallet = AppWallet(ref) + self.assertEqual(wallet._secrets, ref) + self.assertTrue(wallet.has_secrets) + + # # list + # wallet = AppWallet(list(ref.items())) + # self.assertEqual(wallet._secrets, ref) + + # # iter + # wallet = AppWallet(iter(ref.items())) + # self.assertEqual(wallet._secrets, ref) + + # "tag:value" string + wallet = AppWallet("\n 1: aaa\n# comment\n \n2: bbb ") + self.assertEqual(wallet._secrets, ref) + + # ensure ":" allowed in secret + wallet = AppWallet("1: aaa: bbb \n# comment\n \n2: bbb ") + self.assertEqual(wallet._secrets, {"1": b"aaa: bbb", "2": b"bbb"}) + + # json dict + wallet = AppWallet('{"1":"aaa","2":"bbb"}') + self.assertEqual(wallet._secrets, ref) + + # # json list + # wallet = AppWallet('[["1","aaa"],["2","bbb"]]') + # self.assertEqual(wallet._secrets, ref) + + # invalid type + self.assertRaises(TypeError, AppWallet, 123) + + # invalid json obj + self.assertRaises(TypeError, AppWallet, "[123]") + + # # invalid list items + # self.assertRaises(ValueError, AppWallet, ["1", b"aaa"]) + + # forbid empty secret + self.assertRaises(ValueError, AppWallet, {"1": "aaa", "2": ""}) + + def test_secrets_tags(self): + """constructor -- 'secrets' param -- tag/value normalization""" + + # test reference + ref = {"1": b"aaa", "02": b"bbb", "C": b"ccc"} + wallet = AppWallet(ref) + self.assertEqual(wallet._secrets, ref) + + # accept unicode + wallet = AppWallet({u("1"): b"aaa", u("02"): b"bbb", u("C"): b"ccc"}) + self.assertEqual(wallet._secrets, ref) + + # normalize int tags + wallet = AppWallet({1: b"aaa", "02": b"bbb", "C": b"ccc"}) + self.assertEqual(wallet._secrets, ref) + + # forbid non-str/int tags + self.assertRaises(TypeError, AppWallet, {(1,): "aaa"}) + + # accept valid tags + wallet = AppWallet({"1-2_3.4": b"aaa"}) + + # forbid invalid tags + self.assertRaises(ValueError, AppWallet, {"-abc": "aaa"}) + self.assertRaises(ValueError, AppWallet, {"ab*$": "aaa"}) + + # coerce value to bytes + wallet = AppWallet({"1": u("aaa"), "02": "bbb", "C": b"ccc"}) + self.assertEqual(wallet._secrets, ref) + + # forbid invalid value types + self.assertRaises(TypeError, AppWallet, {"1": 123}) + self.assertRaises(TypeError, AppWallet, {"1": None}) + self.assertRaises(TypeError, AppWallet, {"1": []}) + + # TODO: test secrets_path + + def test_default_tag(self): + """constructor -- 'default_tag' param""" + + # should sort numerically + wallet = AppWallet({"1": "one", "02": "two"}) + self.assertEqual(wallet.default_tag, "02") + self.assertEqual(wallet.get_secret(wallet.default_tag), b"two") + + # should sort alphabetically if non-digit present + wallet = AppWallet({"1": "one", "02": "two", "A": "aaa"}) + self.assertEqual(wallet.default_tag, "A") + self.assertEqual(wallet.get_secret(wallet.default_tag), b"aaa") + + # should use honor custom tag + wallet = AppWallet({"1": "one", "02": "two", "A": "aaa"}, default_tag="1") + self.assertEqual(wallet.default_tag, "1") + self.assertEqual(wallet.get_secret(wallet.default_tag), b"one") + + # throw error on unknown value + self.assertRaises(KeyError, AppWallet, {"1": "one", "02": "two", "A": "aaa"}, + default_tag="B") + + # should be empty + wallet = AppWallet() + self.assertEqual(wallet.default_tag, None) + self.assertRaises(KeyError, wallet.get_secret, None) + + # TODO: test 'cost' param + + #============================================================================= + # encrypt_key() & decrypt_key() helpers + #============================================================================= + def require_aes_support(self, canary=None): + if AES_SUPPORT: + canary and canary() + else: + canary and self.assertRaises(RuntimeError, canary) + raise self.skipTest("'cryptography' package not installed") + + def test_decrypt_key(self): + """.decrypt_key()""" + + wallet = AppWallet({"1": PASS1, "2": PASS2}) + + # check for support + CIPHER1 = dict(v=1, c=13, s='6D7N7W53O7HHS37NLUFQ', + k='MHCTEGSNPFN5CGBJ', t='1') + self.require_aes_support(canary=partial(wallet.decrypt_key, CIPHER1)) + + # reference key + self.assertEqual(wallet.decrypt_key(CIPHER1)[0], KEY1_RAW) + + # different salt used to encrypt same raw key + CIPHER2 = dict(v=1, c=13, s='SPZJ54Y6IPUD2BYA4C6A', + k='ZGDXXTVQOWYLC2AU', t='1') + self.assertEqual(wallet.decrypt_key(CIPHER2)[0], KEY1_RAW) + + # different sized key, password, and cost + CIPHER3 = dict(v=1, c=8, s='FCCTARTIJWE7CPQHUDKA', + k='D2DRS32YESGHHINWFFCELKN7Z6NAHM4M', t='2') + self.assertEqual(wallet.decrypt_key(CIPHER3)[0], KEY2_RAW) + + # wrong password should silently result in wrong key + temp = CIPHER1.copy() + temp.update(t='2') + self.assertEqual(wallet.decrypt_key(temp)[0], b'\xafD6.F7\xeb\x19\x05Q') + + # missing tag should throw error + temp = CIPHER1.copy() + temp.update(t='3') + self.assertRaises(KeyError, wallet.decrypt_key, temp) + + # unknown version should throw error + temp = CIPHER1.copy() + temp.update(v=999) + self.assertRaises(ValueError, wallet.decrypt_key, temp) + + def test_decrypt_key_needs_recrypt(self): + """.decrypt_key() -- needs_recrypt flag""" + self.require_aes_support() + + wallet = AppWallet({"1": PASS1, "2": PASS2}, encrypt_cost=13) + + # ref should be accepted + ref = dict(v=1, c=13, s='AAAA', k='AAAA', t='2') + self.assertFalse(wallet.decrypt_key(ref)[1]) + + # wrong cost + temp = ref.copy() + temp.update(c=8) + self.assertTrue(wallet.decrypt_key(temp)[1]) + + # wrong tag + temp = ref.copy() + temp.update(t="1") + self.assertTrue(wallet.decrypt_key(temp)[1]) + + # XXX: should this check salt_size? + + def assertSaneResult(self, result, wallet, key, tag="1", + needs_recrypt=False): + """check encrypt_key() result has expected format""" + + self.assertEqual(set(result), set(["v", "t", "c", "s", "k"])) + + self.assertEqual(result['v'], 1) + self.assertEqual(result['t'], tag) + self.assertEqual(result['c'], wallet.encrypt_cost) + + self.assertEqual(len(result['s']), to_b32_size(wallet.salt_size)) + self.assertEqual(len(result['k']), to_b32_size(len(key))) + + result_key, result_needs_recrypt = wallet.decrypt_key(result) + self.assertEqual(result_key, key) + self.assertEqual(result_needs_recrypt, needs_recrypt) + + def test_encrypt_key(self): + """.encrypt_key()""" + + # check for support + wallet = AppWallet({"1": PASS1}, encrypt_cost=5) + self.require_aes_support(canary=partial(wallet.encrypt_key, KEY1_RAW)) + + # basic behavior + result = wallet.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet, KEY1_RAW) + + # creates new salt each time + other = wallet.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet, KEY1_RAW) + self.assertNotEqual(other['s'], result['s']) + self.assertNotEqual(other['k'], result['k']) + + # honors custom cost + wallet2 = AppWallet({"1": PASS1}, encrypt_cost=6) + result = wallet2.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet2, KEY1_RAW) + + # honors default tag + wallet2 = AppWallet({"1": PASS1, "2": PASS2}) + result = wallet2.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet2, KEY1_RAW, tag="2") + + # honor salt size + wallet2 = AppWallet({"1": PASS1}) + wallet2.salt_size = 64 + result = wallet2.encrypt_key(KEY1_RAW) + self.assertSaneResult(result, wallet2, KEY1_RAW) + + # larger key + result = wallet.encrypt_key(KEY2_RAW) + self.assertSaneResult(result, wallet, KEY2_RAW) + + # border case: empty key + # XXX: might want to allow this, but documenting behavior for now + self.assertRaises(ValueError, wallet.encrypt_key, b"") + + def test_encrypt_cost_timing(self): + """verify cost parameter via timing""" + self.require_aes_support() + + # time default cost + wallet = AppWallet({"1": "aaa"}) + wallet.encrypt_cost -= 2 + delta, _ = time_call(partial(wallet.encrypt_key, KEY1_RAW), maxtime=0) + + # this should take (2**3=8) times as long + wallet.encrypt_cost += 3 + delta2, _ = time_call(partial(wallet.encrypt_key, KEY1_RAW), maxtime=0) + + # TODO: rework timing test here to inject mock pbkdf2_hmac() function instead; + # and test that it's being invoked w/ proper options. + self.assertAlmostEqual(delta2, delta*8, delta=(delta*8)*0.5) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# common OTP code +#============================================================================= + +#: used as base value for RFC test vector keys +RFC_KEY_BYTES_20 = "12345678901234567890".encode("ascii") +RFC_KEY_BYTES_32 = (RFC_KEY_BYTES_20*2)[:32] +RFC_KEY_BYTES_64 = (RFC_KEY_BYTES_20*4)[:64] + +# TODO: this class is separate from TotpTest due to historical issue, +# when there was a base class, and a separate HOTP class. +# these test case classes should probably be combined. +class TotpTest(TestCase): + """ + common code shared by TotpTest & HotpTest + """ + #============================================================================= + # class attrs + #============================================================================= + + descriptionPrefix = "passlib.totp.TOTP" + + #============================================================================= + # setup + #============================================================================= + def setUp(self): + super(TotpTest, self).setUp() + + # clear norm_hash_name() cache so 'unknown hash' warnings get emitted each time + from passlib.crypto.digest import lookup_hash + lookup_hash.clear_cache() + + # monkeypatch module's rng to be deterministic + self.patchAttr(totp_module, "rng", self.getRandom()) + + #============================================================================= + # general helpers + #============================================================================= + def randtime(self): + """ + helper to generate random epoch time + :returns float: epoch time + """ + return self.getRandom().random() * max_time_t + + def randotp(self, cls=None, **kwds): + """ + helper which generates a random TOTP instance. + """ + rng = self.getRandom() + if "key" not in kwds: + kwds['new'] = True + kwds.setdefault("digits", rng.randint(6, 10)) + kwds.setdefault("alg", rng.choice(["sha1", "sha256", "sha512"])) + kwds.setdefault("period", rng.randint(10, 120)) + return (cls or TOTP)(**kwds) + + def test_randotp(self): + """ + internal test -- randotp() + """ + otp1 = self.randotp() + otp2 = self.randotp() + + self.assertNotEqual(otp1.key, otp2.key, "key not randomized:") + + # NOTE: has (1/5)**10 odds of failure + for _ in range(10): + if otp1.digits != otp2.digits: + break + otp2 = self.randotp() + else: + self.fail("digits not randomized") + + # NOTE: has (1/3)**10 odds of failure + for _ in range(10): + if otp1.alg != otp2.alg: + break + otp2 = self.randotp() + else: + self.fail("alg not randomized") + + #============================================================================= + # reference vector helpers + #============================================================================= + + #: default options used by test vectors (unless otherwise stated) + vector_defaults = dict(format="base32", alg="sha1", period=30, digits=8) + + #: various TOTP test vectors, + #: each element in list has format [options, (time, token <, int(expires)>), ...] + vectors = [ + + #------------------------------------------------------------------------- + # passlib test vectors + #------------------------------------------------------------------------- + + # 10 byte key, 6 digits + [dict(key="ACDEFGHJKL234567", digits=6), + # test fencepost to make sure we're rounding right + (1412873399, '221105'), # == 29 mod 30 + (1412873400, '178491'), # == 0 mod 30 + (1412873401, '178491'), # == 1 mod 30 + (1412873429, '178491'), # == 29 mod 30 + (1412873430, '915114'), # == 0 mod 30 + ], + + # 10 byte key, 8 digits + [dict(key="ACDEFGHJKL234567", digits=8), + # should be same as 6 digits (above), but w/ 2 more digits on left side of token. + (1412873399, '20221105'), # == 29 mod 30 + (1412873400, '86178491'), # == 0 mod 30 + (1412873401, '86178491'), # == 1 mod 30 + (1412873429, '86178491'), # == 29 mod 30 + (1412873430, '03915114'), # == 0 mod 30 + ], + + # sanity check on key used in docstrings + [dict(key="S3JD-VB7Q-D2R7-JPXX", digits=6), + (1419622709, '000492'), + (1419622739, '897212'), + ], + + #------------------------------------------------------------------------- + # reference vectors taken from http://tools.ietf.org/html/rfc6238, appendix B + # NOTE: while appendix B states same key used for all tests, the reference + # code in the appendix repeats the key up to the alg's block size, + # and uses *that* as the secret... so that's what we're doing here. + #------------------------------------------------------------------------- + + # sha1 test vectors + [dict(key=RFC_KEY_BYTES_20, format="raw", alg="sha1"), + (59, '94287082'), + (1111111109, '07081804'), + (1111111111, '14050471'), + (1234567890, '89005924'), + (2000000000, '69279037'), + (20000000000, '65353130'), + ], + + # sha256 test vectors + [dict(key=RFC_KEY_BYTES_32, format="raw", alg="sha256"), + (59, '46119246'), + (1111111109, '68084774'), + (1111111111, '67062674'), + (1234567890, '91819424'), + (2000000000, '90698825'), + (20000000000, '77737706'), + ], + + # sha512 test vectors + [dict(key=RFC_KEY_BYTES_64, format="raw", alg="sha512"), + (59, '90693936'), + (1111111109, '25091201'), + (1111111111, '99943326'), + (1234567890, '93441116'), + (2000000000, '38618901'), + (20000000000, '47863826'), + ], + + #------------------------------------------------------------------------- + # other test vectors + #------------------------------------------------------------------------- + + # generated at http://blog.tinisles.com/2011/10/google-authenticator-one-time-password-algorithm-in-javascript + [dict(key="JBSWY3DPEHPK3PXP", digits=6), (1409192430, '727248'), (1419890990, '122419')], + [dict(key="JBSWY3DPEHPK3PXP", digits=9, period=41), (1419891152, '662331049')], + + # found in https://github.com/eloquent/otis/blob/develop/test/suite/Totp/Value/TotpValueGeneratorTest.php, line 45 + [dict(key=RFC_KEY_BYTES_20, format="raw", period=60), (1111111111, '19360094')], + [dict(key=RFC_KEY_BYTES_32, format="raw", alg="sha256", period=60), (1111111111, '40857319')], + [dict(key=RFC_KEY_BYTES_64, format="raw", alg="sha512", period=60), (1111111111, '37023009')], + + ] + + def iter_test_vectors(self): + """ + helper to iterate over test vectors. + yields ``(totp, time, token, expires, prefix)`` tuples. + """ + from passlib.totp import TOTP + for row in self.vectors: + kwds = self.vector_defaults.copy() + kwds.update(row[0]) + for entry in row[1:]: + if len(entry) == 3: + time, token, expires = entry + else: + time, token = entry + expires = None + # NOTE: not re-using otp between calls so that stateful methods + # (like .match) don't have problems. + log.debug("test vector: %r time=%r token=%r expires=%r", kwds, time, token, expires) + otp = TOTP(**kwds) + prefix = "alg=%r time=%r token=%r: " % (otp.alg, time, token) + yield otp, time, token, expires, prefix + + #============================================================================= + # constructor tests + #============================================================================= + def test_ctor_w_new(self): + """constructor -- 'new' parameter""" + + # exactly one of 'key' or 'new' is required + self.assertRaises(TypeError, TOTP) + self.assertRaises(TypeError, TOTP, key='4aoggdbbqsyhntuz', new=True) + + # generates new key + otp = TOTP(new=True) + otp2 = TOTP(new=True) + self.assertNotEqual(otp.key, otp2.key) + + def test_ctor_w_size(self): + """constructor -- 'size' parameter""" + + # should default to digest size, per RFC + self.assertEqual(len(TOTP(new=True, alg="sha1").key), 20) + self.assertEqual(len(TOTP(new=True, alg="sha256").key), 32) + self.assertEqual(len(TOTP(new=True, alg="sha512").key), 64) + + # explicit key size + self.assertEqual(len(TOTP(new=True, size=10).key), 10) + self.assertEqual(len(TOTP(new=True, size=16).key), 16) + + # for new=True, maximum size enforced (based on alg) + self.assertRaises(ValueError, TOTP, new=True, size=21, alg="sha1") + + # for new=True, minimum size enforced + self.assertRaises(ValueError, TOTP, new=True, size=9) + + # for existing key, minimum size is only warned about + with self.assertWarningList([ + dict(category=exc.PasslibSecurityWarning, message_re=".*for security purposes, secret key must be.*") + ]): + _ = TOTP('0A'*9, 'hex') + + def test_ctor_w_key_and_format(self): + """constructor -- 'key' and 'format' parameters""" + + # handle base32 encoding (the default) + self.assertEqual(TOTP(KEY1).key, KEY1_RAW) + + # .. w/ lower case + self.assertEqual(TOTP(KEY1.lower()).key, KEY1_RAW) + + # .. w/ spaces (e.g. user-entered data) + self.assertEqual(TOTP(' 4aog gdbb qsyh ntuz ').key, KEY1_RAW) + + # .. w/ invalid char + self.assertRaises(Base32DecodeError, TOTP, 'ao!ggdbbqsyhntuz') + + # handle hex encoding + self.assertEqual(TOTP('e01c630c2184b076ce99', 'hex').key, KEY1_RAW) + + # .. w/ invalid char + self.assertRaises(Base16DecodeError, TOTP, 'X01c630c2184b076ce99', 'hex') + + # handle raw bytes + self.assertEqual(TOTP(KEY1_RAW, "raw").key, KEY1_RAW) + + def test_ctor_w_alg(self): + """constructor -- 'alg' parameter""" + + # normalize hash names + self.assertEqual(TOTP(KEY1, alg="SHA-256").alg, "sha256") + self.assertEqual(TOTP(KEY1, alg="SHA256").alg, "sha256") + + # invalid alg + self.assertRaises(ValueError, TOTP, KEY1, alg="SHA-333") + + def test_ctor_w_digits(self): + """constructor -- 'digits' parameter""" + self.assertRaises(ValueError, TOTP, KEY1, digits=5) + self.assertEqual(TOTP(KEY1, digits=6).digits, 6) # min value + self.assertEqual(TOTP(KEY1, digits=10).digits, 10) # max value + self.assertRaises(ValueError, TOTP, KEY1, digits=11) + + def test_ctor_w_period(self): + """constructor -- 'period' parameter""" + + # default + self.assertEqual(TOTP(KEY1).period, 30) + + # explicit value + self.assertEqual(TOTP(KEY1, period=63).period, 63) + + # reject wrong type + self.assertRaises(TypeError, TOTP, KEY1, period=1.5) + self.assertRaises(TypeError, TOTP, KEY1, period='abc') + + # reject non-positive values + self.assertRaises(ValueError, TOTP, KEY1, period=0) + self.assertRaises(ValueError, TOTP, KEY1, period=-1) + + def test_ctor_w_label(self): + """constructor -- 'label' parameter""" + self.assertEqual(TOTP(KEY1).label, None) + self.assertEqual(TOTP(KEY1, label="foo@bar").label, "foo@bar") + self.assertRaises(ValueError, TOTP, KEY1, label="foo:bar") + + def test_ctor_w_issuer(self): + """constructor -- 'issuer' parameter""" + self.assertEqual(TOTP(KEY1).issuer, None) + self.assertEqual(TOTP(KEY1, issuer="foo.com").issuer, "foo.com") + self.assertRaises(ValueError, TOTP, KEY1, issuer="foo.com:bar") + + #============================================================================= + # using() tests + #============================================================================= + + # TODO: test using() w/ 'digits', 'alg', 'issue', 'wallet', **wallet_kwds + + def test_using_w_period(self): + """using() -- 'period' parameter""" + + # default + self.assertEqual(TOTP(KEY1).period, 30) + + # explicit value + self.assertEqual(TOTP.using(period=63)(KEY1).period, 63) + + # reject wrong type + self.assertRaises(TypeError, TOTP.using, period=1.5) + self.assertRaises(TypeError, TOTP.using, period='abc') + + # reject non-positive values + self.assertRaises(ValueError, TOTP.using, period=0) + self.assertRaises(ValueError, TOTP.using, period=-1) + + def test_using_w_now(self): + """using -- 'now' parameter""" + + # NOTE: reading time w/ normalize_time() to make sure custom .now actually has effect. + + # default -- time.time + otp = self.randotp() + self.assertIs(otp.now, _time.time) + self.assertAlmostEqual(otp.normalize_time(None), int(_time.time())) + + # custom function + counter = [123.12] + def now(): + counter[0] += 1 + return counter[0] + otp = self.randotp(cls=TOTP.using(now=now)) + # NOTE: TOTP() constructor invokes this as part of test, using up counter values 124 & 125 + self.assertEqual(otp.normalize_time(None), 126) + self.assertEqual(otp.normalize_time(None), 127) + + # require callable + self.assertRaises(TypeError, TOTP.using, now=123) + + # require returns int/float + msg_re = r"now\(\) function must return non-negative" + self.assertRaisesRegex(AssertionError, msg_re, TOTP.using, now=lambda: 'abc') + + # require returns non-negative value + self.assertRaisesRegex(AssertionError, msg_re, TOTP.using, now=lambda: -1) + + #============================================================================= + # internal method tests + #============================================================================= + + def test_normalize_token_instance(self, otp=None): + """normalize_token() -- instance method""" + if otp is None: + otp = self.randotp(digits=7) + + # unicode & bytes + self.assertEqual(otp.normalize_token(u('1234567')), '1234567') + self.assertEqual(otp.normalize_token(b'1234567'), '1234567') + + # int + self.assertEqual(otp.normalize_token(1234567), '1234567') + + # int which needs 0 padding + self.assertEqual(otp.normalize_token(234567), '0234567') + + # reject wrong types (float, None) + self.assertRaises(TypeError, otp.normalize_token, 1234567.0) + self.assertRaises(TypeError, otp.normalize_token, None) + + # too few digits + self.assertRaises(exc.MalformedTokenError, otp.normalize_token, '123456') + + # too many digits + self.assertRaises(exc.MalformedTokenError, otp.normalize_token, '01234567') + self.assertRaises(exc.MalformedTokenError, otp.normalize_token, 12345678) + + def test_normalize_token_class(self): + """normalize_token() -- class method""" + self.test_normalize_token_instance(otp=TOTP.using(digits=7)) + + def test_normalize_time(self): + """normalize_time()""" + TotpFactory = TOTP.using() + otp = self.randotp(TotpFactory) + + for _ in range(10): + time = self.randtime() + tint = int(time) + + self.assertEqual(otp.normalize_time(time), tint) + self.assertEqual(otp.normalize_time(tint + 0.5), tint) + + self.assertEqual(otp.normalize_time(tint), tint) + + dt = datetime.datetime.utcfromtimestamp(time) + self.assertEqual(otp.normalize_time(dt), tint) + + orig = TotpFactory.now + try: + TotpFactory.now = staticmethod(lambda: time) + self.assertEqual(otp.normalize_time(None), tint) + finally: + TotpFactory.now = orig + + self.assertRaises(TypeError, otp.normalize_time, '1234') + + #============================================================================= + # key attr tests + #============================================================================= + + def test_key_attrs(self): + """pretty_key() and .key attributes""" + rng = self.getRandom() + + # test key attrs + otp = TOTP(KEY1_RAW, "raw") + self.assertEqual(otp.key, KEY1_RAW) + self.assertEqual(otp.hex_key, 'e01c630c2184b076ce99') + self.assertEqual(otp.base32_key, KEY1) + + # test pretty_key() + self.assertEqual(otp.pretty_key(), '4AOG-GDBB-QSYH-NTUZ') + self.assertEqual(otp.pretty_key(sep=" "), '4AOG GDBB QSYH NTUZ') + self.assertEqual(otp.pretty_key(sep=False), KEY1) + self.assertEqual(otp.pretty_key(format="hex"), 'e01c-630c-2184-b076-ce99') + + # quick fuzz test: make attr access works for random key & random size + otp = TOTP(new=True, size=rng.randint(10, 20)) + _ = otp.hex_key + _ = otp.base32_key + _ = otp.pretty_key() + + #============================================================================= + # generate() tests + #============================================================================= + def test_totp_token(self): + """generate() -- TotpToken() class""" + from passlib.totp import TOTP, TotpToken + + # test known set of values + otp = TOTP('s3jdvb7qd2r7jpxx') + result = otp.generate(1419622739) + self.assertIsInstance(result, TotpToken) + self.assertEqual(result.token, '897212') + self.assertEqual(result.counter, 47320757) + ##self.assertEqual(result.start_time, 1419622710) + self.assertEqual(result.expire_time, 1419622740) + self.assertEqual(result, ('897212', 1419622740)) + self.assertEqual(len(result), 2) + self.assertEqual(result[0], '897212') + self.assertEqual(result[1], 1419622740) + self.assertRaises(IndexError, result.__getitem__, -3) + self.assertRaises(IndexError, result.__getitem__, 2) + self.assertTrue(result) + + # time dependant bits... + otp.now = lambda : 1419622739.5 + self.assertEqual(result.remaining, 0.5) + self.assertTrue(result.valid) + + otp.now = lambda : 1419622741 + self.assertEqual(result.remaining, 0) + self.assertFalse(result.valid) + + # same time -- shouldn't return same object, but should be equal + result2 = otp.generate(1419622739) + self.assertIsNot(result2, result) + self.assertEqual(result2, result) + + # diff time in period -- shouldn't return same object, but should be equal + result3 = otp.generate(1419622711) + self.assertIsNot(result3, result) + self.assertEqual(result3, result) + + # shouldn't be equal + result4 = otp.generate(1419622999) + self.assertNotEqual(result4, result) + + def test_generate(self): + """generate()""" + from passlib.totp import TOTP + + # generate token + otp = TOTP(new=True) + time = self.randtime() + result = otp.generate(time) + token = result.token + self.assertIsInstance(token, unicode) + start_time = result.counter * 30 + + # should generate same token for next 29s + self.assertEqual(otp.generate(start_time + 29).token, token) + + # and new one at 30s + self.assertNotEqual(otp.generate(start_time + 30).token, token) + + # verify round-trip conversion of datetime + dt = datetime.datetime.utcfromtimestamp(time) + self.assertEqual(int(otp.normalize_time(dt)), int(time)) + + # handle datetime object + self.assertEqual(otp.generate(dt).token, token) + + # omitting value should use current time + otp2 = TOTP.using(now=lambda: time)(key=otp.base32_key) + self.assertEqual(otp2.generate().token, token) + + # reject invalid time + self.assertRaises(ValueError, otp.generate, -1) + + def test_generate_w_reference_vectors(self): + """generate() -- reference vectors""" + for otp, time, token, expires, prefix in self.iter_test_vectors(): + # should output correct token for specified time + result = otp.generate(time) + self.assertEqual(result.token, token, msg=prefix) + self.assertEqual(result.counter, time // otp.period, msg=prefix) + if expires: + self.assertEqual(result.expire_time, expires) + + #============================================================================= + # TotpMatch() tests + #============================================================================= + + def assertTotpMatch(self, match, time, skipped=0, period=30, window=30, msg=''): + from passlib.totp import TotpMatch + + # test type + self.assertIsInstance(match, TotpMatch) + + # totp sanity check + self.assertIsInstance(match.totp, TOTP) + self.assertEqual(match.totp.period, period) + + # test attrs + self.assertEqual(match.time, time, msg=msg + " matched time:") + expected = time // period + counter = expected + skipped + self.assertEqual(match.counter, counter, msg=msg + " matched counter:") + self.assertEqual(match.expected_counter, expected, msg=msg + " expected counter:") + self.assertEqual(match.skipped, skipped, msg=msg + " skipped:") + self.assertEqual(match.cache_seconds, period + window) + expire_time = (counter + 1) * period + self.assertEqual(match.expire_time, expire_time) + self.assertEqual(match.cache_time, expire_time + window) + + # test tuple + self.assertEqual(len(match), 2) + self.assertEqual(match, (counter, time)) + self.assertRaises(IndexError, match.__getitem__, -3) + self.assertEqual(match[0], counter) + self.assertEqual(match[1], time) + self.assertRaises(IndexError, match.__getitem__, 2) + + # test bool + self.assertTrue(match) + + def test_totp_match_w_valid_token(self): + """match() -- valid TotpMatch object""" + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + result = otp.match(token, time) + self.assertTotpMatch(result, time=time, skipped=0) + + def test_totp_match_w_older_token(self): + """match() -- valid TotpMatch object with future token""" + from passlib.totp import TotpMatch + + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + result = otp.match(token, time - 30) + self.assertTotpMatch(result, time=time - 30, skipped=1) + + def test_totp_match_w_new_token(self): + """match() -- valid TotpMatch object with past token""" + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + result = otp.match(token, time + 30) + self.assertTotpMatch(result, time=time + 30, skipped=-1) + + def test_totp_match_w_invalid_token(self): + """match() -- invalid TotpMatch object""" + time = 141230981 + token = '781501' + otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3) + self.assertRaises(exc.InvalidTokenError, otp.match, token, time + 60) + + #============================================================================= + # match() tests + #============================================================================= + + def assertVerifyMatches(self, expect_skipped, token, time, # * + otp, gen_time=None, **kwds): + """helper to test otp.match() output is correct""" + # NOTE: TotpMatch return type tested more throughly above ^^^ + msg = "key=%r alg=%r period=%r token=%r gen_time=%r time=%r:" % \ + (otp.base32_key, otp.alg, otp.period, token, gen_time, time) + result = otp.match(token, time, **kwds) + self.assertTotpMatch(result, + time=otp.normalize_time(time), + period=otp.period, + window=kwds.get("window", 30), + skipped=expect_skipped, + msg=msg) + + def assertVerifyRaises(self, exc_class, token, time, # * + otp, gen_time=None, + **kwds): + """helper to test otp.match() throws correct error""" + # NOTE: TotpMatch return type tested more throughly above ^^^ + msg = "key=%r alg=%r period=%r token=%r gen_time=%r time=%r:" % \ + (otp.base32_key, otp.alg, otp.period, token, gen_time, time) + return self.assertRaises(exc_class, otp.match, token, time, + __msg__=msg, **kwds) + + def test_match_w_window(self): + """match() -- 'time' and 'window' parameters""" + + # init generator & helper + otp = self.randotp() + period = otp.period + time = self.randtime() + token = otp.generate(time).token + common = dict(otp=otp, gen_time=time) + assertMatches = partial(self.assertVerifyMatches, **common) + assertRaises = partial(self.assertVerifyRaises, **common) + + #------------------------------- + # basic validation, and 'window' parameter + #------------------------------- + + # validate against previous counter (passes if window >= period) + assertRaises(exc.InvalidTokenError, token, time - period, window=0) + assertMatches(+1, token, time - period, window=period) + assertMatches(+1, token, time - period, window=2 * period) + + # validate against current counter + assertMatches(0, token, time, window=0) + + # validate against next counter (passes if window >= period) + assertRaises(exc.InvalidTokenError, token, time + period, window=0) + assertMatches(-1, token, time + period, window=period) + assertMatches(-1, token, time + period, window=2 * period) + + # validate against two time steps later (should never pass) + assertRaises(exc.InvalidTokenError, token, time + 2 * period, window=0) + assertRaises(exc.InvalidTokenError, token, time + 2 * period, window=period) + assertMatches(-2, token, time + 2 * period, window=2 * period) + + # TODO: test window values that aren't multiples of period + # (esp ensure counter rounding works correctly) + + #------------------------------- + # time normalization + #------------------------------- + + # handle datetimes + dt = datetime.datetime.utcfromtimestamp(time) + assertMatches(0, token, dt, window=0) + + # reject invalid time + assertRaises(ValueError, token, -1) + + def test_match_w_skew(self): + """match() -- 'skew' parameters""" + # init generator & helper + otp = self.randotp() + period = otp.period + time = self.randtime() + common = dict(otp=otp, gen_time=time) + assertMatches = partial(self.assertVerifyMatches, **common) + assertRaises = partial(self.assertVerifyRaises, **common) + + # assume client is running far behind server / has excessive transmission delay + skew = 3 * period + behind_token = otp.generate(time - skew).token + assertRaises(exc.InvalidTokenError, behind_token, time, window=0) + assertMatches(-3, behind_token, time, window=0, skew=-skew) + + # assume client is running far ahead of server + ahead_token = otp.generate(time + skew).token + assertRaises(exc.InvalidTokenError, ahead_token, time, window=0) + assertMatches(+3, ahead_token, time, window=0, skew=skew) + + # TODO: test skew + larger window + + def test_match_w_reuse(self): + """match() -- 'reuse' and 'last_counter' parameters""" + + # init generator & helper + otp = self.randotp() + period = otp.period + time = self.randtime() + tdata = otp.generate(time) + token = tdata.token + counter = tdata.counter + expire_time = tdata.expire_time + common = dict(otp=otp, gen_time=time) + assertMatches = partial(self.assertVerifyMatches, **common) + assertRaises = partial(self.assertVerifyRaises, **common) + + # last counter unset -- + # previous period's token should count as valid + assertMatches(-1, token, time + period, window=period) + + # last counter set 2 periods ago -- + # previous period's token should count as valid + assertMatches(-1, token, time + period, last_counter=counter-1, + window=period) + + # last counter set 2 periods ago -- + # 2 periods ago's token should NOT count as valid + assertRaises(exc.InvalidTokenError, token, time + 2 * period, + last_counter=counter, window=period) + + # last counter set 1 period ago -- + # previous period's token should now be rejected as 'used' + err = assertRaises(exc.UsedTokenError, token, time + period, + last_counter=counter, window=period) + self.assertEqual(err.expire_time, expire_time) + + # last counter set to current period -- + # current period's token should be rejected + err = assertRaises(exc.UsedTokenError, token, time, + last_counter=counter, window=0) + self.assertEqual(err.expire_time, expire_time) + + def test_match_w_token_normalization(self): + """match() -- token normalization""" + # setup test helper + otp = TOTP('otxl2f5cctbprpzx') + match = otp.match + time = 1412889861 + + # separators / spaces should be stripped (orig token '332136') + self.assertTrue(match(' 3 32-136 ', time)) + + # ascii bytes + self.assertTrue(match(b'332136', time)) + + # too few digits + self.assertRaises(exc.MalformedTokenError, match, '12345', time) + + # invalid char + self.assertRaises(exc.MalformedTokenError, match, '12345X', time) + + # leading zeros count towards size + self.assertRaises(exc.MalformedTokenError, match, '0123456', time) + + def test_match_w_reference_vectors(self): + """match() -- reference vectors""" + for otp, time, token, expires, msg in self.iter_test_vectors(): + # create wrapper + match = otp.match + + # token should match against time + result = match(token, time) + self.assertTrue(result) + self.assertEqual(result.counter, time // otp.period, msg=msg) + + # should NOT match against another time + self.assertRaises(exc.InvalidTokenError, match, token, time + 100, window=0) + + #============================================================================= + # verify() tests + #============================================================================= + def test_verify(self): + """verify()""" + # NOTE: since this is thin wrapper around .from_source() and .match(), + # just testing basic behavior here. + + from passlib.totp import TOTP + + time = 1412889861 + TotpFactory = TOTP.using(now=lambda: time) + + # successful match + source1 = dict(v=1, type="totp", key='otxl2f5cctbprpzx') + match = TotpFactory.verify('332136', source1) + self.assertTotpMatch(match, time=time) + + # failed match + source1 = dict(v=1, type="totp", key='otxl2f5cctbprpzx') + self.assertRaises(exc.InvalidTokenError, TotpFactory.verify, '332155', source1) + + # bad source + source1 = dict(v=1, type="totp") + self.assertRaises(ValueError, TotpFactory.verify, '332155', source1) + + # successful match -- json source + source1json = '{"v": 1, "type": "totp", "key": "otxl2f5cctbprpzx"}' + match = TotpFactory.verify('332136', source1json) + self.assertTotpMatch(match, time=time) + + # successful match -- URI + source1uri = 'otpauth://totp/Label?secret=otxl2f5cctbprpzx' + match = TotpFactory.verify('332136', source1uri) + self.assertTotpMatch(match, time=time) + + #============================================================================= + # serialization frontend tests + #============================================================================= + def test_from_source(self): + """from_source()""" + from passlib.totp import TOTP + from_source = TOTP.from_source + + # uri (unicode) + otp = from_source(u("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "issuer=Example")) + self.assertEqual(otp.key, KEY4_RAW) + + # uri (bytes) + otp = from_source(b"otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + b"issuer=Example") + self.assertEqual(otp.key, KEY4_RAW) + + # dict + otp = from_source(dict(v=1, type="totp", key=KEY4)) + self.assertEqual(otp.key, KEY4_RAW) + + # json (unicode) + otp = from_source(u('{"v": 1, "type": "totp", "key": "JBSWY3DPEHPK3PXP"}')) + self.assertEqual(otp.key, KEY4_RAW) + + # json (bytes) + otp = from_source(b'{"v": 1, "type": "totp", "key": "JBSWY3DPEHPK3PXP"}') + self.assertEqual(otp.key, KEY4_RAW) + + # TOTP object -- return unchanged + self.assertIs(from_source(otp), otp) + + # TOTP object w/ different wallet -- return new one. + wallet1 = AppWallet() + otp1 = TOTP.using(wallet=wallet1).from_source(otp) + self.assertIsNot(otp1, otp) + self.assertEqual(otp1.to_dict(), otp.to_dict()) + + # TOTP object w/ same wallet -- return original + otp2 = TOTP.using(wallet=wallet1).from_source(otp1) + self.assertIs(otp2, otp1) + + # random string + self.assertRaises(ValueError, from_source, u("foo")) + self.assertRaises(ValueError, from_source, b"foo") + + #============================================================================= + # uri serialization tests + #============================================================================= + def test_from_uri(self): + """from_uri()""" + from passlib.totp import TOTP + from_uri = TOTP.from_uri + + # URIs from https://code.google.com/p/google-authenticator/wiki/KeyUriFormat + + #-------------------------------------------------------------------------------- + # canonical uri + #-------------------------------------------------------------------------------- + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "issuer=Example") + self.assertIsInstance(otp, TOTP) + self.assertEqual(otp.key, KEY4_RAW) + self.assertEqual(otp.label, "alice@google.com") + self.assertEqual(otp.issuer, "Example") + self.assertEqual(otp.alg, "sha1") # default + self.assertEqual(otp.period, 30) # default + self.assertEqual(otp.digits, 6) # default + + #-------------------------------------------------------------------------------- + # secret param + #-------------------------------------------------------------------------------- + + # secret case insensitive + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=jbswy3dpehpk3pxp&" + "issuer=Example") + self.assertEqual(otp.key, KEY4_RAW) + + # missing secret + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?digits=6") + + # undecodable secret + self.assertRaises(Base32DecodeError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHP@3PXP") + + #-------------------------------------------------------------------------------- + # label param + #-------------------------------------------------------------------------------- + + # w/ encoded space + otp = from_uri("otpauth://totp/Provider1:Alice%20Smith?secret=JBSWY3DPEHPK3PXP&" + "issuer=Provider1") + self.assertEqual(otp.label, "Alice Smith") + self.assertEqual(otp.issuer, "Provider1") + + # w/ encoded space and colon + # (note url has leading space before 'alice') -- taken from KeyURI spec + otp = from_uri("otpauth://totp/Big%20Corporation%3A%20alice@bigco.com?" + "secret=JBSWY3DPEHPK3PXP") + self.assertEqual(otp.label, "alice@bigco.com") + self.assertEqual(otp.issuer, "Big Corporation") + + #-------------------------------------------------------------------------------- + # issuer param / prefix + #-------------------------------------------------------------------------------- + + # 'new style' issuer only + otp = from_uri("otpauth://totp/alice@bigco.com?secret=JBSWY3DPEHPK3PXP&issuer=Big%20Corporation") + self.assertEqual(otp.label, "alice@bigco.com") + self.assertEqual(otp.issuer, "Big Corporation") + + # new-vs-old issuer mismatch + self.assertRaises(ValueError, TOTP.from_uri, + "otpauth://totp/Provider1:alice?secret=JBSWY3DPEHPK3PXP&issuer=Provider2") + + #-------------------------------------------------------------------------------- + # algorithm param + #-------------------------------------------------------------------------------- + + # custom alg + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&algorithm=SHA256") + self.assertEqual(otp.alg, "sha256") + + # unknown alg + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHPK3PXP&algorithm=SHA333") + + #-------------------------------------------------------------------------------- + # digit param + #-------------------------------------------------------------------------------- + + # custom digits + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=8") + self.assertEqual(otp.digits, 8) + + # digits out of range / invalid + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=A") + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=%20") + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&digits=15") + + #-------------------------------------------------------------------------------- + # period param + #-------------------------------------------------------------------------------- + + # custom period + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&period=63") + self.assertEqual(otp.period, 63) + + # reject period < 1 + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHPK3PXP&period=0") + + self.assertRaises(ValueError, from_uri, "otpauth://totp/Example:alice@google.com?" + "secret=JBSWY3DPEHPK3PXP&period=-1") + + #-------------------------------------------------------------------------------- + # unrecognized param + #-------------------------------------------------------------------------------- + + # should issue warning, but otherwise ignore extra param + with self.assertWarningList([ + dict(category=exc.PasslibRuntimeWarning, message_re="unexpected parameters encountered") + ]): + otp = from_uri("otpauth://totp/Example:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "foo=bar&period=63") + self.assertEqual(otp.base32_key, KEY4) + self.assertEqual(otp.period, 63) + + def test_to_uri(self): + """to_uri()""" + + #------------------------------------------------------------------------- + # label & issuer parameters + #------------------------------------------------------------------------- + + # with label & issuer + otp = TOTP(KEY4, alg="sha1", digits=6, period=30) + self.assertEqual(otp.to_uri("alice@google.com", "Example Org"), + "otpauth://totp/Example%20Org:alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "issuer=Example%20Org") + + # label is required + self.assertRaises(ValueError, otp.to_uri, None, "Example Org") + + # with label only + self.assertEqual(otp.to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP") + + # with default label from constructor + otp.label = "alice@google.com" + self.assertEqual(otp.to_uri(), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP") + + # with default label & default issuer from constructor + otp.issuer = "Example Org" + self.assertEqual(otp.to_uri(), + "otpauth://totp/Example%20Org:alice@google.com?secret=JBSWY3DPEHPK3PXP" + "&issuer=Example%20Org") + + # reject invalid label + self.assertRaises(ValueError, otp.to_uri, "label:with:semicolons") + + # reject invalid issuer + self.assertRaises(ValueError, otp.to_uri, "alice@google.com", "issuer:with:semicolons") + + #------------------------------------------------------------------------- + # algorithm parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, alg="sha256").to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "algorithm=SHA256") + + #------------------------------------------------------------------------- + # digits parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, digits=8).to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "digits=8") + + #------------------------------------------------------------------------- + # period parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, period=63).to_uri("alice@google.com"), + "otpauth://totp/alice@google.com?secret=JBSWY3DPEHPK3PXP&" + "period=63") + + #============================================================================= + # dict serialization tests + #============================================================================= + def test_from_dict(self): + """from_dict()""" + from passlib.totp import TOTP + from_dict = TOTP.from_dict + + #-------------------------------------------------------------------------------- + # canonical simple example + #-------------------------------------------------------------------------------- + otp = from_dict(dict(v=1, type="totp", key=KEY4, label="alice@google.com", issuer="Example")) + self.assertIsInstance(otp, TOTP) + self.assertEqual(otp.key, KEY4_RAW) + self.assertEqual(otp.label, "alice@google.com") + self.assertEqual(otp.issuer, "Example") + self.assertEqual(otp.alg, "sha1") # default + self.assertEqual(otp.period, 30) # default + self.assertEqual(otp.digits, 6) # default + + #-------------------------------------------------------------------------------- + # metadata + #-------------------------------------------------------------------------------- + + # missing version + self.assertRaises(ValueError, from_dict, dict(type="totp", key=KEY4)) + + # invalid version + self.assertRaises(ValueError, from_dict, dict(v=0, type="totp", key=KEY4)) + self.assertRaises(ValueError, from_dict, dict(v=999, type="totp", key=KEY4)) + + # missing type + self.assertRaises(ValueError, from_dict, dict(v=1, key=KEY4)) + + #-------------------------------------------------------------------------------- + # secret param + #-------------------------------------------------------------------------------- + + # secret case insensitive + otp = from_dict(dict(v=1, type="totp", key=KEY4.lower(), label="alice@google.com", issuer="Example")) + self.assertEqual(otp.key, KEY4_RAW) + + # missing secret + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp")) + + # undecodable secret + self.assertRaises(Base32DecodeError, from_dict, + dict(v=1, type="totp", key="JBSWY3DPEHP@3PXP")) + + #-------------------------------------------------------------------------------- + # label & issuer params + #-------------------------------------------------------------------------------- + + otp = from_dict(dict(v=1, type="totp", key=KEY4, label="Alice Smith", issuer="Provider1")) + self.assertEqual(otp.label, "Alice Smith") + self.assertEqual(otp.issuer, "Provider1") + + #-------------------------------------------------------------------------------- + # algorithm param + #-------------------------------------------------------------------------------- + + # custom alg + otp = from_dict(dict(v=1, type="totp", key=KEY4, alg="sha256")) + self.assertEqual(otp.alg, "sha256") + + # unknown alg + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, alg="sha333")) + + #-------------------------------------------------------------------------------- + # digit param + #-------------------------------------------------------------------------------- + + # custom digits + otp = from_dict(dict(v=1, type="totp", key=KEY4, digits=8)) + self.assertEqual(otp.digits, 8) + + # digits out of range / invalid + self.assertRaises(TypeError, from_dict, dict(v=1, type="totp", key=KEY4, digits="A")) + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, digits=15)) + + #-------------------------------------------------------------------------------- + # period param + #-------------------------------------------------------------------------------- + + # custom period + otp = from_dict(dict(v=1, type="totp", key=KEY4, period=63)) + self.assertEqual(otp.period, 63) + + # reject period < 1 + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, period=0)) + self.assertRaises(ValueError, from_dict, dict(v=1, type="totp", key=KEY4, period=-1)) + + #-------------------------------------------------------------------------------- + # unrecognized param + #-------------------------------------------------------------------------------- + self.assertRaises(TypeError, from_dict, dict(v=1, type="totp", key=KEY4, INVALID=123)) + + def test_to_dict(self): + """to_dict()""" + + #------------------------------------------------------------------------- + # label & issuer parameters + #------------------------------------------------------------------------- + + # without label or issuer + otp = TOTP(KEY4, alg="sha1", digits=6, period=30) + self.assertEqual(otp.to_dict(), dict(v=1, type="totp", key=KEY4)) + + # with label & issuer from constructor + otp = TOTP(KEY4, alg="sha1", digits=6, period=30, + label="alice@google.com", issuer="Example Org") + self.assertEqual(otp.to_dict(), + dict(v=1, type="totp", key=KEY4, + label="alice@google.com", issuer="Example Org")) + + # with label only + otp = TOTP(KEY4, alg="sha1", digits=6, period=30, + label="alice@google.com") + self.assertEqual(otp.to_dict(), + dict(v=1, type="totp", key=KEY4, + label="alice@google.com")) + + # with issuer only + otp = TOTP(KEY4, alg="sha1", digits=6, period=30, + issuer="Example Org") + self.assertEqual(otp.to_dict(), + dict(v=1, type="totp", key=KEY4, + issuer="Example Org")) + + # don't serialize default issuer + TotpFactory = TOTP.using(issuer="Example Org") + otp = TotpFactory(KEY4) + self.assertEqual(otp.to_dict(), dict(v=1, type="totp", key=KEY4)) + + # don't serialize default issuer *even if explicitly set* + otp = TotpFactory(KEY4, issuer="Example Org") + self.assertEqual(otp.to_dict(), dict(v=1, type="totp", key=KEY4)) + + #------------------------------------------------------------------------- + # algorithm parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, alg="sha256").to_dict(), + dict(v=1, type="totp", key=KEY4, alg="sha256")) + + #------------------------------------------------------------------------- + # digits parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, digits=8).to_dict(), + dict(v=1, type="totp", key=KEY4, digits=8)) + + #------------------------------------------------------------------------- + # period parameter + #------------------------------------------------------------------------- + self.assertEqual(TOTP(KEY4, period=63).to_dict(), + dict(v=1, type="totp", key=KEY4, period=63)) + + # TODO: to_dict() + # with encrypt=False + # with encrypt="auto" + wallet + secrets + # with encrypt="auto" + wallet + no secrets + # with encrypt="auto" + no wallet + # with encrypt=True + wallet + secrets + # with encrypt=True + wallet + no secrets + # with encrypt=True + no wallet + # that 'changed' is set for old versions, and old encryption tags. + + #============================================================================= + # json serialization tests + #============================================================================= + + # TODO: from_json() / to_json(). + # (skipped for right now cause just wrapper for from_dict/to_dict) + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_utils.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_utils.py new file mode 100644 index 000000000..59ba160f2 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_utils.py @@ -0,0 +1,1171 @@ +"""tests for passlib.util""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from functools import partial +import warnings +# site +# pkg +# module +from passlib.utils import is_ascii_safe, to_bytes +from passlib.utils.compat import irange, PY2, PY3, u, unicode, join_bytes, PYPY +from passlib.tests.utils import TestCase, hb, run_with_fixed_seeds + +#============================================================================= +# byte funcs +#============================================================================= +class MiscTest(TestCase): + """tests various parts of utils module""" + + # NOTE: could test xor_bytes(), but it's exercised well enough by pbkdf2 test + + def test_compat(self): + """test compat's lazymodule""" + from passlib.utils import compat + # "" + self.assertRegex(repr(compat), + r"^$") + + # test synthentic dir() + dir(compat) + self.assertTrue('UnicodeIO' in dir(compat)) + self.assertTrue('irange' in dir(compat)) + + def test_classproperty(self): + from passlib.utils.decor import classproperty + + class test(object): + xvar = 1 + @classproperty + def xprop(cls): + return cls.xvar + + self.assertEqual(test.xprop, 1) + prop = test.__dict__['xprop'] + self.assertIs(prop.im_func, prop.__func__) + + def test_deprecated_function(self): + from passlib.utils.decor import deprecated_function + # NOTE: not comprehensive, just tests the basic behavior + + @deprecated_function(deprecated="1.6", removed="1.8") + def test_func(*args): + """test docstring""" + return args + + self.assertTrue(".. deprecated::" in test_func.__doc__) + + with self.assertWarningList(dict(category=DeprecationWarning, + message="the function passlib.tests.test_utils.test_func() " + "is deprecated as of Passlib 1.6, and will be " + "removed in Passlib 1.8." + )): + self.assertEqual(test_func(1,2), (1,2)) + + def test_memoized_property(self): + from passlib.utils.decor import memoized_property + + class dummy(object): + counter = 0 + + @memoized_property + def value(self): + value = self.counter + self.counter = value+1 + return value + + d = dummy() + self.assertEqual(d.value, 0) + self.assertEqual(d.value, 0) + self.assertEqual(d.counter, 1) + + prop = dummy.value + if not PY3: + self.assertIs(prop.im_func, prop.__func__) + + def test_getrandbytes(self): + """getrandbytes()""" + from passlib.utils import getrandbytes + wrapper = partial(getrandbytes, self.getRandom()) + self.assertEqual(len(wrapper(0)), 0) + a = wrapper(10) + b = wrapper(10) + self.assertIsInstance(a, bytes) + self.assertEqual(len(a), 10) + self.assertEqual(len(b), 10) + self.assertNotEqual(a, b) + + @run_with_fixed_seeds(count=1024) + def test_getrandstr(self, seed): + """getrandstr()""" + from passlib.utils import getrandstr + + wrapper = partial(getrandstr, self.getRandom(seed=seed)) + + # count 0 + self.assertEqual(wrapper('abc',0), '') + + # count <0 + self.assertRaises(ValueError, wrapper, 'abc', -1) + + # letters 0 + self.assertRaises(ValueError, wrapper, '', 0) + + # letters 1 + self.assertEqual(wrapper('a', 5), 'aaaaa') + + # NOTE: the following parts are non-deterministic, + # with a small chance of failure (outside chance it may pick + # a string w/o one char, even more remote chance of picking + # same string). to combat this, we run it against multiple + # fixed seeds (using run_with_fixed_seeds decorator), + # and hope that they're sufficient to test the range of behavior. + + # letters + x = wrapper(u('abc'), 32) + y = wrapper(u('abc'), 32) + self.assertIsInstance(x, unicode) + self.assertNotEqual(x,y) + self.assertEqual(sorted(set(x)), [u('a'),u('b'),u('c')]) + + # bytes + x = wrapper(b'abc', 32) + y = wrapper(b'abc', 32) + self.assertIsInstance(x, bytes) + self.assertNotEqual(x,y) + # NOTE: decoding this due to py3 bytes + self.assertEqual(sorted(set(x.decode("ascii"))), [u('a'),u('b'),u('c')]) + + def test_generate_password(self): + """generate_password()""" + from passlib.utils import generate_password + warnings.filterwarnings("ignore", "The function.*generate_password\(\) is deprecated") + self.assertEqual(len(generate_password(15)), 15) + + def test_is_crypt_context(self): + """test is_crypt_context()""" + from passlib.utils import is_crypt_context + from passlib.context import CryptContext + cc = CryptContext(["des_crypt"]) + self.assertTrue(is_crypt_context(cc)) + self.assertFalse(not is_crypt_context(cc)) + + def test_genseed(self): + """test genseed()""" + import random + from passlib.utils import genseed + rng = random.Random(genseed()) + a = rng.randint(0, 10**10) + + rng = random.Random(genseed()) + b = rng.randint(0, 10**10) + + self.assertNotEqual(a,b) + + rng.seed(genseed(rng)) + + def test_crypt(self): + """test crypt.crypt() wrappers""" + from passlib.utils import has_crypt, safe_crypt, test_crypt + from passlib.registry import get_supported_os_crypt_schemes, get_crypt_handler + + # test everything is disabled + supported = get_supported_os_crypt_schemes() + if not has_crypt: + self.assertEqual(supported, ()) + self.assertEqual(safe_crypt("test", "aa"), None) + self.assertFalse(test_crypt("test", "aaqPiZY5xR5l.")) # des_crypt() hash of "test" + raise self.skipTest("crypt.crypt() not available") + + # expect there to be something supported, if crypt() is present + if not supported: + # NOTE: failures here should be investigated. usually means one of: + # 1) at least one of passlib's os_crypt detection routines is giving false negative + # 2) crypt() ONLY supports some hash alg which passlib doesn't know about + # 3) crypt() is present but completely disabled (never encountered this yet) + raise self.fail("crypt() present, but no supported schemes found!") + + # pick cheap alg if possible, with minimum rounds, to speed up this test. + # NOTE: trusting hasher class works properly (should have been verified using it's own UTs) + for scheme in ("md5_crypt", "sha256_crypt"): + if scheme in supported: + break + else: + scheme = supported[-1] + hasher = get_crypt_handler(scheme) + if getattr(hasher, "min_rounds", None): + hasher = hasher.using(rounds=hasher.min_rounds) + + # helpers to generate hashes & config strings to work with + def get_hash(secret): + assert isinstance(secret, unicode) + hash = hasher.hash(secret) + if isinstance(hash, bytes): # py2 + hash = hash.decode("utf-8") + assert isinstance(hash, unicode) + return hash + + # test ascii password & return type + s1 = u("test") + h1 = get_hash(s1) + result = safe_crypt(s1, h1) + self.assertIsInstance(result, unicode) + self.assertEqual(result, h1) + self.assertEqual(safe_crypt(to_bytes(s1), to_bytes(h1)), h1) + + # make sure crypt doesn't just blindly return h1 for whatever we pass in + h1x = h1[:-2] + 'xx' + self.assertEqual(safe_crypt(s1, h1x), h1) + + # test utf-8 / unicode password + s2 = u('test\u1234') + h2 = get_hash(s2) + self.assertEqual(safe_crypt(s2, h2), h2) + self.assertEqual(safe_crypt(to_bytes(s2), to_bytes(h2)), h2) + + # test rejects null chars in password + self.assertRaises(ValueError, safe_crypt, '\x00', h1) + + # check test_crypt() + self.assertTrue(test_crypt("test", h1)) + self.assertFalse(test_crypt("test", h1x)) + + # check crypt returning variant error indicators + # some platforms return None on errors, others empty string, + # The BSDs in some cases return ":" + import passlib.utils as mod + orig = mod._crypt + try: + retval = None + mod._crypt = lambda secret, hash: retval + + for retval in [None, "", ":", ":0", "*0"]: + self.assertEqual(safe_crypt("test", h1), None) + self.assertFalse(test_crypt("test", h1)) + + retval = 'xxx' + self.assertEqual(safe_crypt("test", h1), "xxx") + self.assertFalse(test_crypt("test", h1)) + + finally: + mod._crypt = orig + + def test_consteq(self): + """test consteq()""" + # NOTE: this test is kind of over the top, but that's only because + # this is used for the critical task of comparing hashes for equality. + from passlib.utils import consteq, str_consteq + + # ensure error raises for wrong types + self.assertRaises(TypeError, consteq, u(''), b'') + self.assertRaises(TypeError, consteq, u(''), 1) + self.assertRaises(TypeError, consteq, u(''), None) + + self.assertRaises(TypeError, consteq, b'', u('')) + self.assertRaises(TypeError, consteq, b'', 1) + self.assertRaises(TypeError, consteq, b'', None) + + self.assertRaises(TypeError, consteq, None, u('')) + self.assertRaises(TypeError, consteq, None, b'') + self.assertRaises(TypeError, consteq, 1, u('')) + self.assertRaises(TypeError, consteq, 1, b'') + + def consteq_supports_string(value): + # under PY2, it supports all unicode strings (when present at all), + # under PY3, compare_digest() only supports ascii unicode strings. + # confirmed for: cpython 2.7.9, cpython 3.4, pypy, pypy3, pyston + return (consteq is str_consteq or PY2 or is_ascii_safe(value)) + + # check equal inputs compare correctly + for value in [ + u("a"), + u("abc"), + u("\xff\xa2\x12\x00")*10, + ]: + if consteq_supports_string(value): + self.assertTrue(consteq(value, value), "value %r:" % (value,)) + else: + self.assertRaises(TypeError, consteq, value, value) + self.assertTrue(str_consteq(value, value), "value %r:" % (value,)) + + value = value.encode("latin-1") + self.assertTrue(consteq(value, value), "value %r:" % (value,)) + + # check non-equal inputs compare correctly + for l,r in [ + # check same-size comparisons with differing contents fail. + (u("a"), u("c")), + (u("abcabc"), u("zbaabc")), + (u("abcabc"), u("abzabc")), + (u("abcabc"), u("abcabz")), + ((u("\xff\xa2\x12\x00")*10)[:-1] + u("\x01"), + u("\xff\xa2\x12\x00")*10), + + # check different-size comparisons fail. + (u(""), u("a")), + (u("abc"), u("abcdef")), + (u("abc"), u("defabc")), + (u("qwertyuiopasdfghjklzxcvbnm"), u("abc")), + ]: + if consteq_supports_string(l) and consteq_supports_string(r): + self.assertFalse(consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(consteq(r, l), "values %r %r:" % (r,l)) + else: + self.assertRaises(TypeError, consteq, l, r) + self.assertRaises(TypeError, consteq, r, l) + self.assertFalse(str_consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(str_consteq(r, l), "values %r %r:" % (r,l)) + + l = l.encode("latin-1") + r = r.encode("latin-1") + self.assertFalse(consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(consteq(r, l), "values %r %r:" % (r,l)) + + # TODO: add some tests to ensure we take THETA(strlen) time. + # this might be hard to do reproducably. + # NOTE: below code was used to generate stats for analysis + ##from math import log as logb + ##import timeit + ##multipliers = [ 1< encode() -> decode() -> raw + # + + # generate some random bytes + size = rng.randint(1 if saw_zero else 0, 12) + if not size: + saw_zero = True + enc_size = (4*size+2)//3 + raw = getrandbytes(rng, size) + + # encode them, check invariants + encoded = engine.encode_bytes(raw) + self.assertEqual(len(encoded), enc_size) + + # make sure decode returns original + result = engine.decode_bytes(encoded) + self.assertEqual(result, raw) + + # + # test encoded -> decode() -> encode() -> encoded + # + + # generate some random encoded data + if size % 4 == 1: + size += rng.choice([-1,1,2]) + raw_size = 3*size//4 + encoded = getrandstr(rng, engine.bytemap, size) + + # decode them, check invariants + raw = engine.decode_bytes(encoded) + self.assertEqual(len(raw), raw_size, "encoded %d:" % size) + + # make sure encode returns original (barring padding bits) + result = engine.encode_bytes(raw) + if size % 4: + self.assertEqual(result[:-1], encoded[:-1]) + else: + self.assertEqual(result, encoded) + + def test_repair_unused(self): + """test repair_unused()""" + # NOTE: this test relies on encode_bytes() always returning clear + # padding bits - which should be ensured by test vectors. + from passlib.utils import getrandstr + rng = self.getRandom() + engine = self.engine + check_repair_unused = self.engine.check_repair_unused + i = 0 + while i < 300: + size = rng.randint(0,23) + cdata = getrandstr(rng, engine.charmap, size).encode("ascii") + if size & 3 == 1: + # should throw error + self.assertRaises(ValueError, check_repair_unused, cdata) + continue + rdata = engine.encode_bytes(engine.decode_bytes(cdata)) + if rng.random() < .5: + cdata = cdata.decode("ascii") + rdata = rdata.decode("ascii") + if cdata == rdata: + # should leave unchanged + ok, result = check_repair_unused(cdata) + self.assertFalse(ok) + self.assertEqual(result, rdata) + else: + # should repair bits + self.assertNotEqual(size % 4, 0) + ok, result = check_repair_unused(cdata) + self.assertTrue(ok) + self.assertEqual(result, rdata) + i += 1 + + #=================================================================== + # test transposed encode/decode - encoding independant + #=================================================================== + # NOTE: these tests assume normal encode/decode has been tested elsewhere. + + transposed = [ + # orig, result, transpose map + (b"\x33\x22\x11", b"\x11\x22\x33",[2,1,0]), + (b"\x22\x33\x11", b"\x11\x22\x33",[1,2,0]), + ] + + transposed_dups = [ + # orig, result, transpose projection + (b"\x11\x11\x22", b"\x11\x22\x33",[0,0,1]), + ] + + def test_encode_transposed_bytes(self): + """test encode_transposed_bytes()""" + engine = self.engine + for result, input, offsets in self.transposed + self.transposed_dups: + tmp = engine.encode_transposed_bytes(input, offsets) + out = engine.decode_bytes(tmp) + self.assertEqual(out, result) + + self.assertRaises(TypeError, engine.encode_transposed_bytes, u("a"), []) + + def test_decode_transposed_bytes(self): + """test decode_transposed_bytes()""" + engine = self.engine + for input, result, offsets in self.transposed: + tmp = engine.encode_bytes(input) + out = engine.decode_transposed_bytes(tmp, offsets) + self.assertEqual(out, result) + + def test_decode_transposed_bytes_bad(self): + """test decode_transposed_bytes() fails if map is a one-way""" + engine = self.engine + for input, _, offsets in self.transposed_dups: + tmp = engine.encode_bytes(input) + self.assertRaises(TypeError, engine.decode_transposed_bytes, tmp, + offsets) + + #=================================================================== + # test 6bit handling + #=================================================================== + def check_int_pair(self, bits, encoded_pairs): + """helper to check encode_intXX & decode_intXX functions""" + rng = self.getRandom() + engine = self.engine + encode = getattr(engine, "encode_int%s" % bits) + decode = getattr(engine, "decode_int%s" % bits) + pad = -bits % 6 + chars = (bits+pad)//6 + upper = 1< block_size, and wrong type + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=17, hash='md5') + self.assertRaises(TypeError, helper, keylen='1') + +#============================================================================= +# test PBKDF2 support +#============================================================================= +class Pbkdf2_Test(TestCase): + """test pbkdf2() support""" + descriptionPrefix = "passlib.utils.pbkdf2.pbkdf2()" + + pbkdf2_test_vectors = [ + # (result, secret, salt, rounds, keylen, prf="sha1") + + # + # from rfc 3962 + # + + # test case 1 / 128 bit + ( + hb("cdedb5281bb2f801565a1122b2563515"), + b"password", b"ATHENA.MIT.EDUraeburn", 1, 16 + ), + + # test case 2 / 128 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935d"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 16 + ), + + # test case 2 / 256 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86"), + b"password", b"ATHENA.MIT.EDUraeburn", 2, 32 + ), + + # test case 3 / 256 bit + ( + hb("5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13"), + b"password", b"ATHENA.MIT.EDUraeburn", 1200, 32 + ), + + # test case 4 / 256 bit + ( + hb("d1daa78615f287e6a1c8b120d7062a493f98d203e6be49a6adf4fa574b6e64ee"), + b"password", b'\x12\x34\x56\x78\x78\x56\x34\x12', 5, 32 + ), + + # test case 5 / 256 bit + ( + hb("139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1"), + b"X"*64, b"pass phrase equals block size", 1200, 32 + ), + + # test case 6 / 256 bit + ( + hb("9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a"), + b"X"*65, b"pass phrase exceeds block size", 1200, 32 + ), + + # + # from rfc 6070 + # + ( + hb("0c60c80f961f0e71f3a9b524af6012062fe037a6"), + b"password", b"salt", 1, 20, + ), + + ( + hb("ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + b"password", b"salt", 2, 20, + ), + + ( + hb("4b007901b765489abead49d926f721d065a429c1"), + b"password", b"salt", 4096, 20, + ), + + # just runs too long - could enable if ALL option is set + ##( + ## + ## unhexlify("eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), + ## "password", "salt", 16777216, 20, + ##), + + ( + hb("3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + b"passwordPASSWORDpassword", + b"saltSALTsaltSALTsaltSALTsaltSALTsalt", + 4096, 25, + ), + + ( + hb("56fa6aa75548099dcc37d7f03425e0c3"), + b"pass\00word", b"sa\00lt", 4096, 16, + ), + + # + # from example in http://grub.enbug.org/Authentication + # + ( + hb("887CFF169EA8335235D8004242AA7D6187A41E3187DF0CE14E256D85ED" + "97A97357AAA8FF0A3871AB9EEFF458392F462F495487387F685B7472FC" + "6C29E293F0A0"), + b"hello", + hb("9290F727ED06C38BA4549EF7DE25CF5642659211B7FC076F2D28FEFD71" + "784BB8D8F6FB244A8CC5C06240631B97008565A120764C0EE9C2CB0073" + "994D79080136"), + 10000, 64, "hmac-sha512" + ), + + # + # custom + # + ( + hb('e248fb6b13365146f8ac6307cc222812'), + b"secret", b"salt", 10, 16, "hmac-sha1", + ), + ( + hb('e248fb6b13365146f8ac6307cc2228127872da6d'), + b"secret", b"salt", 10, None, "hmac-sha1", + ), + + ] + + def setUp(self): + super(Pbkdf2_Test, self).setUp() + warnings.filterwarnings("ignore", ".*passlib.utils.pbkdf2.*deprecated", DeprecationWarning) + + def test_known(self): + """test reference vectors""" + from passlib.utils.pbkdf2 import pbkdf2 + for row in self.pbkdf2_test_vectors: + correct, secret, salt, rounds, keylen = row[:5] + prf = row[5] if len(row) == 6 else "hmac-sha1" + result = pbkdf2(secret, salt, rounds, keylen, prf) + self.assertEqual(result, correct) + + def test_border(self): + """test border cases""" + from passlib.utils.pbkdf2 import pbkdf2 + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, prf="hmac-sha1"): + return pbkdf2(secret, salt, rounds, keylen, prf) + helper() + + # invalid rounds + self.assertRaises(ValueError, helper, rounds=-1) + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='x') + + # invalid keylen + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=0) + helper(keylen=1) + self.assertRaises(OverflowError, helper, keylen=20*(2**32-1)+1) + self.assertRaises(TypeError, helper, keylen='x') + + # invalid secret/salt type + self.assertRaises(TypeError, helper, salt=5) + self.assertRaises(TypeError, helper, secret=5) + + # invalid hash + self.assertRaises(ValueError, helper, prf='hmac-foo') + self.assertRaises(NotImplementedError, helper, prf='foo') + self.assertRaises(TypeError, helper, prf=5) + + def test_default_keylen(self): + """test keylen==None""" + from passlib.utils.pbkdf2 import pbkdf2 + def helper(secret=b'password', salt=b'salt', rounds=1, keylen=None, prf="hmac-sha1"): + return pbkdf2(secret, salt, rounds, keylen, prf) + self.assertEqual(len(helper(prf='hmac-sha1')), 20) + self.assertEqual(len(helper(prf='hmac-sha256')), 32) + + def test_custom_prf(self): + """test custom prf function""" + from passlib.utils.pbkdf2 import pbkdf2 + def prf(key, msg): + return hashlib.md5(key+msg+b'fooey').digest() + self.assertRaises(NotImplementedError, pbkdf2, b'secret', b'salt', 1000, 20, prf) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/test_win32.py b/ansible/lib/python3.11/site-packages/passlib/tests/test_win32.py new file mode 100644 index 000000000..e818b62b9 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/test_win32.py @@ -0,0 +1,50 @@ +"""tests for passlib.win32 -- (c) Assurance Technologies 2003-2009""" +#============================================================================= +# imports +#============================================================================= +# core +import warnings +# site +# pkg +from passlib.tests.utils import TestCase +# module +from passlib.utils.compat import u + +#============================================================================= +# +#============================================================================= +class UtilTest(TestCase): + """test util funcs in passlib.win32""" + + ##test hashes from http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + ## among other places + + def setUp(self): + super(UtilTest, self).setUp() + warnings.filterwarnings("ignore", + "the 'passlib.win32' module is deprecated") + + def test_lmhash(self): + from passlib.win32 import raw_lmhash + for secret, hash in [ + ("OLDPASSWORD", u("c9b81d939d6fd80cd408e6b105741864")), + ("NEWPASSWORD", u('09eeab5aa415d6e4d408e6b105741864')), + ("welcome", u("c23413a8a1e7665faad3b435b51404ee")), + ]: + result = raw_lmhash(secret, hex=True) + self.assertEqual(result, hash) + + def test_nthash(self): + warnings.filterwarnings("ignore", + r"nthash\.raw_nthash\(\) is deprecated") + from passlib.win32 import raw_nthash + for secret, hash in [ + ("OLDPASSWORD", u("6677b2c394311355b54f25eec5bfacf5")), + ("NEWPASSWORD", u("256781a62031289d3c2c98c14f1efc8c")), + ]: + result = raw_nthash(secret, hex=True) + self.assertEqual(result, hash) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/tox_support.py b/ansible/lib/python3.11/site-packages/passlib/tests/tox_support.py new file mode 100644 index 000000000..43170bc40 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/tox_support.py @@ -0,0 +1,83 @@ +"""passlib.tests.tox_support - helper script for tox tests""" +#============================================================================= +# init script env +#============================================================================= +import os, sys +root_dir = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) +sys.path.insert(0, root_dir) + +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils.compat import print_ +# local +__all__ = [ +] + +#============================================================================= +# main +#============================================================================= +TH_PATH = "passlib.tests.test_handlers" + +def do_hash_tests(*args): + """return list of hash algorithm tests that match regexes""" + if not args: + print(TH_PATH) + return + suffix = '' + args = list(args) + while True: + if args[0] == "--method": + suffix = '.' + args[1] + del args[:2] + else: + break + from passlib.tests import test_handlers + names = [TH_PATH + ":" + name + suffix for name in dir(test_handlers) + if not name.startswith("_") and any(re.match(arg,name) for arg in args)] + print_("\n".join(names)) + return not names + +def do_preset_tests(name): + """return list of preset test names""" + if name == "django" or name == "django-hashes": + do_hash_tests("django_.*_test", "hex_md5_test") + if name == "django": + print_("passlib.tests.test_ext_django") + else: + raise ValueError("unknown name: %r" % name) + +def do_setup_gae(path, runtime): + """write fake GAE ``app.yaml`` to current directory so nosegae will work""" + from passlib.tests.utils import set_file + set_file(os.path.join(path, "app.yaml"), """\ +application: fake-app +version: 2 +runtime: %s +api_version: 1 +threadsafe: no + +handlers: +- url: /.* + script: dummy.py + +libraries: +- name: django + version: "latest" +""" % runtime) + +def main(cmd, *args): + return globals()["do_" + cmd](*args) + +if __name__ == "__main__": + import sys + sys.exit(main(*sys.argv[1:]) or 0) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/tests/utils.py b/ansible/lib/python3.11/site-packages/passlib/tests/utils.py new file mode 100644 index 000000000..79a9f9fc4 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/tests/utils.py @@ -0,0 +1,3621 @@ +"""helpers for passlib unittests""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from binascii import unhexlify +import contextlib +from functools import wraps, partial +import hashlib +import logging; log = logging.getLogger(__name__) +import random +import re +import os +import sys +import tempfile +import threading +import time +from passlib.exc import PasslibHashWarning, PasslibConfigWarning +from passlib.utils.compat import PY3, JYTHON +import warnings +from warnings import warn +# site +# pkg +from passlib import exc +from passlib.exc import MissingBackendError +import passlib.registry as registry +from passlib.tests.backports import TestCase as _TestCase, skip, skipIf, skipUnless, SkipTest +from passlib.utils import has_rounds_info, has_salt_info, rounds_cost_values, \ + rng as sys_rng, getrandstr, is_ascii_safe, to_native_str, \ + repeat_string, tick, batch +from passlib.utils.compat import iteritems, irange, u, unicode, PY2, nullcontext +from passlib.utils.decor import classproperty +import passlib.utils.handlers as uh +# local +__all__ = [ + # util funcs + 'TEST_MODE', + 'set_file', 'get_file', + + # unit testing + 'TestCase', + 'HandlerCase', +] + +#============================================================================= +# environment detection +#============================================================================= +# figure out if we're running under GAE; +# some tests (e.g. FS writing) should be skipped. +# XXX: is there better way to do this? +try: + import google.appengine +except ImportError: + GAE = False +else: + GAE = True + +def ensure_mtime_changed(path): + """ensure file's mtime has changed""" + # NOTE: this is hack to deal w/ filesystems whose mtime resolution is >= 1s, + # when a test needs to be sure the mtime changed after writing to the file. + last = os.path.getmtime(path) + while os.path.getmtime(path) == last: + time.sleep(0.1) + os.utime(path, None) + +def _get_timer_resolution(timer): + def sample(): + start = cur = timer() + while start == cur: + cur = timer() + return cur-start + return min(sample() for _ in range(3)) +TICK_RESOLUTION = _get_timer_resolution(tick) + +#============================================================================= +# test mode +#============================================================================= +_TEST_MODES = ["quick", "default", "full"] +_test_mode = _TEST_MODES.index(os.environ.get("PASSLIB_TEST_MODE", + "default").strip().lower()) + +def TEST_MODE(min=None, max=None): + """check if test for specified mode should be enabled. + + ``"quick"`` + run the bare minimum tests to ensure functionality. + variable-cost hashes are tested at their lowest setting. + hash algorithms are only tested against the backend that will + be used on the current host. no fuzz testing is done. + + ``"default"`` + same as ``"quick"``, except: hash algorithms are tested + at default levels, and a brief round of fuzz testing is done + for each hash. + + ``"full"`` + extra regression and internal tests are enabled, hash algorithms are tested + against all available backends, unavailable ones are mocked whre possible, + additional time is devoted to fuzz testing. + """ + if min and _test_mode < _TEST_MODES.index(min): + return False + if max and _test_mode > _TEST_MODES.index(max): + return False + return True + +#============================================================================= +# hash object inspection +#============================================================================= +def has_relaxed_setting(handler): + """check if handler supports 'relaxed' kwd""" + # FIXME: I've been lazy, should probably just add 'relaxed' kwd + # to all handlers that derive from GenericHandler + + # ignore wrapper classes for now.. though could introspec. + if hasattr(handler, "orig_prefix"): + return False + + return 'relaxed' in handler.setting_kwds or issubclass(handler, + uh.GenericHandler) + +def get_effective_rounds(handler, rounds=None): + """get effective rounds value from handler""" + handler = unwrap_handler(handler) + return handler(rounds=rounds, use_defaults=True).rounds + +def is_default_backend(handler, backend): + """check if backend is the default for source""" + try: + orig = handler.get_backend() + except MissingBackendError: + return False + try: + handler.set_backend("default") + return handler.get_backend() == backend + finally: + handler.set_backend(orig) + +def iter_alt_backends(handler, current=None, fallback=False): + """ + iterate over alternate backends available to handler. + + .. warning:: + not thread-safe due to has_backend() call + """ + if current is None: + current = handler.get_backend() + backends = handler.backends + idx = backends.index(current)+1 if fallback else 0 + for backend in backends[idx:]: + if backend != current and handler.has_backend(backend): + yield backend + +def get_alt_backend(*args, **kwds): + for backend in iter_alt_backends(*args, **kwds): + return backend + return None + +def unwrap_handler(handler): + """return original handler, removing any wrapper objects""" + while hasattr(handler, "wrapped"): + handler = handler.wrapped + return handler + +def handler_derived_from(handler, base): + """ + test if was derived from via . + """ + # XXX: need way to do this more formally via ifc, + # for now just hacking in the cases we encounter in testing. + if handler == base: + return True + elif isinstance(handler, uh.PrefixWrapper): + while handler: + if handler == base: + return True + # helper set by PrefixWrapper().using() just for this case... + handler = handler._derived_from + return False + elif isinstance(handler, type) and issubclass(handler, uh.MinimalHandler): + return issubclass(handler, base) + else: + raise NotImplementedError("don't know how to inspect handler: %r" % (handler,)) + +@contextlib.contextmanager +def patch_calc_min_rounds(handler): + """ + internal helper for do_config_encrypt() -- + context manager which temporarily replaces handler's _calc_checksum() + with one that uses min_rounds; useful when trying to generate config + with high rounds value, but don't care if output is correct. + """ + if isinstance(handler, type) and issubclass(handler, uh.HasRounds): + # XXX: also require GenericHandler for this branch? + wrapped = handler._calc_checksum + def wrapper(self, *args, **kwds): + rounds = self.rounds + try: + self.rounds = self.min_rounds + return wrapped(self, *args, **kwds) + finally: + self.rounds = rounds + handler._calc_checksum = wrapper + try: + yield + finally: + handler._calc_checksum = wrapped + elif isinstance(handler, uh.PrefixWrapper): + with patch_calc_min_rounds(handler.wrapped): + yield + else: + yield + return + +#============================================================================= +# misc helpers +#============================================================================= +def set_file(path, content): + """set file to specified bytes""" + if isinstance(content, unicode): + content = content.encode("utf-8") + with open(path, "wb") as fh: + fh.write(content) + +def get_file(path): + """read file as bytes""" + with open(path, "rb") as fh: + return fh.read() + +def tonn(source): + """convert native string to non-native string""" + if not isinstance(source, str): + return source + elif PY3: + return source.encode("utf-8") + else: + try: + return source.decode("utf-8") + except UnicodeDecodeError: + return source.decode("latin-1") + +def hb(source): + """ + helper for represent byte strings in hex. + + usage: ``hb("deadbeef23")`` + """ + return unhexlify(re.sub(r"\s", "", source)) + +def limit(value, lower, upper): + if value < lower: + return lower + elif value > upper: + return upper + return value + +def quicksleep(delay): + """because time.sleep() doesn't even have 10ms accuracy on some OSes""" + start = tick() + while tick()-start < delay: + pass + +def time_call(func, setup=None, maxtime=1, bestof=10): + """ + timeit() wrapper which tries to get as accurate a measurement as possible w/in maxtime seconds. + + :returns: + ``(avg_seconds_per_call, log10_number_of_repetitions)`` + """ + from timeit import Timer + from math import log + timer = Timer(func, setup=setup or '') + number = 1 + end = tick() + maxtime + while True: + delta = min(timer.repeat(bestof, number)) + if tick() >= end: + return delta/number, int(log(number, 10)) + number *= 10 + +def run_with_fixed_seeds(count=128, master_seed=0x243F6A8885A308D3): + """ + decorator run test method w/ multiple fixed seeds. + """ + def builder(func): + @wraps(func) + def wrapper(*args, **kwds): + rng = random.Random(master_seed) + for _ in irange(count): + kwds['seed'] = rng.getrandbits(32) + func(*args, **kwds) + return wrapper + return builder + +#============================================================================= +# custom test harness +#============================================================================= + +class TestCase(_TestCase): + """passlib-specific test case class + + this class adds a number of features to the standard TestCase... + * common prefix for all test descriptions + * resets warnings filter & registry for every test + * tweaks to message formatting + * __msg__ kwd added to assertRaises() + * suite of methods for matching against warnings + """ + #=================================================================== + # add various custom features + #=================================================================== + + #--------------------------------------------------------------- + # make it easy for test cases to add common prefix to shortDescription + #--------------------------------------------------------------- + + # string prepended to all tests in TestCase + descriptionPrefix = None + + def shortDescription(self): + """wrap shortDescription() method to prepend descriptionPrefix""" + desc = super(TestCase, self).shortDescription() + prefix = self.descriptionPrefix + if prefix: + desc = "%s: %s" % (prefix, desc or str(self)) + return desc + + #--------------------------------------------------------------- + # hack things so nose and ut2 both skip subclasses who have + # "__unittest_skip=True" set, or whose names start with "_" + #--------------------------------------------------------------- + @classproperty + def __unittest_skip__(cls): + # NOTE: this attr is technically a unittest2 internal detail. + name = cls.__name__ + return name.startswith("_") or \ + getattr(cls, "_%s__unittest_skip" % name, False) + + @classproperty + def __test__(cls): + # make nose just proxy __unittest_skip__ + return not cls.__unittest_skip__ + + # flag to skip *this* class + __unittest_skip = True + + #--------------------------------------------------------------- + # reset warning filters & registry before each test + #--------------------------------------------------------------- + + # flag to reset all warning filters & ignore state + resetWarningState = True + + def setUp(self): + super(TestCase, self).setUp() + self.setUpWarnings() + # have uh.debug_only_repr() return real values for duration of test + self.patchAttr(exc, "ENABLE_DEBUG_ONLY_REPR", True) + + def setUpWarnings(self): + """helper to init warning filters before subclass setUp()""" + if self.resetWarningState: + ctx = reset_warnings() + ctx.__enter__() + self.addCleanup(ctx.__exit__) + + # ignore security warnings, tests may deliberately cause these + # TODO: may want to filter out a few of this, but not blanket filter... + # warnings.filterwarnings("ignore", category=exc.PasslibSecurityWarning) + + # ignore warnings about PasswordHash features deprecated in 1.7 + # TODO: should be cleaned in 2.0, when support will be dropped. + # should be kept until then, so we test the legacy paths. + warnings.filterwarnings("ignore", r"the method .*\.(encrypt|genconfig|genhash)\(\) is deprecated") + warnings.filterwarnings("ignore", r"the 'vary_rounds' option is deprecated") + warnings.filterwarnings("ignore", r"Support for `(py-bcrypt|bcryptor)` is deprecated") + + #--------------------------------------------------------------- + # tweak message formatting so longMessage mode is only enabled + # if msg ends with ":", and turn on longMessage by default. + #--------------------------------------------------------------- + longMessage = True + + def _formatMessage(self, msg, std): + if self.longMessage and msg and msg.rstrip().endswith(":"): + return '%s %s' % (msg.rstrip(), std) + else: + return msg or std + + #--------------------------------------------------------------- + # override assertRaises() to support '__msg__' keyword, + # and to return the caught exception for further examination + #--------------------------------------------------------------- + def assertRaises(self, _exc_type, _callable=None, *args, **kwds): + msg = kwds.pop("__msg__", None) + if _callable is None: + # FIXME: this ignores 'msg' + return super(TestCase, self).assertRaises(_exc_type, None, + *args, **kwds) + try: + result = _callable(*args, **kwds) + except _exc_type as err: + return err + std = "function returned %r, expected it to raise %r" % (result, + _exc_type) + raise self.failureException(self._formatMessage(msg, std)) + + #--------------------------------------------------------------- + # forbid a bunch of deprecated aliases so I stop using them + #--------------------------------------------------------------- + def assertEquals(self, *a, **k): + raise AssertionError("this alias is deprecated by unittest2") + assertNotEquals = assertRegexMatches = assertEquals + + #=================================================================== + # custom methods for matching warnings + #=================================================================== + def assertWarning(self, warning, + message_re=None, message=None, + category=None, + filename_re=None, filename=None, + lineno=None, + msg=None, + ): + """check if warning matches specified parameters. + 'warning' is the instance of Warning to match against; + can also be instance of WarningMessage (as returned by catch_warnings). + """ + # check input type + if hasattr(warning, "category"): + # resolve WarningMessage -> Warning, but preserve original + wmsg = warning + warning = warning.message + else: + # no original WarningMessage, passed raw Warning + wmsg = None + + # tests that can use a warning instance or WarningMessage object + if message: + self.assertEqual(str(warning), message, msg) + if message_re: + self.assertRegex(str(warning), message_re, msg) + if category: + self.assertIsInstance(warning, category, msg) + + # tests that require a WarningMessage object + if filename or filename_re: + if not wmsg: + raise TypeError("matching on filename requires a " + "WarningMessage instance") + real = wmsg.filename + if real.endswith(".pyc") or real.endswith(".pyo"): + # FIXME: should use a stdlib call to resolve this back + # to module's original filename. + real = real[:-1] + if filename: + self.assertEqual(real, filename, msg) + if filename_re: + self.assertRegex(real, filename_re, msg) + if lineno: + if not wmsg: + raise TypeError("matching on lineno requires a " + "WarningMessage instance") + self.assertEqual(wmsg.lineno, lineno, msg) + + class _AssertWarningList(warnings.catch_warnings): + """context manager for assertWarningList()""" + def __init__(self, case, **kwds): + self.case = case + self.kwds = kwds + self.__super = super(TestCase._AssertWarningList, self) + self.__super.__init__(record=True) + + def __enter__(self): + self.log = self.__super.__enter__() + + def __exit__(self, *exc_info): + self.__super.__exit__(*exc_info) + if exc_info[0] is None: + self.case.assertWarningList(self.log, **self.kwds) + + def assertWarningList(self, wlist=None, desc=None, msg=None): + """check that warning list (e.g. from catch_warnings) matches pattern""" + if desc is None: + assert wlist is not None + return self._AssertWarningList(self, desc=wlist, msg=msg) + # TODO: make this display better diff of *which* warnings did not match + assert desc is not None + if not isinstance(desc, (list,tuple)): + desc = [desc] + for idx, entry in enumerate(desc): + if isinstance(entry, str): + entry = dict(message_re=entry) + elif isinstance(entry, type) and issubclass(entry, Warning): + entry = dict(category=entry) + elif not isinstance(entry, dict): + raise TypeError("entry must be str, warning, or dict") + try: + data = wlist[idx] + except IndexError: + break + self.assertWarning(data, msg=msg, **entry) + else: + if len(wlist) == len(desc): + return + std = "expected %d warnings, found %d: wlist=%s desc=%r" % \ + (len(desc), len(wlist), self._formatWarningList(wlist), desc) + raise self.failureException(self._formatMessage(msg, std)) + + def consumeWarningList(self, wlist, desc=None, *args, **kwds): + """[deprecated] assertWarningList() variant that clears list afterwards""" + if desc is None: + desc = [] + self.assertWarningList(wlist, desc, *args, **kwds) + del wlist[:] + + def _formatWarning(self, entry): + tail = "" + if hasattr(entry, "message"): + # WarningMessage instance. + tail = " filename=%r lineno=%r" % (entry.filename, entry.lineno) + if entry.line: + tail += " line=%r" % (entry.line,) + entry = entry.message + cls = type(entry) + return "<%s.%s message=%r%s>" % (cls.__module__, cls.__name__, + str(entry), tail) + + def _formatWarningList(self, wlist): + return "[%s]" % ", ".join(self._formatWarning(entry) for entry in wlist) + + #=================================================================== + # capability tests + #=================================================================== + def require_stringprep(self): + """helper to skip test if stringprep is missing""" + from passlib.utils import stringprep + if not stringprep: + from passlib.utils import _stringprep_missing_reason + raise self.skipTest("not available - stringprep module is " + + _stringprep_missing_reason) + + def require_TEST_MODE(self, level): + """skip test for all PASSLIB_TEST_MODE values below """ + if not TEST_MODE(level): + raise self.skipTest("requires >= %r test mode" % level) + + def require_writeable_filesystem(self): + """skip test if writeable FS not available""" + if GAE: + return self.skipTest("GAE doesn't offer read/write filesystem access") + + #=================================================================== + # reproducible random helpers + #=================================================================== + + #: global thread lock for random state + #: XXX: could split into global & per-instance locks if need be + _random_global_lock = threading.Lock() + + #: cache of global seed value, initialized on first call to getRandom() + _random_global_seed = None + + #: per-instance cache of name -> RNG + _random_cache = None + + def getRandom(self, name="default", seed=None): + """ + Return a :class:`random.Random` object for current test method to use. + Within an instance, multiple calls with the same name will return + the same object. + + When first created, each RNG will be seeded with value derived from + a global seed, the test class module & name, the current test method name, + and the **name** parameter. + + The global seed taken from the $RANDOM_TEST_SEED env var, + the $PYTHONHASHSEED env var, or a randomly generated the + first time this method is called. In all cases, the value + is logged for reproducibility. + + :param name: + name to uniquely identify separate RNGs w/in a test + (e.g. for threaded tests). + + :param seed: + override global seed when initialzing rng. + + :rtype: random.Random + """ + # check cache + cache = self._random_cache + if cache and name in cache: + return cache[name] + + with self._random_global_lock: + + # check cache again, and initialize it + cache = self._random_cache + if cache and name in cache: + return cache[name] + elif not cache: + cache = self._random_cache = {} + + # init global seed + global_seed = seed or TestCase._random_global_seed + if global_seed is None: + # NOTE: checking PYTHONHASHSEED, because if that's set, + # the test runner wants something reproducible. + global_seed = TestCase._random_global_seed = \ + int(os.environ.get("RANDOM_TEST_SEED") or + os.environ.get("PYTHONHASHSEED") or + sys_rng.getrandbits(32)) + # XXX: would it be better to print() this? + log.info("using RANDOM_TEST_SEED=%d", global_seed) + + # create seed + cls = type(self) + source = "\n".join([str(global_seed), cls.__module__, cls.__name__, + self._testMethodName, name]) + digest = hashlib.sha256(source.encode("utf-8")).hexdigest() + seed = int(digest[:16], 16) + + # create rng + value = cache[name] = random.Random(seed) + return value + + #=================================================================== + # subtests + #=================================================================== + + has_real_subtest = hasattr(_TestCase, "subTest") + + @contextlib.contextmanager + def subTest(self, *args, **kwds): + """ + wrapper/backport for .subTest() which also traps SkipTest errors. + (see source for details) + """ + # this function works around two things: + # * TestCase.subTest() wasn't added until Py34; so for older python versions, + # we either need unittest2 installed, or provide stub of our own. + # this method provides a stub if needed (based on .has_real_subtest check) + # + # * as 2020-10-08, .subTest() doesn't play nicely w/ .skipTest(); + # and also makes it hard to debug which subtest had a failure. + # (see https://bugs.python.org/issue25894 and https://bugs.python.org/issue35327) + # this method traps skipTest exceptions, and adds some logging to help debug + # which subtest caused the issue. + + # setup way to log subtest info + # XXX: would like better way to inject messages into test output; + # but this at least gets us something for debugging... + # NOTE: this hack will miss parent params if called from nested .subTest() + def _render_title(_msg=None, **params): + out = ("[%s] " % _msg if _msg else "") + if params: + out += "(%s)" % " ".join("%s=%r" % tuple(item) for item in params.items()) + return out.strip() or "" + + test_log = self.getLogger() + title = _render_title(*args, **kwds) + + # use real subtest manager if available + if self.has_real_subtest: + ctx = super(TestCase, self).subTest(*args, **kwds) + else: + ctx = nullcontext() + + # run the subtest + with ctx: + test_log.info("running subtest: %s", title) + try: + yield + except SkipTest: + # silence "SkipTest" exceptions, want to keep running next subtest. + test_log.info("subtest skipped: %s", title) + pass + except Exception as err: + # log unhandled exception occurred + # (assuming traceback will be reported up higher, so not bothering here) + test_log.warning("subtest failed: %s: %s: %r", title, type(err).__name__, str(err)) + raise + + # XXX: check for "failed" state in ``self._outcome`` before writing this? + test_log.info("subtest passed: %s", title) + + #=================================================================== + # other + #=================================================================== + _mktemp_queue = None + + def mktemp(self, *args, **kwds): + """create temp file that's cleaned up at end of test""" + self.require_writeable_filesystem() + fd, path = tempfile.mkstemp(*args, **kwds) + os.close(fd) + queue = self._mktemp_queue + if queue is None: + queue = self._mktemp_queue = [] + def cleaner(): + for path in queue: + if os.path.exists(path): + os.remove(path) + del queue[:] + self.addCleanup(cleaner) + queue.append(path) + return path + + def patchAttr(self, obj, attr, value, require_existing=True, wrap=False): + """monkeypatch object value, restoring original value on cleanup""" + try: + orig = getattr(obj, attr) + except AttributeError: + if require_existing: + raise + def cleanup(): + try: + delattr(obj, attr) + except AttributeError: + pass + self.addCleanup(cleanup) + else: + self.addCleanup(setattr, obj, attr, orig) + if wrap: + value = partial(value, orig) + wraps(orig)(value) + setattr(obj, attr, value) + + def getLogger(self): + """ + return logger named after current test. + """ + cls = type(self) + # NOTE: conditional on qualname for PY2 compat + path = cls.__module__ + "." + getattr(cls, "__qualname__", cls.__name__) + name = self._testMethodName + if name: + path = path + "." + name + return logging.getLogger(path) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# other unittest helpers +#============================================================================= + +RESERVED_BACKEND_NAMES = ["any", "default"] + + +def doesnt_require_backend(func): + """ + decorator for HandlerCase.create_backend_case() -- + used to decorate methods that should be run even if backend isn't present + (by default, full test suite is skipped when backend is missing) + + NOTE: tests decorated with this should not rely on handler have expected (or any!) backend. + """ + func._doesnt_require_backend = True + return func + + +class HandlerCase(TestCase): + """base class for testing password hash handlers (esp passlib.utils.handlers subclasses) + + In order to use this to test a handler, + create a subclass will all the appropriate attributes + filled as listed in the example below, + and run the subclass via unittest. + + .. todo:: + + Document all of the options HandlerCase offers. + + .. note:: + + This is subclass of :class:`unittest.TestCase` + (or :class:`unittest2.TestCase` if available). + """ + #=================================================================== + # class attrs - should be filled in by subclass + #=================================================================== + + #--------------------------------------------------------------- + # handler setup + #--------------------------------------------------------------- + + # handler class to test [required] + handler = None + + # if set, run tests against specified backend + backend = None + + #--------------------------------------------------------------- + # test vectors + #--------------------------------------------------------------- + + # list of (secret, hash) tuples which are known to be correct + known_correct_hashes = [] + + # list of (config, secret, hash) tuples are known to be correct + known_correct_configs = [] + + # list of (alt_hash, secret, hash) tuples, where alt_hash is a hash + # using an alternate representation that should be recognized and verify + # correctly, but should be corrected to match hash when passed through + # genhash() + known_alternate_hashes = [] + + # hashes so malformed they aren't even identified properly + known_unidentified_hashes = [] + + # hashes which are identifiabled but malformed - they should identify() + # as True, but cause an error when passed to genhash/verify. + known_malformed_hashes = [] + + # list of (handler name, hash) pairs for other algorithm's hashes that + # handler shouldn't identify as belonging to it this list should generally + # be sufficient (if handler name in list, that entry will be skipped) + known_other_hashes = [ + ('des_crypt', '6f8c114b58f2c'), + ('md5_crypt', '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + ('sha512_crypt', "$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywW" + "vt0RLE8uZ4oPwcelCjmw2kSYu.Ec6ycULevoBK25fs2xXgMNrCzIMVcgEJAstJeonj1"), + ] + + # passwords used to test basic hash behavior - generally + # don't need to be overidden. + stock_passwords = [ + u("test"), + u("\u20AC\u00A5$"), + b'\xe2\x82\xac\xc2\xa5$' + ] + + #--------------------------------------------------------------- + # option flags + #--------------------------------------------------------------- + + # whether hash is case insensitive + # True, False, or special value "verify-only" (which indicates + # hash contains case-sensitive portion, but verifies is case-insensitive) + secret_case_insensitive = False + + # flag if scheme accepts ALL hash strings (e.g. plaintext) + accepts_all_hashes = False + + # flag if scheme has "is_disabled" set, and contains 'salted' data + disabled_contains_salt = False + + # flag/hack to filter PasslibHashWarning issued by test_72_configs() + filter_config_warnings = False + + # forbid certain characters in passwords + @classproperty + def forbidden_characters(cls): + # anything that supports crypt() interface should forbid null chars, + # since crypt() uses null-terminated strings. + if 'os_crypt' in getattr(cls.handler, "backends", ()): + return b"\x00" + return None + + #=================================================================== + # internal class attrs + #=================================================================== + __unittest_skip = True + + @property + def descriptionPrefix(self): + handler = self.handler + name = handler.name + if hasattr(handler, "get_backend"): + name += " (%s backend)" % (handler.get_backend(),) + return name + + #=================================================================== + # support methods + #=================================================================== + + #--------------------------------------------------------------- + # configuration helpers + #--------------------------------------------------------------- + @classmethod + def iter_known_hashes(cls): + """iterate through known (secret, hash) pairs""" + for secret, hash in cls.known_correct_hashes: + yield secret, hash + for config, secret, hash in cls.known_correct_configs: + yield secret, hash + for alt, secret, hash in cls.known_alternate_hashes: + yield secret, hash + + def get_sample_hash(self): + """test random sample secret/hash pair""" + known = list(self.iter_known_hashes()) + return self.getRandom().choice(known) + + #--------------------------------------------------------------- + # test helpers + #--------------------------------------------------------------- + def check_verify(self, secret, hash, msg=None, negate=False): + """helper to check verify() outcome, honoring is_disabled_handler""" + result = self.do_verify(secret, hash) + self.assertTrue(result is True or result is False, + "verify() returned non-boolean value: %r" % (result,)) + if self.handler.is_disabled or negate: + if not result: + return + if not msg: + msg = ("verify incorrectly returned True: secret=%r, hash=%r" % + (secret, hash)) + raise self.failureException(msg) + else: + if result: + return + if not msg: + msg = "verify failed: secret=%r, hash=%r" % (secret, hash) + raise self.failureException(msg) + + def check_returned_native_str(self, result, func_name): + self.assertIsInstance(result, str, + "%s() failed to return native string: %r" % (func_name, result,)) + + #--------------------------------------------------------------- + # PasswordHash helpers - wraps all calls to PasswordHash api, + # so that subclasses can fill in defaults and account for other specialized behavior + #--------------------------------------------------------------- + def populate_settings(self, kwds): + """subclassable method to populate default settings""" + # use lower rounds settings for certain test modes + handler = self.handler + if 'rounds' in handler.setting_kwds and 'rounds' not in kwds: + mn = handler.min_rounds + df = handler.default_rounds + if TEST_MODE(max="quick"): + # use minimum rounds for quick mode + kwds['rounds'] = max(3, mn) + else: + # use default/16 otherwise + factor = 3 + if getattr(handler, "rounds_cost", None) == "log2": + df -= factor + else: + df //= (1<= 1") + + # check min_salt_size + if cls.min_salt_size < 0: + raise AssertionError("min_salt_chars must be >= 0") + if mx_set and cls.min_salt_size > cls.max_salt_size: + raise AssertionError("min_salt_chars must be <= max_salt_chars") + + # check default_salt_size + if cls.default_salt_size < cls.min_salt_size: + raise AssertionError("default_salt_size must be >= min_salt_size") + if mx_set and cls.default_salt_size > cls.max_salt_size: + raise AssertionError("default_salt_size must be <= max_salt_size") + + # check for 'salt_size' keyword + # NOTE: skipping warning if default salt size is already maxed out + # (might change that in future) + if 'salt_size' not in cls.setting_kwds and (not mx_set or cls.default_salt_size < cls.max_salt_size): + warn('%s: hash handler supports range of salt sizes, ' + 'but doesn\'t offer \'salt_size\' setting' % (cls.name,)) + + # check salt_chars & default_salt_chars + if cls.salt_chars: + if not cls.default_salt_chars: + raise AssertionError("default_salt_chars must not be empty") + for c in cls.default_salt_chars: + if c not in cls.salt_chars: + raise AssertionError("default_salt_chars must be subset of salt_chars: %r not in salt_chars" % (c,)) + else: + if not cls.default_salt_chars: + raise AssertionError("default_salt_chars MUST be specified if salt_chars is empty") + + @property + def salt_bits(self): + """calculate number of salt bits in hash""" + # XXX: replace this with bitsize() method? + handler = self.handler + assert has_salt_info(handler), "need explicit bit-size for " + handler.name + from math import log + # FIXME: this may be off for case-insensitive hashes, but that accounts + # for ~1 bit difference, which is good enough for test_11() + return int(handler.default_salt_size * + log(len(handler.default_salt_chars), 2)) + + def test_11_unique_salt(self): + """test hash() / genconfig() creates new salt each time""" + self.require_salt() + # odds of picking 'n' identical salts at random is '(.5**salt_bits)**n'. + # we want to pick the smallest N needed s.t. odds are <1/10**d, just + # to eliminate false-positives. which works out to n>3.33+d-salt_bits. + # for 1/1e12 odds, n=1 is sufficient for most hashes, but a few border cases (e.g. + # cisco_type7) have < 16 bits of salt, requiring more. + samples = max(1, 4 + 12 - self.salt_bits) + + def sampler(func): + value1 = func() + for _ in irange(samples): + value2 = func() + if value1 != value2: + return + raise self.failureException("failed to find different salt after " + "%d samples" % (samples,)) + sampler(self.do_genconfig) + sampler(lambda: self.do_encrypt("stub")) + + def test_12_min_salt_size(self): + """test hash() / genconfig() honors min_salt_size""" + self.require_salt_info() + + handler = self.handler + salt_char = handler.salt_chars[0:1] + min_size = handler.min_salt_size + + # + # check min is accepted + # + s1 = salt_char * min_size + self.do_genconfig(salt=s1) + + self.do_encrypt('stub', salt_size=min_size) + + # + # check min-1 is rejected + # + if min_size > 0: + self.assertRaises(ValueError, self.do_genconfig, + salt=s1[:-1]) + + self.assertRaises(ValueError, self.do_encrypt, 'stub', + salt_size=min_size-1) + + def test_13_max_salt_size(self): + """test hash() / genconfig() honors max_salt_size""" + self.require_salt_info() + + handler = self.handler + max_size = handler.max_salt_size + salt_char = handler.salt_chars[0:1] + + # NOTE: skipping this for hashes like argon2 since max_salt_size takes WAY too much memory + if max_size is None or max_size > (1 << 20): + # + # if it's not set, salt should never be truncated; so test it + # with an unreasonably large salt. + # + s1 = salt_char * 1024 + c1 = self.do_stub_encrypt(salt=s1) + c2 = self.do_stub_encrypt(salt=s1 + salt_char) + self.assertNotEqual(c1, c2) + + self.do_stub_encrypt(salt_size=1024) + + else: + # + # check max size is accepted + # + s1 = salt_char * max_size + c1 = self.do_stub_encrypt(salt=s1) + + self.do_stub_encrypt(salt_size=max_size) + + # + # check max size + 1 is rejected + # + s2 = s1 + salt_char + self.assertRaises(ValueError, self.do_stub_encrypt, salt=s2) + + self.assertRaises(ValueError, self.do_stub_encrypt, salt_size=max_size + 1) + + # + # should accept too-large salt in relaxed mode + # + if has_relaxed_setting(handler): + with warnings.catch_warnings(record=True): # issues passlibhandlerwarning + c2 = self.do_stub_encrypt(salt=s2, relaxed=True) + self.assertEqual(c2, c1) + + # + # if min_salt supports it, check smaller than mx is NOT truncated + # + if handler.min_salt_size < max_size: + c3 = self.do_stub_encrypt(salt=s1[:-1]) + self.assertNotEqual(c3, c1) + + # whether salt should be passed through bcrypt repair function + fuzz_salts_need_bcrypt_repair = False + + def prepare_salt(self, salt): + """prepare generated salt""" + if self.fuzz_salts_need_bcrypt_repair: + from passlib.utils.binary import bcrypt64 + salt = bcrypt64.repair_unused(salt) + return salt + + def test_14_salt_chars(self): + """test hash() honors salt_chars""" + self.require_salt_info() + + handler = self.handler + mx = handler.max_salt_size + mn = handler.min_salt_size + cs = handler.salt_chars + raw = isinstance(cs, bytes) + + # make sure all listed chars are accepted + for salt in batch(cs, mx or 32): + if len(salt) < mn: + salt = repeat_string(salt, mn) + salt = self.prepare_salt(salt) + self.do_stub_encrypt(salt=salt) + + # check some invalid salt chars, make sure they're rejected + source = u('\x00\xff') + if raw: + source = source.encode("latin-1") + chunk = max(mn, 1) + for c in source: + if c not in cs: + self.assertRaises(ValueError, self.do_stub_encrypt, salt=c*chunk, + __msg__="invalid salt char %r:" % (c,)) + + @property + def salt_type(self): + """hack to determine salt keyword's datatype""" + # NOTE: cisco_type7 uses 'int' + if getattr(self.handler, "_salt_is_bytes", False): + return bytes + else: + return unicode + + def test_15_salt_type(self): + """test non-string salt values""" + self.require_salt() + salt_type = self.salt_type + salt_size = getattr(self.handler, "min_salt_size", 0) or 8 + + # should always throw error for random class. + class fake(object): + pass + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=fake()) + + # unicode should be accepted only if salt_type is unicode. + if salt_type is not unicode: + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=u('x') * salt_size) + + # bytes should be accepted only if salt_type is bytes, + # OR if salt type is unicode and running PY2 - to allow native strings. + if not (salt_type is bytes or (PY2 and salt_type is unicode)): + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=b'x' * salt_size) + + def test_using_salt_size(self): + """Handler.using() -- default_salt_size""" + self.require_salt_info() + + handler = self.handler + mn = handler.min_salt_size + mx = handler.max_salt_size + df = handler.default_salt_size + + # should prevent setting below handler limit + self.assertRaises(ValueError, handler.using, default_salt_size=-1) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(default_salt_size=-1, relaxed=True) + self.assertEqual(temp.default_salt_size, mn) + + # should prevent setting above handler limit + if mx: + self.assertRaises(ValueError, handler.using, default_salt_size=mx+1) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(default_salt_size=mx+1, relaxed=True) + self.assertEqual(temp.default_salt_size, mx) + + # try setting to explicit value + if mn != mx: + temp = handler.using(default_salt_size=mn+1) + self.assertEqual(temp.default_salt_size, mn+1) + self.assertEqual(handler.default_salt_size, df) + + temp = handler.using(default_salt_size=mn+2) + self.assertEqual(temp.default_salt_size, mn+2) + self.assertEqual(handler.default_salt_size, df) + + # accept strings + if mn == mx: + ref = mn + else: + ref = mn + 1 + temp = handler.using(default_salt_size=str(ref)) + self.assertEqual(temp.default_salt_size, ref) + + # reject invalid strings + self.assertRaises(ValueError, handler.using, default_salt_size=str(ref) + "xxx") + + # honor 'salt_size' alias + temp = handler.using(salt_size=ref) + self.assertEqual(temp.default_salt_size, ref) + + #=================================================================== + # rounds + #=================================================================== + def require_rounds_info(self): + if not has_rounds_info(self.handler): + raise self.skipTest("handler lacks rounds attributes") + + def test_20_optional_rounds_attributes(self): + """validate optional rounds attributes""" + self.require_rounds_info() + + cls = self.handler + AssertionError = self.failureException + + # check max_rounds + if cls.max_rounds is None: + raise AssertionError("max_rounds not specified") + if cls.max_rounds < 1: + raise AssertionError("max_rounds must be >= 1") + + # check min_rounds + if cls.min_rounds < 0: + raise AssertionError("min_rounds must be >= 0") + if cls.min_rounds > cls.max_rounds: + raise AssertionError("min_rounds must be <= max_rounds") + + # check default_rounds + if cls.default_rounds is not None: + if cls.default_rounds < cls.min_rounds: + raise AssertionError("default_rounds must be >= min_rounds") + if cls.default_rounds > cls.max_rounds: + raise AssertionError("default_rounds must be <= max_rounds") + + # check rounds_cost + if cls.rounds_cost not in rounds_cost_values: + raise AssertionError("unknown rounds cost constant: %r" % (cls.rounds_cost,)) + + def test_21_min_rounds(self): + """test hash() / genconfig() honors min_rounds""" + self.require_rounds_info() + handler = self.handler + min_rounds = handler.min_rounds + + # check min is accepted + self.do_genconfig(rounds=min_rounds) + self.do_encrypt('stub', rounds=min_rounds) + + # check min-1 is rejected + self.assertRaises(ValueError, self.do_genconfig, rounds=min_rounds-1) + self.assertRaises(ValueError, self.do_encrypt, 'stub', rounds=min_rounds-1) + + # TODO: check relaxed mode clips min-1 + + def test_21b_max_rounds(self): + """test hash() / genconfig() honors max_rounds""" + self.require_rounds_info() + handler = self.handler + max_rounds = handler.max_rounds + + if max_rounds is not None: + # check max+1 is rejected + self.assertRaises(ValueError, self.do_genconfig, rounds=max_rounds+1) + self.assertRaises(ValueError, self.do_encrypt, 'stub', rounds=max_rounds+1) + + # handle max rounds + if max_rounds is None: + self.do_stub_encrypt(rounds=(1 << 31) - 1) + else: + self.do_stub_encrypt(rounds=max_rounds) + + # TODO: check relaxed mode clips max+1 + + #-------------------------------------------------------------------------------------- + # HasRounds.using() / .needs_update() -- desired rounds limits + #-------------------------------------------------------------------------------------- + def _create_using_rounds_helper(self): + """ + setup test helpers for testing handler.using()'s rounds parameters. + """ + self.require_rounds_info() + handler = self.handler + + if handler.name == "bsdi_crypt": + # hack to bypass bsdi-crypt's "odd rounds only" behavior, messes up this test + orig_handler = handler + handler = handler.using() + handler._generate_rounds = classmethod(lambda cls: super(orig_handler, cls)._generate_rounds()) + + # create some fake values to test with + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + orig_default_rounds = handler.default_rounds + medium = ((orig_max_rounds or 9999) + orig_min_rounds) // 2 + if medium == orig_default_rounds: + medium += 1 + small = (orig_min_rounds + medium) // 2 + large = ((orig_max_rounds or 9999) + medium) // 2 + + if handler.name == "bsdi_crypt": + # hack to avoid even numbered rounds + small |= 1 + medium |= 1 + large |= 1 + adj = 2 + else: + adj = 1 + + # create a subclass with small/medium/large as new default desired values + with self.assertWarningList([]): + subcls = handler.using( + min_desired_rounds=small, + max_desired_rounds=large, + default_rounds=medium, + ) + + # return helpers + return handler, subcls, small, medium, large, adj + + def test_has_rounds_using_harness(self): + """ + HasRounds.using() -- sanity check test harness + """ + # setup helpers + self.require_rounds_info() + handler = self.handler + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + orig_default_rounds = handler.default_rounds + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + # shouldn't affect original handler at all + self.assertEqual(handler.min_rounds, orig_min_rounds) + self.assertEqual(handler.max_rounds, orig_max_rounds) + self.assertEqual(handler.min_desired_rounds, None) + self.assertEqual(handler.max_desired_rounds, None) + self.assertEqual(handler.default_rounds, orig_default_rounds) + + # should affect subcls' desired value, but not hard min/max + self.assertEqual(subcls.min_rounds, orig_min_rounds) + self.assertEqual(subcls.max_rounds, orig_max_rounds) + self.assertEqual(subcls.default_rounds, medium) + self.assertEqual(subcls.min_desired_rounds, small) + self.assertEqual(subcls.max_desired_rounds, large) + + def test_has_rounds_using_w_min_rounds(self): + """ + HasRounds.using() -- min_rounds / min_desired_rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + orig_default_rounds = handler.default_rounds + + # .using() should clip values below valid minimum, w/ warning + if orig_min_rounds > 0: + self.assertRaises(ValueError, handler.using, min_desired_rounds=orig_min_rounds - adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(min_desired_rounds=orig_min_rounds - adj, relaxed=True) + self.assertEqual(temp.min_desired_rounds, orig_min_rounds) + + # .using() should clip values above valid maximum, w/ warning + if orig_max_rounds: + self.assertRaises(ValueError, handler.using, min_desired_rounds=orig_max_rounds + adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(min_desired_rounds=orig_max_rounds + adj, relaxed=True) + self.assertEqual(temp.min_desired_rounds, orig_max_rounds) + + # .using() should allow values below previous desired minimum, w/o warning + with self.assertWarningList([]): + temp = subcls.using(min_desired_rounds=small - adj) + self.assertEqual(temp.min_desired_rounds, small - adj) + + # .using() should allow values w/in previous range + temp = subcls.using(min_desired_rounds=small + 2 * adj) + self.assertEqual(temp.min_desired_rounds, small + 2 * adj) + + # .using() should allow values above previous desired maximum, w/o warning + with self.assertWarningList([]): + temp = subcls.using(min_desired_rounds=large + adj) + self.assertEqual(temp.min_desired_rounds, large + adj) + + # hash() etc should allow explicit values below desired minimum + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .using() + self.assertEqual(get_effective_rounds(subcls, small + adj), small + adj) + self.assertEqual(get_effective_rounds(subcls, small), small) + with self.assertWarningList([]): + self.assertEqual(get_effective_rounds(subcls, small - adj), small - adj) + + # 'min_rounds' should be treated as alias for 'min_desired_rounds' + temp = handler.using(min_rounds=small) + self.assertEqual(temp.min_desired_rounds, small) + + # should be able to specify strings + temp = handler.using(min_rounds=str(small)) + self.assertEqual(temp.min_desired_rounds, small) + + # invalid strings should cause error + self.assertRaises(ValueError, handler.using, min_rounds=str(small) + "xxx") + + def test_has_rounds_replace_w_max_rounds(self): + """ + HasRounds.using() -- max_rounds / max_desired_rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_min_rounds = handler.min_rounds + orig_max_rounds = handler.max_rounds + + # .using() should clip values below valid minimum w/ warning + if orig_min_rounds > 0: + self.assertRaises(ValueError, handler.using, max_desired_rounds=orig_min_rounds - adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(max_desired_rounds=orig_min_rounds - adj, relaxed=True) + self.assertEqual(temp.max_desired_rounds, orig_min_rounds) + + # .using() should clip values above valid maximum, w/ warning + if orig_max_rounds: + self.assertRaises(ValueError, handler.using, max_desired_rounds=orig_max_rounds + adj) + with self.assertWarningList([PasslibHashWarning]): + temp = handler.using(max_desired_rounds=orig_max_rounds + adj, relaxed=True) + self.assertEqual(temp.max_desired_rounds, orig_max_rounds) + + # .using() should clip values below previous minimum, w/ warning + with self.assertWarningList([PasslibConfigWarning]): + temp = subcls.using(max_desired_rounds=small - adj) + self.assertEqual(temp.max_desired_rounds, small) + + # .using() should reject explicit min > max + self.assertRaises(ValueError, subcls.using, + min_desired_rounds=medium+adj, + max_desired_rounds=medium-adj) + + # .using() should allow values w/in previous range + temp = subcls.using(min_desired_rounds=large - 2 * adj) + self.assertEqual(temp.min_desired_rounds, large - 2 * adj) + + # .using() should allow values above previous desired maximum, w/o warning + with self.assertWarningList([]): + temp = subcls.using(max_desired_rounds=large + adj) + self.assertEqual(temp.max_desired_rounds, large + adj) + + # hash() etc should allow explicit values above desired minimum, w/o warning + # NOTE: formerly issued a warning in passlib 1.6, now just a wrapper for .using() + self.assertEqual(get_effective_rounds(subcls, large - adj), large - adj) + self.assertEqual(get_effective_rounds(subcls, large), large) + with self.assertWarningList([]): + self.assertEqual(get_effective_rounds(subcls, large + adj), large + adj) + + # 'max_rounds' should be treated as alias for 'max_desired_rounds' + temp = handler.using(max_rounds=large) + self.assertEqual(temp.max_desired_rounds, large) + + # should be able to specify strings + temp = handler.using(max_desired_rounds=str(large)) + self.assertEqual(temp.max_desired_rounds, large) + + # invalid strings should cause error + self.assertRaises(ValueError, handler.using, max_desired_rounds=str(large) + "xxx") + + def test_has_rounds_using_w_default_rounds(self): + """ + HasRounds.using() -- default_rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_max_rounds = handler.max_rounds + + # XXX: are there any other cases that need testing? + + # implicit default rounds -- increase to min_rounds + temp = subcls.using(min_rounds=medium+adj) + self.assertEqual(temp.default_rounds, medium+adj) + + # implicit default rounds -- decrease to max_rounds + temp = subcls.using(max_rounds=medium-adj) + self.assertEqual(temp.default_rounds, medium-adj) + + # explicit default rounds below desired minimum + # XXX: make this a warning if min is implicit? + self.assertRaises(ValueError, subcls.using, default_rounds=small-adj) + + # explicit default rounds above desired maximum + # XXX: make this a warning if max is implicit? + if orig_max_rounds: + self.assertRaises(ValueError, subcls.using, default_rounds=large+adj) + + # hash() etc should implicit default rounds, but get overridden + self.assertEqual(get_effective_rounds(subcls), medium) + self.assertEqual(get_effective_rounds(subcls, medium+adj), medium+adj) + + # should be able to specify strings + temp = handler.using(default_rounds=str(medium)) + self.assertEqual(temp.default_rounds, medium) + + # invalid strings should cause error + self.assertRaises(ValueError, handler.using, default_rounds=str(medium) + "xxx") + + def test_has_rounds_using_w_rounds(self): + """ + HasRounds.using() -- rounds + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + orig_max_rounds = handler.max_rounds + + # 'rounds' should be treated as fallback for min, max, and default + temp = subcls.using(rounds=medium+adj) + self.assertEqual(temp.min_desired_rounds, medium+adj) + self.assertEqual(temp.default_rounds, medium+adj) + self.assertEqual(temp.max_desired_rounds, medium+adj) + + # 'rounds' should be treated as fallback for min, max, and default + temp = subcls.using(rounds=medium+1, min_rounds=small+adj, + default_rounds=medium, max_rounds=large-adj) + self.assertEqual(temp.min_desired_rounds, small+adj) + self.assertEqual(temp.default_rounds, medium) + self.assertEqual(temp.max_desired_rounds, large-adj) + + def test_has_rounds_using_w_vary_rounds_parsing(self): + """ + HasRounds.using() -- vary_rounds parsing + """ + # setup helpers + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + def parse(value): + return subcls.using(vary_rounds=value).vary_rounds + + # floats should be preserved + self.assertEqual(parse(0.1), 0.1) + self.assertEqual(parse('0.1'), 0.1) + + # 'xx%' should be converted to float + self.assertEqual(parse('10%'), 0.1) + + # ints should be preserved + self.assertEqual(parse(1000), 1000) + self.assertEqual(parse('1000'), 1000) + + # float bounds should be enforced + self.assertRaises(ValueError, parse, -0.1) + self.assertRaises(ValueError, parse, 1.1) + + def test_has_rounds_using_w_vary_rounds_generation(self): + """ + HasRounds.using() -- vary_rounds generation + """ + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + def get_effective_range(cls): + seen = set(get_effective_rounds(cls) for _ in irange(1000)) + return min(seen), max(seen) + + def assert_rounds_range(vary_rounds, lower, upper): + temp = subcls.using(vary_rounds=vary_rounds) + seen_lower, seen_upper = get_effective_range(temp) + self.assertEqual(seen_lower, lower, "vary_rounds had wrong lower limit:") + self.assertEqual(seen_upper, upper, "vary_rounds had wrong upper limit:") + + # test static + assert_rounds_range(0, medium, medium) + assert_rounds_range("0%", medium, medium) + + # test absolute + assert_rounds_range(adj, medium - adj, medium + adj) + assert_rounds_range(50, max(small, medium - 50), min(large, medium + 50)) + + # test relative - should shift over at 50% mark + if handler.rounds_cost == "log2": + # log rounds "50%" variance should only increase/decrease by 1 cost value + assert_rounds_range("1%", medium, medium) + assert_rounds_range("49%", medium, medium) + assert_rounds_range("50%", medium - adj, medium) + else: + # for linear rounds, range is frequently so huge, won't ever see ends. + # so we just check it's within an expected range. + lower, upper = get_effective_range(subcls.using(vary_rounds="50%")) + + self.assertGreaterEqual(lower, max(small, medium * 0.5)) + self.assertLessEqual(lower, max(small, medium * 0.8)) + + self.assertGreaterEqual(upper, min(large, medium * 1.2)) + self.assertLessEqual(upper, min(large, medium * 1.5)) + + def test_has_rounds_using_and_needs_update(self): + """ + HasRounds.using() -- desired_rounds + needs_update() + """ + handler, subcls, small, medium, large, adj = self._create_using_rounds_helper() + + temp = subcls.using(min_desired_rounds=small+2, max_desired_rounds=large-2) + + # generate some sample hashes + small_hash = self.do_stub_encrypt(subcls, rounds=small) + medium_hash = self.do_stub_encrypt(subcls, rounds=medium) + large_hash = self.do_stub_encrypt(subcls, rounds=large) + + # everything should be w/in bounds for original handler + self.assertFalse(subcls.needs_update(small_hash)) + self.assertFalse(subcls.needs_update(medium_hash)) + self.assertFalse(subcls.needs_update(large_hash)) + + # small & large should require update for temp handler + self.assertTrue(temp.needs_update(small_hash)) + self.assertFalse(temp.needs_update(medium_hash)) + self.assertTrue(temp.needs_update(large_hash)) + + #=================================================================== + # idents + #=================================================================== + def require_many_idents(self): + handler = self.handler + if not isinstance(handler, type) or not issubclass(handler, uh.HasManyIdents): + raise self.skipTest("handler doesn't derive from HasManyIdents") + + def test_30_HasManyIdents(self): + """validate HasManyIdents configuration""" + cls = self.handler + self.require_many_idents() + + # check settings + self.assertTrue('ident' in cls.setting_kwds) + + # check ident_values list + for value in cls.ident_values: + self.assertIsInstance(value, unicode, + "cls.ident_values must be unicode:") + self.assertTrue(len(cls.ident_values)>1, + "cls.ident_values must have 2+ elements:") + + # check default_ident value + self.assertIsInstance(cls.default_ident, unicode, + "cls.default_ident must be unicode:") + self.assertTrue(cls.default_ident in cls.ident_values, + "cls.default_ident must specify member of cls.ident_values") + + # check optional aliases list + if cls.ident_aliases: + for alias, ident in iteritems(cls.ident_aliases): + self.assertIsInstance(alias, unicode, + "cls.ident_aliases keys must be unicode:") # XXX: allow ints? + self.assertIsInstance(ident, unicode, + "cls.ident_aliases values must be unicode:") + self.assertTrue(ident in cls.ident_values, + "cls.ident_aliases must map to cls.ident_values members: %r" % (ident,)) + + # check constructor validates ident correctly. + handler = cls + hash = self.get_sample_hash()[1] + kwds = handler.parsehash(hash) + del kwds['ident'] + + # ... accepts good ident + handler(ident=cls.default_ident, **kwds) + + # ... requires ident w/o defaults + self.assertRaises(TypeError, handler, **kwds) + + # ... supplies default ident + handler(use_defaults=True, **kwds) + + # ... rejects bad ident + self.assertRaises(ValueError, handler, ident='xXx', **kwds) + + # TODO: check various supported idents + + def test_has_many_idents_using(self): + """HasManyIdents.using() -- 'default_ident' and 'ident' keywords""" + self.require_many_idents() + + # pick alt ident to test with + handler = self.handler + orig_ident = handler.default_ident + for alt_ident in handler.ident_values: + if alt_ident != orig_ident: + break + else: + raise AssertionError("expected to find alternate ident: default=%r values=%r" % + (orig_ident, handler.ident_values)) + + def effective_ident(cls): + cls = unwrap_handler(cls) + return cls(use_defaults=True).ident + + # keep default if nothing else specified + subcls = handler.using() + self.assertEqual(subcls.default_ident, orig_ident) + + # accepts alt ident + subcls = handler.using(default_ident=alt_ident) + self.assertEqual(subcls.default_ident, alt_ident) + self.assertEqual(handler.default_ident, orig_ident) + + # check subcls actually *generates* default ident, + # and that we didn't affect orig handler + self.assertEqual(effective_ident(subcls), alt_ident) + self.assertEqual(effective_ident(handler), orig_ident) + + # rejects bad ident + self.assertRaises(ValueError, handler.using, default_ident='xXx') + + # honor 'ident' alias + subcls = handler.using(ident=alt_ident) + self.assertEqual(subcls.default_ident, alt_ident) + self.assertEqual(handler.default_ident, orig_ident) + + # forbid both at same time + self.assertRaises(TypeError, handler.using, default_ident=alt_ident, ident=alt_ident) + + # check ident aliases are being honored + if handler.ident_aliases: + for alias, ident in handler.ident_aliases.items(): + subcls = handler.using(ident=alias) + self.assertEqual(subcls.default_ident, ident, msg="alias %r:" % alias) + + #=================================================================== + # password size limits + #=================================================================== + def test_truncate_error_setting(self): + """ + validate 'truncate_error' setting & related attributes + """ + # If it doesn't have truncate_size set, + # it shouldn't support truncate_error + hasher = self.handler + if hasher.truncate_size is None: + self.assertNotIn("truncate_error", hasher.setting_kwds) + return + + # if hasher defaults to silently truncating, + # it MUST NOT use .truncate_verify_reject, + # because resulting hashes wouldn't verify! + if not hasher.truncate_error: + self.assertFalse(hasher.truncate_verify_reject) + + # if hasher doesn't have configurable policy, + # it must throw error by default + if "truncate_error" not in hasher.setting_kwds: + self.assertTrue(hasher.truncate_error) + return + + # test value parsing + def parse_value(value): + return hasher.using(truncate_error=value).truncate_error + self.assertEqual(parse_value(None), hasher.truncate_error) + self.assertEqual(parse_value(True), True) + self.assertEqual(parse_value("true"), True) + self.assertEqual(parse_value(False), False) + self.assertEqual(parse_value("false"), False) + self.assertRaises(ValueError, parse_value, "xxx") + + def test_secret_wo_truncate_size(self): + """ + test no password size limits enforced (if truncate_size=None) + """ + # skip if hasher has a maximum password size + hasher = self.handler + if hasher.truncate_size is not None: + self.assertGreaterEqual(hasher.truncate_size, 1) + raise self.skipTest("truncate_size is set") + + # NOTE: this doesn't do an exhaustive search to verify algorithm + # doesn't have some cutoff point, it just tries + # 1024-character string, and alters the last char. + # as long as algorithm doesn't clip secret at point <1024, + # the new secret shouldn't verify. + + # hash a 1024-byte secret + secret = "too many secrets" * 16 + alt = "x" + hash = self.do_encrypt(secret) + + # check that verify doesn't silently reject secret + # (i.e. hasher mistakenly honors .truncate_verify_reject) + verify_success = not hasher.is_disabled + self.assertEqual(self.do_verify(secret, hash), verify_success, + msg="verify rejected correct secret") + + # alter last byte, should get different hash, which won't verify + alt_secret = secret[:-1] + alt + self.assertFalse(self.do_verify(alt_secret, hash), + "full password not used in digest") + + def test_secret_w_truncate_size(self): + """ + test password size limits raise truncate_error (if appropriate) + """ + #-------------------------------------------------- + # check if test is applicable + #-------------------------------------------------- + handler = self.handler + truncate_size = handler.truncate_size + if not truncate_size: + raise self.skipTest("truncate_size not set") + + #-------------------------------------------------- + # setup vars + #-------------------------------------------------- + # try to get versions w/ and w/o truncate_error set. + # set to None if policy isn't configruable + size_error_type = exc.PasswordSizeError + if "truncate_error" in handler.setting_kwds: + without_error = handler.using(truncate_error=False) + with_error = handler.using(truncate_error=True) + size_error_type = exc.PasswordTruncateError + elif handler.truncate_error: + without_error = None + with_error = handler + else: + # NOTE: this mode is currently an error in test_truncate_error_setting() + without_error = handler + with_error = None + + # create some test secrets + base = "too many secrets" + alt = "x" # char that's not in base, used to mutate test secrets + long_secret = repeat_string(base, truncate_size+1) + short_secret = long_secret[:-1] + alt_long_secret = long_secret[:-1] + alt + alt_short_secret = short_secret[:-1] + alt + + # init flags + short_verify_success = not handler.is_disabled + long_verify_success = short_verify_success and \ + not handler.truncate_verify_reject + + #-------------------------------------------------- + # do tests on length secret, and resulting hash. + # should pass regardless of truncate_error policy. + #-------------------------------------------------- + assert without_error or with_error + for cand_hasher in [without_error, with_error]: + + # create & hash string that's exactly chars. + short_hash = self.do_encrypt(short_secret, handler=cand_hasher) + + # check hash verifies, regardless of .truncate_verify_reject + self.assertEqual(self.do_verify(short_secret, short_hash, + handler=cand_hasher), + short_verify_success) + + # changing 'th char should invalidate hash + # if this fails, means (reported) truncate_size is too large. + self.assertFalse(self.do_verify(alt_short_secret, short_hash, + handler=with_error), + "truncate_size value is too large") + + # verify should truncate long secret before comparing + # (unless truncate_verify_reject is set) + self.assertEqual(self.do_verify(long_secret, short_hash, + handler=cand_hasher), + long_verify_success) + + #-------------------------------------------------- + # do tests on length secret, + # w/ truncate error disabled (should silently truncate) + #-------------------------------------------------- + if without_error: + + # create & hash string that's exactly truncate_size+1 chars + long_hash = self.do_encrypt(long_secret, handler=without_error) + + # check verifies against secret (unless truncate_verify_reject=True) + self.assertEqual(self.do_verify(long_secret, long_hash, + handler=without_error), + short_verify_success) + + # check mutating last char doesn't change outcome. + # if this fails, means (reported) truncate_size is too small. + self.assertEqual(self.do_verify(alt_long_secret, long_hash, + handler=without_error), + short_verify_success) + + # check short_secret verifies against this hash + # if this fails, means (reported) truncate_size is too large. + self.assertTrue(self.do_verify(short_secret, long_hash, + handler=without_error)) + + #-------------------------------------------------- + # do tests on length secret, + # w/ truncate error + #-------------------------------------------------- + if with_error: + + # with errors enabled, should forbid truncation. + err = self.assertRaises(size_error_type, self.do_encrypt, + long_secret, handler=with_error) + self.assertEqual(err.max_size, truncate_size) + + #=================================================================== + # password contents + #=================================================================== + def test_61_secret_case_sensitive(self): + """test password case sensitivity""" + hash_insensitive = self.secret_case_insensitive is True + verify_insensitive = self.secret_case_insensitive in [True, + "verify-only"] + + # test hashing lower-case verifies against lower & upper + lower = 'test' + upper = 'TEST' + h1 = self.do_encrypt(lower) + if verify_insensitive and not self.handler.is_disabled: + self.assertTrue(self.do_verify(upper, h1), + "verify() should not be case sensitive") + else: + self.assertFalse(self.do_verify(upper, h1), + "verify() should be case sensitive") + + # test hashing upper-case verifies against lower & upper + h2 = self.do_encrypt(upper) + if verify_insensitive and not self.handler.is_disabled: + self.assertTrue(self.do_verify(lower, h2), + "verify() should not be case sensitive") + else: + self.assertFalse(self.do_verify(lower, h2), + "verify() should be case sensitive") + + # test genhash + # XXX: 2.0: what about 'verify-only' hashes once genhash() is removed? + # won't have easy way to recreate w/ same config to see if hash differs. + # (though only hash this applies to is mssql2000) + h2 = self.do_genhash(upper, h1) + if hash_insensitive or (self.handler.is_disabled and not self.disabled_contains_salt): + self.assertEqual(h2, h1, + "genhash() should not be case sensitive") + else: + self.assertNotEqual(h2, h1, + "genhash() should be case sensitive") + + def test_62_secret_border(self): + """test non-string passwords are rejected""" + hash = self.get_sample_hash()[1] + + # secret=None + self.assertRaises(TypeError, self.do_encrypt, None) + self.assertRaises(TypeError, self.do_genhash, None, hash) + self.assertRaises(TypeError, self.do_verify, None, hash) + + # secret=int (picked as example of entirely wrong class) + self.assertRaises(TypeError, self.do_encrypt, 1) + self.assertRaises(TypeError, self.do_genhash, 1, hash) + self.assertRaises(TypeError, self.do_verify, 1, hash) + + # xxx: move to password size limits section, above? + def test_63_large_secret(self): + """test MAX_PASSWORD_SIZE is enforced""" + from passlib.exc import PasswordSizeError + from passlib.utils import MAX_PASSWORD_SIZE + secret = '.' * (1+MAX_PASSWORD_SIZE) + hash = self.get_sample_hash()[1] + err = self.assertRaises(PasswordSizeError, self.do_genhash, secret, hash) + self.assertEqual(err.max_size, MAX_PASSWORD_SIZE) + self.assertRaises(PasswordSizeError, self.do_encrypt, secret) + self.assertRaises(PasswordSizeError, self.do_verify, secret, hash) + + def test_64_forbidden_chars(self): + """test forbidden characters not allowed in password""" + chars = self.forbidden_characters + if not chars: + raise self.skipTest("none listed") + base = u('stub') + if isinstance(chars, bytes): + from passlib.utils.compat import iter_byte_chars + chars = iter_byte_chars(chars) + base = base.encode("ascii") + for c in chars: + self.assertRaises(ValueError, self.do_encrypt, base + c + base) + + #=================================================================== + # check identify(), verify(), genhash() against test vectors + #=================================================================== + def is_secret_8bit(self, secret): + secret = self.populate_context(secret, {}) + return not is_ascii_safe(secret) + + def expect_os_crypt_failure(self, secret): + """ + check if we're expecting potential verify failure due to crypt.crypt() encoding limitation + """ + if PY3 and self.backend == "os_crypt" and isinstance(secret, bytes): + try: + secret.decode("utf-8") + except UnicodeDecodeError: + return True + return False + + def test_70_hashes(self): + """test known hashes""" + + # sanity check + self.assertTrue(self.known_correct_hashes or self.known_correct_configs, + "test must set at least one of 'known_correct_hashes' " + "or 'known_correct_configs'") + + # run through known secret/hash pairs + saw8bit = False + for secret, hash in self.iter_known_hashes(): + if self.is_secret_8bit(secret): + saw8bit = True + + # hash should be positively identified by handler + self.assertTrue(self.do_identify(hash), + "identify() failed to identify hash: %r" % (hash,)) + + # check if what we're about to do is expected to fail due to crypt.crypt() limitation. + expect_os_crypt_failure = self.expect_os_crypt_failure(secret) + try: + + # secret should verify successfully against hash + self.check_verify(secret, hash, "verify() of known hash failed: " + "secret=%r, hash=%r" % (secret, hash)) + + # genhash() should reproduce same hash + result = self.do_genhash(secret, hash) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + if self.handler.is_disabled and self.disabled_contains_salt: + continue + self.assertEqual(result, hash, "genhash() failed to reproduce " + "known hash: secret=%r, hash=%r: result=%r" % + (secret, hash, result)) + + except MissingBackendError: + if not expect_os_crypt_failure: + raise + + # would really like all handlers to have at least one 8-bit test vector + if not saw8bit: + warn("%s: no 8-bit secrets tested" % self.__class__) + + def test_71_alternates(self): + """test known alternate hashes""" + if not self.known_alternate_hashes: + raise self.skipTest("no alternate hashes provided") + for alt, secret, hash in self.known_alternate_hashes: + + # hash should be positively identified by handler + self.assertTrue(self.do_identify(hash), + "identify() failed to identify alternate hash: %r" % + (hash,)) + + # secret should verify successfully against hash + self.check_verify(secret, alt, "verify() of known alternate hash " + "failed: secret=%r, hash=%r" % (secret, alt)) + + # genhash() should reproduce canonical hash + result = self.do_genhash(secret, alt) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + if self.handler.is_disabled and self.disabled_contains_salt: + continue + self.assertEqual(result, hash, "genhash() failed to normalize " + "known alternate hash: secret=%r, alt=%r, hash=%r: " + "result=%r" % (secret, alt, hash, result)) + + def test_72_configs(self): + """test known config strings""" + # special-case handlers without settings + if not self.handler.setting_kwds: + self.assertFalse(self.known_correct_configs, + "handler should not have config strings") + raise self.skipTest("hash has no settings") + + if not self.known_correct_configs: + # XXX: make this a requirement? + raise self.skipTest("no config strings provided") + + # make sure config strings work (hashes in list tested in test_70) + if self.filter_config_warnings: + warnings.filterwarnings("ignore", category=PasslibHashWarning) + for config, secret, hash in self.known_correct_configs: + + # config should be positively identified by handler + self.assertTrue(self.do_identify(config), + "identify() failed to identify known config string: %r" % + (config,)) + + # verify() should throw error for config strings. + self.assertRaises(ValueError, self.do_verify, secret, config, + __msg__="verify() failed to reject config string: %r" % + (config,)) + + # genhash() should reproduce hash from config. + result = self.do_genhash(secret, config) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + self.assertEqual(result, hash, "genhash() failed to reproduce " + "known hash from config: secret=%r, config=%r, hash=%r: " + "result=%r" % (secret, config, hash, result)) + + def test_73_unidentified(self): + """test known unidentifiably-mangled strings""" + if not self.known_unidentified_hashes: + raise self.skipTest("no unidentified hashes provided") + for hash in self.known_unidentified_hashes: + + # identify() should reject these + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified known unidentifiable " + "hash: %r" % (hash,)) + + # verify() should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for unidentifiable " + "hash: %r" % (hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for unidentifiable " + "hash: %r" % (hash,)) + + def test_74_malformed(self): + """test known identifiable-but-malformed strings""" + if not self.known_malformed_hashes: + raise self.skipTest("no malformed hashes provided") + for hash in self.known_malformed_hashes: + + # identify() should accept these + self.assertTrue(self.do_identify(hash), + "identify() failed to identify known malformed " + "hash: %r" % (hash,)) + + # verify() should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for malformed " + "hash: %r" % (hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for malformed " + "hash: %r" % (hash,)) + + def test_75_foreign(self): + """test known foreign hashes""" + if self.accepts_all_hashes: + raise self.skipTest("not applicable") + if not self.known_other_hashes: + raise self.skipTest("no foreign hashes provided") + for name, hash in self.known_other_hashes: + # NOTE: most tests use default list of foreign hashes, + # so they may include ones belonging to that hash... + # hence the 'own' logic. + + if name == self.handler.name: + # identify should accept these + self.assertTrue(self.do_identify(hash), + "identify() failed to identify known hash: %r" % (hash,)) + + # verify & genhash should NOT throw error + self.do_verify('stub', hash) + result = self.do_genhash('stub', hash) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + + else: + # identify should reject these + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified hash belonging to " + "%s: %r" % (name, hash)) + + # verify should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for hash " + "belonging to %s: %r" % (name, hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for hash " + "belonging to %s: %r" % (name, hash)) + + def test_76_hash_border(self): + """test non-string hashes are rejected""" + # + # test hash=None is handled correctly + # + self.assertRaises(TypeError, self.do_identify, None) + self.assertRaises(TypeError, self.do_verify, 'stub', None) + + # NOTE: changed in 1.7 -- previously 'None' would be accepted when config strings not supported. + self.assertRaises(TypeError, self.do_genhash, 'stub', None) + + # + # test hash=int is rejected (picked as example of entirely wrong type) + # + self.assertRaises(TypeError, self.do_identify, 1) + self.assertRaises(TypeError, self.do_verify, 'stub', 1) + self.assertRaises(TypeError, self.do_genhash, 'stub', 1) + + # + # test hash='' is rejected for all but the plaintext hashes + # + for hash in [u(''), b'']: + if self.accepts_all_hashes: + # then it accepts empty string as well. + self.assertTrue(self.do_identify(hash)) + self.do_verify('stub', hash) + result = self.do_genhash('stub', hash) + self.check_returned_native_str(result, "genhash") + else: + # otherwise it should reject them + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified empty hash") + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__="verify() failed to reject empty hash") + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__="genhash() failed to reject empty hash") + + # + # test identify doesn't throw decoding errors on 8-bit input + # + self.do_identify('\xe2\x82\xac\xc2\xa5$') # utf-8 + self.do_identify('abc\x91\x00') # non-utf8 + + #=================================================================== + # test parsehash() + #=================================================================== + + #: optional list of known parse hash results for hasher + known_parsehash_results = [] + + def require_parsehash(self): + if not hasattr(self.handler, "parsehash"): + raise SkipTest("parsehash() not implemented") + + def test_70_parsehash(self): + """ + parsehash() + """ + # TODO: would like to enhance what this test covers + + self.require_parsehash() + handler = self.handler + + # calls should succeed, and return dict + hash = self.do_encrypt("stub") + result = handler.parsehash(hash) + self.assertIsInstance(result, dict) + # TODO: figure out what invariants we can reliably parse, + # or maybe make subclasses specify that? + + # w/ checksum=False, should omit that key + result2 = handler.parsehash(hash, checksum=False) + correct2 = result.copy() + correct2.pop("checksum", None) + self.assertEqual(result2, correct2) + + # w/ sanitize=True + # correct output should mask salt / checksum; + # but all else should be the same + result3 = handler.parsehash(hash, sanitize=True) + correct3 = result.copy() + if PY2: + # silence warning about bytes & unicode not comparing + # (sanitize may convert bytes into base64 text) + warnings.filterwarnings("ignore", ".*unequal comparison failed to convert.*", + category=UnicodeWarning) + for key in ("salt", "checksum"): + if key in result3: + self.assertNotEqual(result3[key], correct3[key]) + self.assert_is_masked(result3[key]) + correct3[key] = result3[key] + self.assertEqual(result3, correct3) + + def assert_is_masked(self, value): + """ + check value properly masked by :func:`passlib.utils.mask_value` + """ + if value is None: + return + self.assertIsInstance(value, unicode) + # assumes mask_value() defaults will never show more than chars (4); + # and show nothing if size less than 1/ (8). + ref = value if len(value) < 8 else value[4:] + if set(ref) == set(["*"]): + return True + raise self.fail("value not masked: %r" % value) + + def test_71_parsehash_results(self): + """ + parsehash() -- known outputs + """ + self.require_parsehash() + samples = self.known_parsehash_results + if not samples: + raise self.skipTest("no samples present") + # XXX: expand to test w/ checksum=False and/or sanitize=True? + # or read "_unsafe_settings"? + for hash, correct in self.known_parsehash_results: + result = self.handler.parsehash(hash) + self.assertEqual(result, correct, "hash=%r:" % hash) + + #=================================================================== + # fuzz testing + #=================================================================== + def test_77_fuzz_input(self, threaded=False): + """fuzz testing -- random passwords and options + + This test attempts to perform some basic fuzz testing of the hash, + based on whatever information can be found about it. + It does as much as it can within a fixed amount of time + (defaults to 1 second, but can be overridden via $PASSLIB_TEST_FUZZ_TIME). + It tests the following: + + * randomly generated passwords including extended unicode chars + * randomly selected rounds values (if rounds supported) + * randomly selected salt sizes (if salts supported) + * randomly selected identifiers (if multiple found) + * runs output of selected backend against other available backends + (if any) to detect errors occurring between different backends. + * runs output against other "external" verifiers such as OS crypt() + + :param report_thread_state: + if true, writes state of loop to current_thread().passlib_fuzz_state. + used to help debug multi-threaded fuzz test issues (below) + """ + if self.handler.is_disabled: + raise self.skipTest("not applicable") + + # gather info + from passlib.utils import tick + max_time = self.max_fuzz_time + if max_time <= 0: + raise self.skipTest("disabled by test mode") + verifiers = self.get_fuzz_verifiers(threaded=threaded) + def vname(v): + return (v.__doc__ or v.__name__).splitlines()[0] + + # init rng -- using separate one for each thread + # so things are predictable for given RANDOM_TEST_SEED + # (relies on test_78_fuzz_threading() to give threads unique names) + if threaded: + thread_name = threading.current_thread().name + else: + thread_name = "fuzz test" + rng = self.getRandom(name=thread_name) + generator = self.FuzzHashGenerator(self, rng) + + # do as many tests as possible for max_time seconds + log.debug("%s: %s: started; max_time=%r verifiers=%d (%s)", + self.descriptionPrefix, thread_name, max_time, len(verifiers), + ", ".join(vname(v) for v in verifiers)) + start = tick() + stop = start + max_time + count = 0 + while tick() <= stop: + # generate random password & options + opts = generator.generate() + secret = opts['secret'] + other = opts['other'] + settings = opts['settings'] + ctx = opts['context'] + if ctx: + settings['context'] = ctx + + # create new hash + hash = self.do_encrypt(secret, **settings) + ##log.debug("fuzz test: hash=%r secret=%r other=%r", + ## hash, secret, other) + + # run through all verifiers we found. + for verify in verifiers: + name = vname(verify) + result = verify(secret, hash, **ctx) + if result == "skip": # let verifiers signal lack of support + continue + assert result is True or result is False + if not result: + raise self.failureException("failed to verify against %r verifier: " + "secret=%r config=%r hash=%r" % + (name, secret, settings, hash)) + # occasionally check that some other secrets WON'T verify + # against this hash. + if rng.random() < .1: + result = verify(other, hash, **ctx) + if result and result != "skip": + raise self.failureException("was able to verify wrong " + "password using %s: wrong_secret=%r real_secret=%r " + "config=%r hash=%r" % (name, other, secret, settings, hash)) + count += 1 + + log.debug("%s: %s: done; elapsed=%r count=%r", + self.descriptionPrefix, thread_name, tick() - start, count) + + def test_78_fuzz_threading(self): + """multithreaded fuzz testing -- random password & options using multiple threads + + run test_77 simultaneously in multiple threads + in an attempt to detect any concurrency issues + (e.g. the bug fixed by pybcrypt 0.3) + """ + self.require_TEST_MODE("full") + import threading + + # check if this test should run + if self.handler.is_disabled: + raise self.skipTest("not applicable") + thread_count = self.fuzz_thread_count + if thread_count < 1 or self.max_fuzz_time <= 0: + raise self.skipTest("disabled by test mode") + + # buffer to hold errors thrown by threads + failed_lock = threading.Lock() + failed = [0] + + # launch threads, all of which run + # test_77_fuzz_input(), and see if any errors get thrown. + # if hash has concurrency issues, this should reveal it. + def wrapper(): + try: + self.test_77_fuzz_input(threaded=True) + except SkipTest: + pass + except: + with failed_lock: + failed[0] += 1 + raise + def launch(n): + cls = type(self) + name = "Fuzz-Thread-%d ('%s:%s.%s')" % (n, cls.__module__, cls.__name__, + self._testMethodName) + thread = threading.Thread(target=wrapper, name=name) + thread.setDaemon(True) + thread.start() + return thread + threads = [launch(n) for n in irange(thread_count)] + + # wait until all threads exit + timeout = self.max_fuzz_time * thread_count * 4 + stalled = 0 + for thread in threads: + thread.join(timeout) + if not thread.is_alive(): + continue + # XXX: not sure why this is happening, main one seems 1/4 times for sun_md5_crypt + log.error("%s timed out after %f seconds", thread.name, timeout) + stalled += 1 + + # if any thread threw an error, raise one ourselves. + if failed[0]: + raise self.fail("%d/%d threads failed concurrent fuzz testing " + "(see error log for details)" % (failed[0], thread_count)) + if stalled: + raise self.fail("%d/%d threads stalled during concurrent fuzz testing " + "(see error log for details)" % (stalled, thread_count)) + + #--------------------------------------------------------------- + # fuzz constants & helpers + #--------------------------------------------------------------- + + @property + def max_fuzz_time(self): + """amount of time to spend on fuzz testing""" + value = float(os.environ.get("PASSLIB_TEST_FUZZ_TIME") or 0) + if value: + return value + elif TEST_MODE(max="quick"): + return 0 + elif TEST_MODE(max="default"): + return 1 + else: + return 5 + + @property + def fuzz_thread_count(self): + """number of threads for threaded fuzz testing""" + value = int(os.environ.get("PASSLIB_TEST_FUZZ_THREADS") or 0) + if value: + return value + elif TEST_MODE(max="quick"): + return 0 + else: + return 10 + + #--------------------------------------------------------------- + # fuzz verifiers + #--------------------------------------------------------------- + + #: list of custom fuzz-test verifiers (in addition to hasher itself, + #: and backend-specific wrappers of hasher). each element is + #: name of method that will return None / a verifier callable. + fuzz_verifiers = ("fuzz_verifier_default",) + + def get_fuzz_verifiers(self, threaded=False): + """return list of password verifiers (including external libs) + + used by fuzz testing. + verifiers should be callable with signature + ``func(password: unicode, hash: ascii str) -> ok: bool``. + """ + handler = self.handler + verifiers = [] + + # call all methods starting with prefix in order to create + for method_name in self.fuzz_verifiers: + func = getattr(self, method_name)() + if func is not None: + verifiers.append(func) + + # create verifiers for any other available backends + # NOTE: skipping this under threading test, + # since backend switching isn't threadsafe (yet) + if hasattr(handler, "backends") and TEST_MODE("full") and not threaded: + def maker(backend): + def func(secret, hash): + orig_backend = handler.get_backend() + try: + handler.set_backend(backend) + return handler.verify(secret, hash) + finally: + handler.set_backend(orig_backend) + func.__name__ = "check_" + backend + "_backend" + func.__doc__ = backend + "-backend" + return func + for backend in iter_alt_backends(handler): + verifiers.append(maker(backend)) + + return verifiers + + def fuzz_verifier_default(self): + # test against self + def check_default(secret, hash, **ctx): + return self.do_verify(secret, hash, **ctx) + if self.backend: + check_default.__doc__ = self.backend + "-backend" + else: + check_default.__doc__ = "self" + return check_default + + #--------------------------------------------------------------- + # fuzz settings generation + #--------------------------------------------------------------- + class FuzzHashGenerator(object): + """ + helper which takes care of generating random + passwords & configuration options to test hash with. + separate from test class so we can create one per thread. + """ + #========================================================== + # class attrs + #========================================================== + + # alphabet for randomly generated passwords + password_alphabet = u('qwertyASDF1234<>.@*#! \u00E1\u0259\u0411\u2113') + + # encoding when testing bytes + password_encoding = "utf-8" + + # map of setting kwd -> method name. + # will ignore setting if method returns None. + # subclasses should make copy of dict. + settings_map = dict(rounds="random_rounds", + salt_size="random_salt_size", + ident="random_ident") + + # map of context kwd -> method name. + context_map = {} + + #========================================================== + # init / generation + #========================================================== + + def __init__(self, test, rng): + self.test = test + self.handler = test.handler + self.rng = rng + + def generate(self): + """ + generate random password and options for fuzz testing. + :returns: + `(secret, other_secret, settings_kwds, context_kwds)` + """ + def gendict(map): + out = {} + for key, meth in map.items(): + value = getattr(self, meth)() + if value is not None: + out[key] = value + return out + secret, other = self.random_password_pair() + return dict(secret=secret, + other=other, + settings=gendict(self.settings_map), + context=gendict(self.context_map), + ) + + #========================================================== + # helpers + #========================================================== + def randintgauss(self, lower, upper, mu, sigma): + """generate random int w/ gauss distirbution""" + value = self.rng.normalvariate(mu, sigma) + return int(limit(value, lower, upper)) + + #========================================================== + # settings generation + #========================================================== + + def random_rounds(self): + handler = self.handler + if not has_rounds_info(handler): + return None + default = handler.default_rounds or handler.min_rounds + lower = handler.min_rounds + if handler.rounds_cost == "log2": + upper = default + else: + upper = min(default*2, handler.max_rounds) + return self.randintgauss(lower, upper, default, default*.5) + + def random_salt_size(self): + handler = self.handler + if not (has_salt_info(handler) and 'salt_size' in handler.setting_kwds): + return None + default = handler.default_salt_size + lower = handler.min_salt_size + upper = handler.max_salt_size or default*4 + return self.randintgauss(lower, upper, default, default*.5) + + def random_ident(self): + rng = self.rng + handler = self.handler + if 'ident' not in handler.setting_kwds or not hasattr(handler, "ident_values"): + return None + if rng.random() < .5: + return None + # resolve wrappers before reading values + handler = getattr(handler, "wrapped", handler) + return rng.choice(handler.ident_values) + + #========================================================== + # fuzz password generation + #========================================================== + def random_password_pair(self): + """generate random password, and non-matching alternate password""" + secret = self.random_password() + while True: + other = self.random_password() + if self.accept_password_pair(secret, other): + break + rng = self.rng + if rng.randint(0,1): + secret = secret.encode(self.password_encoding) + if rng.randint(0,1): + other = other.encode(self.password_encoding) + return secret, other + + def random_password(self): + """generate random passwords for fuzz testing""" + # occasionally try an empty password + rng = self.rng + if rng.random() < .0001: + return u('') + + # check if truncate size needs to be considered + handler = self.handler + truncate_size = handler.truncate_error and handler.truncate_size + max_size = truncate_size or 999999 + + # pick endpoint + if max_size < 50 or rng.random() < .5: + # chance of small password (~15 chars) + size = self.randintgauss(1, min(max_size, 50), 15, 15) + else: + # otherwise large password (~70 chars) + size = self.randintgauss(50, min(max_size, 99), 70, 20) + + # generate random password + result = getrandstr(rng, self.password_alphabet, size) + + # trim ones that encode past truncate point. + if truncate_size and isinstance(result, unicode): + while len(result.encode("utf-8")) > truncate_size: + result = result[:-1] + + return result + + def accept_password_pair(self, secret, other): + """verify fuzz pair contains different passwords""" + return secret != other + + #========================================================== + # eoc FuzzGenerator + #========================================================== + + #=================================================================== + # "disabled hasher" api + #=================================================================== + + def test_disable_and_enable(self): + """.disable() / .enable() methods""" + # + # setup + # + handler = self.handler + if not handler.is_disabled: + self.assertFalse(hasattr(handler, "disable")) + self.assertFalse(hasattr(handler, "enable")) + self.assertFalse(self.disabled_contains_salt) + raise self.skipTest("not applicable") + + # + # disable() + # + + # w/o existing hash + disabled_default = handler.disable() + self.assertIsInstance(disabled_default, str, + msg="disable() must return native string") + self.assertTrue(handler.identify(disabled_default), + msg="identify() didn't recognize disable() result: %r" % (disabled_default)) + + # w/ existing hash + stub = self.getRandom().choice(self.known_other_hashes)[1] + disabled_stub = handler.disable(stub) + self.assertIsInstance(disabled_stub, str, + msg="disable() must return native string") + self.assertTrue(handler.identify(disabled_stub), + msg="identify() didn't recognize disable() result: %r" % (disabled_stub)) + + # + # enable() + # + + # w/o original hash + self.assertRaisesRegex(ValueError, "cannot restore original hash", + handler.enable, disabled_default) + + # w/ original hash + try: + result = handler.enable(disabled_stub) + error = None + except ValueError as e: + result = None + error = e + + if error is None: + # if supports recovery, should have returned stub (e.g. unix_disabled); + self.assertIsInstance(result, str, + msg="enable() must return native string") + self.assertEqual(result, stub) + else: + # if doesn't, should have thrown appropriate error + self.assertIsInstance(error, ValueError) + self.assertRegex("cannot restore original hash", str(error)) + + # + # test repeating disable() & salting state + # + + # repeating disabled + disabled_default2 = handler.disable() + if self.disabled_contains_salt: + # should return new salt for each call (e.g. django_disabled) + self.assertNotEqual(disabled_default2, disabled_default) + elif error is None: + # should return same result for each hash, but unique across hashes + self.assertEqual(disabled_default2, disabled_default) + + # repeating same hash ... + disabled_stub2 = handler.disable(stub) + if self.disabled_contains_salt: + # ... should return different string (if salted) + self.assertNotEqual(disabled_stub2, disabled_stub) + else: + # ... should return same string + self.assertEqual(disabled_stub2, disabled_stub) + + # using different hash ... + disabled_other = handler.disable(stub + 'xxx') + if self.disabled_contains_salt or error is None: + # ... should return different string (if salted or hash encoded) + self.assertNotEqual(disabled_other, disabled_stub) + else: + # ... should return same string + self.assertEqual(disabled_other, disabled_stub) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# HandlerCase mixins providing additional tests for certain hashes +#============================================================================= +class OsCryptMixin(HandlerCase): + """helper used by create_backend_case() which adds additional features + to test the os_crypt backend. + + * if crypt support is missing, inserts fake crypt support to simulate + a working safe_crypt, to test passlib's codepath as fully as possible. + + * extra tests to verify non-conformant crypt implementations are handled + correctly. + + * check that native crypt support is detected correctly for known platforms. + """ + #=================================================================== + # class attrs + #=================================================================== + + # platforms that are known to support / not support this hash natively. + # list of (platform_regex, True|False|None) entries. + platform_crypt_support = [] + + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + # force this backend + backend = "os_crypt" + + # flag read by HandlerCase to detect if fake os crypt is enabled. + using_patched_crypt = False + + #=================================================================== + # setup + #=================================================================== + def setUp(self): + assert self.backend == "os_crypt" + if not self.handler.has_backend("os_crypt"): + # XXX: currently, any tests that use this are skipped entirely! (see issue 120) + self._patch_safe_crypt() + super(OsCryptMixin, self).setUp() + + @classmethod + def _get_safe_crypt_handler_backend(cls): + """ + return (handler, backend) pair to use for faking crypt.crypt() support for hash. + backend will be None if none availabe. + """ + # find handler that generates safe_crypt() compatible hash + handler = unwrap_handler(cls.handler) + + # hack to prevent recursion issue when .has_backend() is called + handler.get_backend() + + # find backend which isn't os_crypt + alt_backend = get_alt_backend(handler, "os_crypt") + return handler, alt_backend + + @property + def has_os_crypt_fallback(self): + """ + test if there's a fallback handler to test against if os_crypt can't support + a specified secret (may be explicitly set to False for some subclasses) + """ + return self._get_safe_crypt_handler_backend()[0] is not None + + def _patch_safe_crypt(self): + """if crypt() doesn't support current hash alg, this patches + safe_crypt() so that it transparently uses another one of the handler's + backends, so that we can go ahead and test as much of code path + as possible. + """ + # find handler & backend + handler, alt_backend = self._get_safe_crypt_handler_backend() + if not alt_backend: + raise AssertionError("handler has no available alternate backends!") + + # create subclass of handler, which we swap to an alternate backend + alt_handler = handler.using() + alt_handler.set_backend(alt_backend) + + def crypt_stub(secret, hash): + hash = alt_handler.genhash(secret, hash) + assert isinstance(hash, str) + return hash + + import passlib.utils as mod + self.patchAttr(mod, "_crypt", crypt_stub) + self.using_patched_crypt = True + + @classmethod + def _get_skip_backend_reason(cls, backend): + """ + make sure os_crypt backend is tested + when it's known os_crypt will be faked by _patch_safe_crypt() + """ + assert backend == "os_crypt" + reason = super(OsCryptMixin, cls)._get_skip_backend_reason(backend) + + from passlib.utils import has_crypt + if reason == cls._BACKEND_NOT_AVAILABLE and has_crypt: + if TEST_MODE("full") and cls._get_safe_crypt_handler_backend()[1]: + # in this case, _patch_safe_crypt() will monkeypatch os_crypt + # to use another backend, just so we can test os_crypt fully. + return None + else: + return "hash not supported by os crypt()" + + return reason + + #=================================================================== + # custom tests + #=================================================================== + + # TODO: turn into decorator, and use mock library. + def _use_mock_crypt(self): + """ + patch passlib.utils.safe_crypt() so it returns mock value for duration of test. + returns function whose .return_value controls what's returned. + this defaults to None. + """ + import passlib.utils as mod + + def mock_crypt(secret, config): + # let 'test' string through so _load_os_crypt_backend() will still work + if secret == "test": + return mock_crypt.__wrapped__(secret, config) + else: + return mock_crypt.return_value + + mock_crypt.__wrapped__ = mod._crypt + mock_crypt.return_value = None + + self.patchAttr(mod, "_crypt", mock_crypt) + + return mock_crypt + + def test_80_faulty_crypt(self): + """test with faulty crypt()""" + hash = self.get_sample_hash()[1] + exc_types = (exc.InternalBackendError,) + mock_crypt = self._use_mock_crypt() + + def test(value): + # set safe_crypt() to return specified value, and + # make sure assertion error is raised by handler. + mock_crypt.return_value = value + self.assertRaises(exc_types, self.do_genhash, "stub", hash) + self.assertRaises(exc_types, self.do_encrypt, "stub") + self.assertRaises(exc_types, self.do_verify, "stub", hash) + + test('$x' + hash[2:]) # detect wrong prefix + test(hash[:-1]) # detect too short + test(hash + 'x') # detect too long + + def test_81_crypt_fallback(self): + """test per-call crypt() fallback""" + + # mock up safe_crypt to return None + mock_crypt = self._use_mock_crypt() + mock_crypt.return_value = None + + if self.has_os_crypt_fallback: + # handler should have a fallback to use when os_crypt backend refuses to handle secret. + h1 = self.do_encrypt("stub") + h2 = self.do_genhash("stub", h1) + self.assertEqual(h2, h1) + self.assertTrue(self.do_verify("stub", h1)) + else: + # handler should give up + from passlib.exc import InternalBackendError as err_type + hash = self.get_sample_hash()[1] + self.assertRaises(err_type, self.do_encrypt, 'stub') + self.assertRaises(err_type, self.do_genhash, 'stub', hash) + self.assertRaises(err_type, self.do_verify, 'stub', hash) + + @doesnt_require_backend + def test_82_crypt_support(self): + """ + test platform-specific crypt() support detection + + NOTE: this is mainly just a sanity check to ensure the runtime + detection is functioning correctly on some known platforms, + so that we can feel more confident it'll work right on unknown ones. + """ + + # skip wrapper handlers, won't ever have crypt support + if hasattr(self.handler, "orig_prefix"): + raise self.skipTest("not applicable to wrappers") + + # look for first entry that matches current system + # XXX: append "/" + platform.release() to string? + # XXX: probably should rework to support rows being dicts w/ "minver" / "maxver" keys, + # instead of hack where we add major # as part of platform regex. + using_backend = not self.using_patched_crypt + name = self.handler.name + platform = sys.platform + for pattern, expected in self.platform_crypt_support: + if re.match(pattern, platform): + break + else: + raise self.skipTest("no data for %r platform (current host support = %r)" % + (platform, using_backend)) + + # rules can use "state=None" to signal varied support; + # e.g. platform='freebsd8' ... sha256_crypt not added until 8.3 + if expected is None: + raise self.skipTest("varied support on %r platform (current host support = %r)" % + (platform, using_backend)) + + # compare expectation vs reality + if expected == using_backend: + pass + elif expected: + self.fail("expected %r platform would have native support for %r" % + (platform, name)) + else: + self.fail("did not expect %r platform would have native support for %r" % + (platform, name)) + + #=================================================================== + # fuzzy verified support -- add additional verifier that uses os crypt() + #=================================================================== + + def fuzz_verifier_crypt(self): + """test results against OS crypt()""" + + # don't use this if we're faking safe_crypt (pointless test), + # or if handler is a wrapper (only original handler will be supported by os) + handler = self.handler + if self.using_patched_crypt or hasattr(handler, "wrapped"): + return None + + # create a wrapper for fuzzy verified to use + from crypt import crypt + from passlib.utils import _safe_crypt_lock + encoding = self.FuzzHashGenerator.password_encoding + + def check_crypt(secret, hash): + """stdlib-crypt""" + if not self.crypt_supports_variant(hash): + return "skip" + # XXX: any reason not to use safe_crypt() here? or just want to test against bare metal? + secret = to_native_str(secret, encoding) + with _safe_crypt_lock: + return crypt(secret, hash) == hash + + return check_crypt + + def crypt_supports_variant(self, hash): + """ + fuzzy_verified_crypt() helper -- + used to determine if os crypt() supports a particular hash variant. + """ + return True + + #=================================================================== + # eoc + #=================================================================== + +class UserHandlerMixin(HandlerCase): + """helper for handlers w/ 'user' context kwd; mixin for HandlerCase + + this overrides the HandlerCase test harness methods + so that a username is automatically inserted to hash/verify + calls. as well, passing in a pair of strings as the password + will be interpreted as (secret,user) + """ + #=================================================================== + # option flags + #=================================================================== + default_user = "user" + requires_user = True + user_case_insensitive = False + + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + #=================================================================== + # custom tests + #=================================================================== + def test_80_user(self): + """test user context keyword""" + handler = self.handler + password = 'stub' + hash = handler.hash(password, user=self.default_user) + + if self.requires_user: + self.assertRaises(TypeError, handler.hash, password) + self.assertRaises(TypeError, handler.genhash, password, hash) + self.assertRaises(TypeError, handler.verify, password, hash) + else: + # e.g. cisco_pix works with or without one. + handler.hash(password) + handler.genhash(password, hash) + handler.verify(password, hash) + + def test_81_user_case(self): + """test user case sensitivity""" + lower = self.default_user.lower() + upper = lower.upper() + hash = self.do_encrypt('stub', context=dict(user=lower)) + if self.user_case_insensitive: + self.assertTrue(self.do_verify('stub', hash, user=upper), + "user should not be case sensitive") + else: + self.assertFalse(self.do_verify('stub', hash, user=upper), + "user should be case sensitive") + + def test_82_user_salt(self): + """test user used as salt""" + config = self.do_stub_encrypt() + h1 = self.do_genhash('stub', config, user='admin') + h2 = self.do_genhash('stub', config, user='admin') + self.assertEqual(h2, h1) + h3 = self.do_genhash('stub', config, user='root') + self.assertNotEqual(h3, h1) + + # TODO: user size? kinda dicey, depends on algorithm. + + #=================================================================== + # override test helpers + #=================================================================== + def populate_context(self, secret, kwds): + """insert username into kwds""" + if isinstance(secret, tuple): + secret, user = secret + elif not self.requires_user: + return secret + else: + user = self.default_user + if 'user' not in kwds: + kwds['user'] = user + return secret + + #=================================================================== + # modify fuzz testing + #=================================================================== + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + context_map = HandlerCase.FuzzHashGenerator.context_map.copy() + context_map.update(user="random_user") + + user_alphabet = u("asdQWE123") + + def random_user(self): + rng = self.rng + if not self.test.requires_user and rng.random() < .1: + return None + return getrandstr(rng, self.user_alphabet, rng.randint(2,10)) + + #=================================================================== + # eoc + #=================================================================== + +class EncodingHandlerMixin(HandlerCase): + """helper for handlers w/ 'encoding' context kwd; mixin for HandlerCase + + this overrides the HandlerCase test harness methods + so that an encoding can be inserted to hash/verify + calls by passing in a pair of strings as the password + will be interpreted as (secret,encoding) + """ + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + # restrict stock passwords & fuzz alphabet to latin-1, + # so different encodings can be tested safely. + stock_passwords = [ + u("test"), + b"test", + u("\u00AC\u00BA"), + ] + + class FuzzHashGenerator(HandlerCase.FuzzHashGenerator): + + password_alphabet = u('qwerty1234<>.@*#! \u00AC') + + def populate_context(self, secret, kwds): + """insert encoding into kwds""" + if isinstance(secret, tuple): + secret, encoding = secret + kwds.setdefault('encoding', encoding) + return secret + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# warnings helpers +#============================================================================= +class reset_warnings(warnings.catch_warnings): + """catch_warnings() wrapper which clears warning registry & filters""" + + def __init__(self, reset_filter="always", reset_registry=".*", **kwds): + super(reset_warnings, self).__init__(**kwds) + self._reset_filter = reset_filter + self._reset_registry = re.compile(reset_registry) if reset_registry else None + + def __enter__(self): + # let parent class archive filter state + ret = super(reset_warnings, self).__enter__() + + # reset the filter to list everything + if self._reset_filter: + warnings.resetwarnings() + warnings.simplefilter(self._reset_filter) + + # archive and clear the __warningregistry__ key for all modules + # that match the 'reset' pattern. + pattern = self._reset_registry + if pattern: + backup = self._orig_registry = {} + for name, mod in list(sys.modules.items()): + if mod is None or not pattern.match(name): + continue + reg = getattr(mod, "__warningregistry__", None) + if reg: + backup[name] = reg.copy() + reg.clear() + return ret + + def __exit__(self, *exc_info): + # restore warning registry for all modules + pattern = self._reset_registry + if pattern: + # restore registry backup, clearing all registry entries that we didn't archive + backup = self._orig_registry + for name, mod in list(sys.modules.items()): + if mod is None or not pattern.match(name): + continue + reg = getattr(mod, "__warningregistry__", None) + if reg: + reg.clear() + orig = backup.get(name) + if orig: + if reg is None: + setattr(mod, "__warningregistry__", orig) + else: + reg.update(orig) + super(reset_warnings, self).__exit__(*exc_info) + +#============================================================================= +# eof +#============================================================================= diff --git a/ansible/lib/python3.11/site-packages/passlib/totp.py b/ansible/lib/python3.11/site-packages/passlib/totp.py new file mode 100644 index 000000000..9ad500087 --- /dev/null +++ b/ansible/lib/python3.11/site-packages/passlib/totp.py @@ -0,0 +1,1908 @@ +"""passlib.totp -- TOTP / RFC6238 / Google Authenticator utilities.""" +#============================================================================= +# imports +#============================================================================= +from __future__ import absolute_import, division, print_function +from passlib.utils.compat import PY3 +# core +import base64 +import calendar +import json +import logging; log = logging.getLogger(__name__) +import math +import struct +import sys +import time as _time +import re +if PY3: + from urllib.parse import urlparse, parse_qsl, quote, unquote +else: + from urllib import quote, unquote + from urlparse import urlparse, parse_qsl +from warnings import warn +# site +try: + # TOTP encrypted keys only supported if cryptography (https://cryptography.io) is installed + from cryptography.hazmat.backends import default_backend as _cg_default_backend + import cryptography.hazmat.primitives.ciphers.algorithms + import cryptography.hazmat.primitives.ciphers.modes + from cryptography.hazmat.primitives import ciphers as _cg_ciphers + del cryptography +except ImportError: + log.debug("can't import 'cryptography' package, totp encryption disabled") + _cg_ciphers = _cg_default_backend = None +# pkg +from passlib import exc +from passlib.exc import TokenError, MalformedTokenError, InvalidTokenError, UsedTokenError +from passlib.utils import (to_unicode, to_bytes, consteq, + getrandbytes, rng, SequenceMixin, xor_bytes, getrandstr) +from passlib.utils.binary import BASE64_CHARS, b32encode, b32decode +from passlib.utils.compat import (u, unicode, native_string_types, bascii_to_str, int_types, num_types, + irange, byte_elem_value, UnicodeIO, suppress_cause) +from passlib.utils.decor import hybrid_method, memoized_property +from passlib.crypto.digest import lookup_hash, compile_hmac, pbkdf2_hmac +from passlib.hash import pbkdf2_sha256 +# local +__all__ = [ + # frontend classes + "AppWallet", + "TOTP", + + # errors (defined in passlib.exc, but exposed here for convenience) + "TokenError", + "MalformedTokenError", + "InvalidTokenError", + "UsedTokenError", + + # internal helper classes + "TotpToken", + "TotpMatch", +] + +#============================================================================= +# HACK: python < 2.7.4's urlparse() won't parse query strings unless the url scheme +# is one of the schemes in the urlparse.uses_query list. 2.7 abandoned +# this, and parses query if present, regardless of the scheme. +# as a workaround for older versions, we add "otpauth" to the known list. +# this was fixed by https://bugs.python.org/issue9374, in 2.7.4 release. +#============================================================================= +if sys.version_info < (2,7,4): + from urlparse import uses_query + if "otpauth" not in uses_query: + uses_query.append("otpauth") + log.debug("registered 'otpauth' scheme with urlparse.uses_query") + del uses_query + +#============================================================================= +# internal helpers +#============================================================================= + +#----------------------------------------------------------------------------- +# token parsing / rendering helpers +#----------------------------------------------------------------------------- + +#: regex used to clean whitespace from tokens & keys +_clean_re = re.compile(u(r"\s|[-=]"), re.U) + +_chunk_sizes = [4,6,5] + +def _get_group_size(klen): + """ + helper for group_string() -- + calculates optimal size of group for given string size. + """ + # look for exact divisor + for size in _chunk_sizes: + if not klen % size: + return size + # fallback to divisor with largest remainder + # (so chunks are as close to even as possible) + best = _chunk_sizes[0] + rem = 0 + for size in _chunk_sizes: + if klen % size > rem: + best = size + rem = klen % size + return best + +def group_string(value, sep="-"): + """ + reformat string into (roughly) evenly-sized groups, separated by **sep**. + useful for making tokens & keys easier to read by humans. + """ + klen = len(value) + size = _get_group_size(klen) + return sep.join(value[o:o+size] for o in irange(0, klen, size)) + +#----------------------------------------------------------------------------- +# encoding helpers +#----------------------------------------------------------------------------- + +def _decode_bytes(key, format): + """ + internal TOTP() helper -- + decodes key according to specified format. + """ + if format == "raw": + if not isinstance(key, bytes): + raise exc.ExpectedTypeError(key, "bytes", "key") + return key + # for encoded data, key must be either unicode or ascii-encoded bytes, + # and must contain a hex or base32 string. + key = to_unicode(key, param="key") + key = _clean_re.sub("", key).encode("utf-8") # strip whitespace & hypens + if format == "hex" or format == "base16": + return base64.b16decode(key.upper()) + elif format == "base32": + return b32decode(key) + # XXX: add base64 support? + else: + raise ValueError("unknown byte-encoding format: %r" % (format,)) + +#============================================================================= +# OTP management +#============================================================================= + +#: flag for detecting if encrypted totp support is present +AES_SUPPORT = bool(_cg_ciphers) + +#: regex for validating secret tags +_tag_re = re.compile("(?i)^[a-z0-9][a-z0-9_.-]*$") + +class AppWallet(object): + """ + This class stores application-wide secrets that can be used + to encrypt & decrypt TOTP keys for storage. + It's mostly an internal detail, applications usually just need + to pass ``secrets`` or ``secrets_path`` to :meth:`TOTP.using`. + + .. seealso:: + + :ref:`totp-storing-instances` for more details on this workflow. + + Arguments + ========= + :param secrets: + Dict of application secrets to use when encrypting/decrypting + stored TOTP keys. This should include a secret to use when encrypting + new keys, but may contain additional older secrets to decrypt + existing stored keys. + + The dict should map tags -> secrets, so that each secret is identified + by a unique tag. This tag will be stored along with the encrypted + key in order to determine which secret should be used for decryption. + Tag should be string that starts with regex range ``[a-z0-9]``, + and the remaining characters must be in ``[a-z0-9_.-]``. + + It is recommended to use something like a incremental counter + ("1", "2", ...), an ISO date ("2016-01-01", "2016-05-16", ...), + or a timestamp ("19803495", "19813495", ...) when assigning tags. + + This mapping be provided in three formats: + + * A python dict mapping tag -> secret + * A JSON-formatted string containing the dict + * A multiline string with the format ``"tag: value\\ntag: value\\n..."`` + + (This last format is mainly useful when loading from a text file via **secrets_path**) + + .. seealso:: :func:`generate_secret` to create a secret with sufficient entropy + + :param secrets_path: + Alternately, callers can specify a separate file where the + application-wide secrets are stored, using either of the string + formats described in **secrets**. + + :param default_tag: + Specifies which tag in **secrets** should be used as the default + for encrypting new keys. If omitted, the tags will be sorted, + and the largest tag used as the default. + + if all tags are numeric, they will be sorted numerically; + otherwise they will be sorted alphabetically. + this permits tags to be assigned numerically, + or e.g. using ``YYYY-MM-DD`` dates. + + :param encrypt_cost: + Optional time-cost factor for key encryption. + This value corresponds to log2() of the number of PBKDF2 + rounds used. + + .. warning:: + + The application secret(s) should be stored in a secure location by + your application, and each secret should contain a large amount + of entropy (to prevent brute-force attacks if the encrypted keys + are leaked). + + :func:`generate_secret` is provided as a convenience helper + to generate a new application secret of suitable size. + + Best practice is to load these values from a file via **secrets_path**, + and then have your application give up permission to read this file + once it's running. + + Public Methods + ============== + .. autoattribute:: has_secrets + .. autoattribute:: default_tag + + Semi-Private Methods + ==================== + The following methods are used internally by the :class:`TOTP` + class in order to encrypt & decrypt keys using the provided application + secrets. They will generally not be publically useful, and may have their + API changed periodically. + + .. automethod:: get_secret + .. automethod:: encrypt_key + .. automethod:: decrypt_key + """ + #======================================================================== + # instance attrs + #======================================================================== + + #: default salt size for encrypt_key() output + salt_size = 12 + + #: default cost (log2 of pbkdf2 rounds) for encrypt_key() output + #: NOTE: this is relatively low, since the majority of the security + #: relies on a high entropy secret to pass to AES. + encrypt_cost = 14 + + #: map of secret tag -> secret bytes + _secrets = None + + #: tag for default secret + default_tag = None + + #======================================================================== + # init + #======================================================================== + def __init__(self, secrets=None, default_tag=None, encrypt_cost=None, + secrets_path=None): + + # TODO: allow a lot more things to be customized from here, + # e.g. setting default TOTP constructor options. + + # + # init cost + # + if encrypt_cost is not None: + if isinstance(encrypt_cost, native_string_types): + encrypt_cost = int(encrypt_cost) + assert encrypt_cost >= 0 + self.encrypt_cost = encrypt_cost + + # + # init secrets map + # + + # load secrets from file (if needed) + if secrets_path is not None: + if secrets is not None: + raise TypeError("'secrets' and 'secrets_path' are mutually exclusive") + secrets = open(secrets_path, "rt").read() + + # parse & store secrets + secrets = self._secrets = self._parse_secrets(secrets) + + # + # init default tag/secret + # + if secrets: + if default_tag is not None: + # verify that tag is present in map + self.get_secret(default_tag) + elif all(tag.isdigit() for tag in secrets): + default_tag = max(secrets, key=int) + else: + default_tag = max(secrets) + self.default_tag = default_tag + + def _parse_secrets(self, source): + """ + parse 'secrets' parameter + + :returns: + Dict[tag:str, secret:bytes] + """ + # parse string formats + # to make this easy to pass in configuration from a separate file, + # 'secrets' can be string using two formats -- json & "tag:value\n" + check_type = True + if isinstance(source, native_string_types): + if source.lstrip().startswith(("[", "{")): + # json list / dict + source = json.loads(source) + elif "\n" in source and ":" in source: + # multiline string containing series of "tag: value\n" rows; + # empty and "#\n" rows are ignored + def iter_pairs(source): + for line in source.splitlines(): + line = line.strip() + if line and not line.startswith("#"): + tag, secret = line.split(":", 1) + yield tag.strip(), secret.strip() + source = iter_pairs(source) + check_type = False + else: + raise ValueError("unrecognized secrets string format") + + # ensure we have iterable of (tag, value) pairs + if source is None: + return {} + elif isinstance(source, dict): + source = source.items() + # XXX: could support iterable of (tag,value) pairs, but not yet needed... + # elif check_type and (isinstance(source, str) or not isinstance(source, Iterable)): + elif check_type: + raise TypeError("'secrets' must be mapping, or list of items") + + # parse into final dict, normalizing contents + return dict(self._parse_secret_pair(tag, value) + for tag, value in source) + + def _parse_secret_pair(self, tag, value): + if isinstance(tag, native_string_types): + pass + elif isinstance(tag, int): + tag = str(tag) + else: + raise TypeError("tag must be unicode/string: %r" % (tag,)) + if not _tag_re.match(tag): + raise ValueError("tag contains invalid characters: %r" % (tag,)) + if not isinstance(value, bytes): + value = to_bytes(value, param="secret %r" % (tag,)) + if not value: + raise ValueError("tag contains empty secret: %r" % (tag,)) + return tag, value + + #======================================================================== + # accessing secrets + #======================================================================== + + @property + def has_secrets(self): + """whether at least one application secret is present""" + return self.default_tag is not None + + def get_secret(self, tag): + """ + resolve a secret tag to the secret (as bytes). + throws a KeyError if not found. + """ + secrets = self._secrets + if not secrets: + raise KeyError("no application secrets configured") + try: + return secrets[tag] + except KeyError: + raise suppress_cause(KeyError("unknown secret tag: %r" % (tag,))) + + #======================================================================== + # encrypted key helpers -- used internally by TOTP + #======================================================================== + + @staticmethod + def _cipher_aes_key(value, secret, salt, cost, decrypt=False): + """ + Internal helper for :meth:`encrypt_key` -- + handles lowlevel encryption/decryption. + + Algorithm details: + + This function uses PBKDF2-HMAC-SHA256 to generate a 32-byte AES key + and a 16-byte IV from the application secret & random salt. + It then uses AES-256-CTR to encrypt/decrypt the TOTP key. + + CTR mode was chosen over CBC because the main attack scenario here + is that the attacker has stolen the database, and is trying to decrypt a TOTP key + (the plaintext value here). To make it hard for them, we want every password + to decrypt to a potentially valid key -- thus need to avoid any authentication + or padding oracle attacks. While some random padding construction could be devised + to make this work for CBC mode, a stream cipher mode is just plain simpler. + OFB/CFB modes would also work here, but seeing as they have malleability + and cyclic issues (though remote and barely relevant here), + CTR was picked as the best overall choice. + """ + # make sure backend AES support is available + if _cg_ciphers is None: + raise RuntimeError("TOTP encryption requires 'cryptography' package " + "(https://cryptography.io)") + + # use pbkdf2 to derive both key (32 bytes) & iv (16 bytes) + # NOTE: this requires 2 sha256 blocks to be calculated. + keyiv = pbkdf2_hmac("sha256", secret, salt=salt, rounds=(1 << cost), keylen=48) + + # use AES-256-CTR to encrypt/decrypt input value + cipher = _cg_ciphers.Cipher(_cg_ciphers.algorithms.AES(keyiv[:32]), + _cg_ciphers.modes.CTR(keyiv[32:]), + _cg_default_backend()) + ctx = cipher.decryptor() if decrypt else cipher.encryptor() + return ctx.update(value) + ctx.finalize() + + def encrypt_key(self, key): + """ + Helper used to encrypt TOTP keys for storage. + + :param key: + TOTP key to encrypt, as raw bytes. + + :returns: + dict containing encrypted TOTP key & configuration parameters. + this format should be treated as opaque, and potentially subject + to change, though it is designed to be easily serialized/deserialized + (e.g. via JSON). + + .. note:: + + This function requires installation of the external + `cryptography `_ package. + + To give some algorithm details: This function uses AES-256-CTR to encrypt + the provided data. It takes the application secret and randomly generated salt, + and uses PBKDF2-HMAC-SHA256 to combine them and generate the AES key & IV. + """ + if not key: + raise ValueError("no key provided") + salt = getrandbytes(rng, self.salt_size) + cost = self.encrypt_cost + tag = self.default_tag + if not tag: + raise TypeError("no application secrets configured, can't encrypt OTP key") + ckey = self._cipher_aes_key(key, self.get_secret(tag), salt, cost) + # XXX: switch to base64? + return dict(v=1, c=cost, t=tag, s=b32encode(salt), k=b32encode(ckey)) + + def decrypt_key(self, enckey): + """ + Helper used to decrypt TOTP keys from storage format. + Consults configured secrets to decrypt key. + + :param source: + source object, as returned by :meth:`encrypt_key`. + + :returns: + ``(key, needs_recrypt)`` -- + + **key** will be the decrypted key, as bytes. + + **needs_recrypt** will be a boolean flag indicating + whether encryption cost or default tag is too old, + and henace that key needs re-encrypting before storing. + + .. note:: + + This function requires installation of the external + `cryptography `_ package. + """ + if not isinstance(enckey, dict): + raise TypeError("'enckey' must be dictionary") + version = enckey.get("v", None) + needs_recrypt = False + if version == 1: + _cipher_key = self._cipher_aes_key + else: + raise ValueError("missing / unrecognized 'enckey' version: %r" % (version,)) + tag = enckey['t'] + cost = enckey['c'] + key = _cipher_key( + value=b32decode(enckey['k']), + secret=self.get_secret(tag), + salt=b32decode(enckey['s']), + cost=cost, + ) + if cost != self.encrypt_cost or tag != self.default_tag: + needs_recrypt = True + return key, needs_recrypt + + #============================================================================= + # eoc + #============================================================================= + +#============================================================================= +# TOTP class +#============================================================================= + +#: helper to convert HOTP counter to bytes +_pack_uint64 = struct.Struct(">Q").pack + +#: helper to extract value from HOTP digest +_unpack_uint32 = struct.Struct(">I").unpack + +#: dummy bytes used as temp key for .using() method +_DUMMY_KEY = b"\x00" * 16 + +class TOTP(object): + """ + Helper for generating and verifying TOTP codes. + + Given a secret key and set of configuration options, this object + offers methods for token generation, token validation, and serialization. + It can also be used to track important persistent TOTP state, + such as the last counter used. + + This class accepts the following options + (only **key** and **format** may be specified as positional arguments). + + :arg str key: + The secret key to use. By default, should be encoded as + a base32 string (see **format** for other encodings). + + Exactly one of **key** or ``new=True`` must be specified. + + :arg str format: + The encoding used by the **key** parameter. May be one of: + ``"base32"`` (base32-encoded string), + ``"hex"`` (hexadecimal string), or ``"raw"`` (raw bytes). + Defaults to ``"base32"``. + + :param bool new: + If ``True``, a new key will be generated using :class:`random.SystemRandom`. + + Exactly one ``new=True`` or **key** must be specified. + + :param str label: + Label to associate with this token when generating a URI. + Displayed to user by most OTP client applications (e.g. Google Authenticator), + and typically has format such as ``"John Smith"`` or ``"jsmith@webservice.example.org"``. + Defaults to ``None``. + See :meth:`to_uri` for details. + + :param str issuer: + String identifying the token issuer (e.g. the domain name of your service). + Used internally by some OTP client applications (e.g. Google Authenticator) to distinguish entries + which otherwise have the same label. + Optional but strongly recommended if you're rendering to a URI. + Defaults to ``None``. + See :meth:`to_uri` for details. + + :param int size: + Number of bytes when generating new keys. Defaults to size of hash algorithm (e.g. 20 for SHA1). + + .. warning:: + + Overriding the default values for ``digits``, ``period``, or ``alg`` may + cause problems with some OTP client programs (such as Google Authenticator), + which may have these defaults hardcoded. + + :param int digits: + The number of digits in the generated / accepted tokens. Defaults to ``6``. + Must be in range [6 .. 10]. + + .. rst-class:: inline-title + .. caution:: + Due to a limitation of the HOTP algorithm, the 10th digit can only take on values 0 .. 2, + and thus offers very little extra security. + + :param str alg: + Name of hash algorithm to use. Defaults to ``"sha1"``. + ``"sha256"`` and ``"sha512"`` are also accepted, per :rfc:`6238`. + + :param int period: + The time-step period to use, in integer seconds. Defaults to ``30``. + + .. + See the passlib documentation for a full list of attributes & methods. + """ + #============================================================================= + # class attrs + #============================================================================= + + #: minimum number of bytes to allow in key, enforced by passlib. + # XXX: see if spec says anything relevant to this. + _min_key_size = 10 + + #: minimum & current serialization version (may be set independently by subclasses) + min_json_version = json_version = 1 + + #: AppWallet that this class will use for encrypting/decrypting keys. + #: (can be overwritten via the :meth:`TOTP.using()` constructor) + wallet = None + + #: function to get system time in seconds, as needed by :meth:`generate` and :meth:`verify`. + #: defaults to :func:`time.time`, but can be overridden on a per-instance basis. + now = _time.time + + #============================================================================= + # instance attrs + #============================================================================= + + #--------------------------------------------------------------------------- + # configuration attrs + #--------------------------------------------------------------------------- + + #: [private] secret key as raw :class:`!bytes` + #: see .key property for public access. + _key = None + + #: [private] cached copy of encrypted secret, + #: so .to_json() doesn't have to re-encrypt on each call. + _encrypted_key = None + + #: [private] cached copy of keyed HMAC function, + #: so ._generate() doesn't have to rebuild this each time + #: ._find_match() invokes it. + _keyed_hmac = None + + #: number of digits in the generated tokens. + digits = 6 + + #: name of hash algorithm in use (e.g. ``"sha1"``) + alg = "sha1" + + #: default label for :meth:`to_uri` + label = None + + #: default issuer for :meth:`to_uri` + issuer = None + + #: number of seconds per counter step. + #: *(TOTP uses an internal time-derived counter which + #: increments by 1 every* :attr:`!period` *seconds)*. + period = 30 + + #--------------------------------------------------------------------------- + # state attrs + #--------------------------------------------------------------------------- + + #: Flag set by deserialization methods to indicate the object needs to be re-serialized. + #: This can be for a number of reasons -- encoded using deprecated format, + #: or encrypted using a deprecated key or too few rounds. + changed = False + + #============================================================================= + # prototype construction + #============================================================================= + @classmethod + def using(cls, digits=None, alg=None, period=None, + issuer=None, wallet=None, now=None, **kwds): + """ + Dynamically create subtype of :class:`!TOTP` class + which has the specified defaults set. + + :parameters: **digits, alg, period, issuer**: + + All these options are the same as in the :class:`TOTP` constructor, + and the resulting class will use any values you specify here + as the default for all TOTP instances it creates. + + :param wallet: + Optional :class:`AppWallet` that will be used for encrypting/decrypting keys. + + :param secrets, secrets_path, encrypt_cost: + + If specified, these options will be passed to the :class:`AppWallet` constructor, + allowing you to directly specify the secret keys that should be used + to encrypt & decrypt stored keys. + + :returns: + subclass of :class:`!TOTP`. + + This method is useful for creating a TOTP class configured + to use your application's secrets for encrypting & decrypting + keys, as well as create new keys using it's desired configuration defaults. + + As an example:: + + >>> # your application can create a custom class when it initializes + >>> from passlib.totp import TOTP, generate_secret + >>> TotpFactory = TOTP.using(secrets={"1": generate_secret()}) + + >>> # subsequent TOTP objects created from this factory + >>> # will use the specified secrets to encrypt their keys... + >>> totp = TotpFactory.new() + >>> totp.to_dict() + {'enckey': {'c': 14, + 'k': 'H77SYXWORDPGVOQTFRR2HFUB3C45XXI7', + 's': 'G5DOQPIHIBUM2OOHHADQ', + 't': '1', + 'v': 1}, + 'type': 'totp', + 'v': 1} + + .. seealso:: :ref:`totp-creation` and :ref:`totp-storing-instances` tutorials for a usage example + """ + # XXX: could add support for setting default match 'window' and 'reuse' policy + + # :param now: + # Optional callable that should return current time for generator to use. + # Default to :func:`time.time`. This optional is generally not needed, + # and is mainly present for examples & unit-testing. + + subcls = type("TOTP", (cls,), {}) + + def norm_param(attr, value): + """ + helper which uses constructor to validate parameter value. + it returns corresponding attribute, so we use normalized value. + """ + # NOTE: this creates *subclass* instance, + # so normalization takes into account any custom params + # already stored. + kwds = dict(key=_DUMMY_KEY, format="raw") + kwds[attr] = value + obj = subcls(**kwds) + return getattr(obj, attr) + + if digits is not None: + subcls.digits = norm_param("digits", digits) + + if alg is not None: + subcls.alg = norm_param("alg", alg) + + if period is not None: + subcls.period = norm_param("period", period) + + # XXX: add default size as configurable parameter? + + if issuer is not None: + subcls.issuer = norm_param("issuer", issuer) + + if kwds: + subcls.wallet = AppWallet(**kwds) + if wallet: + raise TypeError("'wallet' and 'secrets' keywords are mutually exclusive") + elif wallet is not None: + if not isinstance(wallet, AppWallet): + raise exc.ExpectedTypeError(wallet, AppWallet, "wallet") + subcls.wallet = wallet + + if now is not None: + assert isinstance(now(), num_types) and now() >= 0, \ + "now() function must return non-negative int/float" + subcls.now = staticmethod(now) + + return subcls + + #============================================================================= + # init + #============================================================================= + + @classmethod + def new(cls, **kwds): + """ + convenience alias for creating new TOTP key, same as ``TOTP(new=True)`` + """ + return cls(new=True, **kwds) + + def __init__(self, key=None, format="base32", + # keyword only... + new=False, digits=None, alg=None, size=None, period=None, + label=None, issuer=None, changed=False, + **kwds): + super(TOTP, self).__init__(**kwds) + if changed: + self.changed = changed + + # validate & normalize alg + info = lookup_hash(alg or self.alg) + self.alg = info.name + digest_size = info.digest_size + if digest_size < 4: + raise RuntimeError("%r hash digest too small" % alg) + + # parse or generate new key + if new: + # generate new key + if key: + raise TypeError("'key' and 'new=True' are mutually exclusive") + if size is None: + # default to digest size, per RFC 6238 Section 5.1 + size = digest_size + elif size > digest_size: + # not forbidden by spec, but would just be wasted bytes. + # maybe just warn about this? + raise ValueError("'size' should be less than digest size " + "(%d)" % digest_size) + self.key = getrandbytes(rng, size) + elif not key: + raise TypeError("must specify either an existing 'key', or 'new=True'") + elif format == "encrypted": + # NOTE: this handles decrypting & setting '.key' + self.encrypted_key = key + elif key: + # use existing key, encoded using specified + self.key = _decode_bytes(key, format) + + # enforce min key size + if len(self.key) < self._min_key_size: + # only making this fatal for new=True, + # so that existing (but ridiculously small) keys can still be used. + msg = "for security purposes, secret key must be >= %d bytes" % self._min_key_size + if new: + raise ValueError(msg) + else: + warn(msg, exc.PasslibSecurityWarning, stacklevel=1) + + # validate digits + if digits is None: + digits = self.digits + if not isinstance(digits, int_types): + raise TypeError("digits must be an integer, not a %r" % type(digits)) + if digits < 6 or digits > 10: + raise ValueError("digits must in range(6,11)") + self.digits = digits + + # validate label + if label: + self._check_label(label) + self.label = label + + # validate issuer + if issuer: + self._check_issuer(issuer) + self.issuer = issuer + + # init period + if period is not None: + self._check_serial(period, "period", minval=1) + self.period = period + + #============================================================================= + # helpers to verify value types & ranges + #============================================================================= + + @staticmethod + def _check_serial(value, param, minval=0): + """ + check that serial value (e.g. 'counter') is non-negative integer + """ + if not isinstance(value, int_types): + raise exc.ExpectedTypeError(value, "int", param) + if value < minval: + raise ValueError("%s must be >= %d" % (param, minval)) + + @staticmethod + def _check_label(label): + """ + check that label doesn't contain chars forbidden by KeyURI spec + """ + if label and ":" in label: + raise ValueError("label may not contain ':'") + + @staticmethod + def _check_issuer(issuer): + """ + check that issuer doesn't contain chars forbidden by KeyURI spec + """ + if issuer and ":" in issuer: + raise ValueError("issuer may not contain ':'") + + #============================================================================= + # key attributes + #============================================================================= + + #------------------------------------------------------------------ + # raw key + #------------------------------------------------------------------ + @property + def key(self): + """ + secret key as raw bytes + """ + return self._key + + @key.setter + def key(self, value): + # set key + if not isinstance(value, bytes): + raise exc.ExpectedTypeError(value, bytes, "key") + self._key = value + + # clear cached properties derived from key + self._encrypted_key = self._keyed_hmac = None + + #------------------------------------------------------------------ + # encrypted key + #------------------------------------------------------------------ + @property + def encrypted_key(self): + """ + secret key, encrypted using application secret. + this match the output of :meth:`AppWallet.encrypt_key`, + and should be treated as an opaque json serializable object. + """ + enckey = self._encrypted_key + if enckey is None: + wallet = self.wallet + if not wallet: + raise TypeError("no application secrets present, can't encrypt TOTP key") + enckey = self._encrypted_key = wallet.encrypt_key(self.key) + return enckey + + @encrypted_key.setter + def encrypted_key(self, value): + wallet = self.wallet + if not wallet: + raise TypeError("no application secrets present, can't decrypt TOTP key") + self.key, needs_recrypt = wallet.decrypt_key(value) + if needs_recrypt: + # mark as changed so it gets re-encrypted & written to db + self.changed = True + else: + # cache encrypted key for re-use + self._encrypted_key = value + + #------------------------------------------------------------------ + # pretty-printed / encoded key helpers + #------------------------------------------------------------------ + + @property + def hex_key(self): + """ + secret key encoded as hexadecimal string + """ + return bascii_to_str(base64.b16encode(self.key)).lower() + + @property + def base32_key(self): + """ + secret key encoded as base32 string + """ + return b32encode(self.key) + + def pretty_key(self, format="base32", sep="-"): + """ + pretty-print the secret key. + + This is mainly useful for situations where the user cannot get the qrcode to work, + and must enter the key manually into their TOTP client. It tries to format + the key in a manner that is easier for humans to read. + + :param format: + format to output secret key. ``"hex"`` and ``"base32"`` are both accepted. + + :param sep: + separator to insert to break up key visually. + can be any of ``"-"`` (the default), ``" "``, or ``False`` (no separator). + + :return: + key as native string. + + Usage example:: + + >>> t = TOTP('s3jdvb7qd2r7jpxx') + >>> t.pretty_key() + 'S3JD-VB7Q-D2R7-JPXX' + """ + if format == "hex" or format == "base16": + key = self.hex_key + elif format == "base32": + key = self.base32_key + else: + raise ValueError("unknown byte-encoding format: %r" % (format,)) + if sep: + key = group_string(key, sep) + return key + + #============================================================================= + # time & token parsing + #============================================================================= + + @classmethod + def normalize_time(cls, time): + """ + Normalize time value to unix epoch seconds. + + :arg time: + Can be ``None``, :class:`!datetime`, + or unix epoch timestamp as :class:`!float` or :class:`!int`. + If ``None``, uses current system time. + Naive datetimes are treated as UTC. + + :returns: + unix epoch timestamp as :class:`int`. + """ + if isinstance(time, int_types): + return time + elif isinstance(time, float): + return int(time) + elif time is None: + return int(cls.now()) + elif hasattr(time, "utctimetuple"): + # coerce datetime to UTC timestamp + # NOTE: utctimetuple() assumes naive datetimes are in UTC + # NOTE: we explicitly *don't* want microseconds. + return calendar.timegm(time.utctimetuple()) + else: + raise exc.ExpectedTypeError(time, "int, float, or datetime", "time") + + def _time_to_counter(self, time): + """ + convert timestamp to HOTP counter using :attr:`period`. + """ + return time // self.period + + def _counter_to_time(self, counter): + """ + convert HOTP counter to timestamp using :attr:`period`. + """ + return counter * self.period + + @hybrid_method + def normalize_token(self_or_cls, token): + """ + Normalize OTP token representation: + strips whitespace, converts integers to a zero-padded string, + validates token content & number of digits. + + This is a hybrid method -- it can be called at the class level, + as ``TOTP.normalize_token()``, or the instance level as ``TOTP().normalize_token()``. + It will normalize to the instance-specific number of :attr:`~TOTP.digits`, + or use the class default. + + :arg token: + token as ascii bytes, unicode, or an integer. + + :raises ValueError: + if token has wrong number of digits, or contains non-numeric characters. + + :returns: + token as :class:`!unicode` string, containing only digits 0-9. + """ + digits = self_or_cls.digits + if isinstance(token, int_types): + token = u("%0*d") % (digits, token) + else: + token = to_unicode(token, param="token") + token = _clean_re.sub(u(""), token) + if not token.isdigit(): + raise MalformedTokenError("Token must contain only the digits 0-9") + if len(token) != digits: + raise MalformedTokenError("Token must have exactly %d digits" % digits) + return token + + #============================================================================= + # token generation + #============================================================================= + +# # debug helper +# def generate_range(self, size, time=None): +# counter = self._time_to_counter(time) - (size + 1) // 2 +# end = counter + size +# while counter <= end: +# token = self._generate(counter) +# yield TotpToken(self, token, counter) +# counter += 1 + + def generate(self, time=None): + """ + Generate token for specified time + (uses current time if none specified). + + :arg time: + Can be ``None``, a :class:`!datetime`, + or class:`!float` / :class:`!int` unix epoch timestamp. + If ``None`` (the default), uses current system time. + Naive datetimes are treated as UTC. + + :returns: + + A :class:`TotpToken` instance, which can be treated + as a sequence of ``(token, expire_time)`` -- see that class + for more details. + + Usage example:: + + >>> # generate a new token, wrapped in a TotpToken instance... + >>> otp = TOTP('s3jdvb7qd2r7jpxx') + >>> otp.generate(1419622739) + + + >>> # when you just need the token... + >>> otp.generate(1419622739).token + '897212' + """ + time = self.normalize_time(time) + counter = self._time_to_counter(time) + if counter < 0: + raise ValueError("timestamp must be >= 0") + token = self._generate(counter) + return TotpToken(self, token, counter) + + def _generate(self, counter): + """ + base implementation of HOTP token generation algorithm. + + :arg counter: HOTP counter, as non-negative integer + :returns: token as unicode string + """ + # generate digest + assert isinstance(counter, int_types), "counter must be integer" + assert counter >= 0, "counter must be non-negative" + keyed_hmac = self._keyed_hmac + if keyed_hmac is None: + keyed_hmac = self._keyed_hmac = compile_hmac(self.alg, self.key) + digest = keyed_hmac(_pack_uint64(counter)) + digest_size = keyed_hmac.digest_info.digest_size + assert len(digest) == digest_size, "digest_size: sanity check failed" + + # derive 31-bit token value + assert digest_size >= 20, "digest_size: sanity check 2 failed" # otherwise 0xF+4 will run off end of hash. + offset = byte_elem_value(digest[-1]) & 0xF + value = _unpack_uint32(digest[offset:offset+4])[0] & 0x7fffffff + + # render to decimal string, return last chars + # NOTE: the 10'th digit is not as secure, as it can only take on values 0-2, not 0-9, + # due to 31-bit mask on int ">I". But some servers / clients use it :| + # if 31-bit mask removed (which breaks spec), would only get values 0-4. + digits = self.digits + assert 0 < digits < 11, "digits: sanity check failed" + return (u("%0*d") % (digits, value))[-digits:] + + #============================================================================= + # token verification + #============================================================================= + + @classmethod + def verify(cls, token, source, **kwds): + r""" + Convenience wrapper around :meth:`TOTP.from_source` and :meth:`TOTP.match`. + + This parses a TOTP key & configuration from the specified source, + and tries and match the token. + It's designed to parallel the :meth:`passlib.ifc.PasswordHash.verify` method. + + :param token: + Token string to match. + + :param source: + Serialized TOTP key. + Can be anything accepted by :meth:`TOTP.from_source`. + + :param \\*\\*kwds: + All additional keywords passed to :meth:`TOTP.match`. + + :return: + A :class:`TotpMatch` instance, or raises a :exc:`TokenError`. + """ + return cls.from_source(source).match(token, **kwds) + + def match(self, token, time=None, window=30, skew=0, last_counter=None): + """ + Match TOTP token against specified timestamp. + Searches within a window before & after the provided time, + in order to account for transmission delay and small amounts of skew in the client's clock. + + :arg token: + Token to validate. + may be integer or string (whitespace and hyphens are ignored). + + :param time: + Unix epoch timestamp, can be any of :class:`!float`, :class:`!int`, or :class:`!datetime`. + if ``None`` (the default), uses current system time. + *this should correspond to the time the token was received from the client*. + + :param int window: + How far backward and forward in time to search for a match. + Measured in seconds. Defaults to ``30``. Typically only useful if set + to multiples of :attr:`period`. + + :param int skew: + Adjust timestamp by specified value, to account for excessive + client clock skew. Measured in seconds. Defaults to ``0``. + + Negative skew (the common case) indicates transmission delay, + and/or that the client clock is running behind the server. + + Positive skew indicates the client clock is running ahead of the server + (and by enough that it cancels out any negative skew added by + the transmission delay). + + You should ensure the server clock uses a reliable time source such as NTP, + so that only the client clock's inaccuracy needs to be accounted for. + + This is an advanced parameter that should usually be left at ``0``; + The **window** parameter is usually enough to account + for any observed transmission delay. + + :param last_counter: + Optional value of last counter value that was successfully used. + If specified, verify will never search earlier counters, + no matter how large the window is. + + Useful when client has previously authenticated, + and thus should never provide a token older than previously + verified value. + + :raises ~passlib.exc.TokenError: + + If the token is malformed, fails to match, or has already been used. + + :returns TotpMatch: + + Returns a :class:`TotpMatch` instance on successful match. + Can be treated as tuple of ``(counter, time)``. + Raises error if token is malformed / can't be verified. + + Usage example:: + + >>> totp = TOTP('s3jdvb7qd2r7jpxx') + + >>> # valid token for this time period + >>> totp.match('897212', 1419622729) + + + >>> # token from counter step 30 sec ago (within allowed window) + >>> totp.match('000492', 1419622729) + + + >>> # invalid token -- token from 60 sec ago (outside of window) + >>> totp.match('760389', 1419622729) + Traceback: + ... + InvalidTokenError: Token did not match + """ + time = self.normalize_time(time) + self._check_serial(window, "window") + + client_time = time + skew + if last_counter is None: + last_counter = -1 + start = max(last_counter, self._time_to_counter(client_time - window)) + end = self._time_to_counter(client_time + window) + 1 + # XXX: could pass 'expected = _time_to_counter(client_time + TRANSMISSION_DELAY)' + # to the _find_match() method, would help if window set to very large value. + + counter = self._find_match(token, start, end) + assert counter >= last_counter, "sanity check failed: counter went backward" + + if counter == last_counter: + raise UsedTokenError(expire_time=(last_counter + 1) * self.period) + + # NOTE: By returning match tied to