From aefd1b0920150feff31922a6979affc005a6a7d4 Mon Sep 17 00:00:00 2001 From: Joe McDonnell Date: Sun, 2 Jul 2023 15:44:41 -0700 Subject: [PATCH] IMPALA-13551: Produce the shell tarball by pip installing impala-shell Currently, the shell tarball maintains its own packaging code and directory layout. This is very complicated and currently has several Python packages directly checked into our repository. To simplify it, this changes the shell tarball to be based on pip installing the pypi package. Specifically, the new directory structure for an unpack shell tarball is: impala-shell-4.5.0-SNAPSHOT/ impala-shell install_py${PYTHON_VERSION}/ install_py${ANOTHER_PYTHON_VERSION}/ For example, install_py2.7 is the Python 2.7 pip install of impala-shell. install_py3.8 is a Python 3.8 pip install of impala-shell. This means that the impala-shell script simply picks the install for the specified version of python and uses that pip install directory. To make this more consistent across different Linux distributions, this upgrades pip in the virtualenv to the latest. With this, ext-py and pkg_resources.py can be removed. This requires rearranging the shell build code. Specifically, this splits out the code that generates impala_build_version.py so that it can run before generating the pypi package. The shell tarball now has a dependency on the pypi package and must run after it. This builds on Michael Smith's work from IMPALA-11399. Testing: - Ran shell tests locally - Built on Centos 7, Redhat 8 & 9, Ubuntu 20 & 22, SLES 15 Change-Id: Ifbb66ab2c5bc7180221f98d9bf5e38d62f4ac036 Reviewed-on: http://gerrit.cloudera.org:8080/20171 Reviewed-by: Impala Public Jenkins Tested-by: Impala Public Jenkins --- LICENSE.txt | 89 - bin/impala-config.sh | 2 +- bin/rat_exclude_files.txt | 8 - shell/.gitignore | 42 +- shell/CMakeLists.txt | 26 +- shell/ext-py/bitarray-2.3.0/CHANGE_LOG | 569 -- shell/ext-py/bitarray-2.3.0/LICENSE | 46 - shell/ext-py/bitarray-2.3.0/README.rst | 836 --- .../bitarray-2.3.0/bitarray/__init__.py | 83 - .../bitarray-2.3.0/bitarray/__init__.pyi | 126 - .../bitarray-2.3.0/bitarray/_bitarray.c | 3752 ---------- shell/ext-py/bitarray-2.3.0/bitarray/_util.c | 864 --- .../bitarray-2.3.0/bitarray/architecture.txt | 26 - .../ext-py/bitarray-2.3.0/bitarray/bitarray.h | 182 - .../ext-py/bitarray-2.3.0/bitarray/copy_n.txt | 105 - shell/ext-py/bitarray-2.3.0/bitarray/py.typed | 0 .../bitarray/pythoncapi_compat.h | 347 - .../bitarray-2.3.0/bitarray/test_bitarray.py | 4253 ----------- .../bitarray-2.3.0/bitarray/test_data.pickle | Bin 356 -> 0 bytes .../bitarray-2.3.0/bitarray/test_util.py | 1504 ---- shell/ext-py/bitarray-2.3.0/bitarray/util.py | 408 -- shell/ext-py/bitarray-2.3.0/bitarray/util.pyi | 49 - shell/ext-py/bitarray-2.3.0/contributing.md | 31 - shell/ext-py/bitarray-2.3.0/setup.py | 50 - shell/ext-py/bitarray-2.3.0/update_doc.py | 195 - shell/ext-py/kerberos-1.3.1/MANIFEST.in | 3 - shell/ext-py/kerberos-1.3.1/PKG-INFO | 138 - shell/ext-py/kerberos-1.3.1/README.md | 119 - shell/ext-py/kerberos-1.3.1/pysrc/kerberos.py | 461 -- shell/ext-py/kerberos-1.3.1/setup.cfg | 4 - shell/ext-py/kerberos-1.3.1/setup.py | 138 - shell/ext-py/kerberos-1.3.1/src/base64.c | 133 - shell/ext-py/kerberos-1.3.1/src/base64.h | 20 - shell/ext-py/kerberos-1.3.1/src/kerberos.c | 935 --- .../ext-py/kerberos-1.3.1/src/kerberosbasic.c | 171 - .../ext-py/kerberos-1.3.1/src/kerberosbasic.h | 26 - shell/ext-py/kerberos-1.3.1/src/kerberosgss.c | 1007 --- shell/ext-py/kerberos-1.3.1/src/kerberosgss.h | 91 - shell/ext-py/kerberos-1.3.1/src/kerberospw.c | 172 - shell/ext-py/kerberos-1.3.1/src/kerberospw.h | 26 - shell/ext-py/prettytable-0.7.2/CHANGELOG | 142 - shell/ext-py/prettytable-0.7.2/COPYING | 30 - shell/ext-py/prettytable-0.7.2/MANIFEST.in | 4 - shell/ext-py/prettytable-0.7.2/PKG-INFO | 18 - shell/ext-py/prettytable-0.7.2/README | 498 -- shell/ext-py/prettytable-0.7.2/prettytable.py | 1475 ---- shell/ext-py/prettytable-0.7.2/setup.cfg | 5 - shell/ext-py/prettytable-0.7.2/setup.py | 25 - shell/ext-py/sasl-0.3.1/LICENSE.txt | 202 - shell/ext-py/sasl-0.3.1/MANIFEST.in | 4 - shell/ext-py/sasl-0.3.1/PKG-INFO | 12 - shell/ext-py/sasl-0.3.1/sasl/__init__.py | 15 - shell/ext-py/sasl-0.3.1/sasl/saslwrapper.cpp | 6480 ----------------- shell/ext-py/sasl-0.3.1/sasl/saslwrapper.h | 483 -- shell/ext-py/sasl-0.3.1/sasl/saslwrapper.pyx | 96 - shell/ext-py/sasl-0.3.1/setup.cfg | 4 - shell/ext-py/sasl-0.3.1/setup.py | 60 - shell/ext-py/six-1.14.0/CHANGES | 329 - shell/ext-py/six-1.14.0/CONTRIBUTORS | 42 - shell/ext-py/six-1.14.0/LICENSE | 18 - shell/ext-py/six-1.14.0/MANIFEST.in | 6 - shell/ext-py/six-1.14.0/README.rst | 29 - shell/ext-py/six-1.14.0/setup.cfg | 20 - shell/ext-py/six-1.14.0/setup.py | 58 - shell/ext-py/six-1.14.0/six.py | 980 --- shell/ext-py/six-1.14.0/test_six.py | 1041 --- shell/ext-py/six-1.14.0/tox.ini | 11 - shell/ext-py/sqlparse-0.3.1/AUTHORS | 67 - shell/ext-py/sqlparse-0.3.1/CHANGELOG | 522 -- shell/ext-py/sqlparse-0.3.1/LICENSE | 25 - shell/ext-py/sqlparse-0.3.1/MANIFEST.in | 11 - shell/ext-py/sqlparse-0.3.1/README.rst | 77 - shell/ext-py/sqlparse-0.3.1/TODO | 5 - shell/ext-py/sqlparse-0.3.1/setup.cfg | 20 - shell/ext-py/sqlparse-0.3.1/setup.py | 111 - .../sqlparse-0.3.1/sqlparse/__init__.py | 72 - .../sqlparse-0.3.1/sqlparse/__main__.py | 23 - shell/ext-py/sqlparse-0.3.1/sqlparse/cli.py | 202 - .../ext-py/sqlparse-0.3.1/sqlparse/compat.py | 45 - .../sqlparse/engine/__init__.py | 17 - .../sqlparse/engine/filter_stack.py | 45 - .../sqlparse/engine/grouping.py | 453 -- .../sqlparse/engine/statement_splitter.py | 108 - .../sqlparse-0.3.1/sqlparse/exceptions.py | 13 - .../sqlparse/filters/__init__.py | 41 - .../sqlparse/filters/aligned_indent.py | 138 - .../sqlparse-0.3.1/sqlparse/filters/others.py | 125 - .../sqlparse-0.3.1/sqlparse/filters/output.py | 124 - .../sqlparse/filters/reindent.py | 241 - .../sqlparse/filters/right_margin.py | 50 - .../sqlparse-0.3.1/sqlparse/filters/tokens.py | 61 - .../sqlparse-0.3.1/sqlparse/formatter.py | 199 - .../sqlparse-0.3.1/sqlparse/keywords.py | 955 --- shell/ext-py/sqlparse-0.3.1/sqlparse/lexer.py | 82 - shell/ext-py/sqlparse-0.3.1/sqlparse/sql.py | 650 -- .../ext-py/sqlparse-0.3.1/sqlparse/tokens.py | 69 - shell/ext-py/sqlparse-0.3.1/sqlparse/utils.py | 123 - shell/ext-py/sqlparse-0.3.1/tests/__init__.py | 0 shell/ext-py/sqlparse-0.3.1/tests/conftest.py | 49 - .../tests/files/_Make_DirEntry.sql | 6 - .../sqlparse-0.3.1/tests/files/begintag.sql | 4 - .../sqlparse-0.3.1/tests/files/begintag_2.sql | 13 - .../tests/files/dashcomment.sql | 5 - .../tests/files/encoding_gbk.sql | 3 - .../tests/files/encoding_utf8.sql | 3 - .../sqlparse-0.3.1/tests/files/function.sql | 13 - .../tests/files/function_psql.sql | 72 - .../tests/files/function_psql2.sql | 7 - .../tests/files/function_psql3.sql | 8 - .../tests/files/function_psql4.sql | 12 - .../tests/files/huge_select.sql | 1 - .../sqlparse-0.3.1/tests/files/stream.sql | 2 - .../tests/files/test_cp1251.sql | 1 - shell/ext-py/sqlparse-0.3.1/tests/test_cli.py | 145 - .../sqlparse-0.3.1/tests/test_format.py | 709 -- .../sqlparse-0.3.1/tests/test_grouping.py | 642 -- .../sqlparse-0.3.1/tests/test_keywords.py | 14 - .../ext-py/sqlparse-0.3.1/tests/test_parse.py | 474 -- .../sqlparse-0.3.1/tests/test_regressions.py | 408 -- .../ext-py/sqlparse-0.3.1/tests/test_split.py | 151 - .../sqlparse-0.3.1/tests/test_tokenize.py | 238 - shell/ext-py/sqlparse-0.3.1/tox.ini | 28 - shell/ext-py/thrift-0.16.0/MANIFEST.in | 1 - shell/ext-py/thrift-0.16.0/README.md | 35 - shell/ext-py/thrift-0.16.0/setup.cfg | 13 - shell/ext-py/thrift-0.16.0/setup.py | 156 - .../src/TMultiplexedProcessor.py | 82 - shell/ext-py/thrift-0.16.0/src/TRecursive.py | 83 - shell/ext-py/thrift-0.16.0/src/TSCons.py | 36 - .../thrift-0.16.0/src/TSerialization.py | 38 - shell/ext-py/thrift-0.16.0/src/TTornado.py | 188 - shell/ext-py/thrift-0.16.0/src/Thrift.py | 193 - shell/ext-py/thrift-0.16.0/src/__init__.py | 20 - shell/ext-py/thrift-0.16.0/src/compat.py | 46 - shell/ext-py/thrift-0.16.0/src/ext/binary.cpp | 39 - shell/ext-py/thrift-0.16.0/src/ext/binary.h | 217 - .../ext-py/thrift-0.16.0/src/ext/compact.cpp | 108 - shell/ext-py/thrift-0.16.0/src/ext/compact.h | 368 - shell/ext-py/thrift-0.16.0/src/ext/endian.h | 96 - shell/ext-py/thrift-0.16.0/src/ext/module.cpp | 203 - shell/ext-py/thrift-0.16.0/src/ext/protocol.h | 96 - .../ext-py/thrift-0.16.0/src/ext/protocol.tcc | 913 --- shell/ext-py/thrift-0.16.0/src/ext/types.cpp | 113 - shell/ext-py/thrift-0.16.0/src/ext/types.h | 192 - .../thrift-0.16.0/src/protocol/TBase.py | 86 - .../src/protocol/TBinaryProtocol.py | 301 - .../src/protocol/TCompactProtocol.py | 487 -- .../src/protocol/THeaderProtocol.py | 232 - .../src/protocol/TJSONProtocol.py | 677 -- .../src/protocol/TMultiplexedProtocol.py | 39 - .../thrift-0.16.0/src/protocol/TProtocol.py | 428 -- .../src/protocol/TProtocolDecorator.py | 26 - .../thrift-0.16.0/src/protocol/__init__.py | 21 - .../thrift-0.16.0/src/server/THttpServer.py | 131 - .../src/server/TNonblockingServer.py | 370 - .../src/server/TProcessPoolServer.py | 128 - .../thrift-0.16.0/src/server/TServer.py | 323 - .../thrift-0.16.0/src/server/__init__.py | 20 - .../src/transport/THeaderTransport.py | 352 - .../src/transport/THttpClient.py | 191 - .../thrift-0.16.0/src/transport/TSSLSocket.py | 408 -- .../thrift-0.16.0/src/transport/TSocket.py | 239 - .../thrift-0.16.0/src/transport/TTransport.py | 459 -- .../thrift-0.16.0/src/transport/TTwisted.py | 329 - .../src/transport/TZlibTransport.py | 248 - .../thrift-0.16.0/src/transport/__init__.py | 20 - .../thrift-0.16.0/src/transport/sslcompat.py | 100 - .../ext-py/thrift-0.16.0/test/test_socket.py | 57 - .../thrift-0.16.0/test/test_sslsocket.py | 353 - shell/ext-py/thrift_sasl-0.4.3/CHANGELOG.md | 34 - shell/ext-py/thrift_sasl-0.4.3/LICENSE | 202 - shell/ext-py/thrift_sasl-0.4.3/README.md | 4 - shell/ext-py/thrift_sasl-0.4.3/setup.py | 52 - .../thrift_sasl-0.4.3/thrift_sasl/__init__.py | 230 - shell/gen_impala_build_version.sh | 75 + shell/impala-shell | 34 +- shell/legacy/pkg_resources.py | 2698 ------- shell/make_shell_tarball.sh | 159 +- shell/packaging/make_python_package.sh | 4 +- 179 files changed, 137 insertions(+), 49422 deletions(-) delete mode 100644 shell/ext-py/bitarray-2.3.0/CHANGE_LOG delete mode 100644 shell/ext-py/bitarray-2.3.0/LICENSE delete mode 100644 shell/ext-py/bitarray-2.3.0/README.rst delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/__init__.py delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/__init__.pyi delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/_bitarray.c delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/_util.c delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/architecture.txt delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/bitarray.h delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/copy_n.txt delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/py.typed delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/pythoncapi_compat.h delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/test_bitarray.py delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/test_data.pickle delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/test_util.py delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/util.py delete mode 100644 shell/ext-py/bitarray-2.3.0/bitarray/util.pyi delete mode 100644 shell/ext-py/bitarray-2.3.0/contributing.md delete mode 100644 shell/ext-py/bitarray-2.3.0/setup.py delete mode 100644 shell/ext-py/bitarray-2.3.0/update_doc.py delete mode 100644 shell/ext-py/kerberos-1.3.1/MANIFEST.in delete mode 100644 shell/ext-py/kerberos-1.3.1/PKG-INFO delete mode 100644 shell/ext-py/kerberos-1.3.1/README.md delete mode 100644 shell/ext-py/kerberos-1.3.1/pysrc/kerberos.py delete mode 100644 shell/ext-py/kerberos-1.3.1/setup.cfg delete mode 100644 shell/ext-py/kerberos-1.3.1/setup.py delete mode 100644 shell/ext-py/kerberos-1.3.1/src/base64.c delete mode 100644 shell/ext-py/kerberos-1.3.1/src/base64.h delete mode 100644 shell/ext-py/kerberos-1.3.1/src/kerberos.c delete mode 100644 shell/ext-py/kerberos-1.3.1/src/kerberosbasic.c delete mode 100644 shell/ext-py/kerberos-1.3.1/src/kerberosbasic.h delete mode 100644 shell/ext-py/kerberos-1.3.1/src/kerberosgss.c delete mode 100644 shell/ext-py/kerberos-1.3.1/src/kerberosgss.h delete mode 100644 shell/ext-py/kerberos-1.3.1/src/kerberospw.c delete mode 100644 shell/ext-py/kerberos-1.3.1/src/kerberospw.h delete mode 100644 shell/ext-py/prettytable-0.7.2/CHANGELOG delete mode 100644 shell/ext-py/prettytable-0.7.2/COPYING delete mode 100644 shell/ext-py/prettytable-0.7.2/MANIFEST.in delete mode 100644 shell/ext-py/prettytable-0.7.2/PKG-INFO delete mode 100644 shell/ext-py/prettytable-0.7.2/README delete mode 100644 shell/ext-py/prettytable-0.7.2/prettytable.py delete mode 100644 shell/ext-py/prettytable-0.7.2/setup.cfg delete mode 100644 shell/ext-py/prettytable-0.7.2/setup.py delete mode 100644 shell/ext-py/sasl-0.3.1/LICENSE.txt delete mode 100644 shell/ext-py/sasl-0.3.1/MANIFEST.in delete mode 100644 shell/ext-py/sasl-0.3.1/PKG-INFO delete mode 100644 shell/ext-py/sasl-0.3.1/sasl/__init__.py delete mode 100644 shell/ext-py/sasl-0.3.1/sasl/saslwrapper.cpp delete mode 100644 shell/ext-py/sasl-0.3.1/sasl/saslwrapper.h delete mode 100644 shell/ext-py/sasl-0.3.1/sasl/saslwrapper.pyx delete mode 100644 shell/ext-py/sasl-0.3.1/setup.cfg delete mode 100644 shell/ext-py/sasl-0.3.1/setup.py delete mode 100644 shell/ext-py/six-1.14.0/CHANGES delete mode 100644 shell/ext-py/six-1.14.0/CONTRIBUTORS delete mode 100644 shell/ext-py/six-1.14.0/LICENSE delete mode 100644 shell/ext-py/six-1.14.0/MANIFEST.in delete mode 100644 shell/ext-py/six-1.14.0/README.rst delete mode 100644 shell/ext-py/six-1.14.0/setup.cfg delete mode 100644 shell/ext-py/six-1.14.0/setup.py delete mode 100644 shell/ext-py/six-1.14.0/six.py delete mode 100644 shell/ext-py/six-1.14.0/test_six.py delete mode 100644 shell/ext-py/six-1.14.0/tox.ini delete mode 100644 shell/ext-py/sqlparse-0.3.1/AUTHORS delete mode 100644 shell/ext-py/sqlparse-0.3.1/CHANGELOG delete mode 100644 shell/ext-py/sqlparse-0.3.1/LICENSE delete mode 100644 shell/ext-py/sqlparse-0.3.1/MANIFEST.in delete mode 100644 shell/ext-py/sqlparse-0.3.1/README.rst delete mode 100644 shell/ext-py/sqlparse-0.3.1/TODO delete mode 100644 shell/ext-py/sqlparse-0.3.1/setup.cfg delete mode 100644 shell/ext-py/sqlparse-0.3.1/setup.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/__init__.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/__main__.py delete mode 100755 shell/ext-py/sqlparse-0.3.1/sqlparse/cli.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/compat.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/engine/__init__.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/engine/filter_stack.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/engine/grouping.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/engine/statement_splitter.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/exceptions.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/filters/__init__.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/filters/aligned_indent.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/filters/others.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/filters/output.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/filters/reindent.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/filters/right_margin.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/filters/tokens.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/formatter.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/keywords.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/lexer.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/sql.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/tokens.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/sqlparse/utils.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/__init__.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/conftest.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/_Make_DirEntry.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/begintag.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/begintag_2.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/dashcomment.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/encoding_gbk.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/encoding_utf8.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/function.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/function_psql.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/function_psql2.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/function_psql3.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/function_psql4.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/huge_select.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/stream.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/files/test_cp1251.sql delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_cli.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_format.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_grouping.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_keywords.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_parse.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_regressions.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_split.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tests/test_tokenize.py delete mode 100644 shell/ext-py/sqlparse-0.3.1/tox.ini delete mode 100644 shell/ext-py/thrift-0.16.0/MANIFEST.in delete mode 100644 shell/ext-py/thrift-0.16.0/README.md delete mode 100644 shell/ext-py/thrift-0.16.0/setup.cfg delete mode 100644 shell/ext-py/thrift-0.16.0/setup.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/TMultiplexedProcessor.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/TRecursive.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/TSCons.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/TSerialization.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/TTornado.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/Thrift.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/__init__.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/compat.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/binary.cpp delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/binary.h delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/compact.cpp delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/compact.h delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/endian.h delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/module.cpp delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/protocol.h delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/protocol.tcc delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/types.cpp delete mode 100644 shell/ext-py/thrift-0.16.0/src/ext/types.h delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/TBase.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/TBinaryProtocol.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/TCompactProtocol.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/THeaderProtocol.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/TJSONProtocol.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/TMultiplexedProtocol.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/TProtocol.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/TProtocolDecorator.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/protocol/__init__.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/server/THttpServer.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/server/TNonblockingServer.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/server/TProcessPoolServer.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/server/TServer.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/server/__init__.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/THeaderTransport.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/THttpClient.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/TSSLSocket.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/TSocket.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/TTransport.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/TTwisted.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/TZlibTransport.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/__init__.py delete mode 100644 shell/ext-py/thrift-0.16.0/src/transport/sslcompat.py delete mode 100644 shell/ext-py/thrift-0.16.0/test/test_socket.py delete mode 100644 shell/ext-py/thrift-0.16.0/test/test_sslsocket.py delete mode 100644 shell/ext-py/thrift_sasl-0.4.3/CHANGELOG.md delete mode 100644 shell/ext-py/thrift_sasl-0.4.3/LICENSE delete mode 100644 shell/ext-py/thrift_sasl-0.4.3/README.md delete mode 100644 shell/ext-py/thrift_sasl-0.4.3/setup.py delete mode 100644 shell/ext-py/thrift_sasl-0.4.3/thrift_sasl/__init__.py create mode 100755 shell/gen_impala_build_version.sh delete mode 100644 shell/legacy/pkg_resources.py diff --git a/LICENSE.txt b/LICENSE.txt index 9aa09f0a6..c635ed9d7 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -398,7 +398,6 @@ www/DataTables* and www/datatables*: MIT license Parts of be/src/runtime/string-search.h: Python Software License V2 Parts of shell/impala_shell.py: Python Software License V2 -shell/ext-py/bitarray*: Python Software License V2 Copyright (c) 2001 - 2016 Python Software Foundation; All Rights Reserved @@ -556,92 +555,6 @@ tests/comparison/leopard/static/js/highlight.pack.js: 3-clause BSD -------------------------------------------------------------------------------- -shell/ext-py/prettytable-0.7.1: 3-clause BSD - - Copyright (c) 2009-2013 Luke Maurits - All rights reserved. - With contributions from: - * Chris Clark - * Christoph Robbert - * Klein Stephane - * "maartendb" - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * The name of the author may not be used to endorse or promote products - derived from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL - THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT - OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - --------------------------------------------------------------------------------- - -shell/ext-py/six-1.14.0: MIT license - -Copyright (c) 2010-2020 Benjamin Peterson - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - --------------------------------------------------------------------------------- - -shell/ext-py/sqlparse-0.3.1: 3-clause BSD - -Copyright (c) 2016, Andi Albrecht -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the authors nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - --------------------------------------------------------------------------------- - be/src/thirdparty/llvm: LLVM Release License University of Illinois/NCSA @@ -715,8 +628,6 @@ THE SOFTWARE. be/src/thirdparty/mustache: Apache 2.0 license be/src/thirdparty/pcg-cpp-0.98: Apache 2.0 license be/src/expr/hll-bias.h: Apache 2.0 license -shell/ext-py/sasl-0.3.1: Apache 2.0 license -shell/ext-py/thrift_sasl-0.4.2: Apache 2.0 license docs/css/dita-ot-doc.css: Apache 2.0 license docs/shared/header.xml: Apache 2.0 license diff --git a/bin/impala-config.sh b/bin/impala-config.sh index 300cd1514..7acc4d734 100755 --- a/bin/impala-config.sh +++ b/bin/impala-config.sh @@ -433,7 +433,7 @@ fi # the generated Python code. The code that uses the generated Python code gets # the corresponding Thrift runtime library by pip installing thrift (and does not # respect this version). If upgrading IMPALA_THRIFT_PY_VERSION, also upgrade the -# thrift version in shell/ext-py, shell/packaging/requirements.txt, and +# thrift version in shell/packaging/requirements.txt and # infra/python/deps/requirements.txt. export IMPALA_THRIFT_CPP_VERSION=0.16.0-p7 unset IMPALA_THRIFT_CPP_URL diff --git a/bin/rat_exclude_files.txt b/bin/rat_exclude_files.txt index 45effbf96..d8230de6a 100644 --- a/bin/rat_exclude_files.txt +++ b/bin/rat_exclude_files.txt @@ -54,14 +54,6 @@ www/pako.min.js tests/comparison/leopard/static/css/bootstrap* tests/comparison/leopard/static/fonts/glyphicons-halflings* tests/comparison/leopard/static/js/bootstrap* -shell/ext-py/bitarray-2.3.0/* -shell/ext-py/kerberos-1.3.1/* -shell/ext-py/prettytable-0.7.2/* -shell/ext-py/sasl-0.3.1/* -shell/ext-py/six-1.14.0/* -shell/ext-py/sqlparse-0.3.1/* -shell/ext-py/thrift-0.16.0/* -shell/ext-py/thrift_sasl-0.4.3/* tests/comparison/leopard/static/css/hljs.css tests/comparison/leopard/static/js/highlight.pack.js common/protobuf/kudu diff --git a/shell/.gitignore b/shell/.gitignore index 40ec1a890..a76805e64 100644 --- a/shell/.gitignore +++ b/shell/.gitignore @@ -1,42 +1,6 @@ /build/ /gen-py/ -ext-py/bitarray-2.3.0/bitarray.egg-info/ -ext-py/bitarray-2.3.0/dist/ -ext-py/bitarray-2.3.0/build/ -ext-py/kerberos-1.3.1/dist/ -ext-py/kerberos-1.3.1/build/ -ext-py/kerberos-1.3.1/kerberos.egg-info/ -ext-py/prettytable-0.7.2/dist/ -ext-py/prettytable-0.7.2/build/ -ext-py/prettytable-0.7.2/prettytable.egg-info -ext-py/sasl-0.3.1/dist/ -ext-py/sasl-0.3.1/build/ -ext-py/sasl-0.3.1/sasl.egg-info/ -ext-py/six-1.14.0/dist/ -ext-py/six-1.14.0/build/ -ext-py/six-1.14.0/six.egg-info/ -ext-py/sqlparse-0.3.1/dist/ -ext-py/sqlparse-0.3.1/build/ -ext-py/sqlparse-0.3.1/sqlparse.egg-info/ -ext-py/thrift-0.16.0/dist/ -ext-py/thrift-0.16.0/build/ -ext-py/thrift-0.16.0/thrift.egg-info/ -ext-py/thrift_sasl-0.4.3/dist/ -ext-py/thrift_sasl-0.4.3/build/ -ext-py/thrift_sasl-0.4.3/thrift_sasl.egg-info/ +/ext-py/ -# This file is used by buildall.sh to find files that need to be removed during the -# clean phase. Previous version of deps should be kept here for cleaning otherwise they -# may cause a build failure. Also the ignore path must be changed to the root folder -# so "git clean -Xdf" will work. -ext-py/bitarray-0.9.0/ -ext-py/bitarray-1.2.1/ -ext-py/prettytable-0.7.1/ -ext-py/sasl-0.1.1/ -ext-py/sasl-0.2.1/ -ext-py/six-1.11.0/ -ext-py/sqlparse-0.1.7/ -ext-py/sqlparse-0.1.19/ -ext-py/thrift-0.14.2/ -ext-py/thrift_sasl-0.4.1/ -ext-py/thrift_sasl-0.4.2/ +# Ignore the build version python file +impala_shell/impala_build_version.py \ No newline at end of file diff --git a/shell/CMakeLists.txt b/shell/CMakeLists.txt index 513400e5a..028db016d 100644 --- a/shell/CMakeLists.txt +++ b/shell/CMakeLists.txt @@ -56,20 +56,26 @@ foreach(PYTHON_EXE IN LISTS PYTHON_EXES) set(PIP_CACHE "${PIP_LOC}/${PYTHON_NAME}") # Supports fallback to impala-virtualenv for older Python versions. - add_custom_command(OUTPUT "${VENV}" DEPENDS impala_python + # This upgrades pip in the virtualenv to make the behavior more consistent across + # different distributions + add_custom_target(${PYTHON_NAME}_venv + BYPRODUCTS "${VENV}" + DEPENDS impala_python COMMAND "${CMAKE_SOURCE_DIR}/bin/cmake_aux/create_virtualenv.sh" "${PYTHON_EXE}" "${VENV}" + COMMAND "${VENV}/bin/pip" install --cache-dir "${PIP_CACHE}" --upgrade pip COMMAND "${VENV}/bin/pip" install --cache-dir "${PIP_CACHE}" wheel ) - list(APPEND IMPALA_PYTHON_BUILD_VENVS "${VENV}") + list(APPEND IMPALA_PYTHON_BUILD_VENVS ${PYTHON_NAME}_venv) endforeach() -add_custom_target(shell_tarball DEPENDS gen-deps "${IMPALA_PYTHON_BUILD_VENVS}" - COMMAND "${CMAKE_SOURCE_DIR}/shell/make_shell_tarball.sh" ${PYTHON_EXES} +add_custom_target(shell_impala_build_version + DEPENDS gen-deps "${CMAKE_SOURCE_DIR}/bin/version.info" + COMMAND "${CMAKE_SOURCE_DIR}/shell/gen_impala_build_version.sh" ) -add_custom_target(shell_pypi_package DEPENDS shell_tarball impala_python +add_custom_target(shell_pypi_package DEPENDS gen-deps shell_impala_build_version impala_python COMMAND "${CMAKE_SOURCE_DIR}/shell/packaging/make_python_package.sh" ) @@ -82,18 +88,22 @@ set(SHELL_TEST_PKG "${CMAKE_SOURCE_DIR}/shell/build/dist/impala_shell-${PKG_VERSION}.tar.gz") get_filename_component(SHELL_TEST_PKG_DIR "${SHELL_TEST_PKG}" DIRECTORY) # Generates SHELL_TEST_PKG -add_custom_target(shell_pypi_test_package DEPENDS shell_tarball impala_python +add_custom_target(shell_pypi_test_package DEPENDS gen-deps shell_impala_build_version impala_python COMMAND env BUILD_VERSION=${PKG_VERSION} OFFICIAL=true DIST_DIR="${SHELL_TEST_PKG_DIR}" "${CMAKE_SOURCE_DIR}/shell/packaging/make_python_package.sh" ) # Tests expect to find venvs at 'python2_venv' and 'python3_venv' in tests/shell/util.py. set(PYTHON2_VENV "${VENV_LOC}/python2_venv") -add_custom_target(shell_python2_install DEPENDS "${PYTHON2_VENV}" shell_pypi_test_package +add_custom_target(shell_python2_install DEPENDS python2_venv shell_pypi_test_package COMMAND "${PYTHON2_VENV}/bin/pip" install --cache-dir "${PIP_LOC}/python2" "${SHELL_TEST_PKG}" ) set(PYTHON3_VENV "${VENV_LOC}/python3_venv") -add_custom_target(shell_python3_install DEPENDS "${PYTHON3_VENV}" shell_pypi_test_package +add_custom_target(shell_python3_install DEPENDS python3_venv shell_pypi_test_package COMMAND "${PYTHON3_VENV}/bin/pip" install --cache-dir "${PIP_LOC}/python3" "${SHELL_TEST_PKG}" ) + +add_custom_target(shell_tarball DEPENDS gen-deps shell_pypi_test_package "${IMPALA_PYTHON_BUILD_VENVS}" + COMMAND "${CMAKE_SOURCE_DIR}/shell/make_shell_tarball.sh" "${SHELL_TEST_PKG}" ${PYTHON_EXES} +) diff --git a/shell/ext-py/bitarray-2.3.0/CHANGE_LOG b/shell/ext-py/bitarray-2.3.0/CHANGE_LOG deleted file mode 100644 index efbf3ebdc..000000000 --- a/shell/ext-py/bitarray-2.3.0/CHANGE_LOG +++ /dev/null @@ -1,569 +0,0 @@ -2021-08-15 2.3.0: -------------------- - * add optional `buffer` argument to `bitarray()` to import the buffer of - another object, #141, #146, see also: [buffer protocol](buffer.rst) - * update `.buffer_info()` to include: a read-only flag, an imported buffer - flag, and the number of buffer exports - * add optional start and stop arguments to `util.rindex()` - * add [memory-mapped file](../examples/mmapped-file.py) example - * ignore underscore (`_`) in string input, e.g. `bitarray('1100_0111')` - * add missing type hinting for new `.bytereverse()` arguments - * fix `.extend()` type annotations, #145 - * avoid `.reverse()` using temporary memory - * make `.unpack()`, `util.serialize()`, `util.vl_encode()` - and `.__reduce__()` more memory efficient - * add and improve tests - - -2021-08-07 2.2.5: -------------------- - * speedup `find_bit()` and `find_last()` using uint64 checking, this means - a speedup for `.find()`, `.index()`, `.search()` and `util.rindex()` - * add optional start and stop arguments to `.bytereverse()` - * add example to illustrate how - [unaligned copying](../examples/copy_n.py) works internally - * add documentation - * add tests - - -2021-07-29 2.2.4: -------------------- - * use shift operations to speedup all unaligned copy operations, #142 - * expose functionality to Python level only in debug mode for testing - * add and improve tests - - -2021-07-22 2.2.3: -------------------- - * speedup `repeat()`, #136 - * speedup shift operations, #139 - * optimize slice assignment with negative step, e.g.: `a[::-1] = 1` - * add tests - - -2021-07-16 2.2.2: -------------------- - * speedup slice assignment, see #132 and #135 - * speedup bitwise operations, #133 - * optimize `getbit()` and `setbit()` in `bitarray.h` - * fix TypeError messages when bitarray or int (0, 1) are expected (bool - is a subclass of int) - * add and improve tests - - -2021-07-06 2.2.1: -------------------- - * improve documentation - * speedup `vl_encode()` - * `bitarray.h`: make `getbit()` always an (inline) function - * add assertions in C code - - -2021-07-03 2.2.0: -------------------- - * add `bitarray.util.vl_encode()` and `bitarray.util.vl_decode()` which - uses a [variable length bitarray format](variable_length.rst), #131 - - -2021-06-15 2.1.3: -------------------- - * Fix building with MSVC / Bullseye, #129 - - -2021-06-13 2.1.2: -------------------- - * support type hinting for all Python 3 versions (that bitarray supports, - 3.5 and higher currently), fixed #128 - * add explicit endianness to two tests, fixes #127 - - -2021-06-11 2.1.1: -------------------- - * add type hinting (see PEP 484, 561) using stub (`.pyi`) files - * add tests - - -2021-05-05 2.1.0: -------------------- - * add `.find()` method, see #122 - * `.find()`, `.index()`, `.search()` and `.itersearch()` now all except - both (sub-) bitarray as well as bool items to be searched for - * improve encode/decode error messages - * add [lexicographical permutations example](../examples/lexico.py) - * add tests - - -2021-04-19 2.0.1: -------------------- - * update documentation - * improve some error messages - - -2021-04-14 2.0.0: -------------------- - * require more specific objects, int (0 or 1) or bool, see #119 - * items are always returned as int 0 or 1, #119 - * remove `.length()` method (deprecated since 1.5.1 - use `len()`) - * in `.unpack()` the `one` argument now defaults to 0x01 (was 0xff) - * `.tolist()` now always returns a list of integers (0 or 1) - * fix frozenbitarray hash function, see #121 - * fix frozenbitarray being mutable by `<<=` and `>>=` - * support sequence protocol in `.extend()` (and bitarray creation) - * improve OverflowError messages from `util.int2ba()` - * add [hexadecimal example](../examples/hexadecimal.py) - - -2021-04-10 1.9.2: -------------------- - * update pythoncapi_compat: Fix support with PyPy 3.7, #120 - * update readme - - -2021-04-05 1.9.1: -------------------- - * switch documentation from markdown to reStructuredText - * add tests - - -2021-04-03 1.9.0: -------------------- - * add shift operations (`<<`, `>>`, `<<=`, `>>=`), see #117 - * add `bitarray.util.ba2base()` and `bitarray.util.base2ba()`, - see last paragraph in [Bitarray representations](represent.rst) - * documentation and tests - - -2021-03-31 1.8.2: -------------------- - * fix crash caused by unsupported types in binary operations, #116 - * speedup initializing or extending a bitarray from another with different - bit endianness - * add formatting options to `bitarray.util.pprint()` - * add documentation on [bitarray representations](represent.rst) - * add and improve tests (all 291 tests run in less than half a second on - a modern machine) - - -2021-03-25 1.8.1: -------------------- - * moved implementation of and `hex2ba()` and `ba2hex()` to C-level - * add `bitarray.util.parity()` - - -2021-03-21 1.8.0: -------------------- - * add `bitarray.util.serialize()` and `bitarray.util.deserialize()` - * allow whitespace (ignore space and `\n\r\t\v`) in input strings, - e.g. `bitarray('01 11')` or `a += '10 00'` - * add `bitarray.util.pprint()` - * When initializing a bitarray from another with different bit endianness, - e.g. `a = bitarray('110', 'little')` and `b = bitarray(a, 'big')`, - the buffer used to be simply copied, with consequence that `a == b` would - result in `False`. This is fixed now, that is `a == b` will always - evaluate to `True`. - * add test for loading existing pickle file (created using bitarray 1.5.0) - * add example showing how to [jsonize bitarrays](../examples/extend_json.py) - * add tests - - -2021-03-12 1.7.1: -------------------- - * fix issue #114, raise TypeError when incorrect index is used during - assignment, e.g. `a[1.5] = 1` - * raise TypeError (not IndexError) when assigning slice to incorrect type, - e.g. `a[1:4] = 1.2` - * improve some docstrings and tests - - -2021-02-27 1.7.0: -------------------- - * add `bitarray.util.urandom()` - * raise TypeError when trying to extend bitarrays from bytes on Python 3, - ie. `bitarray(b'011')` and `.extend(b'110')`. (Deprecated since 1.4.1) - - -2021-01-20 1.6.3: -------------------- - * add missing .h files to sdist tarball, #113 - - -2021-01-20 1.6.2: -------------------- - * use `Py_SET_TYPE()` and `Py_SET_SIZE()` for Python 3.10, #109 - * add official Python 3.10 support - * fix slice assignment to same object, - e.g. `a[2::] = a` or `a[::-1] = a`, #112 - * add bitarray.h, #110 - - -2020-11-05 1.6.1: -------------------- - * use PyType_Ready for all types: bitarray, bitarrayiterator, - decodeiterator, decodetree, searchiterator - - -2020-10-17 1.6.0: -------------------- - * add `decodetree` object, for speeding up consecutive calls - to `.decode()` and `.iterdecode()`, in particular when dealing - with large prefix codes, see #103 - * add optional parameter to `.tolist()` which changes the items in the - returned list to integers (0 or 1), as opposed to Booleans - * remove deprecated `bitdiff()`, which has been deprecated since version - 1.2.0, use `bitarray.util.count_xor()` instead - * drop Python 2.6 support - * update license file, #104 - - -2020-08-24 1.5.3: -------------------- - * add optional index parameter to `.index()` to invert single bit - * fix `sys.getsizeof(bitarray)` by adding `.__sizeof__()`, see issue #100 - - -2020-08-16 1.5.2: -------------------- - * add PyType_Ready usage, issue #66 - * speedup search() for bitarrays with length 1 in sparse bitarrays, - see issue #67 - * add tests - - -2020-08-10 1.5.1: -------------------- - * support signed integers in `util.ba2int()` and `util.int2ba()`, - see issue #85 - * deprecate `.length()` in favor of `len()` - - -2020-08-05 1.5.0: -------------------- - * Use `Py_ssize_t` for bitarray index. This means that on 32bit - systems, the maximum number of elements in a bitarray is 2 GBits. - We used to have a special 64bit index type for all architectures, but - this prevented us from using Python's sequence, mapping and number - methods, and made those method lookups slow. - * speedup slice operations when step size = 1 (if alignment allows - copying whole bytes) - * Require equal endianness for operations: `&`, `|`, `^`, `&=`, `|=`, `^=`. - This should have always been the case but was overlooked in the past. - * raise TypeError when trying to create bitarray from boolean - * This will be last release to still support Python 2.6 (which was retired - in 2013). We do NOT plan to stop support for Python 2.7 anytime soon. - - -2020-07-15 1.4.2: -------------------- - * add more tests - * C-level: - - simplify pack/unpack code - - fix memory leak in `~` operation (bitarray_cpinvert) - - -2020-07-14 1.4.1: -------------------- - * add official Python 3.9 support - * improve many docstrings - * add DeprecationWarning for `bitdiff()` - * add DeprecationWarning when trying to extend bitarrays - from bytes on Python 3 (`bitarray(b'011')` and `.extend(b'110')`) - * C-level: - - Rewrote `.fromfile()` and `.tofile()` implementation, - such that now the same code is used for Python 2 and 3. - The new implementation is more memory efficient on - Python 3. - - use `memcmp()` in `richcompare()` to shortcut EQ/NE, when - comparing two very large bitarrays for equality the - speedup can easily be 100x - - simplify how unpacking is handled - * add more tests - - -2020-07-11 1.4.0: -------------------- - * add `.clear()` method (Python 3.3 added this method to lists) - * avoid over-allocation when bitarray objects are initially created - * raise BufferError when resizing bitarrays which is exporting buffers - * add example to study the resize() function - * improve some error messages - * add more tests - * raise `NotImplementedError` with (useful message) when trying to call - the `.fromstring()` or `.tostring()` methods, which have been removed - in the last release - - -2020-07-06 1.3.0: -------------------- - * add `bitarray.util.make_endian()` - * `util.ba2hex()` and `util.hex2ba()` now also support little-endian - * add `bitarray.get_default_endian()` - * made first argument of initializer a positional-only parameter - * remove `.fromstring()` and `.tostring()` methods, these have been - deprecated 8 years ago, since version 0.4.0 - * add `__all__` in `bitarray/__init__.py` - * drop Python 3.3 and 3.4 support - - -2020-05-18 1.2.2: -------------------- - * `util.ba2hex()` now always return a string object (instead of bytes - object for Python 3), see issue #94 - * `util.hex2ba` allows a unicode object as input on Python 2 - * Determine 64-bitness of interpreter in a cross-platform fashion #91, - in order to better support PyPy - - -2020-01-06 1.2.1: -------------------- - * simplify markdown of readme so PyPI renders better - * make tests for bitarray.util required (instead of warning when - they cannot be imported) - - -2019-12-06 1.2.0: -------------------- - * add bitarray.util module which provides useful utility functions - * deprecate `bitarray.bitdiff()` in favor of `bitarray.util.count_xor` - * use markdown for documentation - * fix bug in `.count()` on 32bit systems in special cases when array size - is 2^29 bits or larger - * simplified tests by using bytes syntax - * update smallints and sieve example to use new utility module - * simplified mandel example to use numba - * use file context managers in tests - - -2019-11-07 1.1.0: -------------------- - * add frozenbitarray object - * add optional start and stop arguments to `.count()` method - * add official Python 3.8 support - * optimize `setrange()` C-function by using `memset()` - * fix issue #74, bitarray is hashable on Python 2 - * fix issue #68, `unittest.TestCase.assert_` deprecated - * improved test suite - tests should run in about 1 second - * update documentation to use positional-only syntax in docstrings - * update readme to pass Python 3 doctest - * add utils module to examples - - -2019-07-19 1.0.1: -------------------- - * fix readme to pass `twine check` - - -2019-07-15 1.0.0: -------------------- - * fix bitarrays beings created from unicode in Python 2 - * use `PyBytes_*` in C code, treating the Py3k function names as default, - which also removes all redefinitions of `PyString_*` - * handle negative arguments of .index() method consistently with how - they are treated for lists - * add a few more comments to the C code - * move imports outside tests: pickle, io, etc. - * drop Python 2.5 support - - -2019-05-20 0.9.3: -------------------- - * refactor resize() - only shrink allocated memory if new size falls - lower than half the allocated size - * improve error message when trying to initialize from float or complex - - -2019-04-29 0.9.2: -------------------- - * fix to compile on Windows with VS 2015, issue #72 - - -2019-04-28 0.9.1: -------------------- - * fix types to actually be types, #29 - * check for ambiguous prefix codes when building binary tree for decoding - * remove Python level methods: encode, decode, iterdecode (in favor of - having these implemented on the C-level along with check_codedict) - * fix self tests for Python 2.5 and 2.6 - * move all Huffman code related example code into examples/huffman - * add code to generate graphviz .dot file of Huffman tree to examples - - -2019-04-22 0.9.0: -------------------- - * more efficient decode and iterdecode by using C-level binary tree - instead of a python one, #54 - * added buffer protocol support for Python 3, #55 - * fixed invalid pointer exceptions in pypy, #47 - * made all examples Py3k compatible - * add gene sequence example - * add official Python 3.7 support - * drop Python 2.4, 3.1 and 3.2 support - - -2018-07-06 0.8.3: -------------------- - * add exception to setup.py when README.rst cannot be opened - - -2018-05-30 0.8.2: -------------------- - * add official Python 3.6 support (although it was already working) - * fix description of `fill()`, #52 - * handle extending self correctly, #28 - * copy_n: fast copy with memmove fixed, #43 - * minor clarity/wording changes to README, #23 - - -2013-03-30 0.8.1: -------------------- - * fix issue #10, i.e. `int(bitarray())` segfault - * added tests for using a bitarray object as an argument to functions - like int, long (on Python 2), float, list, tuple, dict - - -2012-04-04 0.8.0: -------------------- - * add Python 2.4 support - * add (module level) function bitdiff for calculating the difference - between two bitarrays - - -2012-02-15 0.7.0: -------------------- - * add iterdecode method (C level), which returns an iterator but is - otherwise like the decode method - * improve memory efficiency and speed of pickling large bitarray objects - - -2012-02-06 0.6.0: -------------------- - * add buffer protocol to bitarray objects (Python 2.7 only) - * allow slice assignment to 0 or 1, e.g. `a[::3] = 0` (in addition to - booleans) - * moved implementation of itersearch method to C level (Lluis Pamies) - * search, itersearch now only except bitarray objects, - whereas `__contains__` excepts either booleans or bitarrays - * use a priority queue for Huffman tree example (thanks to Ushma Bhatt) - * improve documentation - - -2012-02-02 0.5.2: -------------------- - * fixed MSVC compile error on Python 3 (thanks to Chris Gohlke) - * add missing start and stop optional parameters to index() method - * add examples/compress.py - - -2012-01-31 0.5.1: -------------------- - * update documentation to use tobytes and frombytes, rather than tostring - and fromstring (which are now deprecated) - * simplified how tests are run - - -2012-01-23 0.5.0: -------------------- - * added itersearch method - * added Bloom filter example - * minor fixes in docstrings, added more tests - - -2011-12-29 0.4.0: -------------------- - * porting to Python 3.x (Roland Puntaier) - * introduced tobytes, frombytes (tostring, fromstring are now deprecated) - * updated development status - * added sieve prime number example - * moved project to github: https://github.com/ilanschnell/bitarray - - -2009-04-06 0.3.5: -------------------- - * fixed reference counts bugs - * added possibility to slice assign to True or False, e.g. a[::3] = True - will set every third element to True - - -2009-01-15 0.3.4: -------------------- - * Made C code less ambiguous, such that the package compiles on - Visual Studio, with all tests passing. - - -2008-12-14 0.3.3: -------------------- - * Made changes to the C code to allow compilation with more compilers. - Compiles on Visual Studio, although there are still a few tests failing. - - -2008-10-19 0.3.2: -------------------- - * Added sequential search method. - * The special method `__contains__` now also takes advantage of the - sequential search. - - -2008-10-12 0.3.1: -------------------- - * Simplified state information for pickling. Argument for count is now - optional, defaults to True. Fixed typos. - - -2008-09-30 0.3.0: -------------------- - * Fixed a severe bug for 64-bit machines. Implemented all methods in C, - improved tests. - * Removed deprecated methods from01 and fromlist. - - -2008-09-23 0.2.5: -------------------- - * Added section in README about prefix codes. Implemented _multiply method - for faster __mul__ and __imul__. Fixed some typos. - - -2008-09-22 0.2.4: -------------------- - * Implemented encode and decode method (in C) for variable-length prefix - codes. - * Added more examples, wrote README for the examples. - * Added more tests, fixed some typos. - - -2008-09-16 0.2.3: -------------------- - * Fixed a memory leak, implemented a number of methods in C. - These include __getitem__, __setitem__, __delitem__, pop, remove, - insert. The methods implemented on the Python level is very limit now. - * Implemented bitwise operations. - - -2008-09-09 0.2.2: -------------------- - * Rewrote parts of the README - * Implemented memory efficient algorithm for the reverse method - * Fixed typos, added a few tests, more C refactoring. - - -2008-09-07 0.2.1: -------------------- - * Improved tests, in particular added checking for memory leaks. - * Refactored many things on the C level. - * Implemented a few more methods. - - -2008-09-02 0.2.0: -------------------- - * Added bit endianness property to the bitarray object - * Added the examples to the release package. - - -2008-08-17 0.1.0: -------------------- - * First official release; put project to - http://pypi.python.org/pypi/bitarray/ - - -May 2008: ---------- -Wrote the initial code, and put it on my personal web-site: -http://ilan.schnell-web.net/prog/ diff --git a/shell/ext-py/bitarray-2.3.0/LICENSE b/shell/ext-py/bitarray-2.3.0/LICENSE deleted file mode 100644 index a82526bf1..000000000 --- a/shell/ext-py/bitarray-2.3.0/LICENSE +++ /dev/null @@ -1,46 +0,0 @@ -PYTHON SOFTWARE FOUNDATION LICENSE ----------------------------------- - -1. This LICENSE AGREEMENT is between Ilan Schnell, and the Individual or -Organization ("Licensee") accessing and otherwise using this software -("bitarray") in source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, Ilan Schnell -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use bitarray -alone or in any derivative version, provided, however, that Ilan Schnell's -License Agreement and Ilan Schnell's notice of copyright, i.e., "Copyright (c) -2008 - 2021 Ilan Schnell; All Rights Reserved" are retained in bitarray -alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates bitarray or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to bitarray. - -4. Ilan Schnell is making bitarray available to Licensee on an "AS IS" -basis. ILAN SCHNELL MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, ILAN SCHNELL MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF BITARRAY WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. ILAN SCHNELL SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF BITARRAY -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING BITARRAY, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between Ilan Schnell -and Licensee. This License Agreement does not grant permission to use Ilan -Schnell trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using bitarray, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/shell/ext-py/bitarray-2.3.0/README.rst b/shell/ext-py/bitarray-2.3.0/README.rst deleted file mode 100644 index c3ea77dce..000000000 --- a/shell/ext-py/bitarray-2.3.0/README.rst +++ /dev/null @@ -1,836 +0,0 @@ -bitarray: efficient arrays of booleans -====================================== - -This library provides an object type which efficiently represents an array -of booleans. Bitarrays are sequence types and behave very much like usual -lists. Eight bits are represented by one byte in a contiguous block of -memory. The user can select between two representations: little-endian -and big-endian. All of the functionality is implemented in C. -Methods for accessing the machine representation are provided. -This can be useful when bit level access to binary files is required, -such as portable bitmap image files (.pbm). Also, when dealing with -compressed data which uses variable bit length encoding, you may find -this module useful. - - -Key features ------------- - -* The bit endianness can be specified for each bitarray object, see below. -* Sequence methods: slicing (including slice assignment and deletion), - operations ``+``, ``*``, ``+=``, ``*=``, the ``in`` operator, ``len()`` -* Fast methods for encoding and decoding variable bit length prefix codes -* Bitwise operations: ``~``, ``&``, ``|``, ``^``, ``<<``, ``>>`` (as well as - their in-place versions ``&=``, ``|=``, ``^=``, ``<<=``, ``>>=``). -* Sequential search -* Packing and unpacking to other binary data formats, e.g. ``numpy.ndarray``. -* Pickling and unpickling of bitarray objects. -* Bitarray objects support the buffer protocol (both importing and - exporting buffers) -* ``frozenbitarray`` objects which are hashable -* Extensive test suite with round 400 unittests -* Utility module ``bitarray.util``: - - * conversion to hexadecimal string - * serialization - * pretty printing - * conversion to integers - * creating Huffman codes - * various count functions - * other helpful functions - - -Installation ------------- - -If you have a working C compiler, you can simply: - -.. code-block:: shell-session - - $ pip install bitarray - -If you rather want to use precompiled binaries, you can: - -* ``conda install bitarray`` (both the default Anaconda repository as well - as conda-forge support bitarray) -* download Windows wheels from - `Chris Gohlke `__ - -Once you have installed the package, you may want to test it: - -.. code-block:: shell-session - - $ python -c 'import bitarray; bitarray.test()' - bitarray is installed in: /Users/ilan/bitarray/bitarray - bitarray version: 2.3.0 - sys.version: 2.7.15 (default, Mar 5 2020, 14:58:04) [GCC Clang 9.0.1] - sys.prefix: /Users/ilan/Mini3/envs/py27 - pointer size: 64 bit - sizeof(size_t): 8 - PY_UINT64_T defined: 1 - DEBUG: 0 - ......................................................................... - ......................................................................... - ............................................................. - ---------------------------------------------------------------------- - Ran 398 tests in 0.476s - - OK - -You can always import the function test, -and ``test().wasSuccessful()`` will return ``True`` when the test went well. - - -Using the module ----------------- - -As mentioned above, bitarray objects behave very much like lists, so -there is not too much to learn. The biggest difference from list -objects (except that bitarray are obviously homogeneous) is the ability -to access the machine representation of the object. -When doing so, the bit endianness is of importance; this issue is -explained in detail in the section below. Here, we demonstrate the -basic usage of bitarray objects: - -.. code-block:: python - - >>> from bitarray import bitarray - >>> a = bitarray() # create empty bitarray - >>> a.append(1) - >>> a.extend([1, 0]) - >>> a - bitarray('110') - >>> x = bitarray(2 ** 20) # bitarray of length 1048576 (uninitialized) - >>> len(x) - 1048576 - >>> bitarray('1001 011') # initialize from string (whitespace is ignored) - bitarray('1001011') - >>> lst = [1, 0, False, True, True] - >>> a = bitarray(lst) # initialize from iterable - >>> a - bitarray('10011') - >>> a.count(1) - 3 - >>> a.remove(0) # removes first occurrence of 0 - >>> a - bitarray('1011') - -Like lists, bitarray objects support slice assignment and deletion: - -.. code-block:: python - - >>> a = bitarray(50) - >>> a.setall(0) # set all elements in a to 0 - >>> a[11:37:3] = 9 * bitarray('1') - >>> a - bitarray('00000000000100100100100100100100100100000000000000') - >>> del a[12::3] - >>> a - bitarray('0000000000010101010101010101000000000') - >>> a[-6:] = bitarray('10011') - >>> a - bitarray('000000000001010101010101010100010011') - >>> a += bitarray('000111') - >>> a[9:] - bitarray('001010101010101010100010011000111') - -In addition, slices can be assigned to booleans, which is easier (and -faster) than assigning to a bitarray in which all values are the same: - -.. code-block:: python - - >>> a = 20 * bitarray('0') - >>> a[1:15:3] = True - >>> a - bitarray('01001001001001000000') - -This is easier and faster than: - -.. code-block:: python - - >>> a = 20 * bitarray('0') - >>> a[1:15:3] = 5 * bitarray('1') - >>> a - bitarray('01001001001001000000') - -Note that in the latter we have to create a temporary bitarray whose length -must be known or calculated. Another example of assigning slices to Booleans, -is setting ranges: - -.. code-block:: python - - >>> a = bitarray(30) - >>> a[:] = 0 # set all elements to 0 - equivalent to a.setall(0) - >>> a[10:25] = 1 # set elements in range(10, 25) to 1 - >>> a - bitarray('000000000011111111111111100000') - - -Bitwise operators ------------------ - -Bitarray objects support the bitwise operators ``~``, ``&``, ``|``, ``^``, -``<<``, ``>>`` (as well as their in-place versions ``&=``, ``|=``, ``^=``, -``<<=``, ``>>=``). The behavior is very much what one would expect: - -.. code-block:: python - - >>> a = bitarray('101110001') - >>> ~a # invert - bitarray('010001110') - >>> b = bitarray('111001011') - >>> a ^ b - bitarray('010111010') - >>> a &= b - >>> a - bitarray('101000001') - >>> a <<= 2 - >>> a - bitarray('100000100') - >>> b >> 1 - bitarray('011100101') - -The C language does not specify the behavior of negative shifts and -of left shifts larger or equal than the width of the promoted left operand. -The exact behavior is compiler/machine specific. -This Python bitarray library specifies the behavior as follows: - -* the length of the bitarray is never changed by any shift operation -* blanks are filled by 0 -* negative shifts raise ``ValueError`` -* shifts larger or equal to the length of the bitarray result in - bitarrays with all values 0 - - -Bit endianness --------------- - -Unless explicitly converting to machine representation, using -the ``.tobytes()``, ``.frombytes()``, ``.tofile()`` and ``.fromfile()`` -methods, as well as using ``memoryview``, the bit endianness will have no -effect on any computation, and one can skip this section. - -Since bitarrays allows addressing individual bits, where the machine -represents 8 bits in one byte, there are two obvious choices for this -mapping: little-endian and big-endian. - -When dealing with the machine representation of bitarray objects, it is -recommended to always explicitly specify the endianness. - -By default, bitarrays use big-endian representation: - -.. code-block:: python - - >>> a = bitarray() - >>> a.endian() - 'big' - >>> a.frombytes(b'A') - >>> a - bitarray('01000001') - >>> a[6] = 1 - >>> a.tobytes() - b'C' - -Big-endian means that the most-significant bit comes first. -Here, ``a[0]`` is the lowest address (index) and most significant bit, -and ``a[7]`` is the highest address and least significant bit. - -When creating a new bitarray object, the endianness can always be -specified explicitly: - -.. code-block:: python - - >>> a = bitarray(endian='little') - >>> a.frombytes(b'A') - >>> a - bitarray('10000010') - >>> a.endian() - 'little' - -Here, the low-bit comes first because little-endian means that increasing -numeric significance corresponds to an increasing address. -So ``a[0]`` is the lowest address and least significant bit, -and ``a[7]`` is the highest address and most significant bit. - -The bit endianness is a property of the bitarray object. -The endianness cannot be changed once a bitarray object is created. -When comparing bitarray objects, the endianness (and hence the machine -representation) is irrelevant; what matters is the mapping from indices -to bits: - -.. code-block:: python - - >>> bitarray('11001', endian='big') == bitarray('11001', endian='little') - True - -Bitwise operations (``|``, ``^``, ``&=``, ``|=``, ``^=``, ``~``) are -implemented efficiently using the corresponding byte operations in C, i.e. the -operators act on the machine representation of the bitarray objects. -Therefore, it is not possible to perform bitwise operators on bitarrays -with different endianness. - -When converting to and from machine representation, using -the ``.tobytes()``, ``.frombytes()``, ``.tofile()`` and ``.fromfile()`` -methods, the endianness matters: - -.. code-block:: python - - >>> a = bitarray(endian='little') - >>> a.frombytes(b'\x01') - >>> a - bitarray('10000000') - >>> b = bitarray(endian='big') - >>> b.frombytes(b'\x80') - >>> b - bitarray('10000000') - >>> a == b - True - >>> a.tobytes() == b.tobytes() - False - -As mentioned above, the endianness can not be changed once an object is -created. However, you can create a new bitarray with different endianness: - -.. code-block:: python - - >>> a = bitarray('111000', endian='little') - >>> b = bitarray(a, endian='big') - >>> b - bitarray('111000') - >>> a == b - True - - -Buffer protocol ---------------- - -Bitarray objects support the buffer protocol. They can both export their -own buffer, as well as import another object's buffer. To learn more about -this topic, please read `buffer protocol `__. There is also an example that shows how -to memory-map a file to a bitarray: `mmapped-file.py `__ - - -Variable bit length prefix codes --------------------------------- - -The ``.encode()`` method takes a dictionary mapping symbols to bitarrays -and an iterable, and extends the bitarray object with the encoded symbols -found while iterating. For example: - -.. code-block:: python - - >>> d = {'H':bitarray('111'), 'e':bitarray('0'), - ... 'l':bitarray('110'), 'o':bitarray('10')} - ... - >>> a = bitarray() - >>> a.encode(d, 'Hello') - >>> a - bitarray('111011011010') - -Note that the string ``'Hello'`` is an iterable, but the symbols are not -limited to characters, in fact any immutable Python object can be a symbol. -Taking the same dictionary, we can apply the ``.decode()`` method which will -return a list of the symbols: - -.. code-block:: python - - >>> a.decode(d) - ['H', 'e', 'l', 'l', 'o'] - >>> ''.join(a.decode(d)) - 'Hello' - -Since symbols are not limited to being characters, it is necessary to return -them as elements of a list, rather than simply returning the joined string. -The above dictionary ``d`` can be efficiently constructed using the function -``bitarray.util.huffman_code()``. I also wrote `Huffman coding in Python -using bitarray `__ for more -background information. - -When the codes are large, and you have many decode calls, most time will -be spent creating the (same) internal decode tree objects. In this case, -it will be much faster to create a ``decodetree`` object, which can be -passed to bitarray's ``.decode()`` and ``.iterdecode()`` methods, instead -of passing the prefix code dictionary to those methods itself: - -.. code-block:: python - - >>> from bitarray import bitarray, decodetree - >>> t = decodetree({'a': bitarray('0'), 'b': bitarray('1')}) - >>> a = bitarray('0110') - >>> a.decode(t) - ['a', 'b', 'b', 'a'] - >>> ''.join(a.iterdecode(t)) - 'abba' - -The ``decodetree`` object is immutable and unhashable, and it's sole purpose -is to be passed to bitarray's `.decode()` and `.iterdecode()` methods. - - -Frozenbitarrays ---------------- - -A ``frozenbitarray`` object is very similar to the bitarray object. -The difference is that this a ``frozenbitarray`` is immutable, and hashable, -and can therefore be used as a dictionary key: - -.. code-block:: python - - >>> from bitarray import frozenbitarray - >>> key = frozenbitarray('1100011') - >>> {key: 'some value'} - {frozenbitarray('1100011'): 'some value'} - >>> key[3] = 1 - Traceback (most recent call last): - ... - TypeError: frozenbitarray is immutable - - -Reference -========= - -bitarray version: 2.3.0 -- `change log `__ - -In the following, ``item`` and ``value`` are usually a single bit - -an integer 0 or 1. - - -The bitarray object: --------------------- - -``bitarray(initializer=0, /, endian='big', buffer=None)`` -> bitarray - Return a new bitarray object whose items are bits initialized from - the optional initial object, and endianness. - The initializer may be of the following types: - - ``int``: Create a bitarray of given integer length. The initial values are - uninitialized. - - ``str``: Create bitarray from a string of ``0`` and ``1``. - - ``iterable``: Create bitarray from iterable or sequence or integers 0 or 1. - - Optional keyword arguments: - - ``endian``: Specifies the bit endianness of the created bitarray object. - Allowed values are ``big`` and ``little`` (the default is ``big``). - The bit endianness effects the buffer representation of the bitarray. - - ``buffer``: Any object which exposes a buffer. When provided, ``initializer`` - cannot be present (or has to be ``None``). The imported buffer may be - readonly or writable, depending on the object type. - - New in version 2.3: optional ``buffer`` argument. - - -**A bitarray object supports the following methods:** - -``all()`` -> bool - Return True when all bits in the array are True. - Note that ``a.all()`` is faster than ``all(a)``. - - -``any()`` -> bool - Return True when any bit in the array is True. - Note that ``a.any()`` is faster than ``any(a)``. - - -``append(item, /)`` - Append ``item`` to the end of the bitarray. - - -``buffer_info()`` -> tuple - Return a tuple containing: - - 0. memory address of buffer - 1. buffer size (in bytes) - 2. bit endianness as a string - 3. number of unused padding bits - 4. allocated memory for the buffer (in bytes) - 5. memory is read-only - 6. buffer is imported - 7. number of buffer exports - - -``bytereverse(start=0, stop=, /)`` - Reverse the bit order for the bytes in range(start, stop) in-place. - The start and stop indices are given in terms of bytes (not bits). - By default, all bytes in the buffer are reversed. - Note: This method only changes the buffer; it does not change the - endianness of the bitarray object. - - New in version 2.2.5: optional ``start`` and ``stop`` arguments. - - -``clear()`` - Remove all items from the bitarray. - - New in version 1.4. - - -``copy()`` -> bitarray - Return a copy of the bitarray. - - -``count(value=1, start=0, stop=, /)`` -> int - Count the number of occurrences of ``value`` in the bitarray. - - New in version 1.1.0: optional ``start`` and ``stop`` arguments. - - -``decode(code, /)`` -> list - Given a prefix code (a dict mapping symbols to bitarrays, or ``decodetree`` - object), decode the content of the bitarray and return it as a list of - symbols. - - -``encode(code, iterable, /)`` - Given a prefix code (a dict mapping symbols to bitarrays), - iterate over the iterable object with symbols, and extend the bitarray - with the corresponding bitarray for each symbol. - - -``endian()`` -> str - Return the bit endianness of the bitarray as a string (``little`` or ``big``). - - -``extend(iterable, /)`` - Append all the items from ``iterable`` to the end of the bitarray. - If the iterable is a string, each ``0`` and ``1`` are appended as - bits (ignoring whitespace and underscore). - - -``fill()`` -> int - Add zeros to the end of the bitarray, such that the length of the bitarray - will be a multiple of 8, and return the number of bits added (0..7). - - -``find(sub_bitarray, start=0, stop=, /)`` -> int - Return the lowest index where sub_bitarray is found, such that sub_bitarray - is contained within ``[start:stop]``. - Return -1 when sub_bitarray is not found. - - New in version 2.1. - - -``frombytes(bytes, /)`` - Extend bitarray with raw bytes. That is, each append byte will add eight - bits to the bitarray. - - -``fromfile(f, n=-1, /)`` - Extend bitarray with up to n bytes read from the file object f. - When n is omitted or negative, reads all data until EOF. - When n is provided and positive but exceeds the data available, - ``EOFError`` is raised (but the available data is still read and appended. - - -``index(sub_bitarray, start=0, stop=, /)`` -> int - Return the lowest index where sub_bitarray is found, such that sub_bitarray - is contained within ``[start:stop]``. - Raises ``ValueError`` when the sub_bitarray is not present. - - -``insert(index, value, /)`` - Insert ``value`` into the bitarray before ``index``. - - -``invert(index=, /)`` - Invert all bits in the array (in-place). - When the optional ``index`` is given, only invert the single bit at index. - - New in version 1.5.3: optional ``index`` argument. - - -``iterdecode(code, /)`` -> iterator - Given a prefix code (a dict mapping symbols to bitarrays, or ``decodetree`` - object), decode the content of the bitarray and return an iterator over - the symbols. - - -``itersearch(sub_bitarray, /)`` -> iterator - Searches for the given sub_bitarray in self, and return an iterator over - the start positions where bitarray matches self. - - -``pack(bytes, /)`` - Extend the bitarray from bytes, where each byte corresponds to a single - bit. The byte ``b'\x00'`` maps to bit 0 and all other characters map to - bit 1. - This method, as well as the unpack method, are meant for efficient - transfer of data between bitarray objects to other python objects - (for example NumPy's ndarray object) which have a different memory view. - - -``pop(index=-1, /)`` -> item - Return the i-th (default last) element and delete it from the bitarray. - Raises ``IndexError`` if bitarray is empty or index is out of range. - - -``remove(value, /)`` - Remove the first occurrence of ``value`` in the bitarray. - Raises ``ValueError`` if item is not present. - - -``reverse()`` - Reverse all bits in the array (in-place). - - -``search(sub_bitarray, limit=, /)`` -> list - Searches for the given sub_bitarray in self, and return the list of start - positions. - The optional argument limits the number of search results to the integer - specified. By default, all search results are returned. - - -``setall(value, /)`` - Set all elements in the bitarray to ``value``. - Note that ``a.setall(value)`` is equivalent to ``a[:] = value``. - - -``sort(reverse=False)`` - Sort the bits in the array (in-place). - - -``to01()`` -> str - Return a string containing '0's and '1's, representing the bits in the - bitarray. - - -``tobytes()`` -> bytes - Return the byte representation of the bitarray. - - -``tofile(f, /)`` - Write the byte representation of the bitarray to the file object f. - - -``tolist()`` -> list - Return a list with the items (0 or 1) in the bitarray. - Note that the list object being created will require 32 or 64 times more - memory (depending on the machine architecture) than the bitarray object, - which may cause a memory error if the bitarray is very large. - - -``unpack(zero=b'\x00', one=b'\x01')`` -> bytes - Return bytes containing one character for each bit in the bitarray, - using the specified mapping. - - -Other objects: --------------- - -``frozenbitarray(initializer=0, /, endian='big', buffer=None)`` -> frozenbitarray - Return a frozenbitarray object, which is initialized the same way a bitarray - object is initialized. A frozenbitarray is immutable and hashable. - Its contents cannot be altered after it is created; however, it can be used - as a dictionary key. - - New in version 1.1. - - -``decodetree(code, /)`` -> decodetree - Given a prefix code (a dict mapping symbols to bitarrays), - create a binary tree object to be passed to ``.decode()`` or ``.iterdecode()``. - - New in version 1.6. - - -Functions defined in the `bitarray` module: -------------------------------------------- - -``bits2bytes(n, /)`` -> int - Return the number of bytes necessary to store n bits. - - -``get_default_endian()`` -> string - Return the default endianness for new bitarray objects being created. - Unless ``_set_default_endian()`` is called, the return value is ``big``. - - New in version 1.3. - - -``test(verbosity=1, repeat=1)`` -> TextTestResult - Run self-test, and return unittest.runner.TextTestResult object. - - -Functions defined in `bitarray.util` module: --------------------------------------------- - -This sub-module was add in version 1.2. - -``zeros(length, /, endian=None)`` -> bitarray - Create a bitarray of length, with all values 0, and optional - endianness, which may be 'big', 'little'. - - -``urandom(length, /, endian=None)`` -> bitarray - Return a bitarray of ``length`` random bits (uses ``os.urandom``). - - New in version 1.7. - - -``pprint(bitarray, /, stream=None, group=8, indent=4, width=80)`` - Prints the formatted representation of object on ``stream``, followed by a - newline. If ``stream`` is ``None``, ``sys.stdout`` is used. By default, elements - are grouped in bytes (8 elements), and 8 bytes (64 elements) per line. - Non-bitarray objects are printed by the standard library - function ``pprint.pprint()``. - - New in version 1.8. - - -``make_endian(bitarray, endian, /)`` -> bitarray - When the endianness of the given bitarray is different from ``endian``, - return a new bitarray, with endianness ``endian`` and the same elements - as the original bitarray. - Otherwise (endianness is already ``endian``) the original bitarray is returned - unchanged. - - New in version 1.3. - - -``rindex(bitarray, value=1, start=0, stop=, /)`` -> int - Return the rightmost (highest) index of ``value`` in bitarray. - Raises ``ValueError`` if the value is not present. - - New in version 2.3.0: optional ``start`` and ``stop`` arguments. - - -``strip(bitarray, /, mode='right')`` -> bitarray - Return a new bitarray with zeros stripped from left, right or both ends. - Allowed values for mode are the strings: ``left``, ``right``, ``both`` - - -``count_n(a, n, /)`` -> int - Return lowest index ``i`` for which ``a[:i].count() == n``. - Raises ``ValueError``, when n exceeds total count (``a.count()``). - - -``parity(a, /)`` -> int - Return the parity of bitarray ``a``. - This is equivalent to ``a.count() % 2`` (but more efficient). - - New in version 1.9. - - -``count_and(a, b, /)`` -> int - Return ``(a & b).count()`` in a memory efficient manner, - as no intermediate bitarray object gets created. - - -``count_or(a, b, /)`` -> int - Return ``(a | b).count()`` in a memory efficient manner, - as no intermediate bitarray object gets created. - - -``count_xor(a, b, /)`` -> int - Return ``(a ^ b).count()`` in a memory efficient manner, - as no intermediate bitarray object gets created. - - -``subset(a, b, /)`` -> bool - Return ``True`` if bitarray ``a`` is a subset of bitarray ``b``. - ``subset(a, b)`` is equivalent to ``(a & b).count() == a.count()`` but is more - efficient since we can stop as soon as one mismatch is found, and no - intermediate bitarray object gets created. - - -``ba2hex(bitarray, /)`` -> hexstr - Return a string containing the hexadecimal representation of - the bitarray (which has to be multiple of 4 in length). - - -``hex2ba(hexstr, /, endian=None)`` -> bitarray - Bitarray of hexadecimal representation. hexstr may contain any number - (including odd numbers) of hex digits (upper or lower case). - - -``ba2base(n, bitarray, /)`` -> str - Return a string containing the base ``n`` ASCII representation of - the bitarray. Allowed values for ``n`` are 2, 4, 8, 16, 32 and 64. - The bitarray has to be multiple of length 1, 2, 3, 4, 5 or 6 respectively. - For ``n=16`` (hexadecimal), ``ba2hex()`` will be much faster, as ``ba2base()`` - does not take advantage of byte level operations. - For ``n=32`` the RFC 4648 Base32 alphabet is used, and for ``n=64`` the - standard base 64 alphabet is used. - - See also: `Bitarray representations `__ - - New in version 1.9. - - -``base2ba(n, asciistr, /, endian=None)`` -> bitarray - Bitarray of the base ``n`` ASCII representation. - Allowed values for ``n`` are 2, 4, 8, 16, 32 and 64. - For ``n=16`` (hexadecimal), ``hex2ba()`` will be much faster, as ``base2ba()`` - does not take advantage of byte level operations. - For ``n=32`` the RFC 4648 Base32 alphabet is used, and for ``n=64`` the - standard base 64 alphabet is used. - - See also: `Bitarray representations `__ - - New in version 1.9. - - -``ba2int(bitarray, /, signed=False)`` -> int - Convert the given bitarray into an integer. - The bit-endianness of the bitarray is respected. - ``signed`` indicates whether two's complement is used to represent the integer. - - -``int2ba(int, /, length=None, endian=None, signed=False)`` -> bitarray - Convert the given integer to a bitarray (with given endianness, - and no leading (big-endian) / trailing (little-endian) zeros), unless - the ``length`` of the bitarray is provided. An ``OverflowError`` is raised - if the integer is not representable with the given number of bits. - ``signed`` determines whether two's complement is used to represent the integer, - and requires ``length`` to be provided. - - -``serialize(bitarray, /)`` -> bytes - Return a serialized representation of the bitarray, which may be passed to - ``deserialize()``. It efficiently represents the bitarray object (including - its endianness) and is guaranteed not to change in future releases. - - See also: `Bitarray representations `__ - - New in version 1.8. - - -``deserialize(bytes, /)`` -> bitarray - Return a bitarray given the bytes representation returned by ``serialize()``. - - See also: `Bitarray representations `__ - - New in version 1.8. - - -``vl_encode(bitarray, /)`` -> bytes - Return variable length binary representation of bitarray. - This representation is useful for efficiently storing small bitarray - in a binary stream. Use ``vl_decode()`` for decoding. - - See also: `Variable length bitarray format `__ - - New in version 2.2. - - -``vl_decode(stream, /, endian=None)`` -> bitarray - Decode binary stream (an integer iterator, or bytes object), and return - the decoded bitarray. This function consumes only one bitarray and leaves - the remaining stream untouched. ``StopIteration`` is raised when no - terminating byte is found. - Use ``vl_encode()`` for encoding. - - See also: `Variable length bitarray format `__ - - New in version 2.2. - - -``huffman_code(dict, /, endian=None)`` -> dict - Given a frequency map, a dictionary mapping symbols to their frequency, - calculate the Huffman code, i.e. a dict mapping those symbols to - bitarrays (with given endianness). Note that the symbols are not limited - to being strings. Symbols may may be any hashable object (such as ``None``). - - diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/__init__.py b/shell/ext-py/bitarray-2.3.0/bitarray/__init__.py deleted file mode 100644 index 79604f3fa..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) 2008 - 2021, Ilan Schnell; All Rights Reserved -""" -This package defines an object type which can efficiently represent -a bitarray. Bitarrays are sequence types and behave very much like lists. - -Please find a description of this package at: - - https://github.com/ilanschnell/bitarray - -Author: Ilan Schnell -""" -from __future__ import absolute_import - -from bitarray._bitarray import (bitarray, decodetree, _sysinfo, - get_default_endian, _set_default_endian, - __version__) - - -__all__ = ['bitarray', 'frozenbitarray', 'decodetree', '__version__'] - - -class frozenbitarray(bitarray): - """frozenbitarray(initializer=0, /, endian='big', buffer=None) -> \ -frozenbitarray - -Return a frozenbitarray object, which is initialized the same way a bitarray -object is initialized. A frozenbitarray is immutable and hashable. -Its contents cannot be altered after it is created; however, it can be used -as a dictionary key. -""" - def __init__(self, *args, **kwargs): - if 'buffer' in kwargs: - if not self.buffer_info()[5]: # not readonly - raise TypeError("cannot import writable buffer into " - "frozenbitarray") - self._freeze() - - def __repr__(self): - return 'frozen' + bitarray.__repr__(self) - - def __hash__(self): - "Return hash(self)." - if getattr(self, '_hash', None) is None: - # ensure hash is independent of endianness, also the copy will be - # mutable such that .tobytes() can zero out the pad bits - a = bitarray(self, 'big') - self._hash = hash((len(a), a.tobytes())) - return self._hash - - # Technically the code below is not necessary, as all these methods will - # raise a TypeError on read-only memory. However, with a different error - # message. - def __delitem__(self, *args, **kwargs): - "" # no docstring - raise TypeError("frozenbitarray is immutable") - - append = bytereverse = clear = extend = encode = fill = __delitem__ - frombytes = fromfile = insert = invert = pack = pop = __delitem__ - remove = reverse = setall = sort = __setitem__ = __delitem__ - __iadd__ = __iand__ = __imul__ = __ior__ = __ixor__ = __delitem__ - __ilshift__ = __irshift__ = __delitem__ - - -def bits2bytes(__n): - """bits2bytes(n, /) -> int - -Return the number of bytes necessary to store n bits. -""" - import sys - if not isinstance(__n, (int, long) if sys.version_info[0] == 2 else int): - raise TypeError("integer expected") - if __n < 0: - raise ValueError("non-negative integer expected") - return (__n + 7) // 8 - - -def test(verbosity=1, repeat=1): - """test(verbosity=1, repeat=1) -> TextTestResult - -Run self-test, and return unittest.runner.TextTestResult object. -""" - from bitarray import test_bitarray - return test_bitarray.run(verbosity=verbosity, repeat=repeat) diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/__init__.pyi b/shell/ext-py/bitarray-2.3.0/bitarray/__init__.pyi deleted file mode 100644 index e15c22f34..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/__init__.pyi +++ /dev/null @@ -1,126 +0,0 @@ -""" -This stub, as well as util.pyi, have been tested with all Python 3 versions -that bitarray supports and mypy 0.902. -""" -from collections.abc import Iterable, Iterator -from unittest.runner import TextTestResult - -from typing import Any, BinaryIO, Dict, Union, overload - - -Codedict = Dict[Any, bitarray] - - -class decodetree: - def __init__(self, code: Codedict) -> None: ... - def nodes(self) -> int: ... - def todict(self) -> Codedict: ... - - -class bitarray: - def __init__(self, - initializer: Union[int, str, Iterable[int], None] = ..., - endian: str = ..., - buffer: Any = ...) -> None: ... - - def all(self) -> bool: ... - def any(self) -> bool: ... - def append(self, value: int) -> None: ... - def buffer_info(self) -> tuple: ... - def bytereverse(self, - start: int = ..., - stop: int = ...) -> None: ... - - def clear(self) -> None: ... - def copy(self) -> bitarray: ... - def count(self, - value: int = ..., - start: int = ..., - stop: int = ...) -> int: ... - - def decode(self, code: Union[Codedict, decodetree]) -> list: ... - def encode(self, code: Codedict, x: Iterable) -> None: ... - def endian(self) -> str: ... - def extend(self, x: Union[str, Iterable[int]]) -> None: ... - def fill(self) -> int: ... - def find(self, - a: Union[bitarray, int], - start: int = ..., - stop: int = ...) -> int: ... - - def frombytes(self, a: bytes) -> None: ... - def fromfile(self, f: BinaryIO, n: int = ...) -> None: ... - def index(self, - a: Union[bitarray, int], - start: int = ..., - stop: int = ...) -> int: ... - - def insert(self, i: int, value: int) -> None: ... - def invert(self, i: int = ...) -> None: ... - def iterdecode(self, - code: Union[Codedict, decodetree]) -> Iterator: ... - - def itersearch(self, a: Union[bitarray, int]) -> Iterator[int]: ... - def pack(self, b: bytes) -> None: ... - def pop(self, i: int = ...) -> int: ... - def remove(self, value: int) -> None: ... - def reverse(self) -> None: ... - def search(self, a: Union[bitarray, int], - limit: int = ...) -> list[int]: ... - - def setall(self, value: int) -> None: ... - def sort(self, reverse: int) -> None: ... - def to01(self) -> str: ... - def tobytes(self) -> bytes: ... - def tofile(self, f: BinaryIO) -> None: ... - def tolist(self) -> list[int]: ... - def unpack(self, - zero: bytes = ..., - one: bytes = ...) -> bytes: ... - - def __len__(self) -> int: ... - def __iter__(self) -> Iterator[int]: ... - @overload - def __getitem__(self, i: int) -> int: ... - @overload - def __getitem__(self, s: slice) -> bitarray: ... - @overload - def __setitem__(self, i: Union[int, slice], o: int) -> None: ... - @overload - def __setitem__(self, s: slice, o: bitarray) -> None: ... - def __delitem__(self, i: Union[int, slice]) -> None: ... - - def __add__(self, other: bitarray) -> bitarray: ... - def __iadd__(self, other: bitarray) -> bitarray: ... - def __mul__(self, n: int) -> bitarray: ... - def __imul__(self, n: int) -> bitarray: ... - def __rmul__(self, n: int) -> bitarray: ... - - def __ge__(self, other: bitarray) -> bool: ... - def __gt__(self, other: bitarray) -> bool: ... - def __le__(self, other: bitarray) -> bool: ... - def __lt__(self, other: bitarray) -> bool: ... - - def __and__(self, other: bitarray) -> bitarray: ... - def __or__(self, other: bitarray) -> bitarray: ... - def __xor__(self, other: bitarray) -> bitarray: ... - def __iand__(self, other: bitarray) -> bitarray: ... - def __ior__(self, other: bitarray) -> bitarray: ... - def __ixor__(self, other: bitarray) -> bitarray: ... - def __invert__(self) -> bitarray: ... - def __lshift__(self, n: int) -> bitarray: ... - def __rshift__(self, n: int) -> bitarray: ... - def __ilshift__(self, n: int) -> bitarray: ... - def __irshift__(self, n: int) -> bitarray: ... - - -class frozenbitarray(bitarray): - def __hash__(self) -> int: ... - - -__version__: str -def bits2bytes(n: int) -> int: ... -def get_default_endian() -> str: ... -def test(verbosity: int = ..., repeat: int = ...) -> TextTestResult: ... -def _set_default_endian(endian: str) -> None: ... -def _sysinfo() -> tuple: ... diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/_bitarray.c b/shell/ext-py/bitarray-2.3.0/bitarray/_bitarray.c deleted file mode 100644 index 2f874ece0..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/_bitarray.c +++ /dev/null @@ -1,3752 +0,0 @@ -/* - Copyright (c) 2008 - 2021, Ilan Schnell; All Rights Reserved - bitarray is published under the PSF license. - - This file is the C part of the bitarray package. - All functionality of the bitarray object is implemented here. - - Author: Ilan Schnell -*/ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#include "pythoncapi_compat.h" -#include "bitarray.h" - -/* size used when reading / writing blocks from files (in bytes) */ -#define BLOCKSIZE 65536 - -#ifdef IS_PY3K -#define Py_TPFLAGS_HAVE_WEAKREFS 0 -#endif - -static int default_endian = ENDIAN_BIG; - -static PyTypeObject Bitarray_Type; - -#define bitarray_Check(obj) PyObject_TypeCheck((obj), &Bitarray_Type) - - -static int -resize(bitarrayobject *self, Py_ssize_t nbits) -{ - const Py_ssize_t allocated = self->allocated, size = Py_SIZE(self); - Py_ssize_t newsize; - size_t new_allocated; - - newsize = BYTES(nbits); - if (nbits < 0 || newsize < 0) { - PyErr_Format(PyExc_OverflowError, "bitarray resize %zd", nbits); - return -1; - } - - if (self->ob_exports > 0) { - PyErr_SetString(PyExc_BufferError, - "cannot resize bitarray that is exporting buffers"); - return -1; - } - - if (self->buffer) { - PyErr_SetString(PyExc_BufferError, "cannot resize imported buffer"); - return -1; - } - - assert(allocated >= size && size == BYTES(self->nbits)); - /* ob_item == NULL implies ob_size == allocated == 0 */ - assert(self->ob_item != NULL || (size == 0 && allocated == 0)); - /* allocated == 0 implies size == 0 */ - assert(allocated != 0 || size == 0); - /* resize() is never called on readonly memory */ - assert(self->readonly == 0); - - if (newsize == size) { - /* buffer size hasn't changed - bypass everything */ - self->nbits = nbits; - return 0; - } - - /* Bypass reallocation when a allocation is large enough to accommodate - the newsize. If the newsize falls lower than half the allocated size, - then proceed with the reallocation to shrink the bitarray. - */ - if (allocated >= newsize && newsize >= (allocated >> 1)) { - assert(self->ob_item != NULL || newsize == 0); - Py_SET_SIZE(self, newsize); - self->nbits = nbits; - return 0; - } - - if (newsize == 0) { - PyMem_Free(self->ob_item); - self->ob_item = NULL; - Py_SET_SIZE(self, 0); - self->allocated = 0; - self->nbits = 0; - return 0; - } - - new_allocated = (size_t) newsize; - if (size == 0 && newsize <= 4) - /* When resizing an empty bitarray, we want at least 4 bytes. */ - new_allocated = 4; - - /* Over-allocate when the (previous) size is non-zero (as we often - extend an empty array on creation) and the size is actually - increasing. */ - else if (size != 0 && newsize > size) - /* This over-allocates proportional to the bitarray size, making - room for additional growth. - The growth pattern is: 0, 4, 8, 16, 25, 34, 44, 54, 65, 77, ... - The pattern starts out the same as for lists but then - grows at a smaller rate so that larger bitarrays only overallocate - by about 1/16th -- this is done because bitarrays are assumed - to be memory critical. */ - new_allocated += (newsize >> 4) + (newsize < 8 ? 3 : 7); - - assert(new_allocated >= (size_t) newsize); - self->ob_item = PyMem_Realloc(self->ob_item, new_allocated); - if (self->ob_item == NULL) { - PyErr_NoMemory(); - return -1; - } - Py_SET_SIZE(self, newsize); - self->allocated = new_allocated; - self->nbits = nbits; - return 0; -} - -/* create new bitarray object without initialization of buffer */ -static PyObject * -newbitarrayobject(PyTypeObject *type, Py_ssize_t nbits, int endian) -{ - const Py_ssize_t nbytes = BYTES(nbits); - bitarrayobject *obj; - - assert(nbits >= 0); - obj = (bitarrayobject *) type->tp_alloc(type, 0); - if (obj == NULL) - return NULL; - - Py_SET_SIZE(obj, nbytes); - if (nbytes == 0) { - obj->ob_item = NULL; - } - else { - obj->ob_item = (char *) PyMem_Malloc((size_t) nbytes); - if (obj->ob_item == NULL) { - PyObject_Del(obj); - return PyErr_NoMemory(); - } - } - obj->allocated = nbytes; - obj->nbits = nbits; - obj->endian = endian; - obj->ob_exports = 0; - obj->weakreflist = NULL; - obj->buffer = NULL; - obj->readonly = 0; - return (PyObject *) obj; -} - -static void -bitarray_dealloc(bitarrayobject *self) -{ - if (self->weakreflist != NULL) - PyObject_ClearWeakRefs((PyObject *) self); - - if (self->buffer) { - PyBuffer_Release(self->buffer); - PyMem_Free(self->buffer); - } - else if (self->ob_item != NULL) - PyMem_Free((void *) self->ob_item); - - Py_TYPE(self)->tp_free((PyObject *) self); -} - -/* reverse bytes range(a, b) in buffer */ -static void -bytereverse(bitarrayobject *self, Py_ssize_t a, Py_ssize_t b) -{ - static char trans[256]; - static int setup = 0; - Py_ssize_t i; - - assert(0 <= a && a <= Py_SIZE(self)); - assert(0 <= b && b <= Py_SIZE(self)); - - if (!setup) { - /* setup translation table, which maps each byte to it's reversed: - trans = {0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, ..., 0xff} */ - int j, k; - - for (k = 0; k < 256; k++) { - trans[k] = 0x00; - for (j = 0; j < 8; j++) - if (1 << (7 - j) & k) - trans[k] |= 1 << j; - } - setup = 1; - } - - for (i = a; i < b; i++) - self->ob_item[i] = trans[(unsigned char) self->ob_item[i]]; -} - -#ifdef PY_UINT64_T -#define UINT64_BUFFER(self) ((PY_UINT64_T *) (self)->ob_item) -#define UINT64_WORDS(bytes) ((bytes) >> 3) -#else -/* The UINT64_BUFFER macro only exists here in order to write code which - complies with and without PY_UINT64_T defined (in order to avoid - #ifdef'ing the code below). */ -#define UINT64_BUFFER(self) ((self)->ob_item) -#define UINT64_WORDS(bytes) 0 -#endif - -/* Shift bits in byte-range(a, b) by n bits to right (using uint64 shifts - when possible). - The parameter (bebr = big endian byte reverse) is used to allow this - function to call itself without calling bytereverse(). Elsewhere, this - function should always be called with bebr=1. */ -static void -shift_r8(bitarrayobject *self, Py_ssize_t a, Py_ssize_t b, int n, int bebr) -{ - Py_ssize_t i; - - assert(0 <= n && n < 8 && a <= b); - assert(0 <= a && a <= Py_SIZE(self)); - assert(0 <= b && b <= Py_SIZE(self)); - if (n == 0 || a == b) - return; - - /* as the big-endian representation has reversed bit order in each - byte, we reverse each byte, and (re-) reverse again below */ - if (bebr && self->endian == ENDIAN_BIG) - bytereverse(self, a, b); -#define ucb ((unsigned char *) (self)->ob_item) - -#ifdef PY_UINT64_T - if (b >= a + 8) { - const Py_ssize_t word_a = (a + 7) / 8; - const Py_ssize_t word_b = b / 8; - - assert(word_a <= word_b && b - 8 * word_b < 8 && 8 * word_a - a < 8); - - shift_r8(self, 8 * word_b, b, n, 0); - if (a < 8 * word_b && 8 * word_b < b) /* add byte from word below */ - ucb[8 * word_b] |= ucb[8 * word_b - 1] >> (8 - n); - - for (i = word_b - 1; i >= word_a; i--) { - assert_byte_in_range(self, 8 * i + 7); - UINT64_BUFFER(self)[i] <<= n; /* shift word */ - if (i != word_a) /* add shifted byte from next lower word */ - ucb[8 * i] |= ucb[8 * i - 1] >> (8 - n); - } - if (a < 8 * word_a && 8 * word_a < b) /* add byte from below */ - ucb[8 * word_a] |= ucb[8 * word_a - 1] >> (8 - n); - - shift_r8(self, a, 8 * word_a, n, 0); - } -#endif - if (UINT64_WORDS(8) == 0 || b < a + 8) { - for (i = b - 1; i >= a; i--) { - ucb[i] <<= n; /* shift byte (from highest to lowest) */ - if (i != a) /* add shifted next lower byte */ - ucb[i] |= ucb[i - 1] >> (8 - n); - } - } -#undef ucb - if (bebr && self->endian == ENDIAN_BIG) /* (re-) reverse bytes */ - bytereverse(self, a, b); -} - -/* copy n bits from other (starting at b) onto self (starting at a), - please find details about how this function works in copy_n.txt */ -static void -copy_n(bitarrayobject *self, Py_ssize_t a, - bitarrayobject *other, Py_ssize_t b, Py_ssize_t n) -{ - assert(0 <= n && n <= self->nbits && n <= other->nbits); - assert(0 <= a && a <= self->nbits - n); - assert(0 <= b && b <= other->nbits - n); - assert(self->readonly == 0); - if (n == 0 || (self == other && a == b)) - return; - - if (a % 8 == 0 && b % 8 == 0 && n >= 8) { /***** aligned case *****/ - const size_t m = n / 8; /* bytes copied using memmove() */ - - if (a > b) - copy_n(self, a + BITS(m), other, b + BITS(m), n % 8); - - memmove(self->ob_item + a / 8, other->ob_item + b / 8, m); - if (self->endian != other->endian) - bytereverse(self, a / 8, a / 8 + m); - - if (a <= b) - copy_n(self, a + BITS(m), other, b + BITS(m), n % 8); - } - else if (n < 24) { /***** small n case *****/ - Py_ssize_t i; - - if (a <= b) { /* loop forward (delete) */ - for (i = 0; i < n; i++) - setbit(self, i + a, getbit(other, i + b)); - } - else { /* loop backwards (insert) */ - for (i = n - 1; i >= 0; i--) - setbit(self, i + a, getbit(other, i + b)); - } - } - else { /***** general case *****/ - const Py_ssize_t p1 = a / 8; - const Py_ssize_t p2 = (a + n - 1) / 8; - const Py_ssize_t p3 = b / 8; - int sa = a % 8; - int sb = 8 - b % 8; - char t1, t2, t3; - Py_ssize_t i; - - assert(n >= 8); - assert(b + sb == 8 * (p3 + 1)); /* useful equations */ - assert(a - sa == 8 * p1); - assert(a + n > 8 * p2); - - assert_byte_in_range(self, p1); - assert_byte_in_range(self, p2); - assert_byte_in_range(other, p3); - t1 = self->ob_item[p1]; /* temporary bytes for later use */ - t2 = self->ob_item[p2]; - t3 = other->ob_item[p3]; - - if (sa + sb >= 8) - sb -= 8; - copy_n(self, 8 * p1, other, b + sb, n - sb); /* aligned copy */ - shift_r8(self, p1, p2 + 1, sa + sb, 1); /* right shift */ - - for (i = 8 * p1; i < a; i++) /* restore bits at p1 */ - setbit(self, i, t1 & BITMASK(self->endian, i)); - - if (sa + sb != 0) { /* if shifted, restore bits at p2 */ - for (i = a + n; i < 8 * p2 + 8 && i < self->nbits; i++) - setbit(self, i, t2 & BITMASK(self->endian, i)); - } - for (i = 0; i < sb; i++) /* copy first bits missed by copy_n() */ - setbit(self, i + a, t3 & BITMASK(other->endian, i + b)); - } -} - -/* starting at start, delete n bits from self */ -static int -delete_n(bitarrayobject *self, Py_ssize_t start, Py_ssize_t n) -{ - const Py_ssize_t nbits = self->nbits; - - assert(0 <= start && start <= nbits); - assert(0 <= n && n <= nbits - start); - /* start == nbits implies n == 0 */ - assert(start != nbits || n == 0); - - copy_n(self, start, self, start + n, nbits - start - n); - return resize(self, nbits - n); -} - -/* starting at start, insert n (uninitialized) bits into self */ -static int -insert_n(bitarrayobject *self, Py_ssize_t start, Py_ssize_t n) -{ - const Py_ssize_t nbits = self->nbits; - - assert(0 <= start && start <= nbits); - assert(n >= 0); - - if (resize(self, nbits + n) < 0) - return -1; - copy_n(self, start + n, self, start, nbits - start); - return 0; -} - -static void -invert(bitarrayobject *self) -{ - const Py_ssize_t nbytes = Py_SIZE(self); - const Py_ssize_t nwords = UINT64_WORDS(nbytes); - Py_ssize_t i; - - assert_nbits(self); - assert(self->readonly == 0); - for (i = 0; i < nwords; i++) - UINT64_BUFFER(self)[i] = ~UINT64_BUFFER(self)[i]; - for (i = nwords << 3; i < nbytes; i++) - self->ob_item[i] = ~self->ob_item[i]; -} - -/* repeat self m times (negative n is treated as 0) */ -static int -repeat(bitarrayobject *self, Py_ssize_t m) -{ - Py_ssize_t q, k = self->nbits; - - assert(self->readonly == 0); - if (k == 0 || m == 1) /* nothing to do */ - return 0; - - if (m <= 0) /* clear */ - return resize(self, 0); - - assert(m > 1 && k > 0); - if (k >= PY_SSIZE_T_MAX / m) { - PyErr_Format(PyExc_OverflowError, - "cannot repeat bitarray (of size %zd) %zd times", - k, m); - return -1; - } - /* k = self->nbits, the number of bits which have been copied */ - q = k * m; /* number of resulting bits */ - if (resize(self, q) < 0) - return -1; - - while (k <= q / 2) { /* double copies */ - copy_n(self, k, self, 0, k); - k *= 2; - } - assert(q / 2 < k && k <= q); - - if (k < q) /* copy remaining bits */ - copy_n(self, k, self, 0, self->nbits - k); - return 0; -} - -/* set bits in range(a, b) in self to vi */ -static void -setrange(bitarrayobject *self, Py_ssize_t a, Py_ssize_t b, int vi) -{ - Py_ssize_t i; - - assert(0 <= a && a <= self->nbits); - assert(0 <= b && b <= self->nbits); - assert(a <= b); - assert(self->readonly == 0); - - if (b >= a + 8) { - const Py_ssize_t byte_a = BYTES(a); - const Py_ssize_t byte_b = b / 8; - - assert(a + 8 > BITS(byte_a) && BITS(byte_b) + 8 > b); - - setrange(self, a, BITS(byte_a), vi); - memset(self->ob_item + byte_a, vi ? 0xff : 0x00, - (size_t) (byte_b - byte_a)); - setrange(self, BITS(byte_b), b, vi); - } - else { - for (i = a; i < b; i++) - setbit(self, i, vi); - } -} - -/* return number of bits with value vi in range(a, b) */ -static Py_ssize_t -count(bitarrayobject *self, int vi, Py_ssize_t a, Py_ssize_t b) -{ - Py_ssize_t res = 0, i; - - assert(0 <= a && a <= self->nbits); - assert(0 <= b && b <= self->nbits); - if (a >= b) - return 0; - - if (b >= a + 8) { - const Py_ssize_t byte_a = BYTES(a); - const Py_ssize_t byte_b = b / 8; - - assert(a + 8 > BITS(byte_a) && BITS(byte_b) + 8 > b); - - res += count(self, 1, a, BITS(byte_a)); - for (i = byte_a; i < byte_b; i++) - res += bitcount_lookup[(unsigned char) self->ob_item[i]]; - res += count(self, 1, BITS(byte_b), b); - } - else { - for (i = a; i < b; i++) - res += getbit(self, i); - } - return vi ? res : b - a - res; -} - -/* return index of first occurrence of vi in self[a:b], -1 when not found */ -static Py_ssize_t -find_bit(bitarrayobject *self, int vi, Py_ssize_t a, Py_ssize_t b) -{ - const Py_ssize_t n = b - a; - Py_ssize_t res, i; - - assert(0 <= a && a <= self->nbits); - assert(0 <= b && b <= self->nbits); - assert(0 <= vi && vi <= 1); - if (n <= 0) - return -1; - -#ifdef PY_UINT64_T - /* When the search range is greater than 64 bits, we skip uint64 words. - Note that we cannot check for n >= 64 here as the function could then - go into an infinite recursive loop when a word is found. */ - if (n > 64) { - const Py_ssize_t word_a = (a + 63) / 64; - const Py_ssize_t word_b = b / 64; - const PY_UINT64_T w = vi ? 0 : ~0; - - if ((res = find_bit(self, vi, a, 64 * word_a)) >= 0) - return res; - - for (i = word_a; i < word_b; i++) { /* skip uint64 words */ - assert_byte_in_range(self, 8 * i + 7); - if (w ^ UINT64_BUFFER(self)[i]) - return find_bit(self, vi, 64 * i, 64 * i + 64); - } - return find_bit(self, vi, 64 * word_b, b); - } -#endif - /* For the same reason as above, we cannot check for n >= 8 here. */ - if (n > 8) { - const Py_ssize_t byte_a = BYTES(a); - const Py_ssize_t byte_b = b / 8; - const char c = vi ? 0 : ~0; - - assert(UINT64_WORDS(8) == 0 || n <= 64); - if ((res = find_bit(self, vi, a, BITS(byte_a))) >= 0) - return res; - - for (i = byte_a; i < byte_b; i++) { /* skip bytes */ - assert_byte_in_range(self, i); - if (c ^ self->ob_item[i]) - return find_bit(self, vi, BITS(i), BITS(i) + 8); - } - return find_bit(self, vi, BITS(byte_b), b); - } - assert(n <= 8); - for (i = a; i < b; i++) { - if (getbit(self, i) == vi) - return i; - } - return -1; -} - -/* Return first occurrence of bitarray xa (in self), such that xa is contained - within self[start:stop], or -1 when xa is not found */ -static Py_ssize_t -find(bitarrayobject *self, bitarrayobject *xa, - Py_ssize_t start, Py_ssize_t stop) -{ - Py_ssize_t i; - - assert(0 <= start && start <= self->nbits); - assert(0 <= stop && stop <= self->nbits); - - if (xa->nbits == 1) /* faster for sparse bitarrays */ - return find_bit(self, getbit(xa, 0), start, stop); - - while (start <= stop - xa->nbits) { - for (i = 0; i < xa->nbits; i++) - if (getbit(self, start + i) != getbit(xa, i)) - goto next; - - return start; - next: - start++; - } - return -1; -} - -/* place self->nbits characters ('0', '1' corresponding to self) into str */ -static void -setstr01(bitarrayobject *self, char *str) -{ - Py_ssize_t i; - - for (i = 0; i < self->nbits; i++) - str[i] = getbit(self, i) ? '1' : '0'; -} - -/* set item i in self to given value */ -static int -set_item(bitarrayobject *self, Py_ssize_t i, PyObject *value) -{ - int vi; - - assert(0 <= i && i < self->nbits); - assert(self->readonly == 0); - if ((vi = pybit_as_int(value)) < 0) - return -1; - setbit(self, i, vi); - return 0; -} - -static int -extend_bitarray(bitarrayobject *self, bitarrayobject *other) -{ - /* We have to store the sizes before we resize, and since - other may be self, we also need to store other->nbits. */ - const Py_ssize_t self_nbits = self->nbits; - const Py_ssize_t other_nbits = other->nbits; - - if (resize(self, self_nbits + other_nbits) < 0) - return -1; - - copy_n(self, self_nbits, other, 0, other_nbits); - return 0; -} - -static int -extend_iter(bitarrayobject *self, PyObject *iter) -{ - const Py_ssize_t original_nbits = self->nbits; - PyObject *item; - - assert(PyIter_Check(iter)); - while ((item = PyIter_Next(iter))) { - if (resize(self, self->nbits + 1) < 0) - goto error; - if (set_item(self, self->nbits - 1, item) < 0) - goto error; - Py_DECREF(item); - } - if (PyErr_Occurred()) - return -1; - - return 0; - error: - Py_DECREF(item); - resize(self, original_nbits); - return -1; -} - -static int -extend_sequence(bitarrayobject *self, PyObject *sequence) -{ - const Py_ssize_t original_nbits = self->nbits; - PyObject *item; - Py_ssize_t n, i; - - assert(PySequence_Check(sequence)); - n = PySequence_Size(sequence); - - if (resize(self, self->nbits + n) < 0) - return -1; - - for (i = 0; i < n; i++) { - item = PySequence_GetItem(sequence, i); - if (item == NULL || set_item(self, self->nbits - n + i, item) < 0) { - Py_XDECREF(item); - resize(self, original_nbits); - return -1; - } - Py_DECREF(item); - } - return 0; -} - -static int -extend_bytes01(bitarrayobject *self, PyObject *bytes) -{ - const Py_ssize_t original_nbits = self->nbits; - unsigned char c; - char *data; - int vi = 0; /* to avoid uninitialized warning for some compilers */ - - assert(PyBytes_Check(bytes)); - data = PyBytes_AS_STRING(bytes); - - while ((c = *data++)) { - switch (c) { - case '0': vi = 0; break; - case '1': vi = 1; break; - case '_': - case ' ': - case '\n': - case '\r': - case '\t': - case '\v': - continue; - default: - PyErr_Format(PyExc_ValueError, "expected '0' or '1' " - "(or whitespace, or underscore), got '%c' (0x%02x)", - c, c); - resize(self, original_nbits); /* no bits added on error */ - return -1; - } - if (resize(self, self->nbits + 1) < 0) - return -1; - setbit(self, self->nbits - 1, vi); - } - return 0; -} - -static int -extend_unicode01(bitarrayobject *self, PyObject *unicode) -{ - PyObject *bytes; - int res; - - assert(PyUnicode_Check(unicode)); - bytes = PyUnicode_AsASCIIString(unicode); - if (bytes == NULL) - return -1; - - assert(PyBytes_Check(bytes)); - res = extend_bytes01(self, bytes); - Py_DECREF(bytes); /* drop bytes */ - return res; -} - -static int -extend_dispatch(bitarrayobject *self, PyObject *obj) -{ - PyObject *iter; - - /* dispatch on type */ - if (bitarray_Check(obj)) /* bitarray */ - return extend_bitarray(self, (bitarrayobject *) obj); - - if (PyBytes_Check(obj)) { /* bytes 01 */ -#ifdef IS_PY3K - PyErr_SetString(PyExc_TypeError, - "cannot extend bitarray with 'bytes', " - "use .pack() or .frombytes() instead"); - return -1; -#else - return extend_bytes01(self, obj); -#endif - } - - if (PyUnicode_Check(obj)) /* unicode 01 */ - return extend_unicode01(self, obj); - - if (PySequence_Check(obj)) /* sequence */ - return extend_sequence(self, obj); - - if (PyIter_Check(obj)) /* iter */ - return extend_iter(self, obj); - - /* finally, try to get the iterator of the object */ - iter = PyObject_GetIter(obj); - if (iter) { - int res; - - res = extend_iter(self, iter); - Py_DECREF(iter); - return res; - } - - PyErr_Format(PyExc_TypeError, - "'%s' object is not iterable", Py_TYPE(obj)->tp_name); - return -1; -} - -/************************************************************************** - Implementation of bitarray methods - **************************************************************************/ - -/* - All methods which modify the buffer need to raise an exception when the - buffer is read-only. This is necessary because the buffer may be imported - from another object which has a read-only buffer. - - We decided to do this check at the top level here, by adding the - RAISE_IF_READONLY macro to all methods which modify the buffer. - We could have done it at the low level (in setbit(), etc. also), but - because most of these low level functions have no return value. - - The situation is different from how resize() raises an exception when - called on an imported buffer. There, it is easy to raise the exception - in resize() itself, as there only one function which resizes the buffer, - and this function (resize()) needs to report failures anyway. -*/ - -/* raise when buffer is readonly */ -#define RAISE_IF_READONLY(self, ret_value) \ - if (((bitarrayobject *) self)->readonly) { \ - PyErr_SetString(PyExc_TypeError, "cannot modify read-only memory"); \ - return ret_value; \ - } - -static PyObject * -bitarray_all(bitarrayobject *self) -{ - return PyBool_FromLong(find_bit(self, 0, 0, self->nbits) == -1); -} - -PyDoc_STRVAR(all_doc, -"all() -> bool\n\ -\n\ -Return True when all bits in the array are True.\n\ -Note that `a.all()` is faster than `all(a)`."); - - -static PyObject * -bitarray_any(bitarrayobject *self) -{ - return PyBool_FromLong(find_bit(self, 1, 0, self->nbits) >= 0); -} - -PyDoc_STRVAR(any_doc, -"any() -> bool\n\ -\n\ -Return True when any bit in the array is True.\n\ -Note that `a.any()` is faster than `any(a)`."); - - -static PyObject * -bitarray_append(bitarrayobject *self, PyObject *value) -{ - int vi; - - RAISE_IF_READONLY(self, NULL); - if ((vi = pybit_as_int(value)) < 0) - return NULL; - if (resize(self, self->nbits + 1) < 0) - return NULL; - setbit(self, self->nbits - 1, vi); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(append_doc, -"append(item, /)\n\ -\n\ -Append `item` to the end of the bitarray."); - - -static PyObject * -bitarray_bytereverse(bitarrayobject *self, PyObject *args) -{ - const Py_ssize_t nbytes = Py_SIZE(self); - Py_ssize_t start = 0, stop = nbytes; - - RAISE_IF_READONLY(self, NULL); - if (!PyArg_ParseTuple(args, "|nn:bytereverse", &start, &stop)) - return NULL; - - if (start < 0 || start > nbytes || stop < 0 || stop > nbytes) { - PyErr_SetString(PyExc_IndexError, "byte index out of range"); - return NULL; - } - setunused(self); - bytereverse(self, start, stop); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(bytereverse_doc, -"bytereverse(start=0, stop=, /)\n\ -\n\ -Reverse the bit order for the bytes in range(start, stop) in-place.\n\ -The start and stop indices are given in terms of bytes (not bits).\n\ -By default, all bytes in the buffer are reversed.\n\ -Note: This method only changes the buffer; it does not change the\n\ -endianness of the bitarray object."); - - -static PyObject * -bitarray_buffer_info(bitarrayobject *self) -{ - PyObject *res, *ptr; - Py_ssize_t size = Py_SIZE(self); - - ptr = PyLong_FromVoidPtr(self->ob_item); - if (ptr == NULL) - return NULL; - - res = Py_BuildValue("Onsnniii", - ptr, - size, - ENDIAN_STR(self->endian), - BITS(size) - self->nbits, - self->allocated, - self->readonly, - self->buffer ? 1 : 0, - self->ob_exports); - Py_DECREF(ptr); - return res; -} - -PyDoc_STRVAR(buffer_info_doc, -"buffer_info() -> tuple\n\ -\n\ -Return a tuple containing:\n\ -\n\ -0. memory address of buffer\n\ -1. buffer size (in bytes)\n\ -2. bit endianness as a string\n\ -3. number of unused padding bits\n\ -4. allocated memory for the buffer (in bytes)\n\ -5. memory is read-only\n\ -6. buffer is imported\n\ -7. number of buffer exports"); - - -static PyObject * -bitarray_clear(bitarrayobject *self) -{ - RAISE_IF_READONLY(self, NULL); - if (resize(self, 0) < 0) - return NULL; - Py_RETURN_NONE; -} - -PyDoc_STRVAR(clear_doc, -"clear()\n\ -\n\ -Remove all items from the bitarray."); - - -static PyObject * -bitarray_copy(bitarrayobject *self) -{ - PyObject *res; - - res = newbitarrayobject(Py_TYPE(self), self->nbits, self->endian); - if (res == NULL) - return NULL; - - memcpy(((bitarrayobject *) res)->ob_item, self->ob_item, - (size_t) Py_SIZE(self)); - return res; -} - -PyDoc_STRVAR(copy_doc, -"copy() -> bitarray\n\ -\n\ -Return a copy of the bitarray."); - - -static PyObject * -bitarray_count(bitarrayobject *self, PyObject *args) -{ - PyObject *value = Py_True; - Py_ssize_t start = 0, stop = self->nbits; - int vi; - - if (!PyArg_ParseTuple(args, "|Onn:count", &value, &start, &stop)) - return NULL; - if ((vi = pybit_as_int(value)) < 0) - return NULL; - - normalize_index(self->nbits, &start); - normalize_index(self->nbits, &stop); - - return PyLong_FromSsize_t(count(self, vi, start, stop)); -} - -PyDoc_STRVAR(count_doc, -"count(value=1, start=0, stop=, /) -> int\n\ -\n\ -Count the number of occurrences of `value` in the bitarray."); - - -static PyObject * -bitarray_endian(bitarrayobject *self) -{ - return Py_BuildValue("s", ENDIAN_STR(self->endian)); -} - -PyDoc_STRVAR(endian_doc, -"endian() -> str\n\ -\n\ -Return the bit endianness of the bitarray as a string (`little` or `big`)."); - - -static PyObject * -bitarray_extend(bitarrayobject *self, PyObject *obj) -{ - RAISE_IF_READONLY(self, NULL); - if (extend_dispatch(self, obj) < 0) - return NULL; - Py_RETURN_NONE; -} - -PyDoc_STRVAR(extend_doc, -"extend(iterable, /)\n\ -\n\ -Append all the items from `iterable` to the end of the bitarray.\n\ -If the iterable is a string, each `0` and `1` are appended as\n\ -bits (ignoring whitespace and underscore)."); - - -static PyObject * -bitarray_fill(bitarrayobject *self) -{ - long p; - - RAISE_IF_READONLY(self, NULL); - p = setunused(self); - if (resize(self, self->nbits + p) < 0) - return NULL; - - assert(self->nbits % 8 == 0); - assert_nbits(self); - return PyLong_FromLong(p); -} - -PyDoc_STRVAR(fill_doc, -"fill() -> int\n\ -\n\ -Add zeros to the end of the bitarray, such that the length of the bitarray\n\ -will be a multiple of 8, and return the number of bits added (0..7)."); - - -static PyObject * -bitarray_find(bitarrayobject *self, PyObject *args) -{ - Py_ssize_t start = 0, stop = self->nbits; - PyObject *x; - - if (!PyArg_ParseTuple(args, "O|nn", &x, &start, &stop)) - return NULL; - - normalize_index(self->nbits, &start); - normalize_index(self->nbits, &stop); - - if (PyIndex_Check(x)) { - int vi; - - if ((vi = pybit_as_int(x)) < 0) - return NULL; - return PyLong_FromSsize_t(find_bit(self, vi, start, stop)); - } - - if (bitarray_Check(x)) - return PyLong_FromSsize_t( - find(self, (bitarrayobject *) x, start, stop)); - - PyErr_SetString(PyExc_TypeError, "bitarray or int expected"); - return NULL; -} - -PyDoc_STRVAR(find_doc, -"find(sub_bitarray, start=0, stop=, /) -> int\n\ -\n\ -Return the lowest index where sub_bitarray is found, such that sub_bitarray\n\ -is contained within `[start:stop]`.\n\ -Return -1 when sub_bitarray is not found."); - - -static PyObject * -bitarray_index(bitarrayobject *self, PyObject *args) -{ - PyObject *ret; - - if ((ret = bitarray_find(self, args)) == NULL) - return NULL; - - assert(PyLong_Check(ret)); - if (PyLong_AsSsize_t(ret) < 0) { - Py_DECREF(ret); -#ifdef IS_PY3K - return PyErr_Format(PyExc_ValueError, "%A not in bitarray", - PyTuple_GET_ITEM(args, 0)); -#else - PyErr_SetString(PyExc_ValueError, "item not in bitarray"); - return NULL; -#endif - } - return ret; -} - -PyDoc_STRVAR(index_doc, -"index(sub_bitarray, start=0, stop=, /) -> int\n\ -\n\ -Return the lowest index where sub_bitarray is found, such that sub_bitarray\n\ -is contained within `[start:stop]`.\n\ -Raises `ValueError` when the sub_bitarray is not present."); - - -static PyObject * -bitarray_insert(bitarrayobject *self, PyObject *args) -{ - Py_ssize_t i; - PyObject *value; - int vi; - - RAISE_IF_READONLY(self, NULL); - if (!PyArg_ParseTuple(args, "nO:insert", &i, &value)) - return NULL; - - normalize_index(self->nbits, &i); - - if ((vi = pybit_as_int(value)) < 0) - return NULL; - if (insert_n(self, i, 1) < 0) - return NULL; - setbit(self, i, vi); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(insert_doc, -"insert(index, value, /)\n\ -\n\ -Insert `value` into the bitarray before `index`."); - - -static PyObject * -bitarray_invert(bitarrayobject *self, PyObject *args) -{ - Py_ssize_t i = PY_SSIZE_T_MAX; - - RAISE_IF_READONLY(self, NULL); - if (!PyArg_ParseTuple(args, "|n:invert", &i)) - return NULL; - - if (i == PY_SSIZE_T_MAX) { /* default - invert all bits */ - invert(self); - Py_RETURN_NONE; - } - - if (i < 0) - i += self->nbits; - - if (i < 0 || i >= self->nbits) { - PyErr_SetString(PyExc_IndexError, "index out of range"); - return NULL; - } - self->ob_item[i / 8] ^= BITMASK(self->endian, i % 8); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(invert_doc, -"invert(index=, /)\n\ -\n\ -Invert all bits in the array (in-place).\n\ -When the optional `index` is given, only invert the single bit at index."); - - -static PyObject * -bitarray_reduce(bitarrayobject *self) -{ - const Py_ssize_t nbytes = Py_SIZE(self); - PyObject *dict, *repr = NULL, *result = NULL; - char *str; - - dict = PyObject_GetAttrString((PyObject *) self, "__dict__"); - if (dict == NULL) { - PyErr_Clear(); - dict = Py_None; - Py_INCREF(dict); - } - - repr = PyBytes_FromStringAndSize(NULL, nbytes + 1); - if (repr == NULL) { - PyErr_NoMemory(); - goto error; - } - str = PyBytes_AsString(repr); - /* first byte contains the number of unused bits */ - *str = (char) setunused(self); - /* remaining bytes contain buffer */ - memcpy(str + 1, self->ob_item, (size_t) nbytes); - - result = Py_BuildValue("O(Os)O", Py_TYPE(self), - repr, ENDIAN_STR(self->endian), dict); - error: - Py_DECREF(dict); - Py_XDECREF(repr); - return result; -} - -PyDoc_STRVAR(reduce_doc, "state information for pickling"); - - -static PyObject * -bitarray_repr(bitarrayobject *self) -{ - PyObject *result; - size_t strsize; - char *str; - - if (self->nbits == 0) - return Py_BuildValue("s", "bitarray()"); - - strsize = self->nbits + 12; /* 12 is the length of "bitarray('')" */ - if (strsize > PY_SSIZE_T_MAX) { - PyErr_SetString(PyExc_OverflowError, - "bitarray too large to represent"); - return NULL; - } - - if ((str = (char *) PyMem_Malloc(strsize)) == NULL) - return PyErr_NoMemory(); - - strcpy(str, "bitarray('"); /* has length 10 */ - setstr01(self, str + 10); - str[strsize - 2] = '\''; - str[strsize - 1] = ')'; /* no terminating '\0' */ - - result = Py_BuildValue("s#", str, (Py_ssize_t) strsize); - PyMem_Free((void *) str); - return result; -} - - -static PyObject * -bitarray_reverse(bitarrayobject *self) -{ - Py_ssize_t i, j; - - RAISE_IF_READONLY(self, NULL); - for (i = 0, j = self->nbits - 1; i < j; i++, j--) { - int t = getbit(self, i); - setbit(self, i, getbit(self, j)); - setbit(self, j, t); - } - Py_RETURN_NONE; -} - -PyDoc_STRVAR(reverse_doc, -"reverse()\n\ -\n\ -Reverse all bits in the array (in-place)."); - - -static PyObject * -bitarray_search(bitarrayobject *self, PyObject *args) -{ - PyObject *list = NULL, *item = NULL, *t = NULL, *x; - Py_ssize_t limit = PY_SSIZE_T_MAX, p = 0; - - if (!PyArg_ParseTuple(args, "O|n:search", &x, &limit)) - return NULL; - -#define tt ((bitarrayobject *) t) - if (PyIndex_Check(x)) { - int vi; - - if ((vi = pybit_as_int(x)) < 0) - return NULL; - if ((t = newbitarrayobject(Py_TYPE(self), 1, self->endian)) == NULL) - return NULL; - setbit(tt, 0, vi); - } - else if (bitarray_Check(x)) { - t = x; - Py_INCREF(t); - } - else { - PyErr_SetString(PyExc_TypeError, "bitarray or int expected"); - return NULL; - } - - if (tt->nbits == 0) { - PyErr_SetString(PyExc_ValueError, "can't search for empty bitarray"); - goto error; - } - if ((list = PyList_New(0)) == NULL) - goto error; - - while ((p = find(self, tt, p, self->nbits)) >= 0) { - if (PyList_Size(list) >= limit) - break; - item = PyLong_FromSsize_t(p++); - if (item == NULL || PyList_Append(list, item) < 0) - goto error; - Py_DECREF(item); - } -#undef tt - Py_DECREF(t); - return list; - - error: - Py_XDECREF(item); - Py_XDECREF(list); - Py_DECREF(t); - return NULL; -} - -PyDoc_STRVAR(search_doc, -"search(sub_bitarray, limit=, /) -> list\n\ -\n\ -Searches for the given sub_bitarray in self, and return the list of start\n\ -positions.\n\ -The optional argument limits the number of search results to the integer\n\ -specified. By default, all search results are returned."); - - -static PyObject * -bitarray_setall(bitarrayobject *self, PyObject *value) -{ - int vi; - - RAISE_IF_READONLY(self, NULL); - if ((vi = pybit_as_int(value)) < 0) - return NULL; - memset(self->ob_item, vi ? 0xff : 0x00, (size_t) Py_SIZE(self)); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(setall_doc, -"setall(value, /)\n\ -\n\ -Set all elements in the bitarray to `value`.\n\ -Note that `a.setall(value)` is equivalent to `a[:] = value`."); - - -static PyObject * -bitarray_sort(bitarrayobject *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"reverse", NULL}; - Py_ssize_t cnt; - int reverse = 0; - - RAISE_IF_READONLY(self, NULL); - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|i:sort", kwlist, &reverse)) - return NULL; - - cnt = count(self, reverse, 0, self->nbits); - setrange(self, 0, cnt, reverse); - setrange(self, cnt, self->nbits, !reverse); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(sort_doc, -"sort(reverse=False)\n\ -\n\ -Sort the bits in the array (in-place)."); - - -static PyObject * -bitarray_tolist(bitarrayobject *self) -{ - PyObject *list, *item; - Py_ssize_t i; - - list = PyList_New(self->nbits); - if (list == NULL) - return NULL; - - for (i = 0; i < self->nbits; i++) { - item = PyLong_FromLong(getbit(self, i)); - if (item == NULL) - return NULL; - if (PyList_SetItem(list, i, item) < 0) - return NULL; - } - return list; -} - -PyDoc_STRVAR(tolist_doc, -"tolist() -> list\n\ -\n\ -Return a list with the items (0 or 1) in the bitarray.\n\ -Note that the list object being created will require 32 or 64 times more\n\ -memory (depending on the machine architecture) than the bitarray object,\n\ -which may cause a memory error if the bitarray is very large."); - - -static PyObject * -bitarray_frombytes(bitarrayobject *self, PyObject *bytes) -{ - Py_ssize_t nbytes; /* number of bytes we add to self */ - Py_ssize_t t, p; - - RAISE_IF_READONLY(self, NULL); - if (!PyBytes_Check(bytes)) - return PyErr_Format(PyExc_TypeError, "bytes expected, not %s", - Py_TYPE(bytes)->tp_name); - - nbytes = PyBytes_GET_SIZE(bytes); - if (nbytes == 0) - Py_RETURN_NONE; - - /* Before we extend the raw bytes with the new data, we need to store - the current size and padding, as the bitarray size might not be - a multiple of 8. After extending, we remove the padding bits again. - */ - t = self->nbits; /* number of bits before extending */ - p = BITS(BYTES(t)) - t; /* number of pad bits */ - assert(0 <= p && p < 8); - if (resize(self, t + p) < 0) - return NULL; - - assert(self->nbits % 8 == 0); - assert_nbits(self); - if (resize(self, self->nbits + BITS(nbytes)) < 0) - return NULL; - - memcpy(self->ob_item + (Py_SIZE(self) - nbytes), - PyBytes_AS_STRING(bytes), (size_t) nbytes); - - if (delete_n(self, t, p) < 0) /* remove padding bits */ - return NULL; - assert(self->nbits == t + BITS(nbytes)); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(frombytes_doc, -"frombytes(bytes, /)\n\ -\n\ -Extend bitarray with raw bytes. That is, each append byte will add eight\n\ -bits to the bitarray."); - - -static PyObject * -bitarray_tobytes(bitarrayobject *self) -{ - setunused(self); - return PyBytes_FromStringAndSize(self->ob_item, Py_SIZE(self)); -} - -PyDoc_STRVAR(tobytes_doc, -"tobytes() -> bytes\n\ -\n\ -Return the byte representation of the bitarray."); - - -static PyObject * -bitarray_fromfile(bitarrayobject *self, PyObject *args) -{ - PyObject *bytes, *f, *res; - Py_ssize_t nblock, nread = 0, nbytes = -1; - int not_enough_bytes; - - RAISE_IF_READONLY(self, NULL); - if (!PyArg_ParseTuple(args, "O|n:fromfile", &f, &nbytes)) - return NULL; - - if (nbytes < 0) /* read till EOF */ - nbytes = PY_SSIZE_T_MAX; - - while (nread < nbytes) { - nblock = Py_MIN(nbytes - nread, BLOCKSIZE); - bytes = PyObject_CallMethod(f, "read", "n", nblock); - if (bytes == NULL) - return NULL; - if (!PyBytes_Check(bytes)) { - Py_DECREF(bytes); - PyErr_SetString(PyExc_TypeError, "read() didn't return bytes"); - return NULL; - } - not_enough_bytes = (PyBytes_GET_SIZE(bytes) < nblock); - nread += PyBytes_GET_SIZE(bytes); - assert(nread >= 0 && nread <= nbytes); - - res = bitarray_frombytes(self, bytes); - Py_DECREF(bytes); - if (res == NULL) - return NULL; - Py_DECREF(res); /* drop frombytes result */ - - if (not_enough_bytes) { - if (nbytes == PY_SSIZE_T_MAX) /* read till EOF */ - break; - PyErr_SetString(PyExc_EOFError, "not enough bytes to read"); - return NULL; - } - } - Py_RETURN_NONE; -} - -PyDoc_STRVAR(fromfile_doc, -"fromfile(f, n=-1, /)\n\ -\n\ -Extend bitarray with up to n bytes read from the file object f.\n\ -When n is omitted or negative, reads all data until EOF.\n\ -When n is provided and positive but exceeds the data available,\n\ -`EOFError` is raised (but the available data is still read and appended."); - - -static PyObject * -bitarray_tofile(bitarrayobject *self, PyObject *f) -{ - const Py_ssize_t nbytes = Py_SIZE(self); - Py_ssize_t size, offset; - PyObject *res; - - setunused(self); - for (offset = 0; offset < nbytes; offset += BLOCKSIZE) { - size = Py_MIN(nbytes - offset, BLOCKSIZE); - assert(size >= 0 && offset + size <= nbytes); - /* basically: f.write(memoryview(self)[offset:offset + size] */ - res = PyObject_CallMethod(f, "write", BYTES_SIZE_FMT, - self->ob_item + offset, size); - if (res == NULL) - return NULL; - Py_DECREF(res); /* drop write result */ - } - Py_RETURN_NONE; -} - -PyDoc_STRVAR(tofile_doc, -"tofile(f, /)\n\ -\n\ -Write the byte representation of the bitarray to the file object f."); - - -static PyObject * -bitarray_to01(bitarrayobject *self) -{ - PyObject *result; - char *str; - - if ((str = (char *) PyMem_Malloc((size_t) self->nbits)) == NULL) - return PyErr_NoMemory(); - setstr01(self, str); - result = Py_BuildValue("s#", str, self->nbits); - PyMem_Free((void *) str); - return result; -} - -PyDoc_STRVAR(to01_doc, -"to01() -> str\n\ -\n\ -Return a string containing '0's and '1's, representing the bits in the\n\ -bitarray."); - - -static PyObject * -bitarray_unpack(bitarrayobject *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"zero", "one", NULL}; - PyObject *res; - char zero = 0x00, one = 0x01, *str; - Py_ssize_t i; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|cc:unpack", kwlist, - &zero, &one)) - return NULL; - - if ((res = PyBytes_FromStringAndSize(NULL, self->nbits)) == NULL) - return PyErr_NoMemory(); - assert(PyBytes_Check(res)); - - str = PyBytes_AsString(res); - for (i = 0; i < self->nbits; i++) - str[i] = getbit(self, i) ? one : zero; - return res; -} - -PyDoc_STRVAR(unpack_doc, -"unpack(zero=b'\\x00', one=b'\\x01') -> bytes\n\ -\n\ -Return bytes containing one character for each bit in the bitarray,\n\ -using the specified mapping."); - - -static PyObject * -bitarray_pack(bitarrayobject *self, PyObject *bytes) -{ - Py_ssize_t nbytes, i; - char *data; - - RAISE_IF_READONLY(self, NULL); - if (!PyBytes_Check(bytes)) - return PyErr_Format(PyExc_TypeError, "bytes expected, not %s", - Py_TYPE(bytes)->tp_name); - - nbytes = PyBytes_GET_SIZE(bytes); - - if (resize(self, self->nbits + nbytes) < 0) - return NULL; - - data = PyBytes_AS_STRING(bytes); - for (i = 0; i < nbytes; i++) - setbit(self, self->nbits - nbytes + i, data[i] ? 1 : 0); - - Py_RETURN_NONE; -} - -PyDoc_STRVAR(pack_doc, -"pack(bytes, /)\n\ -\n\ -Extend the bitarray from bytes, where each byte corresponds to a single\n\ -bit. The byte `b'\\x00'` maps to bit 0 and all other characters map to\n\ -bit 1.\n\ -This method, as well as the unpack method, are meant for efficient\n\ -transfer of data between bitarray objects to other python objects\n\ -(for example NumPy's ndarray object) which have a different memory view."); - - -static PyObject * -bitarray_pop(bitarrayobject *self, PyObject *args) -{ - Py_ssize_t i = -1; - long vi; - - RAISE_IF_READONLY(self, NULL); - if (!PyArg_ParseTuple(args, "|n:pop", &i)) - return NULL; - - if (self->nbits == 0) { - /* special case -- most common failure cause */ - PyErr_SetString(PyExc_IndexError, "pop from empty bitarray"); - return NULL; - } - if (i < 0) - i += self->nbits; - - if (i < 0 || i >= self->nbits) { - PyErr_SetString(PyExc_IndexError, "pop index out of range"); - return NULL; - } - vi = getbit(self, i); - if (delete_n(self, i, 1) < 0) - return NULL; - return PyLong_FromLong(vi); -} - -PyDoc_STRVAR(pop_doc, -"pop(index=-1, /) -> item\n\ -\n\ -Return the i-th (default last) element and delete it from the bitarray.\n\ -Raises `IndexError` if bitarray is empty or index is out of range."); - - -static PyObject * -bitarray_remove(bitarrayobject *self, PyObject *value) -{ - Py_ssize_t i; - int vi; - - RAISE_IF_READONLY(self, NULL); - if ((vi = pybit_as_int(value)) < 0) - return NULL; - - if ((i = find_bit(self, vi, 0, self->nbits)) < 0) - return PyErr_Format(PyExc_ValueError, "%d not in bitarray", vi); - - if (delete_n(self, i, 1) < 0) - return NULL; - Py_RETURN_NONE; -} - -PyDoc_STRVAR(remove_doc, -"remove(value, /)\n\ -\n\ -Remove the first occurrence of `value` in the bitarray.\n\ -Raises `ValueError` if item is not present."); - - -static PyObject * -bitarray_sizeof(bitarrayobject *self) -{ - Py_ssize_t res; - - res = sizeof(bitarrayobject) + self->allocated; - if (self->buffer) - res += sizeof(Py_buffer); - return PyLong_FromSsize_t(res); -} - -PyDoc_STRVAR(sizeof_doc, -"Return the size of the bitarray in memory, in bytes."); - - -static PyObject * -bitarray_freeze(bitarrayobject *self) -{ - self->readonly = 1; - Py_RETURN_NONE; -} - -/* ---------- functionality exposed in debug mode for testing ---------- */ - -#ifndef NDEBUG - -static PyObject * -bitarray_shift_r8(bitarrayobject *self, PyObject *args) -{ - Py_ssize_t a, b; - int n; - - if (!PyArg_ParseTuple(args, "nni", &a, &b, &n)) - return NULL; - - shift_r8(self, a, b, n, 1); - Py_RETURN_NONE; -} - -static PyObject * -bitarray_copy_n(bitarrayobject *self, PyObject *args) -{ - PyObject *other; - Py_ssize_t a, b, n; - - if (!PyArg_ParseTuple(args, "nOnn", &a, &other, &b, &n)) - return NULL; - if (!bitarray_Check(other)) { - PyErr_SetString(PyExc_TypeError, "bitarray expected"); - return NULL; - } - copy_n(self, a, (bitarrayobject *) other, b, n); - Py_RETURN_NONE; -} -#endif /* NDEBUG */ - -/* ----------------------- bitarray_as_sequence ------------------------ */ - -static Py_ssize_t -bitarray_len(bitarrayobject *self) -{ - return self->nbits; -} - -static PyObject * -bitarray_concat(bitarrayobject *self, PyObject *other) -{ - PyObject *res; - - if ((res = bitarray_copy(self)) == NULL) - return NULL; - - if (extend_dispatch((bitarrayobject *) res, other) < 0) { - Py_DECREF(res); - return NULL; - } - return res; -} - -static PyObject * -bitarray_repeat(bitarrayobject *self, Py_ssize_t n) -{ - PyObject *res; - - if ((res = bitarray_copy(self)) == NULL) - return NULL; - - if (repeat((bitarrayobject *) res, n) < 0) { - Py_DECREF(res); - return NULL; - } - return res; -} - -static PyObject * -bitarray_item(bitarrayobject *self, Py_ssize_t i) -{ - if (i < 0 || i >= self->nbits) { - PyErr_SetString(PyExc_IndexError, "bitarray index out of range"); - return NULL; - } - return PyLong_FromLong(getbit(self, i)); -} - -static int -bitarray_ass_item(bitarrayobject *self, Py_ssize_t i, PyObject *value) -{ - RAISE_IF_READONLY(self, -1); - if (i < 0 || i >= self->nbits) { - PyErr_SetString(PyExc_IndexError, - "bitarray assignment index out of range"); - return -1; - } - if (value == NULL) - return delete_n(self, i, 1); - else - return set_item(self, i, value); -} - -/* return 1 if value (which can be an int or bitarray) is in self, - 0 otherwise, and -1 on error */ -static int -bitarray_contains(bitarrayobject *self, PyObject *value) -{ - if (PyIndex_Check(value)) { - int vi; - - vi = pybit_as_int(value); - if (vi < 0) - return -1; - return find_bit(self, vi, 0, self->nbits) >= 0; - } - - if (bitarray_Check(value)) - return find(self, (bitarrayobject *) value, 0, self->nbits) >= 0; - - PyErr_Format(PyExc_TypeError, "bitarray or int expected, got %s", - Py_TYPE(value)->tp_name); - return -1; -} - -static PyObject * -bitarray_inplace_concat(bitarrayobject *self, PyObject *other) -{ - RAISE_IF_READONLY(self, NULL); - if (extend_dispatch(self, other) < 0) - return NULL; - Py_INCREF(self); - return (PyObject *) self; -} - -static PyObject * -bitarray_inplace_repeat(bitarrayobject *self, Py_ssize_t n) -{ - RAISE_IF_READONLY(self, NULL); - if (repeat(self, n) < 0) - return NULL; - Py_INCREF(self); - return (PyObject *) self; -} - -static PySequenceMethods bitarray_as_sequence = { - (lenfunc) bitarray_len, /* sq_length */ - (binaryfunc) bitarray_concat, /* sq_concat */ - (ssizeargfunc) bitarray_repeat, /* sq_repeat */ - (ssizeargfunc) bitarray_item, /* sq_item */ - 0, /* sq_slice */ - (ssizeobjargproc) bitarray_ass_item, /* sq_ass_item */ - 0, /* sq_ass_slice */ - (objobjproc) bitarray_contains, /* sq_contains */ - (binaryfunc) bitarray_inplace_concat, /* sq_inplace_concat */ - (ssizeargfunc) bitarray_inplace_repeat, /* sq_inplace_repeat */ -}; - -/* ----------------------- bitarray_as_mapping ------------------------- */ - -static PyObject * -bitarray_subscr(bitarrayobject *self, PyObject *item) -{ - if (PyIndex_Check(item)) { - Py_ssize_t i; - - i = PyNumber_AsSsize_t(item, PyExc_IndexError); - if (i == -1 && PyErr_Occurred()) - return NULL; - if (i < 0) - i += self->nbits; - return bitarray_item(self, i); - } - - if (PySlice_Check(item)) { - Py_ssize_t start, stop, step, slicelength; - PyObject *res; - - if (PySlice_GetIndicesEx(item, self->nbits, - &start, &stop, &step, &slicelength) < 0) { - return NULL; - } - res = newbitarrayobject(Py_TYPE(self), slicelength, self->endian); - if (res == NULL) - return NULL; - -#define rr ((bitarrayobject *) res) - if (step == 1) { - copy_n(rr, 0, self, start, slicelength); - } - else { - Py_ssize_t i, j; - - for (i = 0, j = start; i < slicelength; i++, j += step) - setbit(rr, i, getbit(self, j)); - } -#undef rr - return res; - } - - return PyErr_Format(PyExc_TypeError, - "bitarray indices must be integers or slices, not %s", - Py_TYPE(item)->tp_name); -} - -/* The following functions (setslice_bitarray, setslice_bool and delslice) - are called from bitarray_ass_subscr. Having this functionality inside - bitarray_ass_subscr would make the function incomprehensibly long. */ - -/* set the elements in self, specified by slice, to bitarray */ -static int -setslice_bitarray(bitarrayobject *self, PyObject *slice, PyObject *array) -{ - Py_ssize_t start, stop, step, slicelength, increase, i, j; - int copy_self = 0, res = -1; - - assert(PySlice_Check(slice) && bitarray_Check(array)); - if (PySlice_GetIndicesEx(slice, self->nbits, - &start, &stop, &step, &slicelength) < 0) - return -1; - -#define aa ((bitarrayobject *) array) - /* number of bits by which self has to be increased (decreased) */ - increase = aa->nbits - slicelength; - - if (aa == self) { /* covers cases like a[2::] = a and a[::-1] = a */ - if ((array = bitarray_copy(self)) == NULL) - return -1; - copy_self = 1; - } - - if (step == 1) { - if (increase > 0) { /* increase self */ - if (insert_n(self, start + slicelength, increase) < 0) - goto error; - } - if (increase < 0) { /* decrease self */ - if (delete_n(self, start + aa->nbits, -increase) < 0) - goto error; - } - /* copy the new values into self */ - copy_n(self, start, aa, 0, aa->nbits); - } - else { /* step != 1 */ - if (increase != 0) { - PyErr_Format(PyExc_ValueError, - "attempt to assign sequence of size %zd " - "to extended slice of size %zd", - aa->nbits, slicelength); - goto error; - } - assert(increase == 0); - for (i = 0, j = start; i < slicelength; i++, j += step) - setbit(self, j, getbit(aa, i)); - } -#undef aa - - res = 0; - error: - if (copy_self) - Py_DECREF(array); - return res; -} - -/* like PySlice_GetIndicesEx(), but step index will always be positive */ -static int -slice_get_indices(PyObject *slice, Py_ssize_t length, - Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step, - Py_ssize_t *slicelength) -{ - assert(PySlice_Check(slice) && length >= 0); - if (PySlice_GetIndicesEx(slice, length, - start, stop, step, slicelength) < 0) - return -1; - - if (*slicelength == 0) - return 0; - - if (*step < 0) { - *stop = *start + 1; - *start = *stop + *step * (*slicelength - 1) - 1; - *step *= -1; - } - assert(*step > 0 && *start < *stop && *slicelength > 0); - assert(0 <= *start && *start < length); - assert(0 <= *stop && *stop <= length); - assert(*step != 1 || *start + *slicelength == *stop); - assert(*start + ((*slicelength - 1) * *step) < *stop); - return 0; -} - -/* set the elements in self, specified by slice, to value */ -static int -setslice_bool(bitarrayobject *self, PyObject *slice, PyObject *value) -{ - Py_ssize_t start, stop, step, slicelength, i; - int vi; - - assert(PySlice_Check(slice) && PyIndex_Check(value)); - if ((vi = pybit_as_int(value)) < 0) - return -1; - - if (slice_get_indices(slice, self->nbits, - &start, &stop, &step, &slicelength) < 0) - return -1; - if (slicelength == 0) - return 0; - - if (step == 1) { - setrange(self, start, stop, vi); - } - else { /* step > 1 */ - if (slicelength < 8) { - for (i = start; i < stop; i += step) - setbit(self, i, vi); - } - else { - char bitmask_table[8]; - - for (i = 0; i < 8; i++) /* set up bitmask table */ - bitmask_table[i] = BITMASK(self->endian, i); - - if (vi) { - for (i = start; i < stop; i += step) - self->ob_item[i >> 3] |= bitmask_table[i & 7]; - } - else { - for (i = start; i < stop; i += step) - self->ob_item[i >> 3] &= ~bitmask_table[i & 7]; - } - } - } - return 0; -} - -/* delete the elements in self, specified by slice */ -static int -delslice(bitarrayobject *self, PyObject *slice) -{ - Py_ssize_t start, stop, step, slicelength; - - assert(PySlice_Check(slice)); - if (slice_get_indices(slice, self->nbits, - &start, &stop, &step, &slicelength) < 0) - return -1; - if (slicelength == 0) - return 0; - - if (step == 1) { - return delete_n(self, start, slicelength); - } - else { /* step > 1 */ - Py_ssize_t i, j; - - /* set the items not to be removed */ - for (i = j = start; i < self->nbits; i++) { - if ((i - start) % step != 0 || i >= stop) - setbit(self, j++, getbit(self, i)); - } - return resize(self, self->nbits - slicelength); - } -} - -static int -bitarray_ass_subscr(bitarrayobject *self, PyObject* item, PyObject* value) -{ - RAISE_IF_READONLY(self, -1); - - if (PyIndex_Check(item)) { - Py_ssize_t i; - - i = PyNumber_AsSsize_t(item, PyExc_IndexError); - if (i == -1 && PyErr_Occurred()) - return -1; - if (i < 0) - i += self->nbits; - return bitarray_ass_item(self, i, value); - } - - if (PySlice_Check(item)) { - if (value == NULL) - return delslice(self, item); - - if (bitarray_Check(value)) - return setslice_bitarray(self, item, value); - - if (PyIndex_Check(value)) - return setslice_bool(self, item, value); - - PyErr_Format(PyExc_TypeError, - "bitarray or int expected for slice assignment, not %s", - Py_TYPE(value)->tp_name); - return -1; - } - - PyErr_Format(PyExc_TypeError, - "bitarray indices must be integers or slices, not %s", - Py_TYPE(item)->tp_name); - return -1; -} - -static PyMappingMethods bitarray_as_mapping = { - (lenfunc) bitarray_len, - (binaryfunc) bitarray_subscr, - (objobjargproc) bitarray_ass_subscr, -}; - -/* --------------------------- bitarray_as_number ---------------------- */ - -static PyObject * -bitarray_cpinvert(bitarrayobject *self) -{ - PyObject *result; - - if ((result = bitarray_copy(self)) == NULL) - return NULL; - - invert((bitarrayobject *) result); - return result; -} - -enum op_type { - OP_and, - OP_or, - OP_xor, -}; - -/* perform bitwise in-place operation */ -static void -bitwise(bitarrayobject *self, bitarrayobject *other, enum op_type oper) -{ - const Py_ssize_t nbytes = Py_SIZE(self); - const Py_ssize_t nwords = UINT64_WORDS(nbytes); - Py_ssize_t i; - - assert(self->nbits == other->nbits); - assert(self->endian == other->endian); - assert_nbits(self); - switch (oper) { - case OP_and: - for (i = 0; i < nwords; i++) - UINT64_BUFFER(self)[i] &= UINT64_BUFFER(other)[i]; - for (i = nwords << 3; i < nbytes; i++) - self->ob_item[i] &= other->ob_item[i]; - break; - - case OP_or: - for (i = 0; i < nwords; i++) - UINT64_BUFFER(self)[i] |= UINT64_BUFFER(other)[i]; - for (i = nwords << 3; i < nbytes; i++) - self->ob_item[i] |= other->ob_item[i]; - break; - - case OP_xor: - for (i = 0; i < nwords; i++) - UINT64_BUFFER(self)[i] ^= UINT64_BUFFER(other)[i]; - for (i = nwords << 3; i < nbytes; i++) - self->ob_item[i] ^= other->ob_item[i]; - break; - - default: /* cannot happen */ - Py_FatalError("unknown bitwise operation"); - } -} - -/* return 0 if both a and b are bitarray objects, -1 on error */ -static int -bitwise_check(PyObject *a, PyObject *b, const char *ostr) -{ - if (!bitarray_Check(a) || !bitarray_Check(b)) { - PyErr_Format(PyExc_TypeError, - "unsupported operand type(s) for %s: '%s' and '%s'", - ostr, Py_TYPE(a)->tp_name, Py_TYPE(b)->tp_name); - return -1; - } -#define aa ((bitarrayobject *) a) -#define bb ((bitarrayobject *) b) - if (aa->nbits != bb->nbits) { - PyErr_Format(PyExc_ValueError, - "bitarrays of equal length expected for '%s'", ostr); - return -1; - } - if (aa->endian != bb->endian) { - PyErr_Format(PyExc_ValueError, - "bitarrays of equal endianness expected for '%s'", ostr); - return -1; - } -#undef aa -#undef bb - return 0; -} - -#define BITWISE_FUNC(oper, ostr) \ -static PyObject * \ -bitarray_ ## oper (PyObject *self, PyObject *other) \ -{ \ - PyObject *res; \ - \ - if (bitwise_check(self, other, ostr) < 0) \ - return NULL; \ - res = bitarray_copy((bitarrayobject *) self); \ - if (res == NULL) \ - return NULL; \ - bitwise((bitarrayobject *) res, \ - (bitarrayobject *) other, OP_ ## oper); \ - return res; \ -} - -BITWISE_FUNC(and, "&") /* bitarray_and */ -BITWISE_FUNC(or, "|") /* bitarray_or */ -BITWISE_FUNC(xor, "^") /* bitarray_xor */ - - -#define BITWISE_IFUNC(oper, ostr) \ -static PyObject * \ -bitarray_i ## oper (PyObject *self, PyObject *other) \ -{ \ - RAISE_IF_READONLY(self, NULL); \ - if (bitwise_check(self, other, ostr) < 0) \ - return NULL; \ - bitwise((bitarrayobject *) self, \ - (bitarrayobject *) other, OP_ ## oper); \ - Py_INCREF(self); \ - return self; \ -} - -BITWISE_IFUNC(and, "&=") /* bitarray_iand */ -BITWISE_IFUNC(or, "|=") /* bitarray_ior */ -BITWISE_IFUNC(xor, "^=") /* bitarray_ixor */ - - -/* shift bitarray n positions to left (right=0) or right (right=1) */ -static void -shift(bitarrayobject *self, Py_ssize_t n, int right) -{ - Py_ssize_t nbits = self->nbits; - - if (n == 0) - return; - if (n >= nbits) { - memset(self->ob_item, 0x00, (size_t) Py_SIZE(self)); - return; - } - - assert(0 < n && n < nbits); - if (right) { /* rshift */ - copy_n(self, n, self, 0, nbits - n); - setrange(self, 0, n, 0); - } - else { /* lshift */ - copy_n(self, 0, self, n, nbits - n); - setrange(self, nbits - n, nbits, 0); - } -} - -/* check shift arguments and return the shift count, -1 on error */ -static Py_ssize_t -shift_check(PyObject *a, PyObject *b, const char *ostr) -{ - Py_ssize_t n; - - if (!bitarray_Check(a) || !PyIndex_Check(b)) { - PyErr_Format(PyExc_TypeError, - "unsupported operand type(s) for %s: '%s' and '%s'", - ostr, Py_TYPE(a)->tp_name, Py_TYPE(b)->tp_name); - return -1; - } - n = PyNumber_AsSsize_t(b, PyExc_OverflowError); - if (n == -1 && PyErr_Occurred()) - return -1; - - if (n < 0) { - PyErr_SetString(PyExc_ValueError, "negative shift count"); - return -1; - } - return n; -} - -#define SHIFT_FUNC(name, inplace, right, ostr) \ -static PyObject * \ -bitarray_ ## name (PyObject *self, PyObject *other) \ -{ \ - PyObject *res; \ - Py_ssize_t n; \ - \ - if ((n = shift_check(self, other, ostr)) < 0) \ - return NULL; \ - if (inplace) { \ - RAISE_IF_READONLY(self, NULL); \ - res = self; \ - Py_INCREF(res); \ - } \ - else { \ - res = bitarray_copy((bitarrayobject *) self); \ - if (res == NULL) \ - return NULL; \ - } \ - shift((bitarrayobject *) res, n, right); \ - return res; \ -} - -SHIFT_FUNC(lshift, 0, 0, "<<") /* bitarray_lshift */ -SHIFT_FUNC(rshift, 0, 1, ">>") /* bitarray_rshift */ -SHIFT_FUNC(ilshift, 1, 0, "<<=") /* bitarray_ilshift */ -SHIFT_FUNC(irshift, 1, 1, ">>=") /* bitarray_irshift */ - - -static PyNumberMethods bitarray_as_number = { - 0, /* nb_add */ - 0, /* nb_subtract */ - 0, /* nb_multiply */ -#if PY_MAJOR_VERSION == 2 - 0, /* nb_divide */ -#endif - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - 0, /* nb_bool (was nb_nonzero) */ - (unaryfunc) bitarray_cpinvert, /* nb_invert */ - (binaryfunc) bitarray_lshift, /* nb_lshift */ - (binaryfunc) bitarray_rshift, /* nb_rshift */ - (binaryfunc) bitarray_and, /* nb_and */ - (binaryfunc) bitarray_xor, /* nb_xor */ - (binaryfunc) bitarray_or, /* nb_or */ -#if PY_MAJOR_VERSION == 2 - 0, /* nb_coerce */ -#endif - 0, /* nb_int */ - 0, /* nb_reserved (was nb_long) */ - 0, /* nb_float */ -#if PY_MAJOR_VERSION == 2 - 0, /* nb_oct */ - 0, /* nb_hex */ -#endif - 0, /* nb_inplace_add */ - 0, /* nb_inplace_subtract */ - 0, /* nb_inplace_multiply */ -#if PY_MAJOR_VERSION == 2 - 0, /* nb_inplace_divide */ -#endif - 0, /* nb_inplace_remainder */ - 0, /* nb_inplace_power */ - (binaryfunc) bitarray_ilshift, /* nb_inplace_lshift */ - (binaryfunc) bitarray_irshift, /* nb_inplace_rshift */ - (binaryfunc) bitarray_iand, /* nb_inplace_and */ - (binaryfunc) bitarray_ixor, /* nb_inplace_xor */ - (binaryfunc) bitarray_ior, /* nb_inplace_or */ - 0, /* nb_floor_divide */ - 0, /* nb_true_divide */ - 0, /* nb_inplace_floor_divide */ - 0, /* nb_inplace_true_divide */ -#if PY_MAJOR_VERSION == 3 - 0, /* nb_index */ -#endif -}; - -/************************************************************************** - variable length encoding and decoding - **************************************************************************/ - -static int -check_codedict(PyObject *codedict) -{ - if (!PyDict_Check(codedict)) { - PyErr_Format(PyExc_TypeError, "dict expected, got %s", - Py_TYPE(codedict)->tp_name); - return -1; - } - if (PyDict_Size(codedict) == 0) { - PyErr_SetString(PyExc_ValueError, "non-empty dict expected"); - return -1; - } - return 0; -} - -static int -check_value(PyObject *value) -{ - if (!bitarray_Check(value)) { - PyErr_SetString(PyExc_TypeError, - "bitarray expected for dict value"); - return -1; - } - if (((bitarrayobject *) value)->nbits == 0) { - PyErr_SetString(PyExc_ValueError, "non-empty bitarray expected"); - return -1; - } - return 0; -} - -static PyObject * -bitarray_encode(bitarrayobject *self, PyObject *args) -{ - PyObject *codedict, *iterable, *iter, *symbol, *value; - - RAISE_IF_READONLY(self, NULL); - if (!PyArg_ParseTuple(args, "OO:encode", &codedict, &iterable)) - return NULL; - - if (check_codedict(codedict) < 0) - return NULL; - - iter = PyObject_GetIter(iterable); - if (iter == NULL) - return PyErr_Format(PyExc_TypeError, "'%s' object is not iterable", - Py_TYPE(iterable)->tp_name); - - /* extend self with the bitarrays from codedict */ - while ((symbol = PyIter_Next(iter))) { - value = PyDict_GetItem(codedict, symbol); - Py_DECREF(symbol); - if (value == NULL) { -#ifdef IS_PY3K - PyErr_Format(PyExc_ValueError, - "symbol not defined in prefix code: %A", symbol); -#else - PyErr_SetString(PyExc_ValueError, - "symbol not defined in prefix code"); -#endif - goto error; - } - if (check_value(value) < 0 || - extend_bitarray(self, (bitarrayobject *) value) < 0) - goto error; - } - Py_DECREF(iter); - if (PyErr_Occurred()) - return NULL; - Py_RETURN_NONE; - - error: - Py_DECREF(iter); - return NULL; -} - -PyDoc_STRVAR(encode_doc, -"encode(code, iterable, /)\n\ -\n\ -Given a prefix code (a dict mapping symbols to bitarrays),\n\ -iterate over the iterable object with symbols, and extend the bitarray\n\ -with the corresponding bitarray for each symbol."); - -/* ----------------------- binary tree (C-level) ----------------------- */ - -/* a node has either children or a symbol, NEVER both */ -typedef struct _bin_node -{ - struct _bin_node *child[2]; - PyObject *symbol; -} binode; - - -static binode * -binode_new(void) -{ - binode *nd; - - if ((nd = (binode *) PyMem_Malloc(sizeof(binode))) == NULL) { - PyErr_NoMemory(); - return NULL; - } - nd->child[0] = NULL; - nd->child[1] = NULL; - nd->symbol = NULL; - return nd; -} - -static void -binode_delete(binode *nd) -{ - if (nd == NULL) - return; - - binode_delete(nd->child[0]); - binode_delete(nd->child[1]); - Py_XDECREF(nd->symbol); - PyMem_Free((void *) nd); -} - -/* insert symbol (mapping to ba) into the tree */ -static int -binode_insert_symbol(binode *tree, bitarrayobject *ba, PyObject *symbol) -{ - binode *nd = tree, *prev; - Py_ssize_t i; - int k; - - for (i = 0; i < ba->nbits; i++) { - k = getbit(ba, i); - prev = nd; - nd = nd->child[k]; - - if (nd) { - if (nd->symbol) /* we cannot have already a symbol */ - goto ambiguity; - } - else { /* if node does not exist, create new one */ - nd = binode_new(); - if (nd == NULL) - return -1; - prev->child[k] = nd; - } - } - /* the new leaf node cannot already have a symbol or children */ - if (nd->symbol || nd->child[0] || nd->child[1]) - goto ambiguity; - - nd->symbol = symbol; - Py_INCREF(symbol); - return 0; - - ambiguity: -#ifdef IS_PY3K - PyErr_Format(PyExc_ValueError, "prefix code ambiguous: %A", symbol); -#else - PyErr_SetString(PyExc_ValueError, "prefix code ambiguous"); -#endif - return -1; -} - -/* return a binary tree from a codedict, which is created by inserting - all symbols mapping to bitarrays */ -static binode * -binode_make_tree(PyObject *codedict) -{ - binode *tree; - PyObject *symbol, *value; - Py_ssize_t pos = 0; - - tree = binode_new(); - if (tree == NULL) - return NULL; - - while (PyDict_Next(codedict, &pos, &symbol, &value)) { - if (check_value(value) < 0 || - binode_insert_symbol(tree, (bitarrayobject *) value, symbol) < 0) - { - binode_delete(tree); - return NULL; - } - } - /* as we require the codedict to be non-empty the tree cannot be empty */ - assert(tree); - return tree; -} - -/* Traverse using the branches corresponding to bits in `ba`, starting - at *indexp. Return the symbol at the leaf node, or NULL when the end - of the bitarray has been reached. On error, NULL is also returned, - and the appropriate exception is set. -*/ -static PyObject * -binode_traverse(binode *tree, bitarrayobject *ba, Py_ssize_t *indexp) -{ - binode *nd = tree; - Py_ssize_t start = *indexp; - - while (*indexp < ba->nbits) { - assert(nd); - nd = nd->child[getbit(ba, *indexp)]; - if (nd == NULL) - return PyErr_Format(PyExc_ValueError, - "prefix code unrecognized in bitarray " - "at position %zd .. %zd", start, *indexp); - (*indexp)++; - if (nd->symbol) { /* leaf */ - assert(nd->child[0] == NULL && nd->child[1] == NULL); - return nd->symbol; - } - } - if (nd != tree) - PyErr_Format(PyExc_ValueError, - "incomplete prefix code at position %zd", start); - return NULL; -} - -/* add the node's symbol to given dict */ -static int -binode_to_dict(binode *nd, PyObject *dict, bitarrayobject *prefix) -{ - bitarrayobject *t; /* prefix of the two child nodes */ - int k, ret; - - if (nd == NULL) - return 0; - - if (nd->symbol) { - assert(nd->child[0] == NULL && nd->child[1] == NULL); - if (PyDict_SetItem(dict, nd->symbol, (PyObject *) prefix) < 0) - return -1; - return 0; - } - - for (k = 0; k < 2; k++) { - t = (bitarrayobject *) bitarray_copy(prefix); - if (t == NULL) - return -1; - if (resize(t, t->nbits + 1) < 0) - return -1; - setbit(t, t->nbits - 1, k); - ret = binode_to_dict(nd->child[k], dict, t); - Py_DECREF((PyObject *) t); - if (ret < 0) - return -1; - } - return 0; -} - -/* return the number of nodes */ -static Py_ssize_t -binode_nodes(binode *nd) -{ - Py_ssize_t res; - - if (nd == NULL) - return 0; - - /* a node cannot have a symbol and children */ - assert(!(nd->symbol && (nd->child[0] || nd->child[1]))); - /* a node must have a symbol or children */ - assert(nd->symbol || nd->child[0] || nd->child[1]); - - res = 1; - res += binode_nodes(nd->child[0]); - res += binode_nodes(nd->child[1]); - return res; -} - -/******************************** decodetree ******************************/ - -typedef struct { - PyObject_HEAD - binode *tree; -} decodetreeobject; - - -static PyObject * -decodetree_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - binode *tree; - PyObject *codedict; - decodetreeobject *self; - - if (!PyArg_ParseTuple(args, "O:decodetree", &codedict)) - return NULL; - - if (check_codedict(codedict) < 0) - return NULL; - - tree = binode_make_tree(codedict); - if (tree == NULL) - return NULL; - - self = (decodetreeobject *) type->tp_alloc(type, 0); - if (self == NULL) { - binode_delete(tree); - return NULL; - } - self->tree = tree; - - return (PyObject *) self; -} - -/* Return a dict mapping the symbols to bitarrays. This dict is a - reconstruction of the code dict the decodetree was created with. */ -static PyObject * -decodetree_todict(decodetreeobject *self) -{ - PyObject *dict, *prefix; - - if ((dict = PyDict_New()) == NULL) - return NULL; - - prefix = newbitarrayobject(&Bitarray_Type, 0, default_endian); - if (prefix == NULL) - goto error; - - if (binode_to_dict(self->tree, dict, (bitarrayobject *) prefix) < 0) - goto error; - - Py_DECREF(prefix); - return dict; - - error: - Py_DECREF(dict); - Py_XDECREF(prefix); - return NULL; -} - -/* Return the number of nodes in the tree (not just symbols) */ -static PyObject * -decodetree_nodes(decodetreeobject *self) -{ - return PyLong_FromSsize_t(binode_nodes(self->tree)); -} - -static PyObject * -decodetree_sizeof(decodetreeobject *self) -{ - Py_ssize_t res; - - res = sizeof(decodetreeobject); - res += sizeof(binode) * binode_nodes(self->tree); - return PyLong_FromSsize_t(res); -} - -static void -decodetree_dealloc(decodetreeobject *self) -{ - binode_delete(self->tree); - Py_TYPE(self)->tp_free((PyObject *) self); -} - -/* as these methods are only useful for debugging and testing, - they are only documented within this file */ -static PyMethodDef decodetree_methods[] = { - {"nodes", (PyCFunction) decodetree_nodes, METH_NOARGS, 0}, - {"todict", (PyCFunction) decodetree_todict, METH_NOARGS, 0}, - {"__sizeof__", (PyCFunction) decodetree_sizeof, METH_NOARGS, 0}, - {NULL, NULL} /* sentinel */ -}; - -PyDoc_STRVAR(decodetree_doc, -"decodetree(code, /) -> decodetree\n\ -\n\ -Given a prefix code (a dict mapping symbols to bitarrays),\n\ -create a binary tree object to be passed to `.decode()` or `.iterdecode()`."); - -static PyTypeObject DecodeTree_Type = { -#ifdef IS_PY3K - PyVarObject_HEAD_INIT(NULL, 0) -#else - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ -#endif - "bitarray.decodetree", /* tp_name */ - sizeof(decodetreeobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor) decodetree_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number*/ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - PyObject_HashNotImplemented, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - decodetree_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - decodetree_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - PyType_GenericAlloc, /* tp_alloc */ - decodetree_new, /* tp_new */ - PyObject_Del, /* tp_free */ -}; - -#define DecodeTree_Check(op) PyObject_TypeCheck(op, &DecodeTree_Type) - -/* -------------------------- END decodetree --------------------------- */ - -static PyObject * -bitarray_decode(bitarrayobject *self, PyObject *obj) -{ - binode *tree; - PyObject *list = NULL, *symbol; - Py_ssize_t index = 0; - - if (DecodeTree_Check(obj)) { - tree = ((decodetreeobject *) obj)->tree; - } - else { - if (check_codedict(obj) < 0) - return NULL; - - if ((tree = binode_make_tree(obj)) == NULL) - return NULL; - } - - if ((list = PyList_New(0)) == NULL) - goto error; - - while ((symbol = binode_traverse(tree, self, &index))) { - if (PyList_Append(list, symbol) < 0) - goto error; - } - if (PyErr_Occurred()) - goto error; - if (!DecodeTree_Check(obj)) - binode_delete(tree); - return list; - - error: - if (!DecodeTree_Check(obj)) - binode_delete(tree); - Py_XDECREF(list); - return NULL; -} - -PyDoc_STRVAR(decode_doc, -"decode(code, /) -> list\n\ -\n\ -Given a prefix code (a dict mapping symbols to bitarrays, or `decodetree`\n\ -object), decode the content of the bitarray and return it as a list of\n\ -symbols."); - -/*********************** (bitarray) Decode Iterator ***********************/ - -typedef struct { - PyObject_HEAD - bitarrayobject *bao; /* bitarray we're decoding */ - binode *tree; /* prefix tree containing symbols */ - Py_ssize_t index; /* current index in bitarray */ - PyObject *decodetree; /* decodetree or NULL */ -} decodeiterobject; - -static PyTypeObject DecodeIter_Type; - -/* create a new initialized bitarray decode iterator object */ -static PyObject * -bitarray_iterdecode(bitarrayobject *self, PyObject *obj) -{ - decodeiterobject *it; /* iterator to be returned */ - binode *tree; - - if (DecodeTree_Check(obj)) { - tree = ((decodetreeobject *) obj)->tree; - } - else { - if (check_codedict(obj) < 0) - return NULL; - - if ((tree = binode_make_tree(obj)) == NULL) - return NULL; - } - - it = PyObject_GC_New(decodeiterobject, &DecodeIter_Type); - if (it == NULL) { - if (!DecodeTree_Check(obj)) - binode_delete(tree); - return NULL; - } - - Py_INCREF(self); - it->bao = self; - it->tree = tree; - it->index = 0; - it->decodetree = DecodeTree_Check(obj) ? obj : NULL; - Py_XINCREF(it->decodetree); - PyObject_GC_Track(it); - return (PyObject *) it; -} - -PyDoc_STRVAR(iterdecode_doc, -"iterdecode(code, /) -> iterator\n\ -\n\ -Given a prefix code (a dict mapping symbols to bitarrays, or `decodetree`\n\ -object), decode the content of the bitarray and return an iterator over\n\ -the symbols."); - -static PyObject * -decodeiter_next(decodeiterobject *it) -{ - PyObject *symbol; - - symbol = binode_traverse(it->tree, it->bao, &(it->index)); - if (symbol == NULL) /* stop iteration OR error occured */ - return NULL; - Py_INCREF(symbol); - return symbol; -} - -static void -decodeiter_dealloc(decodeiterobject *it) -{ - if (it->decodetree) - Py_DECREF(it->decodetree); - else /* when decodeiter was created from dict - free tree */ - binode_delete(it->tree); - - PyObject_GC_UnTrack(it); - Py_DECREF(it->bao); - PyObject_GC_Del(it); -} - -static int -decodeiter_traverse(decodeiterobject *it, visitproc visit, void *arg) -{ - Py_VISIT(it->bao); - return 0; -} - -static PyTypeObject DecodeIter_Type = { -#ifdef IS_PY3K - PyVarObject_HEAD_INIT(NULL, 0) -#else - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ -#endif - "bitarray.decodeiterator", /* tp_name */ - sizeof(decodeiterobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor) decodeiter_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - 0, /* tp_doc */ - (traverseproc) decodeiter_traverse, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - PyObject_SelfIter, /* tp_iter */ - (iternextfunc) decodeiter_next, /* tp_iternext */ - 0, /* tp_methods */ -}; - -/*********************** (Bitarray) Search Iterator ***********************/ - -typedef struct { - PyObject_HEAD - bitarrayobject *bao; /* bitarray we're searching in */ - bitarrayobject *xa; /* bitarray being searched for */ - Py_ssize_t p; /* current search position */ -} searchiterobject; - -static PyTypeObject SearchIter_Type; - -/* create a new initialized bitarray search iterator object */ -static PyObject * -bitarray_itersearch(bitarrayobject *self, PyObject *x) -{ - searchiterobject *it; /* iterator to be returned */ - bitarrayobject *xa; - - if (PyIndex_Check(x)) { - int vi; - - if ((vi = pybit_as_int(x)) < 0) - return NULL; - xa = (bitarrayobject *) newbitarrayobject(Py_TYPE(self), 1, - self->endian); - if (xa == NULL) - return NULL; - setbit(xa, 0, vi); - } - else if (bitarray_Check(x)) { - xa = (bitarrayobject *) x; - } - else { - PyErr_SetString(PyExc_TypeError, "bitarray or int expected"); - return NULL; - } - - if (xa->nbits == 0) { - PyErr_SetString(PyExc_ValueError, "can't search for empty bitarray"); - return NULL; - } - - it = PyObject_GC_New(searchiterobject, &SearchIter_Type); - if (it == NULL) - return NULL; - - it->bao = self; - Py_INCREF(self); - it->xa = xa; - if (bitarray_Check(x)) - Py_INCREF(xa); - it->p = 0; /* start search at position 0 */ - PyObject_GC_Track(it); - return (PyObject *) it; -} - -PyDoc_STRVAR(itersearch_doc, -"itersearch(sub_bitarray, /) -> iterator\n\ -\n\ -Searches for the given sub_bitarray in self, and return an iterator over\n\ -the start positions where bitarray matches self."); - -static PyObject * -searchiter_next(searchiterobject *it) -{ - Py_ssize_t p; - - p = find(it->bao, it->xa, it->p, it->bao->nbits); - if (p < 0) /* no more positions -- stop iteration */ - return NULL; - it->p = p + 1; /* next search position */ - return PyLong_FromSsize_t(p); -} - -static void -searchiter_dealloc(searchiterobject *it) -{ - PyObject_GC_UnTrack(it); - Py_DECREF(it->bao); - Py_DECREF(it->xa); - PyObject_GC_Del(it); -} - -static int -searchiter_traverse(searchiterobject *it, visitproc visit, void *arg) -{ - Py_VISIT(it->bao); - return 0; -} - -static PyTypeObject SearchIter_Type = { -#ifdef IS_PY3K - PyVarObject_HEAD_INIT(NULL, 0) -#else - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ -#endif - "bitarray.searchiterator", /* tp_name */ - sizeof(searchiterobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor) searchiter_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - 0, /* tp_doc */ - (traverseproc) searchiter_traverse, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - PyObject_SelfIter, /* tp_iter */ - (iternextfunc) searchiter_next, /* tp_iternext */ - 0, /* tp_methods */ -}; - -/*********************** bitarray method definitions **********************/ - -static PyMethodDef bitarray_methods[] = { - {"all", (PyCFunction) bitarray_all, METH_NOARGS, - all_doc}, - {"any", (PyCFunction) bitarray_any, METH_NOARGS, - any_doc}, - {"append", (PyCFunction) bitarray_append, METH_O, - append_doc}, - {"buffer_info", (PyCFunction) bitarray_buffer_info, METH_NOARGS, - buffer_info_doc}, - {"bytereverse", (PyCFunction) bitarray_bytereverse, METH_VARARGS, - bytereverse_doc}, - {"clear", (PyCFunction) bitarray_clear, METH_NOARGS, - clear_doc}, - {"copy", (PyCFunction) bitarray_copy, METH_NOARGS, - copy_doc}, - {"count", (PyCFunction) bitarray_count, METH_VARARGS, - count_doc}, - {"decode", (PyCFunction) bitarray_decode, METH_O, - decode_doc}, - {"iterdecode", (PyCFunction) bitarray_iterdecode, METH_O, - iterdecode_doc}, - {"encode", (PyCFunction) bitarray_encode, METH_VARARGS, - encode_doc}, - {"endian", (PyCFunction) bitarray_endian, METH_NOARGS, - endian_doc}, - {"extend", (PyCFunction) bitarray_extend, METH_O, - extend_doc}, - {"fill", (PyCFunction) bitarray_fill, METH_NOARGS, - fill_doc}, - {"find", (PyCFunction) bitarray_find, METH_VARARGS, - find_doc}, - {"frombytes", (PyCFunction) bitarray_frombytes, METH_O, - frombytes_doc}, - {"fromfile", (PyCFunction) bitarray_fromfile, METH_VARARGS, - fromfile_doc}, - {"index", (PyCFunction) bitarray_index, METH_VARARGS, - index_doc}, - {"insert", (PyCFunction) bitarray_insert, METH_VARARGS, - insert_doc}, - {"invert", (PyCFunction) bitarray_invert, METH_VARARGS, - invert_doc}, - {"pack", (PyCFunction) bitarray_pack, METH_O, - pack_doc}, - {"pop", (PyCFunction) bitarray_pop, METH_VARARGS, - pop_doc}, - {"remove", (PyCFunction) bitarray_remove, METH_O, - remove_doc}, - {"reverse", (PyCFunction) bitarray_reverse, METH_NOARGS, - reverse_doc}, - {"search", (PyCFunction) bitarray_search, METH_VARARGS, - search_doc}, - {"itersearch", (PyCFunction) bitarray_itersearch, METH_O, - itersearch_doc}, - {"setall", (PyCFunction) bitarray_setall, METH_O, - setall_doc}, - {"sort", (PyCFunction) bitarray_sort, METH_VARARGS | - METH_KEYWORDS, - sort_doc}, - {"to01", (PyCFunction) bitarray_to01, METH_NOARGS, - to01_doc}, - {"tobytes", (PyCFunction) bitarray_tobytes, METH_NOARGS, - tobytes_doc}, - {"tofile", (PyCFunction) bitarray_tofile, METH_O, - tofile_doc}, - {"tolist", (PyCFunction) bitarray_tolist, METH_NOARGS, - tolist_doc}, - {"unpack", (PyCFunction) bitarray_unpack, METH_VARARGS | - METH_KEYWORDS, - unpack_doc}, - - {"__copy__", (PyCFunction) bitarray_copy, METH_NOARGS, - copy_doc}, - {"__deepcopy__", (PyCFunction) bitarray_copy, METH_O, - copy_doc}, - {"__reduce__", (PyCFunction) bitarray_reduce, METH_NOARGS, - reduce_doc}, - {"__sizeof__", (PyCFunction) bitarray_sizeof, METH_NOARGS, - sizeof_doc}, - {"_freeze", (PyCFunction) bitarray_freeze, METH_NOARGS, 0}, - -#ifndef NDEBUG - /* functionality exposed in debug mode for testing */ - {"_shift_r8", (PyCFunction) bitarray_shift_r8, METH_VARARGS, 0}, - {"_copy_n", (PyCFunction) bitarray_copy_n, METH_VARARGS, 0}, -#endif - - {NULL, NULL} /* sentinel */ -}; - -/* ------------------------ bitarray initialization -------------------- */ - -/* Given a string, return an integer representing the endianness. - If the string is invalid, set a Python exception and return -1. */ -static int -endian_from_string(const char* string) -{ - assert(default_endian == ENDIAN_LITTLE || default_endian == ENDIAN_BIG); - - if (string == NULL) - return default_endian; - - if (strcmp(string, "little") == 0) - return ENDIAN_LITTLE; - - if (strcmp(string, "big") == 0) - return ENDIAN_BIG; - - PyErr_Format(PyExc_ValueError, "bit endianness must be either " - "'little' or 'big', got: '%s'", string); - return -1; -} - -/* create a new bitarray object whose buffer is imported from another object - which exposes the buffer protocol */ -static PyObject* -newbitarray_from_buffer(PyTypeObject *type, PyObject *buffer, int endian) -{ - Py_buffer view; - bitarrayobject *obj; - - if (!PyObject_CheckBuffer(buffer)) - return PyErr_Format(PyExc_TypeError, "cannot use '%s' as buffer", - Py_TYPE(buffer)->tp_name); - - if (PyObject_GetBuffer(buffer, &view, PyBUF_SIMPLE) < 0) - return NULL; - - obj = (bitarrayobject *) type->tp_alloc(type, 0); - if (obj == NULL) - return NULL; - - Py_SET_SIZE(obj, view.len); - obj->ob_item = (char *) view.buf; - obj->allocated = 0; /* no buffer allocated (in this object) */ - obj->nbits = BITS(view.len); - obj->endian = endian; - obj->ob_exports = 0; - obj->weakreflist = NULL; - obj->readonly = view.readonly; - - obj->buffer = (Py_buffer *) PyMem_Malloc(sizeof(Py_buffer)); - if (obj->buffer == NULL) - return NULL; - memcpy(obj->buffer, &view, sizeof(Py_buffer)); - - return (PyObject *) obj; -} - -static PyObject * -newbitarray_from_index(PyTypeObject *type, PyObject *index, int endian) -{ - Py_ssize_t nbits; - - assert(PyIndex_Check(index)); - nbits = PyNumber_AsSsize_t(index, PyExc_OverflowError); - if (nbits == -1 && PyErr_Occurred()) - return NULL; - - if (nbits < 0) { - PyErr_SetString(PyExc_ValueError, "bitarray length must be >= 0"); - return NULL; - } - if (BYTES(nbits) < 0) - return PyErr_Format(PyExc_OverflowError, "new bitarray %zd", nbits); - - return newbitarrayobject(type, nbits, endian); -} - -/* The head byte % 8 specifies the number of unused bits (in last buffer - byte), the remaining bytes consist of the buffer itself */ -static PyObject * -newbitarray_from_pickle(PyTypeObject *type, PyObject *bytes, int endian) -{ - PyObject *res; - Py_ssize_t nbytes; - unsigned char head; - char *data; - - assert(PyBytes_Check(bytes)); - nbytes = PyBytes_GET_SIZE(bytes); - assert(nbytes > 0); - data = PyBytes_AS_STRING(bytes); - head = *data; - - if (nbytes == 1 && head % 8) - return PyErr_Format(PyExc_ValueError, - "invalid header byte: 0x%02x", head); - - res = newbitarrayobject(type, - BITS(nbytes - 1) - ((Py_ssize_t) (head % 8)), - endian); - if (res == NULL) - return NULL; - memcpy(((bitarrayobject *) res)->ob_item, data + 1, (size_t) nbytes - 1); - return res; -} - -static PyObject * -bitarray_new(PyTypeObject *type, PyObject *args, PyObject *kwds) -{ - PyObject *res; /* to be returned in some cases */ - PyObject *initial = Py_None, *buffer = Py_None; - char *endian_str = NULL; - int endian; - static char *kwlist[] = {"", "endian", "buffer", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OsO:bitarray", - kwlist, &initial, &endian_str, &buffer)) - return NULL; - - endian = endian_from_string(endian_str); - if (endian < 0) - return NULL; - - if (buffer != Py_None) { - if (initial != Py_None) { - PyErr_SetString(PyExc_TypeError, - "buffer requires no initial argument"); - return NULL; - } - return newbitarray_from_buffer(type, buffer, endian); - } - - /* no arg / None */ - if (initial == Py_None) - return newbitarrayobject(type, 0, endian); - - /* bool */ - if (PyBool_Check(initial)) { - PyErr_SetString(PyExc_TypeError, "cannot create bitarray from bool"); - return NULL; - } - - /* index (a number) */ - if (PyIndex_Check(initial)) - return newbitarray_from_index(type, initial, endian); - - /* bytes (for pickling) */ - if (PyBytes_Check(initial) && PyBytes_GET_SIZE(initial) > 0) { - unsigned char head = *PyBytes_AS_STRING(initial); - - if (head < 32 && head % 16 < 8) { - if (endian_str == NULL) /* no endianness provided */ - endian = head / 16 ? ENDIAN_BIG : ENDIAN_LITTLE; - return newbitarray_from_pickle(type, initial, endian); - } - } - - if (bitarray_Check(initial) && endian_str == NULL) - endian = ((bitarrayobject *) initial)->endian; - - /* leave remaining type dispatch to extend method */ - res = newbitarrayobject(type, 0, endian); - if (res == NULL) - return NULL; - if (extend_dispatch((bitarrayobject *) res, initial) < 0) { - Py_DECREF(res); - return NULL; - } - return res; -} - -static PyObject * -richcompare(PyObject *v, PyObject *w, int op) -{ - int cmp; - Py_ssize_t i, vs, ws; - - if (!bitarray_Check(v) || !bitarray_Check(w)) { - Py_INCREF(Py_NotImplemented); - return Py_NotImplemented; - } -#define va ((bitarrayobject *) v) -#define wa ((bitarrayobject *) w) - vs = va->nbits; - ws = wa->nbits; - if (op == Py_EQ || op == Py_NE) { - /* shortcuts for EQ/NE */ - if (vs != ws) { - /* if sizes differ, the bitarrays differ */ - return PyBool_FromLong(op == Py_NE); - } - else if (va->endian == wa->endian) { - /* sizes and endianness are the same - use memcmp() */ - size_t s = vs / 8; /* bytes to whole bytes in buffer */ - - assert(vs == ws); - cmp = memcmp(va->ob_item, wa->ob_item, s); - if (cmp == 0 && vs % 8) /* if equal, compare remaining bits */ - cmp = zeroed_last_byte(va) != zeroed_last_byte(wa); - - return PyBool_FromLong((cmp == 0) ^ (op == Py_NE)); - } - } - - /* search for the first index where items are different */ - for (i = 0; i < vs && i < ws; i++) { - int vi = getbit(va, i); - int wi = getbit(wa, i); - - if (vi != wi) { - /* we have an item that differs */ - switch (op) { - case Py_LT: cmp = vi < wi; break; - case Py_LE: cmp = vi <= wi; break; - case Py_EQ: cmp = 0; break; - case Py_NE: cmp = 1; break; - case Py_GT: cmp = vi > wi; break; - case Py_GE: cmp = vi >= wi; break; - default: return NULL; /* cannot happen */ - } - return PyBool_FromLong((long) cmp); - } - } -#undef va -#undef wa - - /* no more items to compare -- compare sizes */ - switch (op) { - case Py_LT: cmp = vs < ws; break; - case Py_LE: cmp = vs <= ws; break; - case Py_EQ: cmp = vs == ws; break; - case Py_NE: cmp = vs != ws; break; - case Py_GT: cmp = vs > ws; break; - case Py_GE: cmp = vs >= ws; break; - default: return NULL; /* cannot happen */ - } - return PyBool_FromLong((long) cmp); -} - -/***************************** bitarray iterator **************************/ - -typedef struct { - PyObject_HEAD - bitarrayobject *bao; /* bitarray we're iterating over */ - Py_ssize_t index; /* current index in bitarray */ -} bitarrayiterobject; - -static PyTypeObject BitarrayIter_Type; - -/* create a new initialized bitarray iterator object, this object is - returned when calling iter(a) */ -static PyObject * -bitarray_iter(bitarrayobject *self) -{ - bitarrayiterobject *it; - - it = PyObject_GC_New(bitarrayiterobject, &BitarrayIter_Type); - if (it == NULL) - return NULL; - - Py_INCREF(self); - it->bao = self; - it->index = 0; - PyObject_GC_Track(it); - return (PyObject *) it; -} - -static PyObject * -bitarrayiter_next(bitarrayiterobject *it) -{ - if (it->index < it->bao->nbits) - return PyLong_FromLong(getbit(it->bao, it->index++)); - - return NULL; /* stop iteration */ -} - -static void -bitarrayiter_dealloc(bitarrayiterobject *it) -{ - PyObject_GC_UnTrack(it); - Py_DECREF(it->bao); - PyObject_GC_Del(it); -} - -static int -bitarrayiter_traverse(bitarrayiterobject *it, visitproc visit, void *arg) -{ - Py_VISIT(it->bao); - return 0; -} - -static PyTypeObject BitarrayIter_Type = { -#ifdef IS_PY3K - PyVarObject_HEAD_INIT(NULL, 0) -#else - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ -#endif - "bitarray.bitarrayiterator", /* tp_name */ - sizeof(bitarrayiterobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor) bitarrayiter_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - 0, /* tp_doc */ - (traverseproc) bitarrayiter_traverse, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - PyObject_SelfIter, /* tp_iter */ - (iternextfunc) bitarrayiter_next, /* tp_iternext */ - 0, /* tp_methods */ -}; - -/******************** bitarray buffer export interface ********************/ - -#if PY_MAJOR_VERSION == 2 /* old buffer protocol */ -static Py_ssize_t -bitarray_buffer_getreadbuf(bitarrayobject *self, - Py_ssize_t index, const void **ptr) -{ - if (index != 0) { - PyErr_SetString(PyExc_SystemError, "accessing non-existent segment"); - return -1; - } - *ptr = (void *) self->ob_item; - return Py_SIZE(self); -} - -static Py_ssize_t -bitarray_buffer_getwritebuf(bitarrayobject *self, - Py_ssize_t index, const void **ptr) -{ - if (index != 0) { - PyErr_SetString(PyExc_SystemError, "accessing non-existent segment"); - return -1; - } - *ptr = (void *) self->ob_item; - return Py_SIZE(self); -} - -static Py_ssize_t -bitarray_buffer_getsegcount(bitarrayobject *self, Py_ssize_t *lenp) -{ - if (lenp) - *lenp = Py_SIZE(self); - return 1; -} - -static Py_ssize_t -bitarray_buffer_getcharbuf(bitarrayobject *self, - Py_ssize_t index, const char **ptr) -{ - if (index != 0) { - PyErr_SetString(PyExc_SystemError, "accessing non-existent segment"); - return -1; - } - *ptr = self->ob_item; - return Py_SIZE(self); -} - -#endif /* old buffer protocol */ - -static int -bitarray_getbuffer(bitarrayobject *self, Py_buffer *view, int flags) -{ - int ret; - - if (view == NULL) { - self->ob_exports++; - return 0; - } - ret = PyBuffer_FillInfo(view, - (PyObject *) self, /* exporter */ - (void *) self->ob_item, - Py_SIZE(self), - self->readonly, - flags); - if (ret >= 0) - self->ob_exports++; - - return ret; -} - -static void -bitarray_releasebuffer(bitarrayobject *self, Py_buffer *view) -{ - self->ob_exports--; -} - -static PyBufferProcs bitarray_as_buffer = { -#if PY_MAJOR_VERSION == 2 /* old buffer protocol */ - (readbufferproc) bitarray_buffer_getreadbuf, - (writebufferproc) bitarray_buffer_getwritebuf, - (segcountproc) bitarray_buffer_getsegcount, - (charbufferproc) bitarray_buffer_getcharbuf, -#endif - (getbufferproc) bitarray_getbuffer, - (releasebufferproc) bitarray_releasebuffer, -}; - -/***************************** Bitarray Type ******************************/ - -PyDoc_STRVAR(bitarraytype_doc, -"bitarray(initializer=0, /, endian='big', buffer=None) -> bitarray\n\ -\n\ -Return a new bitarray object whose items are bits initialized from\n\ -the optional initial object, and endianness.\n\ -The initializer may be of the following types:\n\ -\n\ -`int`: Create a bitarray of given integer length. The initial values are\n\ -uninitialized.\n\ -\n\ -`str`: Create bitarray from a string of `0` and `1`.\n\ -\n\ -`iterable`: Create bitarray from iterable or sequence or integers 0 or 1.\n\ -\n\ -Optional keyword arguments:\n\ -\n\ -`endian`: Specifies the bit endianness of the created bitarray object.\n\ -Allowed values are `big` and `little` (the default is `big`).\n\ -The bit endianness effects the buffer representation of the bitarray.\n\ -\n\ -`buffer`: Any object which exposes a buffer. When provided, `initializer`\n\ -cannot be present (or has to be `None`). The imported buffer may be\n\ -readonly or writable, depending on the object type."); - - -static PyTypeObject Bitarray_Type = { -#ifdef IS_PY3K - PyVarObject_HEAD_INIT(NULL, 0) -#else - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ -#endif - "bitarray.bitarray", /* tp_name */ - sizeof(bitarrayobject), /* tp_basicsize */ - 0, /* tp_itemsize */ - /* methods */ - (destructor) bitarray_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - (reprfunc) bitarray_repr, /* tp_repr */ - &bitarray_as_number, /* tp_as_number*/ - &bitarray_as_sequence, /* tp_as_sequence */ - &bitarray_as_mapping, /* tp_as_mapping */ - PyObject_HashNotImplemented, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - 0, /* tp_setattro */ - &bitarray_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_WEAKREFS -#if PY_MAJOR_VERSION == 2 - | Py_TPFLAGS_HAVE_NEWBUFFER | Py_TPFLAGS_CHECKTYPES -#endif - , /* tp_flags */ - bitarraytype_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - richcompare, /* tp_richcompare */ - offsetof(bitarrayobject, weakreflist), /* tp_weaklistoffset */ - (getiterfunc) bitarray_iter, /* tp_iter */ - 0, /* tp_iternext */ - bitarray_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - PyType_GenericAlloc, /* tp_alloc */ - bitarray_new, /* tp_new */ - PyObject_Del, /* tp_free */ -}; - -/***************************** Module functions ***************************/ - -static PyObject * -get_default_endian(PyObject *module) -{ - return Py_BuildValue("s", ENDIAN_STR(default_endian)); -} - -PyDoc_STRVAR(get_default_endian_doc, -"get_default_endian() -> string\n\ -\n\ -Return the default endianness for new bitarray objects being created.\n\ -Unless `_set_default_endian()` is called, the return value is `big`."); - - -static PyObject * -set_default_endian(PyObject *module, PyObject *args) -{ - char *endian_str; - int tmp; - - if (!PyArg_ParseTuple(args, "s:_set_default_endian", &endian_str)) - return NULL; - - /* As endian_from_string() might return -1, we have to store its value - in a temporary variable BEFORE setting default_endian. */ - tmp = endian_from_string(endian_str); - if (tmp < 0) - return NULL; - default_endian = tmp; - - Py_RETURN_NONE; -} - -PyDoc_STRVAR(set_default_endian_doc, -"_set_default_endian(endian, /)\n\ -\n\ -Set the default bit endianness for new bitarray objects being created."); - - -static PyObject * -sysinfo(void) -{ - return Py_BuildValue("iiiiiii", - (int) sizeof(void *), - (int) sizeof(size_t), - (int) sizeof(bitarrayobject), - (int) sizeof(decodetreeobject), - (int) sizeof(binode), -#ifdef PY_UINT64_T - 1, -#else - 0, -#endif -#ifndef NDEBUG - 1 -#else - 0 -#endif - ); -} - -PyDoc_STRVAR(sysinfo_doc, -"_sysinfo() -> tuple\n\ -\n\ -Return tuple containing:\n\ -\n\ -0. sizeof(void *)\n\ -1. sizeof(size_t)\n\ -2. sizeof(bitarrayobject)\n\ -3. sizeof(decodetreeobject)\n\ -4. sizeof(binode)\n\ -5. PY_UINT64_T defined\n\ -6. NDEBUG not defined"); - - -static PyMethodDef module_functions[] = { - {"get_default_endian", (PyCFunction) get_default_endian, - METH_NOARGS, get_default_endian_doc}, - {"_set_default_endian", (PyCFunction) set_default_endian, - METH_VARARGS, set_default_endian_doc}, - {"_sysinfo", (PyCFunction) sysinfo, - METH_NOARGS, sysinfo_doc }, - {NULL, NULL} /* sentinel */ -}; - -/******************************* Install Module ***************************/ - -#ifdef IS_PY3K -static PyModuleDef moduledef = { - PyModuleDef_HEAD_INIT, "_bitarray", 0, -1, module_functions, -}; -#endif - -PyMODINIT_FUNC -#ifdef IS_PY3K -PyInit__bitarray(void) -#else -init_bitarray(void) -#endif -{ - PyObject *m; - -#ifdef IS_PY3K - m = PyModule_Create(&moduledef); -#else - m = Py_InitModule3("_bitarray", module_functions, 0); -#endif - if (m == NULL) - goto error; - - if (PyType_Ready(&Bitarray_Type) < 0) - goto error; - Py_SET_TYPE(&Bitarray_Type, &PyType_Type); - Py_INCREF((PyObject *) &Bitarray_Type); - PyModule_AddObject(m, "bitarray", (PyObject *) &Bitarray_Type); - - if (PyType_Ready(&DecodeTree_Type) < 0) - goto error; - Py_SET_TYPE(&DecodeTree_Type, &PyType_Type); - Py_INCREF((PyObject *) &DecodeTree_Type); - PyModule_AddObject(m, "decodetree", (PyObject *) &DecodeTree_Type); - - if (PyType_Ready(&DecodeIter_Type) < 0) - goto error; - Py_SET_TYPE(&DecodeIter_Type, &PyType_Type); - - if (PyType_Ready(&BitarrayIter_Type) < 0) - goto error; - Py_SET_TYPE(&BitarrayIter_Type, &PyType_Type); - - if (PyType_Ready(&SearchIter_Type) < 0) - goto error; - Py_SET_TYPE(&SearchIter_Type, &PyType_Type); - - PyModule_AddObject(m, "__version__", - Py_BuildValue("s", BITARRAY_VERSION)); -#ifdef IS_PY3K - return m; - error: - return NULL; -#else - error: - return; -#endif -} diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/_util.c b/shell/ext-py/bitarray-2.3.0/bitarray/_util.c deleted file mode 100644 index 56066a7ae..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/_util.c +++ /dev/null @@ -1,864 +0,0 @@ -/* - Copyright (c) 2019 - 2021, Ilan Schnell; All Rights Reserved - bitarray is published under the PSF license. - - This file contains the C implementation of some useful utility functions. - - Author: Ilan Schnell -*/ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#include "pythoncapi_compat.h" -#include "bitarray.h" - -#define IS_LE(a) ((a)->endian == ENDIAN_LITTLE) -#define IS_BE(a) ((a)->endian == ENDIAN_BIG) - -/* set using the Python module function _set_bato() */ -static PyObject *bitarray_type_obj = NULL; - -/* Return 0 if obj is bitarray. If not, return -1 and set an exception. */ -static int -ensure_bitarray(PyObject *obj) -{ - int t; - - if (bitarray_type_obj == NULL) - Py_FatalError("bitarray_type_obj not set"); - t = PyObject_IsInstance(obj, bitarray_type_obj); - if (t < 0) - return -1; - if (t == 0) { - PyErr_Format(PyExc_TypeError, "bitarray expected, not %s", - Py_TYPE(obj)->tp_name); - return -1; - } - return 0; -} - -/* ensure object is a bitarray of given length */ -static int -ensure_ba_of_length(PyObject *a, const Py_ssize_t n) -{ - if (ensure_bitarray(a) < 0) - return -1; - if (((bitarrayobject *) a)->nbits != n) { - PyErr_SetString(PyExc_ValueError, "size mismatch"); - return -1; - } - return 0; -} - -/* ------------------------------- count_n ----------------------------- */ - -/* return the smallest index i for which a.count(1, 0, i) == n, or when - n exceeds the total count return -1 */ -static Py_ssize_t -count_to_n(bitarrayobject *a, Py_ssize_t n) -{ - const Py_ssize_t nbits = a->nbits; - Py_ssize_t i = 0; /* index */ - Py_ssize_t j = 0; /* total count up to index */ - Py_ssize_t block_start, block_stop, k, m; - - assert(0 <= n && n <= nbits); - if (n == 0) - return 0; - -#define BLOCK_BITS 8192 - /* by counting big blocks we save comparisons */ - while (i + BLOCK_BITS < nbits) { - m = 0; - assert(i % 8 == 0); - block_start = i >> 3; - block_stop = block_start + (BLOCK_BITS >> 3); - assert(block_stop <= Py_SIZE(a)); - for (k = block_start; k < block_stop; k++) - m += bitcount_lookup[(unsigned char) a->ob_item[k]]; - if (j + m >= n) - break; - j += m; - i += BLOCK_BITS; - } -#undef BLOCK_BITS - - while (i + 8 < nbits) { - k = i >> 3; - assert(k < Py_SIZE(a)); - m = bitcount_lookup[(unsigned char) a->ob_item[k]]; - if (j + m >= n) - break; - j += m; - i += 8; - } - - while (j < n && i < nbits ) { - j += getbit(a, i); - i++; - } - if (j < n) - return -1; - - return i; -} - -static PyObject * -count_n(PyObject *module, PyObject *args) -{ - PyObject *a; - Py_ssize_t n, i; - - if (!PyArg_ParseTuple(args, "On:count_n", &a, &n)) - return NULL; - if (ensure_bitarray(a) < 0) - return NULL; - - if (n < 0) { - PyErr_SetString(PyExc_ValueError, "non-negative integer expected"); - return NULL; - } -#define aa ((bitarrayobject *) a) - if (n > aa->nbits) { - PyErr_SetString(PyExc_ValueError, "n larger than bitarray size"); - return NULL; - } - i = count_to_n(aa, n); /* do actual work here */ -#undef aa - if (i < 0) { - PyErr_SetString(PyExc_ValueError, "n exceeds total count"); - return NULL; - } - return PyLong_FromSsize_t(i); -} - -PyDoc_STRVAR(count_n_doc, -"count_n(a, n, /) -> int\n\ -\n\ -Return lowest index `i` for which `a[:i].count() == n`.\n\ -Raises `ValueError`, when n exceeds total count (`a.count()`)."); - -/* ----------------------------- right index --------------------------- */ - -/* return index of highest occurrence of vi in self[a:b], -1 when not found */ -static Py_ssize_t -find_last(bitarrayobject *self, int vi, Py_ssize_t a, Py_ssize_t b) -{ - const Py_ssize_t n = b - a; - Py_ssize_t res, i; - - assert(0 <= a && a <= self->nbits); - assert(0 <= b && b <= self->nbits); - assert(0 <= vi && vi <= 1); - if (n <= 0) - return -1; - - /* the logic here is the same as in find_bit() in _bitarray.c */ -#ifdef PY_UINT64_T - if (n > 64) { - const Py_ssize_t word_a = (a + 63) / 64; - const Py_ssize_t word_b = b / 64; - const PY_UINT64_T w = vi ? 0 : ~0; - - if ((res = find_last(self, vi, 64 * word_b, b)) >= 0) - return res; - - for (i = word_b - 1; i >= word_a; i--) { /* skip uint64 words */ - if (w ^ ((PY_UINT64_T *) self->ob_item)[i]) - return find_last(self, vi, 64 * i, 64 * i + 64); - } - return find_last(self, vi, a, 64 * word_a); - } -#endif - if (n > 8) { - const Py_ssize_t byte_a = BYTES(a); - const Py_ssize_t byte_b = b / 8; - const char c = vi ? 0 : ~0; - - if ((res = find_last(self, vi, BITS(byte_b), b)) >= 0) - return res; - - for (i = byte_b - 1; i >= byte_a; i--) { /* skip bytes */ - assert_byte_in_range(self, i); - if (c ^ self->ob_item[i]) - return find_last(self, vi, BITS(i), BITS(i) + 8); - } - return find_last(self, vi, a, BITS(byte_a)); - } - assert(n <= 8); - for (i = b - 1; i >= a; i--) { - if (getbit(self, i) == vi) - return i; - } - return -1; -} - -static PyObject * -r_index(PyObject *module, PyObject *args) -{ - PyObject *value = Py_True, *a; - Py_ssize_t start = 0, stop = PY_SSIZE_T_MAX, res; - int vi; - - if (!PyArg_ParseTuple(args, "O|Onn:rindex", &a, &value, &start, &stop)) - return NULL; - if (ensure_bitarray(a) < 0) - return NULL; - if ((vi = pybit_as_int(value)) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) - normalize_index(aa->nbits, &start); - normalize_index(aa->nbits, &stop); - res = find_last(aa, vi, start, stop); -#undef aa - if (res < 0) - return PyErr_Format(PyExc_ValueError, "%d not in bitarray", vi); - - return PyLong_FromSsize_t(res); -} - -PyDoc_STRVAR(rindex_doc, -"rindex(bitarray, value=1, start=0, stop=, /) -> int\n\ -\n\ -Return the rightmost (highest) index of `value` in bitarray.\n\ -Raises `ValueError` if the value is not present."); - -/* --------------------------- unary functions ------------------------- */ - -static PyObject * -parity(PyObject *module, PyObject *a) -{ - Py_ssize_t i; - unsigned char par = 0; - - if (ensure_bitarray(a) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) - for (i = 0; i < aa->nbits / 8; i++) - par ^= aa->ob_item[i]; - if (aa->nbits % 8) - par ^= zeroed_last_byte(aa); -#undef aa - - return PyLong_FromLong((long) bitcount_lookup[par] % 2); -} - -PyDoc_STRVAR(parity_doc, -"parity(a, /) -> int\n\ -\n\ -Return the parity of bitarray `a`.\n\ -This is equivalent to `a.count() % 2` (but more efficient)."); - -/* --------------------------- binary functions ------------------------ */ - -enum kernel_type { - KERN_cand, /* count bitwise and -> int */ - KERN_cor, /* count bitwise or -> int */ - KERN_cxor, /* count bitwise xor -> int */ - KERN_subset, /* is subset -> bool */ -}; - -static PyObject * -binary_function(PyObject *args, enum kernel_type kern, const char *format) -{ - Py_ssize_t res = 0, s, i; - PyObject *a, *b; - unsigned char c; - int r; - - if (!PyArg_ParseTuple(args, format, &a, &b)) - return NULL; - if (ensure_bitarray(a) < 0 || ensure_bitarray(b) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) -#define bb ((bitarrayobject *) b) - if (aa->nbits != bb->nbits) { - PyErr_SetString(PyExc_ValueError, - "bitarrays of equal length expected"); - return NULL; - } - if (aa->endian != bb->endian) { - PyErr_SetString(PyExc_ValueError, - "bitarrays of equal endianness expected"); - return NULL; - } - s = aa->nbits / 8; /* number of whole bytes in buffer */ - r = aa->nbits % 8; /* remaining bits */ - - switch (kern) { - case KERN_cand: - for (i = 0; i < s; i++) { - c = aa->ob_item[i] & bb->ob_item[i]; - res += bitcount_lookup[c]; - } - if (r) { - c = zeroed_last_byte(aa) & zeroed_last_byte(bb); - res += bitcount_lookup[c]; - } - break; - - case KERN_cor: - for (i = 0; i < s; i++) { - c = aa->ob_item[i] | bb->ob_item[i]; - res += bitcount_lookup[c]; - } - if (r) { - c = zeroed_last_byte(aa) | zeroed_last_byte(bb); - res += bitcount_lookup[c]; - } - break; - - case KERN_cxor: - for (i = 0; i < s; i++) { - c = aa->ob_item[i] ^ bb->ob_item[i]; - res += bitcount_lookup[c]; - } - if (r) { - c = zeroed_last_byte(aa) ^ zeroed_last_byte(bb); - res += bitcount_lookup[c]; - } - break; - - case KERN_subset: - for (i = 0; i < s; i++) { - if ((aa->ob_item[i] & bb->ob_item[i]) != aa->ob_item[i]) - Py_RETURN_FALSE; - } - if (r) { - if ((zeroed_last_byte(aa) & zeroed_last_byte(bb)) != - zeroed_last_byte(aa)) - Py_RETURN_FALSE; - } - Py_RETURN_TRUE; - - default: /* cannot happen */ - return NULL; - } -#undef aa -#undef bb - return PyLong_FromSsize_t(res); -} - -#define COUNT_FUNC(oper, ochar) \ -static PyObject * \ -count_ ## oper (bitarrayobject *module, PyObject *args) \ -{ \ - return binary_function(args, KERN_c ## oper, "OO:count_" #oper); \ -} \ -PyDoc_STRVAR(count_ ## oper ## _doc, \ -"count_" #oper "(a, b, /) -> int\n\ -\n\ -Return `(a " ochar " b).count()` in a memory efficient manner,\n\ -as no intermediate bitarray object gets created.") - -COUNT_FUNC(and, "&"); /* count_and */ -COUNT_FUNC(or, "|"); /* count_or */ -COUNT_FUNC(xor, "^"); /* count_xor */ - - -static PyObject * -subset(PyObject *module, PyObject *args) -{ - return binary_function(args, KERN_subset, "OO:subset"); -} - -PyDoc_STRVAR(subset_doc, -"subset(a, b, /) -> bool\n\ -\n\ -Return `True` if bitarray `a` is a subset of bitarray `b`.\n\ -`subset(a, b)` is equivalent to `(a & b).count() == a.count()` but is more\n\ -efficient since we can stop as soon as one mismatch is found, and no\n\ -intermediate bitarray object gets created."); - -/* ---------------------------- serialization -------------------------- */ - -static PyObject * -serialize(PyObject *module, PyObject *a) -{ - PyObject *result; - Py_ssize_t nbytes; - char *str; - - if (ensure_bitarray(a) < 0) - return NULL; - - nbytes = Py_SIZE(a); - if ((result = PyBytes_FromStringAndSize(NULL, nbytes + 1)) == NULL) - return PyErr_NoMemory(); - - str = PyBytes_AsString(result); -#define aa ((bitarrayobject *) a) - *str = (char) (16 * IS_BE(aa) + setunused(aa)); - memcpy(str + 1, aa->ob_item, (size_t) nbytes); -#undef aa - return result; -} - -PyDoc_STRVAR(serialize_doc, -"serialize(bitarray, /) -> bytes\n\ -\n\ -Return a serialized representation of the bitarray, which may be passed to\n\ -`deserialize()`. It efficiently represents the bitarray object (including\n\ -its endianness) and is guaranteed not to change in future releases."); - -/* ----------------------------- hexadecimal --------------------------- */ - -static const char hexdigits[] = "0123456789abcdef"; - -static int -hex_to_int(char c) -{ - if ('0' <= c && c <= '9') - return c - '0'; - if ('a' <= c && c <= 'f') - return c - 'a' + 10; - if ('A' <= c && c <= 'F') - return c - 'A' + 10; - return -1; -} - -static PyObject * -ba2hex(PyObject *module, PyObject *a) -{ - PyObject *result; - size_t i, strsize; - char *str; - int le, be; - - if (ensure_bitarray(a) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) - if (aa->nbits % 4) { - PyErr_SetString(PyExc_ValueError, "bitarray length not multiple of 4"); - return NULL; - } - - /* strsize = aa->nbits / 4; could make strsize odd */ - strsize = 2 * Py_SIZE(a); - str = (char *) PyMem_Malloc(strsize); - if (str == NULL) - return PyErr_NoMemory(); - - le = IS_LE(aa); - be = IS_BE(aa); - for (i = 0; i < strsize; i += 2) { - unsigned char c = aa->ob_item[i / 2]; - str[i + le] = hexdigits[c >> 4]; - str[i + be] = hexdigits[0x0f & c]; - } - result = Py_BuildValue("s#", str, aa->nbits / 4); -#undef aa - PyMem_Free((void *) str); - return result; -} - -PyDoc_STRVAR(ba2hex_doc, -"ba2hex(bitarray, /) -> hexstr\n\ -\n\ -Return a string containing the hexadecimal representation of\n\ -the bitarray (which has to be multiple of 4 in length)."); - - -/* Translate hexadecimal digits into the bitarray's buffer. - Each digit corresponds to 4 bits in the bitarray. - The number of digits may be odd. */ -static PyObject * -hex2ba(PyObject *module, PyObject *args) -{ - PyObject *a; - char *str; - Py_ssize_t i, strsize; - int le, be, x, y; - - if (!PyArg_ParseTuple(args, "Os#", &a, &str, &strsize)) - return NULL; - if (ensure_ba_of_length(a, 4 * strsize) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) - le = IS_LE(aa); - be = IS_BE(aa); - assert(le + be == 1 && str[strsize] == 0); - for (i = 0; i < strsize; i += 2) { - x = hex_to_int(str[i + le]); - y = hex_to_int(str[i + be]); - if (x < 0 || y < 0) { - /* ignore the terminating NUL - happends when strsize is odd */ - if (i + le == strsize) /* str[i+le] is NUL */ - x = 0; - if (i + be == strsize) /* str[i+be] is NUL */ - y = 0; - /* there is an invalid byte - or (non-terminating) NUL */ - if (x < 0 || y < 0) { - PyErr_SetString(PyExc_ValueError, - "Non-hexadecimal digit found"); - return NULL; - } - } - assert(x < 16 && y < 16); - aa->ob_item[i / 2] = x << 4 | y; - } -#undef aa - Py_RETURN_NONE; -} - -/* ----------------------- base 2, 4, 8, 16, 32, 64 -------------------- */ - -/* RFC 4648 Base32 alphabet */ -static const char base32_alphabet[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567"; - -/* standard base 64 alphabet */ -static const char base64_alphabet[] = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; - -static int -digit_to_int(char c, int n) -{ - if (n <= 16) { /* base 2, 4, 8, 16 */ - int i = hex_to_int(c); - if (0 <= i && i < n) - return i; - } - if (n == 32) { /* base 32 */ - if ('A' <= c && c <= 'Z') - return c - 'A'; - if ('2' <= c && c <= '7') - return c - '2' + 26; - } - if (n == 64) { /* base 64 */ - if ('A' <= c && c <= 'Z') - return c - 'A'; - if ('a' <= c && c <= 'z') - return c - 'a' + 26; - if ('0' <= c && c <= '9') - return c - '0' + 52; - if (c == '+') - return 62; - if (c == '/') - return 63; - } - return -1; -} - -/* return m = log2(n) for m = 1..6 */ -static int -base_to_length(int n) -{ - int m; - - for (m = 1; m < 7; m++) { - if (n == (1 << m)) - return m; - } - PyErr_SetString(PyExc_ValueError, "base must be 2, 4, 8, 16, 32 or 64"); - return -1; -} - -static PyObject * -ba2base(PyObject *module, PyObject *args) -{ - const char *alphabet; - PyObject *result, *a; - size_t i, strsize; - char *str; - int n, m, x, k, le; - - if (!PyArg_ParseTuple(args, "iO:ba2base", &n, &a)) - return NULL; - if ((m = base_to_length(n)) < 0) - return NULL; - if (ensure_bitarray(a) < 0) - return NULL; - - switch (n) { - case 32: alphabet = base32_alphabet; break; - case 64: alphabet = base64_alphabet; break; - default: alphabet = hexdigits; - } - -#define aa ((bitarrayobject *) a) - if (aa->nbits % m) - return PyErr_Format(PyExc_ValueError, - "bitarray length must be multiple of %d", m); - - strsize = aa->nbits / m; - if ((str = (char *) PyMem_Malloc(strsize)) == NULL) - return PyErr_NoMemory(); - - le = IS_LE(aa); - for (i = 0; i < strsize; i++) { - x = 0; - for (k = 0; k < m; k++) - x |= getbit(aa, m * i + (le ? k : (m - k - 1))) << k; - str[i] = alphabet[x]; - } - result = Py_BuildValue("s#", str, strsize); -#undef aa - PyMem_Free((void *) str); - return result; -} - -PyDoc_STRVAR(ba2base_doc, -"ba2base(n, bitarray, /) -> str\n\ -\n\ -Return a string containing the base `n` ASCII representation of\n\ -the bitarray. Allowed values for `n` are 2, 4, 8, 16, 32 and 64.\n\ -The bitarray has to be multiple of length 1, 2, 3, 4, 5 or 6 respectively.\n\ -For `n=16` (hexadecimal), `ba2hex()` will be much faster, as `ba2base()`\n\ -does not take advantage of byte level operations.\n\ -For `n=32` the RFC 4648 Base32 alphabet is used, and for `n=64` the\n\ -standard base 64 alphabet is used."); - - -/* Translate ASCII digits into the bitarray's buffer. - The (Python) arguments to this functions are: - - base n, one of 2, 4, 8, 16, 32, 64 (n=2^m where m bits per digit) - - bitarray (of length m * len(s)) whose buffer is written into - - byte object s containing the ASCII digits -*/ -static PyObject * -base2ba(PyObject *module, PyObject *args) -{ - PyObject *a; - Py_ssize_t i, strsize; - char *str; - int n, m, d, k, le; - - if (!PyArg_ParseTuple(args, "iOs#", &n, &a, &str, &strsize)) - return NULL; - if ((m = base_to_length(n)) < 0) - return NULL; - if (ensure_ba_of_length(a, m * strsize) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) - memset(aa->ob_item, 0x00, (size_t) Py_SIZE(a)); - - le = IS_LE(aa); - for (i = 0; i < strsize; i++) { - d = digit_to_int(str[i], n); - if (d < 0) { - PyErr_SetString(PyExc_ValueError, "invalid digit found"); - return NULL; - } - for (k = 0; k < m; k++) - setbit(aa, m * i + (le ? k : (m - k - 1)), d & (1 << k)); - } -#undef aa - Py_RETURN_NONE; -} - -/* ------------------- variable length bitarray format ----------------- */ - -/* PADBITS is always 3 - the number of bits that represent the number of - padding bits. The actual number of padding bits is called 'padding' - below, and is in range(0, 7). - Also note that 'padding' refers to the pad bits within the variable - length format, which is not the same as the pad bits of the actual - bitarray. For example, b'\x10' has padding = 1, and decodes to - bitarray('000'), which has 5 pad bits. */ -#define PADBITS 3 - -/* Consume iterator while decoding bytes into bitarray. - As we don't have access to bitarrays resize() C function, we give this - function a bitarray (large enough in many cases). We manipulate .nbits - and .ob_size (using Py_SET_SIZE) directly without having to call resize(). - Whenever we need a larger bitarray, we call .frombytes() with a multiple - of 7 dummy bytes, such that the added bytes are aligned for the next time - we call .frombytes() (to avoid expensive bit shifts). - We drop the over-allocated bitarray on the Python side after this function - is called. -*/ -static PyObject * -vl_decode(PyObject *module, PyObject *args) -{ - const Py_ssize_t ibits = 256; /* initial number of bits in a */ - PyObject *iter, *item, *res, *a; - Py_ssize_t padding = 0; /* number of pad bits read from header byte */ - Py_ssize_t i = 0; /* bit counter */ - unsigned char b = 0x80; /* empty stream will raise StopIteration */ - Py_ssize_t k; - - /* Ensure that bits will be aligned when gowing memory below. - Possible values for ibits are: 32, 88, 144, 200, 256, 312, ... */ - assert((ibits + PADBITS) % 7 == 0 && ibits % 8 == 0); - - if (!PyArg_ParseTuple(args, "OO", &iter, &a)) - return NULL; - if (!PyIter_Check(iter)) - return PyErr_Format(PyExc_TypeError, "iterator or bytes expected, " - "got '%s'", Py_TYPE(iter)->tp_name); - if (ensure_ba_of_length(a, ibits) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) - while ((item = PyIter_Next(iter))) { -#ifdef IS_PY3K - if (PyLong_Check(item)) - b = (unsigned char) PyLong_AsLong(item); -#else - if (PyBytes_Check(item)) - b = (unsigned char) *PyBytes_AS_STRING(item); -#endif - else { - PyErr_Format(PyExc_TypeError, "int (byte) iterator expected, " - "got '%s' element", Py_TYPE(item)->tp_name); - Py_DECREF(item); - return NULL; - } - Py_DECREF(item); - - assert(i == 0 || (i + PADBITS) % 7 == 0); - if (i == aa->nbits) { - /* grow memory - see above */ - assert(i % 8 == 0); /* added dummy bytes are aligned */ - /* 63 is a multiple of 7 - bytes will be aligned for next call */ - res = PyObject_CallMethod(a, "frombytes", BYTES_SIZE_FMT, - base64_alphabet, (Py_ssize_t) 63); - if (res == NULL) - return NULL; - Py_DECREF(res); /* drop frombytes result */ - } - assert(i + 6 < aa->nbits); - - if (i == 0) { - padding = (b & 0x70) >> 4; - if (padding >= 7 || ((b & 0x80) == 0 && padding > 4)) - return PyErr_Format(PyExc_ValueError, - "invalid header byte: 0x%02x", b); - for (k = 0; k < 4; k++) - setbit(aa, i++, (0x08 >> k) & b); - } - else { - for (k = 0; k < 7; k++) - setbit(aa, i++, (0x40 >> k) & b); - } - if ((b & 0x80) == 0) - break; - } - /* set final length of bitarray */ - aa->nbits = i - padding; - Py_SET_SIZE(a, BYTES(aa->nbits)); - assert_nbits(aa); -#undef aa - - if (PyErr_Occurred()) /* from PyIter_Next() */ - return NULL; - - if (b & 0x80) { - k = (i + PADBITS) / 7; - return PyErr_Format(PyExc_StopIteration, - "no terminating byte found, bytes read: %zd", k); - } - Py_RETURN_NONE; -} - -static PyObject * -vl_encode(PyObject *module, PyObject *a) -{ - PyObject *result; - Py_ssize_t padding, n, m, i, k; - Py_ssize_t j = 0; /* byte conter */ - char *str; - - if (ensure_bitarray(a) < 0) - return NULL; - -#define aa ((bitarrayobject *) a) - n = (aa->nbits + PADBITS + 6) / 7; /* number of resulting bytes */ - m = 7 * n - PADBITS; /* number of bits resulting bytes can hold */ - padding = m - aa->nbits; /* number of pad bits */ - assert(0 <= padding && padding < 7); - - if ((result = PyBytes_FromStringAndSize(NULL, n)) == NULL) - return PyErr_NoMemory(); - - str = PyBytes_AsString(result); - str[0] = aa->nbits > 4 ? 0x80 : 0x00; /* leading bit */ - str[0] |= padding << 4; /* encode padding */ - for (i = 0; i < 4 && i < aa->nbits; i++) - str[0] |= (0x08 >> i) * getbit(aa, i); - - for (i = 4; i < aa->nbits; i++) { - k = (i - 4) % 7; - if (k == 0) { - j++; - str[j] = j < n - 1 ? 0x80 : 0x00; /* leading bit */ - } - str[j] |= (0x40 >> k) * getbit(aa, i); - } -#undef aa - assert(j == n - 1); - - return result; -} - -PyDoc_STRVAR(vl_encode_doc, -"vl_encode(bitarray, /) -> bytes\n\ -\n\ -Return variable length binary representation of bitarray.\n\ -This representation is useful for efficiently storing small bitarray\n\ -in a binary stream. Use `vl_decode()` for decoding."); - -/* --------------------------------------------------------------------- */ - -/* Set bitarray_type_obj (bato). This function must be called before any - other Python function in this module. */ -static PyObject * -set_bato(PyObject *module, PyObject *obj) -{ - bitarray_type_obj = obj; - Py_RETURN_NONE; -} - -static PyMethodDef module_functions[] = { - {"count_n", (PyCFunction) count_n, METH_VARARGS, count_n_doc}, - {"rindex", (PyCFunction) r_index, METH_VARARGS, rindex_doc}, - {"parity", (PyCFunction) parity, METH_O, parity_doc}, - {"count_and", (PyCFunction) count_and, METH_VARARGS, count_and_doc}, - {"count_or", (PyCFunction) count_or, METH_VARARGS, count_or_doc}, - {"count_xor", (PyCFunction) count_xor, METH_VARARGS, count_xor_doc}, - {"subset", (PyCFunction) subset, METH_VARARGS, subset_doc}, - {"serialize", (PyCFunction) serialize, METH_O, serialize_doc}, - {"ba2hex", (PyCFunction) ba2hex, METH_O, ba2hex_doc}, - {"_hex2ba", (PyCFunction) hex2ba, METH_VARARGS, 0}, - {"ba2base", (PyCFunction) ba2base, METH_VARARGS, ba2base_doc}, - {"_base2ba", (PyCFunction) base2ba, METH_VARARGS, 0}, - {"vl_encode", (PyCFunction) vl_encode, METH_O, vl_encode_doc}, - {"_vl_decode",(PyCFunction) vl_decode, METH_VARARGS, 0}, - {"_set_bato", (PyCFunction) set_bato, METH_O, 0}, - {NULL, NULL} /* sentinel */ -}; - -/******************************* Install Module ***************************/ - -#ifdef IS_PY3K -static PyModuleDef moduledef = { - PyModuleDef_HEAD_INIT, "_util", 0, -1, module_functions, -}; -#endif - -PyMODINIT_FUNC -#ifdef IS_PY3K -PyInit__util(void) -#else -init_util(void) -#endif -{ - PyObject *m; - -#ifdef IS_PY3K - m = PyModule_Create(&moduledef); - if (m == NULL) - return NULL; - return m; -#else - m = Py_InitModule3("_util", module_functions, 0); - if (m == NULL) - return; -#endif -} diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/architecture.txt b/shell/ext-py/bitarray-2.3.0/bitarray/architecture.txt deleted file mode 100644 index e246c6a01..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/architecture.txt +++ /dev/null @@ -1,26 +0,0 @@ -Dependency of files -------------------- - -A depends on B A --------> B - - -A imports B in a function A - - - - > B - - - - +------------+ +------------+ - | util.py |------->| _util.c | - +------------+ +------------+ - | ^ - | | - | +--------------------------------------------------+ - | | | - V v | - +-------------+ - - - > +------------------+ - - - > +--------------+ - | __init__.py | | test_bitarray.py | | test_util.py | - +-------------+ <------ +------------------+ <------ +--------------+ - | - V - +-------------+ - | _bitarray.c | - +-------------+ diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/bitarray.h b/shell/ext-py/bitarray-2.3.0/bitarray/bitarray.h deleted file mode 100644 index 16b7b401f..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/bitarray.h +++ /dev/null @@ -1,182 +0,0 @@ -/* - Copyright (c) 2008 - 2021, Ilan Schnell; All Rights Reserved - bitarray is published under the PSF license. - - Author: Ilan Schnell -*/ -#define BITARRAY_VERSION "2.3.0" - -/* .ob_size is buffer size (in bytes), not the number of elements. - The number of elements (bits) is .nbits. */ -typedef struct { - PyObject_VAR_HEAD - char *ob_item; /* buffer */ - Py_ssize_t allocated; /* allocated buffer size (in bytes) */ - Py_ssize_t nbits; /* length of bitarray, i.e. elements */ - int endian; /* bit endianness of bitarray */ - int ob_exports; /* how many buffer exports */ - PyObject *weakreflist; /* list of weak references */ - Py_buffer *buffer; /* used when importing a buffer */ - int readonly; /* buffer is readonly */ -} bitarrayobject; - -/* --- bit endianness --- */ -#define ENDIAN_LITTLE 0 -#define ENDIAN_BIG 1 - -/* the endianness string */ -#define ENDIAN_STR(endian) ((endian) == ENDIAN_LITTLE ? "little" : "big") - -/* number of bits that can be stored in given bytes */ -#define BITS(bytes) ((bytes) << 3) - -/* number of bytes necessary to store given bits */ -#define BYTES(bits) (((bits) + 7) >> 3) - -#define BITMASK(endian, i) \ - (((char) 1) << ((endian) == ENDIAN_LITTLE ? ((i) & 7) : (7 - ((i) & 7)))) - -/* assert that .nbits is in agreement with .ob_size */ -#define assert_nbits(self) assert(BYTES((self)->nbits) == Py_SIZE(self)) - -/* assert byte index is in range */ -#define assert_byte_in_range(self, j) \ - assert(self->ob_item && 0 <= (j) && (j) < Py_SIZE(self)) - -/* --------------- definitions not specific to bitarray ---------------- */ - -#ifdef STDC_HEADERS -#include -#else /* !STDC_HEADERS */ -#ifdef HAVE_SYS_TYPES_H -#include /* For size_t */ -#endif /* HAVE_SYS_TYPES_H */ -#endif /* !STDC_HEADERS */ - -/* Compatibility with Visual Studio 2013 and older which don't support - the inline keyword in C (only in C++): use __inline instead. - (copied from pythoncapi_compat.h) */ -#if (defined(_MSC_VER) && _MSC_VER < 1900 \ - && !defined(__cplusplus) && !defined(inline)) -#define inline __inline -#endif - -/* --- definitions specific to Python --- */ - -#if PY_MAJOR_VERSION >= 3 -#define IS_PY3K -#define BYTES_SIZE_FMT "y#" -#else -/* the Py_MIN and Py_MAX macros were introduced in Python 3.3 */ -#define Py_MIN(x, y) (((x) > (y)) ? (y) : (x)) -#define Py_MAX(x, y) (((x) > (y)) ? (x) : (y)) -#define PySlice_GetIndicesEx(slice, len, start, stop, step, slicelength) \ - PySlice_GetIndicesEx(((PySliceObject *) slice), \ - (len), (start), (stop), (step), (slicelength)) -#define PyLong_FromLong PyInt_FromLong -#define BYTES_SIZE_FMT "s#" -#endif - -/* ------------ low level access to bits in bitarrayobject ------------- */ - -static inline int -getbit(bitarrayobject *self, Py_ssize_t i) -{ - assert_nbits(self); - assert_byte_in_range(self, i >> 3); - assert(0 <= i && i < self->nbits); - return (self->ob_item[i >> 3] & BITMASK(self->endian, i) ? 1 : 0); -} - -static inline void -setbit(bitarrayobject *self, Py_ssize_t i, int vi) -{ - char *cp, mask; - - assert_nbits(self); - assert_byte_in_range(self, i >> 3); - assert(0 <= i && i < self->nbits); - assert(self->readonly == 0); - - mask = BITMASK(self->endian, i); - cp = self->ob_item + (i >> 3); - if (vi) - *cp |= mask; - else - *cp &= ~mask; -} - -/* Return the (padded with zeros) last byte of the buffer. When called with - a bitarray whose number of bits is a multiple of 8, return a NUL byte. */ -static inline char -zeroed_last_byte(bitarrayobject *self) -{ - const char mask_table[16] = { - 0x00, 0x01, 0x03, 0x07, 0x0f, 0x1f, 0x3f, 0x7f, /* little endian */ - 0x00, 0x80, 0xc0, 0xe0, 0xf0, 0xf8, 0xfc, 0xfe, /* big endian */ - }; - Py_ssize_t t = Py_SIZE(self) - 1; /* index of last byte in buffer */ - int r = self->nbits % 8; /* index into mask table (minus offset) */ - int be = self->endian == ENDIAN_BIG; /* is big endian */ - - if (r == 0) - return 0x00; - assert_nbits(self); - assert_byte_in_range(self, t); - return mask_table[r + 8 * be] & self->ob_item[t]; -} - -/* Unless the buffer is readonly, zero out pad bits. - Always return the number of pad bits - leave self->nbits unchanged */ -static inline int -setunused(bitarrayobject *self) -{ - int r = self->nbits % 8; - - if (r == 0) - return 0; - if (self->readonly == 0) - self->ob_item[Py_SIZE(self) - 1] = zeroed_last_byte(self); - return 8 - r; -} - -static const unsigned char bitcount_lookup[256] = { -#define B2(n) n, n + 1, n + 1, n + 2 -#define B4(n) B2(n), B2(n + 1), B2(n + 1), B2(n + 2) -#define B6(n) B4(n), B4(n + 1), B4(n + 1), B4(n + 2) - B6(0), B6(1), B6(1), B6(2) -#undef B2 -#undef B4 -#undef B6 -}; - -/* normalize index (which may be negative), such that 0 <= i <= n */ -static inline void -normalize_index(Py_ssize_t n, Py_ssize_t *i) -{ - if (*i < 0) { - *i += n; - if (*i < 0) - *i = 0; - } - if (*i > n) - *i = n; -} - -/* Interpret a PyObject (usually PyLong or PyBool) as a bit, return 0 or 1. - On error, return -1 and set error message. */ -static inline int -pybit_as_int(PyObject *value) -{ - Py_ssize_t x; - - x = PyNumber_AsSsize_t(value, NULL); - if (x == -1 && PyErr_Occurred()) - return -1; - - if (x < 0 || x > 1) { - PyErr_Format(PyExc_ValueError, "bit must be 0 or 1, got %zd", x); - return -1; - } - return (int) x; -} diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/copy_n.txt b/shell/ext-py/bitarray-2.3.0/bitarray/copy_n.txt deleted file mode 100644 index 01c59fbac..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/copy_n.txt +++ /dev/null @@ -1,105 +0,0 @@ -copy_n() in _bitarray.c -======================= - -The following variable names are used to in this document, as well as in -the source code: - - self bitarray object bits are copies onto - a start bit (in self) bits are copied onto - other bitarray object bits are copied from - b start bit (in other) bits are copied from - n number of bits being copied - -There are 3 cases handled by this function: (i) aligned case (ii) small n case -(iii) general case. For all cases, it is important to handle self == other. -We will briefly discuss the first two cases, and then go into more detail -about the general case. - - -Aligned case ------------- - -In the aligned case, i.e. when both start positions (a and b) are a -multiple of 8, we use memmove() on the first 8 * n bits, and call copy_n() -on itself to handle the few remaining bits. Note that the order of these -two operations (memmove() and copy_n()) matters when copying self to self. - - -Small n case ------------- - -For n smaller than a low limit (like 24 bits), we use a sequence of getbit() -and setbit() calls which is quite slow. We could choose the low limit to be -as low as 8. However, as the general case has some overhead, we don't see -a speedup over this case until we copy at least several bytes. -As other might be self, we need to either loop forward or backwards in order -to not copy from bits already changed. - - -General case ------------- - -We choose an aligned region to be copied using copy_n() itself, and use -shift_r8() and a few fixes to create the correct copy. The is done in the -following steps: - -1.) Calculate the following byte positions: - p1: start (in self) memory is copied to, i.e. a / 8 - p2: last (in self) memory is copied to, i.e. (a + n - 1) / 8 - p3: first (in other) memory is copied from, i.e. b / 8 - -2.) Store temporary bytes corresponding to p1, p2, p3 in t1, t2, t3. - We need these bytes later to restore bits which got overwritten or - shifted away. Note that we have to store t3 (the byte with b in other), - as other can be self. - -3.) Calculate the total right shift of bytes p1..p2 (once copied into self). - This shift depends on a % 8 and b % 8, and has to be a right shift - below 8. - -4.) Using copy_n(), copy the byte region from other at p3 or p3 + 1 into - self at p1. Because in the latter case we miss the beginning bits in - other, we need t3 and copy those bits later. - -5.) Right shift self in byte range p1..p2. This region includes the - bit-range(a, a + n), but is generally larger. This is why we need t1 - and t2 to restore the bytes at p1 and p2 in self later. - -6.) Restore bits in self at, see step 5: - - p1 using t1 (those got overwritten and shifted) - - p2 using t2 (those got shifted) - -7.) Copy the first few bits from other to self (using t3, see step 4). - - -Here is an example with the following parameters (created using -examples/copy_n.py): - -a = 21 -b = 6 -n = 31 -p1 = 2 -p2 = 6 -p3 = 0 - -other -bitarray('00101110 11111001 01011101 11001011 10110000 01011110 011') -b..b+n ^^ ^^^^^^^^ ^^^^^^^^ ^^^^^^^^ ^^^^^ - ======== ======== ======== ===== - 33 -self -bitarray('01011101 11100101 01110101 01011001 01110100 10001010 01111011') -a..a+n ^^^ ^^^^^^^^ ^^^^^^^^ ^^^^^^^^ ^^^^ - 11111 - 2222 -copy_n - ======== ======== ======== ===== -bitarray('01011101 11100101 11111001 01011101 11001011 10110010 01111011') -rshift 7 - >>>>>>>> >>>>>>>> >>>>>>>> >>>>>>>> >>>>>>>> -bitarray('01011101 11100101 00000001 11110010 10111011 10010111 01100100') -a..a+n = ======== ======== ======== ==== - 11111 - 2222 - 33 -bitarray('01011101 11100101 01110101 11110010 10111011 10010111 01101011') diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/py.typed b/shell/ext-py/bitarray-2.3.0/bitarray/py.typed deleted file mode 100644 index e69de29bb..000000000 diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/pythoncapi_compat.h b/shell/ext-py/bitarray-2.3.0/bitarray/pythoncapi_compat.h deleted file mode 100644 index d9de1818b..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/pythoncapi_compat.h +++ /dev/null @@ -1,347 +0,0 @@ -// Header file providing new functions of the Python C API to old Python -// versions. -// -// File distributed under the MIT license. -// -// Homepage: -// https://github.com/pythoncapi/pythoncapi_compat -// -// Latest version: -// https://raw.githubusercontent.com/pythoncapi/pythoncapi_compat/master/pythoncapi_compat.h -// -// SPDX-License-Identifier: MIT - -#ifndef PYTHONCAPI_COMPAT -#define PYTHONCAPI_COMPAT - -#ifdef __cplusplus -extern "C" { -#endif - -#include -#include "frameobject.h" // PyFrameObject, PyFrame_GetBack() - - -// Compatibility with Visual Studio 2013 and older which don't support -// the inline keyword in C (only in C++): use __inline instead. -#if (defined(_MSC_VER) && _MSC_VER < 1900 \ - && !defined(__cplusplus) && !defined(inline)) -# define inline __inline -# define PYTHONCAPI_COMPAT_MSC_INLINE - // These two macros are undefined at the end of this file -#endif - - -// Cast argument to PyObject* type. -#ifndef _PyObject_CAST -# define _PyObject_CAST(op) ((PyObject*)(op)) -#endif -#ifndef _PyObject_CAST_CONST -# define _PyObject_CAST_CONST(op) ((const PyObject*)(op)) -#endif - - -// bpo-42262 added Py_NewRef() to Python 3.10.0a3 -#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef) -static inline PyObject* _Py_NewRef(PyObject *obj) -{ - Py_INCREF(obj); - return obj; -} -#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj)) -#endif - - -// bpo-42262 added Py_XNewRef() to Python 3.10.0a3 -#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef) -static inline PyObject* _Py_XNewRef(PyObject *obj) -{ - Py_XINCREF(obj); - return obj; -} -#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj)) -#endif - - -// See https://bugs.python.org/issue42522 -#if !defined(_Py_StealRef) -static inline PyObject* __Py_StealRef(PyObject *obj) -{ - Py_DECREF(obj); - return obj; -} -#define _Py_StealRef(obj) __Py_StealRef(_PyObject_CAST(obj)) -#endif - - -// See https://bugs.python.org/issue42522 -#if !defined(_Py_XStealRef) -static inline PyObject* __Py_XStealRef(PyObject *obj) -{ - Py_XDECREF(obj); - return obj; -} -#define _Py_XStealRef(obj) __Py_XStealRef(_PyObject_CAST(obj)) -#endif - - -// bpo-39573 added Py_SET_REFCNT() to Python 3.9.0a4 -#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_REFCNT) -static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) -{ - ob->ob_refcnt = refcnt; -} -#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT(_PyObject_CAST(ob), refcnt) -#endif - - -// Py_SETREF() and Py_XSETREF() were added to Python 3.5.2. -// It is excluded from the limited C API. -#if (PY_VERSION_HEX < 0x03050200 && !defined(Py_SETREF)) && !defined(Py_LIMITED_API) -#define Py_SETREF(op, op2) \ - do { \ - PyObject *_py_tmp = _PyObject_CAST(op); \ - (op) = (op2); \ - Py_DECREF(_py_tmp); \ - } while (0) - -#define Py_XSETREF(op, op2) \ - do { \ - PyObject *_py_tmp = _PyObject_CAST(op); \ - (op) = (op2); \ - Py_XDECREF(_py_tmp); \ - } while (0) -#endif - -// bpo-39573 added Py_SET_TYPE() to Python 3.9.0a4 -#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_TYPE) -static inline void -_Py_SET_TYPE(PyObject *ob, PyTypeObject *type) -{ - ob->ob_type = type; -} -#define Py_SET_TYPE(ob, type) _Py_SET_TYPE(_PyObject_CAST(ob), type) -#endif - - -// bpo-39573 added Py_SET_SIZE() to Python 3.9.0a4 -#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_SET_SIZE) -static inline void -_Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) -{ - ob->ob_size = size; -} -#define Py_SET_SIZE(ob, size) _Py_SET_SIZE((PyVarObject*)(ob), size) -#endif - - -// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 -#if PY_VERSION_HEX < 0x030900B1 -static inline PyCodeObject* -PyFrame_GetCode(PyFrameObject *frame) -{ - assert(frame != NULL); - assert(frame->f_code != NULL); - return (PyCodeObject*)Py_NewRef(frame->f_code); -} -#endif - -static inline PyCodeObject* -_PyFrame_GetCodeBorrow(PyFrameObject *frame) -{ - return (PyCodeObject *)_Py_StealRef(PyFrame_GetCode(frame)); -} - - -// bpo-40421 added PyFrame_GetCode() to Python 3.9.0b1 -#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) -static inline PyFrameObject* -PyFrame_GetBack(PyFrameObject *frame) -{ - assert(frame != NULL); - return (PyFrameObject*)Py_XNewRef(frame->f_back); -} -#endif - -#if !defined(PYPY_VERSION) -static inline PyFrameObject* -_PyFrame_GetBackBorrow(PyFrameObject *frame) -{ - return (PyFrameObject *)_Py_XStealRef(PyFrame_GetBack(frame)); -} -#endif - - -// bpo-39947 added PyThreadState_GetInterpreter() to Python 3.9.0a5 -#if PY_VERSION_HEX < 0x030900A5 -static inline PyInterpreterState * -PyThreadState_GetInterpreter(PyThreadState *tstate) -{ - assert(tstate != NULL); - return tstate->interp; -} -#endif - - -// bpo-40429 added PyThreadState_GetFrame() to Python 3.9.0b1 -#if PY_VERSION_HEX < 0x030900B1 && !defined(PYPY_VERSION) -static inline PyFrameObject* -PyThreadState_GetFrame(PyThreadState *tstate) -{ - assert(tstate != NULL); - return (PyFrameObject *)Py_XNewRef(tstate->frame); -} -#endif - -#if !defined(PYPY_VERSION) -static inline PyFrameObject* -_PyThreadState_GetFrameBorrow(PyThreadState *tstate) -{ - return (PyFrameObject *)_Py_XStealRef(PyThreadState_GetFrame(tstate)); -} -#endif - - -// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a5 -#if PY_VERSION_HEX < 0x030900A5 -static inline PyInterpreterState * -PyInterpreterState_Get(void) -{ - PyThreadState *tstate; - PyInterpreterState *interp; - - tstate = PyThreadState_GET(); - if (tstate == NULL) { - Py_FatalError("GIL released (tstate is NULL)"); - } - interp = tstate->interp; - if (interp == NULL) { - Py_FatalError("no current interpreter"); - } - return interp; -} -#endif - - -// bpo-39947 added PyInterpreterState_Get() to Python 3.9.0a6 -#if 0x030700A1 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) -static inline uint64_t -PyThreadState_GetID(PyThreadState *tstate) -{ - assert(tstate != NULL); - return tstate->id; -} -#endif - - -// bpo-37194 added PyObject_CallNoArgs() to Python 3.9.0a1 -#if PY_VERSION_HEX < 0x030900A1 -static inline PyObject* -PyObject_CallNoArgs(PyObject *func) -{ - return PyObject_CallFunctionObjArgs(func, NULL); -} -#endif - - -// bpo-39245 made PyObject_CallOneArg() public (previously called -// _PyObject_CallOneArg) in Python 3.9.0a4 -#if PY_VERSION_HEX < 0x030900A4 -static inline PyObject* -PyObject_CallOneArg(PyObject *func, PyObject *arg) -{ - return PyObject_CallFunctionObjArgs(func, arg, NULL); -} -#endif - - -// bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3 -#if PY_VERSION_HEX < 0x030A00A3 -static inline int -PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value) -{ - int res; - Py_XINCREF(value); - res = PyModule_AddObject(module, name, value); - if (res < 0) { - Py_XDECREF(value); - } - return res; -} -#endif - - -// bpo-40024 added PyModule_AddType() to Python 3.9.0a5 -#if PY_VERSION_HEX < 0x030900A5 -static inline int -PyModule_AddType(PyObject *module, PyTypeObject *type) -{ - const char *name, *dot; - - if (PyType_Ready(type) < 0) { - return -1; - } - - // inline _PyType_Name() - name = type->tp_name; - assert(name != NULL); - dot = strrchr(name, '.'); - if (dot != NULL) { - name = dot + 1; - } - - return PyModule_AddObjectRef(module, name, (PyObject *)type); -} -#endif - - -// bpo-40241 added PyObject_GC_IsTracked() to Python 3.9.0a6. -// bpo-4688 added _PyObject_GC_IS_TRACKED() to Python 2.7.0a2. -#if PY_VERSION_HEX < 0x030900A6 && !defined(PYPY_VERSION) -static inline int -PyObject_GC_IsTracked(PyObject* obj) -{ - return (PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)); -} -#endif - -// bpo-40241 added PyObject_GC_IsFinalized() to Python 3.9.0a6. -// bpo-18112 added _PyGCHead_FINALIZED() to Python 3.4.0 final. -#if PY_VERSION_HEX < 0x030900A6 && PY_VERSION_HEX >= 0x030400F0 && !defined(PYPY_VERSION) -static inline int -PyObject_GC_IsFinalized(PyObject *obj) -{ - return (PyObject_IS_GC(obj) && _PyGCHead_FINALIZED((PyGC_Head *)(obj)-1)); -} -#endif - - -// bpo-39573 added Py_IS_TYPE() to Python 3.9.0a4 -#if PY_VERSION_HEX < 0x030900A4 && !defined(Py_IS_TYPE) -static inline int -_Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) { - return ob->ob_type == type; -} -#define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST_CONST(ob), type) -#endif - - -// Py_UNUSED() was added to Python 3.4.0b2. -#if PY_VERSION_HEX < 0x030400B2 && !defined(Py_UNUSED) -# if defined(__GNUC__) || defined(__clang__) -# define Py_UNUSED(name) _unused_ ## name __attribute__((unused)) -# else -# define Py_UNUSED(name) _unused_ ## name -# endif -#endif - - -#ifdef PYTHONCAPI_COMPAT_MSC_INLINE -# undef inline -# undef PYTHONCAPI_COMPAT_MSC_INLINE -#endif - -#ifdef __cplusplus -} -#endif -#endif // PYTHONCAPI_COMPAT diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/test_bitarray.py b/shell/ext-py/bitarray-2.3.0/bitarray/test_bitarray.py deleted file mode 100644 index b9695e1d3..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/test_bitarray.py +++ /dev/null @@ -1,4253 +0,0 @@ -""" -Tests for bitarray - -Author: Ilan Schnell -""" -from __future__ import absolute_import - -import re -import os -import sys -import unittest -import tempfile -import shutil -from random import randint - -# imports needed inside tests -import copy -import pickle -import itertools -import shelve - - -is_py3k = bool(sys.version_info[0] == 3) - -if is_py3k: - from io import BytesIO -else: - from cStringIO import StringIO as BytesIO # type: ignore - range = xrange # type: ignore - - -from bitarray import (bitarray, frozenbitarray, bits2bytes, decodetree, - get_default_endian, _set_default_endian, - _sysinfo, __version__) - -SYSINFO = _sysinfo() -DEBUG = SYSINFO[6] - -def buffer_info(a, key=None): - fields = ( - "address", # 0. address of byte buffer - "size", # 1. buffer size in bytes - "endian", # 2. bit endianness - "padding", # 3. number of pad bits - "allocated", # 4. allocated memory - "readonly", # 5. memory is read-only - "imported", # 6. buffer is imported - "exports", # 7. number of buffer exports - ) - info = a.buffer_info() - res = dict(zip(fields, info)) - return res if key is None else res[key] - - -# avoid importing from bitarray.util -def zeros(n, endian=None): - a = bitarray(n, endian or get_default_endian()) - a.setall(0) - return a - -def urandom(n, endian=None): - a = bitarray(0, endian or get_default_endian()) - a.frombytes(os.urandom(bits2bytes(n))) - del a[n:] - return a - -tests = [] # type: list - -class Util(object): - - @staticmethod - def random_endian(): - return ['little', 'big'][randint(0, 1)] - - @staticmethod - def randombitarrays(start=0): - for n in list(range(start, 25)) + [randint(1000, 2000)]: - a = bitarray(endian=['little', 'big'][randint(0, 1)]) - a.frombytes(os.urandom(bits2bytes(n))) - del a[n:] - yield a - - def randomlists(self): - for a in self.randombitarrays(): - yield a.tolist() - - @staticmethod - def rndsliceidx(length): - if randint(0, 1): - return None - else: - return randint(-length - 5, length + 5) - - @staticmethod - def other_endian(endian): - t = {'little': 'big', - 'big': 'little'} - return t[endian] - - @staticmethod - def slicelen(s, length): - assert isinstance(s, slice) - start, stop, step = s.indices(length) - slicelength = (stop - start + (1 if step < 0 else -1)) // step + 1 - if slicelength < 0: - slicelength = 0 - return slicelength - - def check_obj(self, a): - self.assertIsInstance(a, bitarray) - - ptr, size, endian, unused, alloc, readonly, buf, exports = \ - a.buffer_info() - - self.assertEqual(size, bits2bytes(len(a))) - self.assertEqual(unused, 8 * size - len(a)) - self.assertTrue(0 <= unused < 8) - self.assertEqual(endian, a.endian()) - self.assertTrue(endian in ('little', 'big')) - - if buf: - # imported buffer implies that no extra memory is allocated - self.assertEqual(alloc, 0) - # an imported buffer will always have a multiple of 8 bits - self.assertEqual(len(a) % 8, 0) - self.assertEqual(len(a), 8 * size) - self.assertEqual(unused, 0) - else: - # the allocated memory is always larger than the buffer size - self.assertTrue(alloc >= size) - - if ptr == 0: - # the buffer being a NULL pointer implies that the buffer size - # and the allocated memory size are 0 - self.assertEqual(size, 0) - self.assertEqual(alloc, 0) - - if type(a).__name__ == 'frozenbitarray': - # frozenbitarray have read-only memory - self.assertEqual(readonly, 1) - elif not buf: - # otherwise, unless the buffer is imported, it is writable - self.assertEqual(readonly, 0) - - def assertEQUAL(self, a, b): - self.assertEqual(a, b) - self.assertEqual(a.endian(), b.endian()) - - def assertIsType(self, a, b): - self.assertEqual(type(a).__name__, b) - self.assertEqual( - repr(type(a)), "<%s 'bitarray.%s'>" % - ('class' if is_py3k or b == 'frozenbitarray' else 'type', b)) - - def assertBitEqual(self, x, y): - for z in x, y: - self.assertEqual('01'[z], repr(z)) - self.assertEqual(x, y) - - def assertStopIteration(self, it): - self.assertRaises(StopIteration, next, it) - - def assertRaisesMessage(self, excClass, msg, callable, *args, **kwargs): - try: - callable(*args, **kwargs) - raise AssertionError("%s not raised" % excClass.__name__) - except excClass as e: - if msg != str(e): - raise AssertionError("message: %s\n got: %s" % (msg, e)) - -# --------------------------------------------------------------------------- - -class TestsModuleFunctions(unittest.TestCase, Util): - - def test_version_string(self): - # the version string is not a function, but test it here anyway - self.assertIsInstance(__version__, str) - - def test_sysinfo(self): - info = _sysinfo() - self.assertIsInstance(info, tuple) - for x in info: - self.assertIsInstance(x, int) - - def test_set_default_endian(self): - self.assertRaises(TypeError, _set_default_endian, 0) - self.assertRaises(TypeError, _set_default_endian, 'little', 0) - self.assertRaises(ValueError, _set_default_endian, 'foo') - for default_endian in 'big', 'little', u'big', u'little': - _set_default_endian(default_endian) - a = bitarray() - self.assertEqual(a.endian(), default_endian) - for x in None, 0, 64, '10111', [1, 0]: - a = bitarray(x) - self.assertEqual(a.endian(), default_endian) - - for endian in 'big', 'little': - a = bitarray(endian=endian) - self.assertEqual(a.endian(), endian) - - # make sure that calling _set_default_endian wrong does not - # change the default endianness - self.assertRaises(ValueError, _set_default_endian, 'foobar') - self.assertEqual(bitarray().endian(), default_endian) - - def test_get_default_endian(self): - # takes no arguments - self.assertRaises(TypeError, get_default_endian, 'big') - for default_endian in 'big', 'little': - _set_default_endian(default_endian) - endian = get_default_endian() - self.assertEqual(endian, default_endian) - self.assertIsInstance(endian, str) - - def test_bits2bytes(self): - for arg in 'foo', [], None, {}, 187.0, -4.0: - self.assertRaises(TypeError, bits2bytes, arg) - - self.assertRaises(TypeError, bits2bytes) - self.assertRaises(TypeError, bits2bytes, 1, 2) - - self.assertRaises(ValueError, bits2bytes, -1) - self.assertRaises(ValueError, bits2bytes, -924) - - self.assertEqual(bits2bytes(0), 0) - for n in range(1, 100): - m = bits2bytes(n) - self.assertEqual(m, (n - 1) // 8 + 1) - self.assertIsInstance(m, int) - - for n, m in [(0, 0), (1, 1), (2, 1), (7, 1), (8, 1), (9, 2), - (10, 2), (15, 2), (16, 2), (64, 8), (65, 9), - (2**31, 2**28), (2**32, 2**29), (2**34, 2**31), - (2**34+793, 2**31+100), (2**35-8, 2**32-1), - (2**62, 2**59), (2**63-8, 2**60-1)]: - self.assertEqual(bits2bytes(n), m) - -tests.append(TestsModuleFunctions) - -# --------------------------------------------------------------------------- - -class CreateObjectTests(unittest.TestCase, Util): - - def test_noInitializer(self): - a = bitarray() - self.assertEqual(len(a), 0) - self.assertEqual(a.tolist(), []) - self.assertIsType(a, 'bitarray') - self.check_obj(a) - - def test_endian(self): - a = bitarray(endian='little') - a.frombytes(b'ABC') - self.assertEqual(a.endian(), 'little') - self.assertIsInstance(a.endian(), str) - self.check_obj(a) - - b = bitarray(endian='big') - b.frombytes(b'ABC') - self.assertEqual(b.endian(), 'big') - self.assertIsInstance(a.endian(), str) - self.check_obj(b) - - self.assertNotEqual(a, b) - self.assertEqual(a.tobytes(), b.tobytes()) - - def test_endian_default(self): - _set_default_endian('big') - a_big = bitarray() - _set_default_endian('little') - a_little = bitarray() - _set_default_endian('big') - - self.assertEqual(a_big.endian(), 'big') - self.assertEqual(a_little.endian(), 'little') - - def test_endian_wrong(self): - self.assertRaises(TypeError, bitarray, endian=0) - self.assertRaises(ValueError, bitarray, endian='') - self.assertRaisesMessage( - ValueError, - "bit endianness must be either 'little' or 'big', got: 'foo'", - bitarray, endian='foo') - self.assertRaisesMessage(TypeError, - "'ellipsis' object is not iterable", - bitarray, Ellipsis) - - def test_buffer(self): - # buffer requires no initial argument - self.assertRaises(TypeError, bitarray, 5, buffer=b'DATA\0') - - for endian in 'big', 'little': - a = bitarray(buffer=b'', endian=endian) - self.assertEQUAL(a, bitarray(0, endian)) - - _set_default_endian(endian) - a = bitarray(buffer=b'A') - self.assertEqual(a.endian(), endian) - self.assertEqual(len(a), 8) - - a = bitarray(buffer=b'\xf0', endian='little') - self.assertRaises(TypeError, a.clear) - self.assertRaises(TypeError, a.__setitem__, 3, 1) - self.assertEQUAL(a, bitarray('00001111', 'little')) - self.check_obj(a) - - # positinal arguments - a = bitarray(None, 'big', bytearray([15])) - self.assertEQUAL(a, bitarray('00001111', 'big')) - a = bitarray(None, 'little', None) - self.assertEQUAL(a, bitarray(0, 'little')) - - def test_integers(self): - for n in range(50): - a = bitarray(n) - self.assertEqual(len(a), n) - self.check_obj(a) - - a = bitarray(int(n)) - self.assertEqual(len(a), n) - self.check_obj(a) - - if not is_py3k: - a = bitarray(long(29)) - self.assertEqual(len(a), 29) - - self.assertRaises(ValueError, bitarray, -1) - self.assertRaises(ValueError, bitarray, -924) - - def test_list(self): - lst = [0, 1, False, True] - a = bitarray(lst) - self.assertEqual(a, bitarray('0101')) - self.check_obj(a) - - if not is_py3k: - a = bitarray([long(1), long(0)]) - self.assertEqual(a, bitarray('10')) - - self.assertRaises(ValueError, bitarray, [0, 1, 2]) - self.assertRaises(TypeError, bitarray, [0, 1, None]) - - for n in range(50): - lst = [bool(randint(0, 1)) for d in range(n)] - a = bitarray(lst) - self.assertEqual(a.tolist(), lst) - self.check_obj(a) - - def test_tuple(self): - tup = (0, True, False, 1) - a = bitarray(tup) - self.assertEqual(a, bitarray('0101')) - self.check_obj(a) - - self.assertRaises(ValueError, bitarray, (0, 1, 2)) - self.assertRaises(TypeError, bitarray, (0, 1, None)) - - for n in range(50): - lst = [bool(randint(0, 1)) for d in range(n)] - a = bitarray(tuple(lst)) - self.assertEqual(a.tolist(), lst) - self.check_obj(a) - - def test_iter1(self): - for n in range(50): - lst = [bool(randint(0, 1)) for d in range(n)] - a = bitarray(iter(lst)) - self.assertEqual(a.tolist(), lst) - self.check_obj(a) - - def test_iter2(self): - for lst in self.randomlists(): - def foo(): - for x in lst: - yield x - a = bitarray(foo()) - self.assertEqual(a, bitarray(lst)) - self.check_obj(a) - - def test_iter3(self): - a = bitarray(itertools.repeat(False, 10)) - self.assertEqual(a, zeros(10)) - a = bitarray(itertools.repeat(1, 10)) - self.assertEqual(a, bitarray(10 * '1')) - - def test_range(self): - self.assertEqual(bitarray(range(2)), bitarray('01')) - self.assertRaises(ValueError, bitarray, range(0, 3)) - - def test_string01(self): - for s in ('0010111', u'0010111', '0010 111', u'0010 111', - '0010_111', u'0010_111'): - a = bitarray(s) - self.assertEqual(a.tolist(), [0, 0, 1, 0, 1, 1, 1]) - self.check_obj(a) - - for n in range(50): - lst = [bool(randint(0, 1)) for d in range(n)] - s = ''.join([['0', '1'][x] for x in lst]) - a = bitarray(s) - self.assertEqual(a.tolist(), lst) - self.check_obj(a) - - self.assertRaises(ValueError, bitarray, '01021') - self.assertRaises(UnicodeEncodeError, bitarray, u'1\u26050') - - def test_string01_whitespace(self): - whitespace = ' \n\r\t\v' - a = bitarray(whitespace) - self.assertEqual(a, bitarray()) - - # For Python 2 (where strings are bytes), we are in the lucky - # position that none of the valid characters ('\t'=9, '\n'=10, - # '\v'=11, '\r'=13, ' '=32, '0'=48 and '1'=49) are valid header - # bytes for deserialization 0..7, 16..23. Therefore a string of - # '0's and '1'a can start with any whitespace character, as well - # as '0' or '1' obviously. - for c in whitespace: - a = bitarray(c + '1101110001') - self.assertEqual(a, bitarray('1101110001')) - - a = bitarray(' 0\n1\r0\t1\v0 ') - self.assertEqual(a, bitarray('01010')) - - def test_rawbytes(self): - self.assertEqual(bitarray(b'\x00').endian(), 'little') - self.assertEqual(bitarray(b'\x10').endian(), 'big') - - # this representation is used for pickling - for s, r in [(b'\x00', ''), (b'\x07\xff', '1'), (b'\x03\xff', '11111'), - (b'\x01\x87\xda', '10000111 1101101')]: - self.assertEqual(bitarray(s, endian='big'), bitarray(r)) - - self.assertEQUAL(bitarray(b'\x12\x0f', 'little'), - bitarray('111100', 'little')) - self.assertEQUAL(bitarray(b'\x02\x0f', 'big'), - bitarray('000011', 'big')) - - for a, s in [ - (bitarray(0, 'little'), b'\x00'), - (bitarray(0, 'big'), b'\x10'), - (bitarray('1', 'little'), b'\x07\x01'), - (bitarray('1', 'big'), b'\x17\x80'), - (bitarray('11110000', 'little'), b'\x00\x0f'), - (bitarray('11110000', 'big'), b'\x10\xf0'), - ]: - self.assertEQUAL(bitarray(s), a) - - def test_rawbytes_invalid(self): - for s in b'\x01', b'\x04', b'\x07', b'\x11', b'\x15', b'\x17': - # this error is raised in newbitarray_from_pickle() (C function) - if is_py3k: - self.assertRaisesMessage(ValueError, - "invalid header byte: 0x%02x" % s[0], - bitarray, s) - else: - # Python 2: PyErr_Format() seems to handle "0x%02x" - # incorrectly. Oh well... - self.assertRaises(ValueError, bitarray, s) - - a = bitarray(s + b'\x00') - head = s[0] if is_py3k else ord(s[0]) - endian, unused = divmod(head, 16) - self.assertEqual(a.endian(), ['little', 'big'][endian]) - self.assertEqual(len(a), 8 - unused) - self.assertFalse(a.any()) - - s = b'\x21' - if is_py3k: - # on Python 3, we don't allow bitarrays being created from bytes - error = TypeError - msg = ("cannot extend bitarray with 'bytes', use .pack() or " - ".frombytes() instead") - else: - # on Python 2, we have an invalid character in the string - error = ValueError - msg = ("expected '0' or '1' (or whitespace, or underscore), " - "got '!' (0x21)") - self.assertRaisesMessage(error, msg, bitarray, s) - - def test_bitarray_simple(self): - for n in range(10): - a = bitarray(n) - b = bitarray(a) - self.assertFalse(a is b) - self.assertEQUAL(a, b) - - def test_bitarray_endian(self): - # Test creating a new bitarray with different endianness from an - # existing bitarray. - for endian in 'little', 'big': - a = bitarray(endian=endian) - b = bitarray(a) - self.assertFalse(a is b) - self.assertEQUAL(a, b) - - endian2 = self.other_endian(endian) - b = bitarray(a, endian2) - self.assertEqual(b.endian(), endian2) - self.assertEqual(a, b) - - for a in self.randombitarrays(): - endian2 = self.other_endian(a.endian()) - b = bitarray(a, endian2) - self.assertEqual(a, b) - self.assertEqual(b.endian(), endian2) - self.assertNotEqual(a.endian(), b.endian()) - - def test_bitarray_endianness(self): - a = bitarray('11100001', endian='little') - b = bitarray(a, endian='big') - self.assertEqual(a, b) - self.assertNotEqual(a.tobytes(), b.tobytes()) - - b.bytereverse() - self.assertNotEqual(a, b) - self.assertEqual(a.tobytes(), b.tobytes()) - - c = bitarray('11100001', endian='big') - self.assertEqual(a, c) - - def test_frozenbitarray(self): - a = bitarray(frozenbitarray()) - self.assertEQUAL(a, bitarray()) - self.assertIsType(a, 'bitarray') - - for endian in 'little', 'big': - a = bitarray(frozenbitarray('011', endian=endian)) - self.assertEQUAL(a, bitarray('011', endian)) - self.assertIsType(a, 'bitarray') - - def test_create_empty(self): - for x in (None, 0, '', list(), tuple(), set(), dict(), u'', - bitarray(), frozenbitarray()): - a = bitarray(x) - self.assertEqual(len(a), 0) - self.assertEQUAL(a, bitarray()) - - if is_py3k: - self.assertRaises(TypeError, bitarray, b'') - else: - self.assertEqual(bitarray(b''), bitarray()) - - def test_wrong_args(self): - # wrong types - for x in False, True, Ellipsis, slice(0), 0.0, 0 + 0j: - self.assertRaises(TypeError, bitarray, x) - if is_py3k: - self.assertRaises(TypeError, bitarray, b'10') - else: - self.assertEQUAL(bitarray(b'10'), bitarray('10')) - # wrong values - for x in -1, 'A': - self.assertRaises(ValueError, bitarray, x) - # test second (endian) argument - self.assertRaises(TypeError, bitarray, 0, None) - self.assertRaises(TypeError, bitarray, 0, 0) - self.assertRaises(ValueError, bitarray, 0, 'foo') - # too many args - self.assertRaises(TypeError, bitarray, 0, 'big', 0) - -tests.append(CreateObjectTests) - -# --------------------------------------------------------------------------- - -class ToObjectsTests(unittest.TestCase, Util): - - def test_numeric(self): - a = bitarray() - self.assertRaises(Exception, int, a) - self.assertRaises(Exception, float, a) - self.assertRaises(Exception, complex, a) - - def test_list(self): - for a in self.randombitarrays(): - self.assertEqual(list(a), a.tolist()) - - def test_tuple(self): - for a in self.randombitarrays(): - self.assertEqual(tuple(a), tuple(a.tolist())) - -tests.append(ToObjectsTests) - -# --------------------------------------------------------------------------- - -class MetaDataTests(unittest.TestCase): - - def test_buffer_info(self): - a = bitarray(13, endian='little') - self.assertEqual(a.buffer_info()[1:4], (2, 'little', 3)) - - info = a.buffer_info() - self.assertIsInstance(info, tuple) - self.assertEqual(len(info), 8) - for i, item in enumerate(info): - if i == 2: - self.assertIsInstance(item, str) - continue - self.assertIsInstance(item, int) - - def test_endian(self): - for endian in 'big', 'little': - a = bitarray(endian=endian) - self.assertEqual(a.endian(), endian) - - def test_len(self): - for n in range(100): - a = bitarray(n) - self.assertEqual(len(a), n) - -tests.append(MetaDataTests) - -# --------------------------------------------------------------------------- - -class InternalTests(unittest.TestCase, Util): - - # Internal functionality exposed for the purpose of testing. - # This class will only be part of the test suite in debug mode. - - def test_shift_r8_empty(self): - a = bitarray() - a._shift_r8(0, 0, 3) - self.assertEqual(a, bitarray()) - - a = urandom(80) - for i in range(11): - b = a.copy() - a._shift_r8(i, i, 5) - self.assertEqual(a, b) - - def test_shift_r8_explicit(self): - x = bitarray('11000100 11111111 11100111 11111111 00001000') - y = bitarray('11000100 00000111 11111111 00111111 00001000') - x._shift_r8(1, 4, 5) - self.assertEqual(x, y) - - x = bitarray('11000100 11110') - y = bitarray('00011000 10011') - x._shift_r8(0, 2, 3) - self.assertEqual(x, y) - - def test_shift_r8_random_bytes(self): - for N in range(100): - a = randint(0, N) - b = randint(a, N) - n = randint(0, 7) - x = urandom(8 * N, self.random_endian()) - y = x.copy() - x._shift_r8(a, b, n) - y[8 * a : 8 * b] >>= n - self.assertEQUAL(x, y) - self.assertEqual(len(x), 8 * N) - - def test_copy_n_explicit(self): - x = bitarray('11000100 11110') - # ^^^^ ^ - y = bitarray('0101110001') - # ^^^^^ - x._copy_n(4, y, 1, 5) - self.assertEqual(x, bitarray('11001011 11110')) - # ^^^^ ^ - x = bitarray('10110111 101', 'little') - y = x.copy() - x._copy_n(3, x, 3, 7) # copy region of x onto x - self.assertEqual(x, y) - x._copy_n(3, bitarray(x, 'big'), 3, 7) # as before but other endian - self.assertEqual(x, y) - x._copy_n(5, bitarray(), 0, 0) # copy empty bitarray onto x - self.assertEqual(x, y) - - def test_copy_n_example(self): - # example givin in bitarray/copy_n.txt - y = bitarray( - '00101110 11111001 01011101 11001011 10110000 01011110 011') - x = bitarray( - '01011101 11100101 01110101 01011001 01110100 10001010 01111011') - x._copy_n(21, y, 6, 31) - self.assertEqual(x, bitarray( - '01011101 11100101 01110101 11110010 10111011 10010111 01101011')) - - def check_copy_n(self, N, M, a, b, n): - x = urandom(N, self.random_endian()) - x_lst = x.tolist() - y = x if M < 0 else urandom(M, self.random_endian()) - x_lst[a:a + n] = y.tolist()[b:b + n] - x._copy_n(a, y, b, n) - self.assertEqual(x, bitarray(x_lst)) - self.assertEqual(len(x), N) - self.check_obj(x) - - def test_copy_n_range(self): - for a in range(8): - for b in range(8): - for n in range(90): - self.check_copy_n(100, -1, a, b, n) - self.check_copy_n(100, 100, a, b, n) - - def test_copy_n_random_self(self): - for N in range(500): - n = randint(0, N) - a = randint(0, N - n) - b = randint(0, N - n) - self.check_copy_n(N, -1, a, b, n) - - def test_copy_n_random_other(self): - for N in range(500): - M = randint(0, 5 + 2 * N) - n = randint(0, min(N, M)) - a = randint(0, N - n) - b = randint(0, M - n) - self.check_copy_n(N, M, a, b, n) - - @staticmethod - def getslice(a, start, slicelength): - # this is the Python eqivalent of __getitem__ for slices with step=1 - b = bitarray(slicelength, a.endian()) - b._copy_n(0, a, start, slicelength) - return b - - def test_getslice(self): - for a in self.randombitarrays(): - a_lst = a.tolist() - n = len(a) - i = randint(0, n) - j = randint(i, n) - b = self.getslice(a, i, j - i) - self.assertEqual(b.tolist(), a_lst[i:j]) - self.assertEQUAL(b, a[i:j]) - -if DEBUG: - tests.append(InternalTests) - -# --------------------------------------------------------------------------- - -class SliceTests(unittest.TestCase, Util): - - def test_getitem_1(self): - a = bitarray() - self.assertRaises(IndexError, a.__getitem__, 0) - a.append(True) - self.assertBitEqual(a[0], 1) - self.assertBitEqual(a[-1], 1) - self.assertRaises(IndexError, a.__getitem__, 1) - self.assertRaises(IndexError, a.__getitem__, -2) - a.append(False) - self.assertBitEqual(a[1], 0) - self.assertBitEqual(a[-1], 0) - self.assertRaises(IndexError, a.__getitem__, 2) - self.assertRaises(IndexError, a.__getitem__, -3) - self.assertRaises(TypeError, a.__getitem__, 1.5) - self.assertRaises(TypeError, a.__getitem__, None) - self.assertRaises(TypeError, a.__getitem__, 'A') - - def test_getitem_2(self): - a = bitarray('1100010') - for i, b in enumerate(a): - self.assertBitEqual(a[i], b) - self.assertBitEqual(a[i - 7], b) - self.assertRaises(IndexError, a.__getitem__, 7) - self.assertRaises(IndexError, a.__getitem__, -8) - - def test_getslice(self): - a = bitarray('01001111 00001') - self.assertEQUAL(a[:], a) - self.assertFalse(a[:] is a) - self.assertEQUAL(a[13:2:-3], bitarray('1010')) - self.assertEQUAL(a[2:-1:4], bitarray('010')) - self.assertEQUAL(a[::2], bitarray('0011001')) - self.assertEQUAL(a[8:], bitarray('00001')) - self.assertEQUAL(a[7:], bitarray('100001')) - self.assertEQUAL(a[:8], bitarray('01001111')) - self.assertEQUAL(a[::-1], bitarray('10000111 10010')) - self.assertEQUAL(a[:8:-1], bitarray('1000')) - - self.assertRaises(ValueError, a.__getitem__, slice(None, None, 0)) - self.assertRaises(TypeError, a.__getitem__, (1, 2)) - - def test_getslice_random(self): - for a in self.randombitarrays(start=1): - aa = a.tolist() - la = len(a) - for _ in range(10): - step = self.rndsliceidx(la) or None - s = slice(self.rndsliceidx(la), self.rndsliceidx(la), step) - self.assertEQUAL(a[s], bitarray(aa[s], endian=a.endian())) - - def test_getslice_random2(self): - n = randint(1000, 2000) - a = urandom(n, self.random_endian()) - sa = a.to01() - for _ in range(50): - i = randint(0, n) - j = randint(i, n) - b = a[i:j] - self.assertEqual(b.to01(), sa[i:j]) - self.assertEqual(len(b), j - i) - self.assertEqual(b.endian(), a.endian()) - - def test_setitem_simple(self): - a = bitarray('0') - a[0] = 1 - self.assertEqual(a, bitarray('1')) - - a = bitarray(2) - a[0] = 0 - a[1] = 1 - self.assertEqual(a, bitarray('01')) - a[-1] = 0 - a[-2] = 1 - self.assertEqual(a, bitarray('10')) - - self.assertRaises(ValueError, a.__setitem__, 0, -1) - self.assertRaises(TypeError, a.__setitem__, 1, None) - - self.assertRaises(IndexError, a.__setitem__, 2, True) - self.assertRaises(IndexError, a.__setitem__, -3, False) - self.assertRaises(TypeError, a.__setitem__, 1.5, 1) # see issue 114 - self.assertRaises(TypeError, a.__setitem__, None, 0) - self.assertRaises(TypeError, a.__setitem__, 'a', True) - self.assertEqual(a, bitarray('10')) - - def test_setitem_random(self): - for a in self.randombitarrays(start=1): - i = randint(0, len(a) - 1) - aa = a.tolist() - val = bool(randint(0, 1)) - a[i] = val - aa[i] = val - self.assertEqual(a.tolist(), aa) - self.check_obj(a) - - def test_setslice_simple(self): - for a in self.randombitarrays(start=1): - la = len(a) - b = bitarray(la) - b[0:la] = bitarray(a) - self.assertEqual(a, b) - self.assertFalse(a is b) - - b = bitarray(la) - b[:] = bitarray(a) - self.assertEqual(a, b) - self.assertFalse(a is b) - - b = bitarray(la) - b[::-1] = bitarray(a) - self.assertEqual(a.tolist()[::-1], b.tolist()) - - def test_setslice_random(self): - for a in self.randombitarrays(start=1): - la = len(a) - for _ in range(10): - step = self.rndsliceidx(la) or None - s = slice(self.rndsliceidx(la), self.rndsliceidx(la), step) - lb = randint(0, 10) if step is None else self.slicelen(s, la) - b = bitarray(lb) - c = bitarray(a) - c[s] = b - self.check_obj(c) - cc = a.tolist() - cc[s] = b.tolist() - self.assertEqual(c, bitarray(cc)) - - def test_setslice_self_random(self): - for a in self.randombitarrays(): - for step in -1, 1: - s = slice(None, None, step) - aa = a.tolist() - a[s] = a - aa[s] = aa - self.assertEqual(a, bitarray(aa)) - - def test_setslice_special(self): - for n in 0, 1, 10, 87: - a = urandom(n) - for m in 0, 1, 10, 99: - x = urandom(m) - b = a.copy() - b[n:n] = x # insert at end - extend - self.assertEqual(b, a + x) - self.assertEqual(len(b), len(a) + len(x)) - b[0:0] = x # insert at 0 - prepend - self.assertEqual(b, x + a + x) - self.check_obj(b) - self.assertEqual(len(b), len(a) + 2 * len(x)) - - def test_setslice_range(self): - # tests C function insert_n() - for endian in 'big', 'little': - for n in range(500): - a = urandom(n, endian) - p = randint(0, n) - m = randint(0, 500) - - x = urandom(m, self.random_endian()) - b = a.copy() - b[p:p] = x - self.assertEQUAL(b, a[:p] + x + a[p:]) - self.assertEqual(len(b), len(a) + m) - self.check_obj(b) - - def test_setslice_resize(self): - N, M = 200, 300 - for endian in 'big', 'little': - for n in 0, randint(0, N), N: - a = urandom(n, endian) - for p1 in 0, randint(0, n), n: - for p2 in 0, randint(0, p1), p1, randint(0, n), n: - for m in 0, randint(0, M), M: - x = urandom(m, self.random_endian()) - b = a.copy() - b[p1:p2] = x - b_lst = a.tolist() - b_lst[p1:p2] = x.tolist() - self.assertEqual(b.tolist(), b_lst) - if p1 <= p2: - self.assertEQUAL(b, a[:p1] + x + a[p2:]) - self.assertEqual(len(b), n + p1 - p2 + len(x)) - else: - self.assertEqual(b, a[:p1] + x + a[p1:]) - self.assertEqual(len(b), n + len(x)) - self.check_obj(b) - - def test_setslice_self(self): - a = bitarray('1100111') - a[::-1] = a - self.assertEqual(a, bitarray('1110011')) - a[4:] = a - self.assertEqual(a, bitarray('11101110011')) - a[:-5] = a - self.assertEqual(a, bitarray('1110111001110011')) - - a = bitarray('01001') - a[:-1] = a - self.assertEqual(a, bitarray('010011')) - a[2::] = a - self.assertEqual(a, bitarray('01010011')) - a[2:-2:1] = a - self.assertEqual(a, bitarray('010101001111')) - - a = bitarray('011') - a[2:2] = a - self.assertEqual(a, bitarray('010111')) - a[:] = a - self.assertEqual(a, bitarray('010111')) - - def test_setslice_bitarray(self): - a = bitarray('11111111 1111') - a[2:6] = bitarray('0010') - self.assertEqual(a, bitarray('11001011 1111')) - a.setall(0) - a[::2] = bitarray('111001') - self.assertEqual(a, bitarray('10101000 0010')) - a.setall(0) - a[3:] = bitarray('111') - self.assertEqual(a, bitarray('000111')) - - a = bitarray(12) - a.setall(0) - a[1:11:2] = bitarray('11101') - self.assertEqual(a, bitarray('01010100 0100')) - - a = bitarray(12) - a.setall(0) - a[:-6:-1] = bitarray('10111') - self.assertEqual(a, bitarray('00000001 1101')) - - def test_setslice_bitarray_2(self): - a = bitarray('1111') - a[3:3] = bitarray('000') # insert - self.assertEqual(a, bitarray('1110001')) - a[2:5] = bitarray() # remove - self.assertEqual(a, bitarray('1101')) - - a = bitarray('1111') - a[1:3] = bitarray('0000') - self.assertEqual(a, bitarray('100001')) - a[:] = bitarray('010') # replace all values - self.assertEqual(a, bitarray('010')) - - # assign slice to bitarray with different length - a = bitarray('111111') - a[3:4] = bitarray('00') - self.assertEqual(a, bitarray('1110011')) - a[2:5] = bitarray('0') # remove - self.assertEqual(a, bitarray('11011')) - - def test_setslice_bitarray_random_same_length(self): - for endian in 'little', 'big': - for _ in range(100): - n = randint(0, 200) - a = urandom(n, endian) - lst_a = a.tolist() - b = urandom(randint(0, n), self.random_endian()) - lst_b = b.tolist() - i = randint(0, n - len(b)) - j = i + len(b) - self.assertEqual(j - i, len(b)) - a[i:j] = b - lst_a[i:j] = lst_b - self.assertEqual(a.tolist(), lst_a) - # a didn't change length - self.assertEqual(len(a), n) - self.assertEqual(a.endian(), endian) - self.check_obj(a) - - def test_setslice_bitarray_random_step_1(self): - for _ in range(50): - n = randint(0, 300) - a = urandom(n, self.random_endian()) - lst_a = a.tolist() - b = urandom(randint(0, 100), self.random_endian()) - lst_b = b.tolist() - s = slice(self.rndsliceidx(n), self.rndsliceidx(n), None) - a[s] = b - lst_a[s] = lst_b - self.assertEqual(a.tolist(), lst_a) - self.check_obj(a) - - def test_setslice_bool_explicit(self): - a = bitarray('11111111') - a[::2] = False - self.assertEqual(a, bitarray('01010101')) - a[4::] = True # ^^^^ - self.assertEqual(a, bitarray('01011111')) - a[-2:] = False # ^^ - self.assertEqual(a, bitarray('01011100')) - a[:2:] = True # ^^ - self.assertEqual(a, bitarray('11011100')) - a[:] = True # ^^^^^^^^ - self.assertEqual(a, bitarray('11111111')) - a[2:5] = False # ^^^ - self.assertEqual(a, bitarray('11000111')) - a[1::3] = False # ^ ^ ^ - self.assertEqual(a, bitarray('10000110')) - a[1:6:2] = True # ^ ^ ^ - self.assertEqual(a, bitarray('11010110')) - a[3:3] = False # zero slicelength - self.assertEqual(a, bitarray('11010110')) - a[:] = False # ^^^^^^^^ - self.assertEqual(a, bitarray('00000000')) - a[-2:2:-1] = 1 # ^^^^ - self.assertEqual(a, bitarray('00011110')) - - def test_setslice_bool_simple(self): - for _ in range(100): - N = randint(100, 2000) - s = slice(randint(0, 20), randint(N - 20, N), randint(1, 20)) - a = zeros(N) - a[s] = 1 - b = zeros(N) - for i in range(s.start, s.stop, s.step): - b[i] = 1 - self.assertEqual(a, b) - - def test_setslice_bool_range(self): - N = 200 - a = bitarray(N) - b = bitarray(N) - for step in range(-N - 1, N): - if step == 0: - continue - v = randint(0, 1) - a.setall(not v) - a[::step] = v - - b.setall(not v) - for i in range(0, N, abs(step)): - b[i] = v - if step < 0: - b.reverse() - self.assertEqual(a, b) - - def test_setslice_bool_random(self): - N = 100 - a = bitarray(N) - for _ in range(100): - a.setall(0) - aa = a.tolist() - step = self.rndsliceidx(N) or None - s = slice(self.rndsliceidx(N), self.rndsliceidx(N), step) - a[s] = 1 - aa[s] = self.slicelen(s, N) * [1] - self.assertEqual(a.tolist(), aa) - - def test_setslice_bool_random2(self): - for a in self.randombitarrays(): - n = len(a) - aa = a.tolist() - step = self.rndsliceidx(n) or None - s = slice(self.rndsliceidx(n), self.rndsliceidx(n), step) - v = randint(0, 1) - a[s] = v - aa[s] = self.slicelen(s, n) * [v] - self.assertEqual(a.tolist(), aa) - - def test_setslice_to_int(self): - a = bitarray('11111111') - a[::2] = 0 # ^ ^ ^ ^ - self.assertEqual(a, bitarray('01010101')) - a[4::] = 1 # ^^^^ - self.assertEqual(a, bitarray('01011111')) - a.__setitem__(slice(-2, None, None), 0) - self.assertEqual(a, bitarray('01011100')) - self.assertRaises(ValueError, a.__setitem__, slice(None, None, 2), 3) - self.assertRaises(ValueError, a.__setitem__, slice(None, 2, None), -1) - # a[:2:] = '0' - self.assertRaises(TypeError, a.__setitem__, slice(None, 2, None), '0') - - def test_setslice_to_invalid(self): - a = bitarray('11111111') - s = slice(2, 6, None) - self.assertRaises(TypeError, a.__setitem__, s, 1.2) - self.assertRaises(TypeError, a.__setitem__, s, None) - self.assertRaises(TypeError, a.__setitem__, s, "0110") - a[s] = False - self.assertEqual(a, bitarray('11000011')) - # step != 1 and slicelen != length of assigned bitarray - self.assertRaisesMessage( - ValueError, - "attempt to assign sequence of size 3 to extended slice of size 4", - a.__setitem__, slice(None, None, 2), bitarray('000')) - self.assertRaisesMessage( - ValueError, - "attempt to assign sequence of size 3 to extended slice of size 2", - a.__setitem__, slice(None, None, 4), bitarray('000')) - self.assertRaisesMessage( - ValueError, - "attempt to assign sequence of size 7 to extended slice of size 8", - a.__setitem__, slice(None, None, -1), bitarray('0001000')) - self.assertEqual(a, bitarray('11000011')) - - def test_sieve(self): # Sieve of Eratosthenes - a = bitarray(50) - a.setall(1) - a[0:2] = 0 - for i in range(2, 8): - if a[i]: - a[i * i::i] = 0 - primes = a.search(1) - self.assertEqual(primes, [2, 3, 5, 7, 11, 13, 17, 19, - 23, 29, 31, 37, 41, 43, 47]) - - def test_delitem_simple(self): - a = bitarray('100110') - del a[1] - self.assertEqual(len(a), 5) - del a[3], a[-2] - self.assertEqual(a, bitarray('100')) - self.assertRaises(IndexError, a.__delitem__, 3) - self.assertRaises(IndexError, a.__delitem__, -4) - - def test_delitem_random(self): - for a in self.randombitarrays(start=1): - n = len(a) - b = a.copy() - i = randint(0, n - 1) - del b[i] - self.assertEQUAL(b, a[:i] + a[i + 1:]) - self.assertEqual(len(b), n - 1) - self.check_obj(b) - - def test_delslice_explicit(self): - a = bitarray('10101100 10110') - del a[3:9] # ^^^^^ ^ - self.assertEqual(a, bitarray('1010110')) - del a[::3] # ^ ^ ^ - self.assertEqual(a, bitarray('0111')) - a = bitarray('10101100 101101111') - del a[5:-3:3] # ^ ^ ^ - self.assertEqual(a, bitarray('1010100 0101111')) - a = bitarray('10101100 1011011') - del a[:-9:-2] # ^ ^ ^ ^ - self.assertEqual(a, bitarray('10101100 011')) - del a[3:3] # zero slicelength - self.assertEqual(a, bitarray('10101100 011')) - self.assertRaises(ValueError, a.__delitem__, slice(None, None, 0)) - self.assertEqual(len(a), 11) - del a[:] - self.assertEqual(a, bitarray()) - - def test_delslice_special(self): - for n in 0, 1, 10, 73: - a = urandom(n) - b = a.copy() - del b[:0] - del b[n:] - self.assertEqual(b, a) - del b[10:] # delete at end - self.assertEqual(b, a[:10]) - del b[:] # clear - self.assertEqual(len(b), 0) - self.check_obj(b) - - def test_delslice_random(self): - for a in self.randombitarrays(): - la = len(a) - for _ in range(10): - step = self.rndsliceidx(la) or None - s = slice(self.rndsliceidx(la), self.rndsliceidx(la), step) - c = a.copy() - del c[s] - self.check_obj(c) - c_lst = a.tolist() - del c_lst[s] - self.assertEQUAL(c, bitarray(c_lst, endian=c.endian())) - - def test_delslice_range(self): - # tests C function delete_n() - for endian in 'little', 'big': - for n in range(500): - a = urandom(n, endian) - p = randint(0, n) - m = randint(0, 500) - - b = a.copy() - del b[p:p + m] - self.assertEQUAL(b, a[:p] + a[p + m:]) - self.check_obj(b) - -tests.append(SliceTests) - -# --------------------------------------------------------------------------- - -class MiscTests(unittest.TestCase, Util): - - def test_instancecheck(self): - a = bitarray('011') - self.assertIsInstance(a, bitarray) - self.assertFalse(isinstance(a, str)) - - def test_booleanness(self): - self.assertEqual(bool(bitarray('')), False) - self.assertEqual(bool(bitarray('0')), True) - self.assertEqual(bool(bitarray('1')), True) - - def test_to01(self): - a = bitarray() - self.assertEqual(a.to01(), '') - self.assertIsInstance(a.to01(), str) - - a = bitarray('101') - self.assertEqual(a.to01(), '101') - self.assertIsInstance(a.to01(), str) - - def test_iterate(self): - for lst in self.randomlists(): - acc = [] - for b in bitarray(lst): - acc.append(b) - self.assertEqual(acc, lst) - - def test_iter1(self): - it = iter(bitarray('011')) - self.assertIsType(it, 'bitarrayiterator') - self.assertBitEqual(next(it), 0) - self.assertBitEqual(next(it), 1) - self.assertBitEqual(next(it), 1) - self.assertStopIteration(it) - - def test_iter2(self): - for a in self.randombitarrays(): - aa = a.tolist() - self.assertEqual(list(a), aa) - self.assertEqual(list(iter(a)), aa) - - def test_assignment(self): - a = bitarray('00110111001') - a[1:3] = a[7:9] - a[-1:] = a[:1] - b = bitarray('01010111000') - self.assertEqual(a, b) - - def test_subclassing(self): - class ExaggeratingBitarray(bitarray): - - def __new__(cls, data, offset): - return bitarray.__new__(cls, data) - - def __init__(self, data, offset): - self.offset = offset - - def __getitem__(self, i): - return bitarray.__getitem__(self, i - self.offset) - - for a in self.randombitarrays(): - b = ExaggeratingBitarray(a, 1234) - for i in range(len(a)): - self.assertEqual(a[i], b[i + 1234]) - - def test_endianness1(self): - a = bitarray(endian='little') - a.frombytes(b'\x01') - self.assertEqual(a.to01(), '10000000') - - b = bitarray(endian='little') - b.frombytes(b'\x80') - self.assertEqual(b.to01(), '00000001') - - c = bitarray(endian='big') - c.frombytes(b'\x80') - self.assertEqual(c.to01(), '10000000') - - d = bitarray(endian='big') - d.frombytes(b'\x01') - self.assertEqual(d.to01(), '00000001') - - self.assertEqual(a, c) - self.assertEqual(b, d) - - def test_endianness2(self): - a = bitarray(8, endian='little') - a.setall(False) - a[0] = True - self.assertEqual(a.tobytes(), b'\x01') - a[1] = True - self.assertEqual(a.tobytes(), b'\x03') - a.frombytes(b' ') - self.assertEqual(a.tobytes(), b'\x03 ') - self.assertEqual(a.to01(), '1100000000000100') - - def test_endianness3(self): - a = bitarray(8, endian='big') - a.setall(False) - a[7] = True - self.assertEqual(a.tobytes(), b'\x01') - a[6] = True - self.assertEqual(a.tobytes(), b'\x03') - a.frombytes(b' ') - self.assertEqual(a.tobytes(), b'\x03 ') - self.assertEqual(a.to01(), '0000001100100000') - - def test_endianness4(self): - a = bitarray('00100000', endian='big') - self.assertEqual(a.tobytes(), b' ') - b = bitarray('00000100', endian='little') - self.assertEqual(b.tobytes(), b' ') - self.assertNotEqual(a, b) - - def test_pickle(self): - for a in self.randombitarrays(): - b = pickle.loads(pickle.dumps(a)) - self.assertFalse(b is a) - self.assertEQUAL(a, b) - - def test_overflow(self): - self.assertRaises(OverflowError, bitarray, 2 ** 63) - a = bitarray(1) - for i in -7, -1, 0, 1: - self.assertRaises(OverflowError, a.__imul__, 2 ** 63 + i) - a = bitarray(2 ** 10) - self.assertRaises(OverflowError, a.__imul__, 2 ** 53) - - if SYSINFO[0] == 8: - return - - a = bitarray(10 ** 6) - self.assertRaises(OverflowError, a.__imul__, 17180) - for i in -7, -1, 0, 1: - self.assertRaises(OverflowError, bitarray, 2 ** 31 + i) - try: - a = bitarray(2 ** 31 - 8); - except MemoryError: - return - self.assertRaises(OverflowError, bitarray.append, a, True) - - def test_unicode_create(self): - a = bitarray(u'') - self.assertEqual(a, bitarray()) - - a = bitarray(u'111001') - self.assertEqual(a, bitarray('111001')) - - def test_unhashable(self): - a = bitarray() - self.assertRaises(TypeError, hash, a) - self.assertRaises(TypeError, dict, [(a, 'foo')]) - -tests.append(MiscTests) - -# --------------------------------------------------------------------------- - -class RichCompareTests(unittest.TestCase, Util): - - def test_wrong_types(self): - a = bitarray() - for x in None, 7, 'A': - self.assertEqual(a.__eq__(x), NotImplemented) - self.assertEqual(a.__ne__(x), NotImplemented) - self.assertEqual(a.__ge__(x), NotImplemented) - self.assertEqual(a.__gt__(x), NotImplemented) - self.assertEqual(a.__le__(x), NotImplemented) - self.assertEqual(a.__lt__(x), NotImplemented) - - def test_explicit(self): - for sa, sb, res in [ - ('', '', '101010'), - ('1', '', '011100'), - ('11', '10', '011100'), - ('0', '1', '010011'), - ]: - a = bitarray(sa, self.random_endian()) - b = bitarray(sb, self.random_endian()) - self.assertEqual(a == b, int(res[0])) - self.assertEqual(a != b, int(res[1])) - self.assertEqual(a >= b, int(res[2])) - self.assertEqual(a > b, int(res[3])) - self.assertEqual(a <= b, int(res[4])) - self.assertEqual(a < b, int(res[5])) - - def test_eq_ne(self): - for _ in range(10): - self.assertTrue(bitarray(0, self.random_endian()) == - bitarray(0, self.random_endian())) - self.assertFalse(bitarray(0, self.random_endian()) != - bitarray(0, self.random_endian())) - - for n in range(1, 20): - a = bitarray(n, self.random_endian()) - a.setall(1) - b = bitarray(a, self.random_endian()) - self.assertTrue(a == b) - self.assertFalse(a != b) - b[n - 1] = 0 - self.assertTrue(a != b) - self.assertFalse(a == b) - - def test_eq_ne_random(self): - for a in self.randombitarrays(start=1): - b = bitarray(a, self.random_endian()) - self.assertTrue(a == b) - self.assertFalse(a != b) - b.invert(randint(0, len(a) - 1)) - self.assertTrue(a != b) - self.assertFalse(a == b) - - def check(self, a, b, c, d): - self.assertEqual(a == b, c == d) - self.assertEqual(a != b, c != d) - self.assertEqual(a <= b, c <= d) - self.assertEqual(a < b, c < d) - self.assertEqual(a >= b, c >= d) - self.assertEqual(a > b, c > d) - - def test_invert_random_element(self): - for a in self.randombitarrays(start=1): - n = len(a) - b = bitarray(a, self.random_endian()) - i = randint(0, n - 1) - b.invert(i) - self.check(a, b, a[i], b[i]) - - def test_size(self): - for _ in range(100): - a = zeros(randint(1, 20), self.random_endian()) - b = zeros(randint(1, 20), self.random_endian()) - self.check(a, b, len(a), len(b)) - - def test_random(self): - for a in self.randombitarrays(): - aa = a.tolist() - if randint(0, 1): - a = frozenbitarray(a) - for b in self.randombitarrays(): - bb = b.tolist() - if randint(0, 1): - b = frozenbitarray(b) - self.check(a, b, aa, bb) - self.check(a, b, aa, bb) - -tests.append(RichCompareTests) - -# --------------------------------------------------------------------------- - -class SpecialMethodTests(unittest.TestCase, Util): - - def test_all(self): - a = bitarray() - self.assertTrue(a.all()) - for s, r in ('0', False), ('1', True), ('01', False): - self.assertTrue(bitarray(s).all() is r) - - for a in self.randombitarrays(): - self.assertTrue(a.all() is all(a)) - - N = randint(1000, 2000) - a = bitarray(N) - a.setall(1) - self.assertTrue(a.all()) - a[N - 1] = 0 - self.assertFalse(a.all()) - - def test_any(self): - a = bitarray() - self.assertFalse(a.any()) - for s, r in ('0', False), ('1', True), ('01', True): - self.assertTrue(bitarray(s).any() is r) - - for a in self.randombitarrays(): - self.assertTrue(a.any() is any(a)) - - N = randint(1000, 2000) - a = bitarray(N) - a.setall(0) - self.assertFalse(a.any()) - a[N - 1] = 1 - self.assertTrue(a.any()) - - def test_repr(self): - r = repr(bitarray()) - self.assertEqual(r, "bitarray()") - self.assertIsInstance(r, str) - - r = repr(bitarray('10111')) - self.assertEqual(r, "bitarray('10111')") - self.assertIsInstance(r, str) - - for a in self.randombitarrays(): - b = eval(repr(a)) - self.assertFalse(b is a) - self.assertEqual(a, b) - self.check_obj(b) - - def test_copy(self): - for a in self.randombitarrays(): - b = a.copy() - self.assertFalse(b is a) - self.assertEQUAL(a, b) - - b = copy.copy(a) - self.assertFalse(b is a) - self.assertEQUAL(a, b) - - b = copy.deepcopy(a) - self.assertFalse(b is a) - self.assertEQUAL(a, b) - - def assertReallyEqual(self, a, b): - # assertEqual first, because it will have a good message if the - # assertion fails. - self.assertEqual(a, b) - self.assertEqual(b, a) - self.assertTrue(a == b) - self.assertTrue(b == a) - self.assertFalse(a != b) - self.assertFalse(b != a) - if not is_py3k: - self.assertEqual(0, cmp(a, b)) - self.assertEqual(0, cmp(b, a)) - - def assertReallyNotEqual(self, a, b): - # assertNotEqual first, because it will have a good message if the - # assertion fails. - self.assertNotEqual(a, b) - self.assertNotEqual(b, a) - self.assertFalse(a == b) - self.assertFalse(b == a) - self.assertTrue(a != b) - self.assertTrue(b != a) - if not is_py3k: - self.assertNotEqual(0, cmp(a, b)) - self.assertNotEqual(0, cmp(b, a)) - - def test_equality(self): - self.assertReallyEqual(bitarray(''), bitarray('')) - self.assertReallyEqual(bitarray('0'), bitarray('0')) - self.assertReallyEqual(bitarray('1'), bitarray('1')) - - def test_not_equality(self): - self.assertReallyNotEqual(bitarray(''), bitarray('1')) - self.assertReallyNotEqual(bitarray(''), bitarray('0')) - self.assertReallyNotEqual(bitarray('0'), bitarray('1')) - - def test_equality_random(self): - for a in self.randombitarrays(start=1): - b = a.copy() - self.assertReallyEqual(a, b) - n = len(a) - b.invert(n - 1) # flip last bit - self.assertReallyNotEqual(a, b) - - def test_sizeof(self): - a = bitarray() - size = sys.getsizeof(a) - self.assertEqual(size, a.__sizeof__()) - self.assertIsInstance(size, int if is_py3k else (int, long)) - self.assertTrue(size < 200) - a = bitarray(8000) - self.assertTrue(sys.getsizeof(a) > 1000) - -tests.append(SpecialMethodTests) - -# --------------------------------------------------------------------------- - -class SequenceMethodsTests(unittest.TestCase, Util): - - def test_concat(self): - a = bitarray('001') - b = a + bitarray('110') - self.assertEQUAL(b, bitarray('001110')) - b = a + [0, 1, True] - self.assertEQUAL(b, bitarray('001011')) - b = a + '100' - self.assertEQUAL(b, bitarray('001100')) - b = a + (1, 0, True) - self.assertEQUAL(b, bitarray('001101')) - self.assertRaises(ValueError, a.__add__, (0, 1, 2)) - self.assertEQUAL(a, bitarray('001')) - - self.assertRaises(TypeError, a.__add__, 42) - if is_py3k: - self.assertRaises(TypeError, a.__add__, b'1101') - else: - self.assertEqual(a + b'10', bitarray('00110')) - - for a in self.randombitarrays(): - aa = a.copy() - for b in self.randombitarrays(): - bb = b.copy() - c = a + b - self.assertEqual(c, bitarray(a.tolist() + b.tolist())) - self.assertEqual(c.endian(), a.endian()) - self.check_obj(c) - - self.assertEQUAL(a, aa) - self.assertEQUAL(b, bb) - - def test_inplace_concat(self): - a = bitarray('001') - a += bitarray('110') - self.assertEqual(a, bitarray('001110')) - a += [0, 1, True] - self.assertEqual(a, bitarray('001110011')) - a += '100' - self.assertEqual(a, bitarray('001110011100')) - a += (1, 0, True) - self.assertEqual(a, bitarray('001110011100101')) - - a = bitarray('110') - self.assertRaises(ValueError, a.__iadd__, [0, 1, 2]) - self.assertEqual(a, bitarray('110')) - - self.assertRaises(TypeError, a.__iadd__, 42) - b = b'101' - if is_py3k: - self.assertRaises(TypeError, a.__iadd__, b) - else: - a += b - self.assertEqual(a, bitarray('110101')) - - for a in self.randombitarrays(): - for b in self.randombitarrays(): - c = bitarray(a) - d = c - d += b - self.assertEqual(d, a + b) - self.assertTrue(c is d) - self.assertEQUAL(c, d) - self.assertEqual(d.endian(), a.endian()) - self.check_obj(d) - - def test_repeat_explicit(self): - for m, s, r in [ - ( 0, '', ''), - ( 0, '1001111', ''), - (-1, '100110', ''), - (11, '', ''), - ( 1, '110', '110'), - ( 2, '01', '0101'), - ( 5, '1', '11111'), - ]: - a = bitarray(s) - self.assertEqual(a * m, bitarray(r)) - self.assertEqual(m * a, bitarray(r)) - c = a.copy() - c *= m - self.assertEqual(c, bitarray(r)) - - def test_repeat_wrong_args(self): - a = bitarray() - self.assertRaises(TypeError, a.__mul__, None) - self.assertRaises(TypeError, a.__mul__, 2.0) - self.assertRaises(TypeError, a.__imul__, None) - self.assertRaises(TypeError, a.__imul__, 3.0) - - def test_repeat_random(self): - for a in self.randombitarrays(): - b = a.copy() - for m in list(range(-3, 5)) + [randint(100, 200)]: - res = bitarray(m * a.to01(), endian=a.endian()) - self.assertEqual(len(res), len(a) * max(0, m)) - - c = a * m - self.assertEQUAL(c, res) - c = m * a - self.assertEQUAL(c, res) - - c = a.copy() - c *= m - self.assertEQUAL(c, res) - self.check_obj(c) - - self.assertEQUAL(a, b) - - def test_contains_simple(self): - a = bitarray() - self.assertFalse(False in a) - self.assertFalse(True in a) - self.assertTrue(bitarray() in a) - a.append(True) - self.assertTrue(True in a) - self.assertFalse(False in a) - a = bitarray([False]) - self.assertTrue(False in a) - self.assertFalse(True in a) - a.append(True) - self.assertTrue(0 in a) - self.assertTrue(1 in a) - if not is_py3k: - self.assertTrue(long(0) in a) - self.assertTrue(long(1) in a) - - def test_contains_errors(self): - a = bitarray() - self.assertEqual(a.__contains__(1), False) - a.append(1) - self.assertEqual(a.__contains__(1), True) - a = bitarray('0011') - self.assertEqual(a.__contains__(bitarray('01')), True) - self.assertEqual(a.__contains__(bitarray('10')), False) - self.assertRaises(TypeError, a.__contains__, 'asdf') - self.assertRaises(ValueError, a.__contains__, 2) - self.assertRaises(ValueError, a.__contains__, -1) - if not is_py3k: - self.assertRaises(ValueError, a.__contains__, long(2)) - - def test_contains_range(self): - for n in range(2, 50): - a = bitarray(n) - a.setall(0) - self.assertTrue(False in a) - self.assertFalse(True in a) - a[randint(0, n - 1)] = 1 - self.assertTrue(True in a) - self.assertTrue(False in a) - a.setall(1) - self.assertTrue(True in a) - self.assertFalse(False in a) - a[randint(0, n - 1)] = 0 - self.assertTrue(True in a) - self.assertTrue(False in a) - - def test_contains_explicit(self): - a = bitarray('011010000001') - for s, r in [('', True), ('1', True), ('11', True), ('111', False), - ('011', True), ('0001', True), ('00011', False)]: - self.assertEqual(bitarray(s) in a, r) - -tests.append(SequenceMethodsTests) - -# --------------------------------------------------------------------------- - -class NumberTests(unittest.TestCase, Util): - - def test_misc(self): - for a in self.randombitarrays(): - b = ~a - c = a & b - self.assertEqual(c.any(), False) - self.assertEqual(a, a ^ c) - d = a ^ b - self.assertEqual(d.all(), True) - b &= d - self.assertEqual(~b, a) - - def test_bool(self): - a = bitarray() - self.assertTrue(bool(a) is False) - a.append(0) - self.assertTrue(bool(a) is True) - a.append(1) - self.assertTrue(bool(a) is True) - - def test_size_error(self): - a = bitarray('11001') - b = bitarray('100111') - self.assertRaises(ValueError, lambda: a & b) - self.assertRaises(ValueError, lambda: a | b) - self.assertRaises(ValueError, lambda: a ^ b) - for x in (a.__and__, a.__or__, a.__xor__, - a.__iand__, a.__ior__, a.__ixor__): - self.assertRaises(ValueError, x, b) - - def test_endianness_error(self): - a = bitarray('11001', 'big') - b = bitarray('10011', 'little') - self.assertRaises(ValueError, lambda: a & b) - self.assertRaises(ValueError, lambda: a | b) - self.assertRaises(ValueError, lambda: a ^ b) - for x in (a.__and__, a.__or__, a.__xor__, - a.__iand__, a.__ior__, a.__ixor__): - self.assertRaises(ValueError, x, b) - - def test_and(self): - a = bitarray('11001') - b = bitarray('10011') - c = a & b - self.assertEqual(c, bitarray('10001')) - self.check_obj(c) - - self.assertRaises(TypeError, lambda: a & 1) - self.assertRaises(TypeError, lambda: 1 & a) - self.assertEqual(a, bitarray('11001')) - self.assertEqual(b, bitarray('10011')) - - def test_or(self): - a = bitarray('11001') - b = bitarray('10011') - c = a | b - self.assertEqual(c, bitarray('11011')) - self.check_obj(c) - - self.assertRaises(TypeError, lambda: a | 1) - self.assertRaises(TypeError, lambda: 1 | a) - self.assertEqual(a, bitarray('11001')) - self.assertEqual(b, bitarray('10011')) - - def test_xor(self): - a = bitarray('11001') - b = bitarray('10011') - c = a ^ b - self.assertEQUAL(c, bitarray('01010')) - self.check_obj(c) - - self.assertRaises(TypeError, lambda: a ^ 1) - self.assertRaises(TypeError, lambda: 1 ^ a) - self.assertEqual(a, bitarray('11001')) - self.assertEqual(b, bitarray('10011')) - - def test_iand(self): - a = bitarray('110010110') - b = bitarray('100110011') - a &= b - self.assertEqual(a, bitarray('100010010')) - self.assertEqual(b, bitarray('100110011')) - self.check_obj(a) - self.check_obj(b) - try: - a &= 1 - except TypeError: - error = 1 - self.assertEqual(error, 1) - - def test_ior(self): - a = bitarray('110010110') - b = bitarray('100110011') - a |= b - self.assertEQUAL(a, bitarray('110110111')) - self.assertEQUAL(b, bitarray('100110011')) - try: - a |= 1 - except TypeError: - error = 1 - self.assertEqual(error, 1) - - def test_ixor(self): - a = bitarray('110010110') - b = bitarray('100110011') - a ^= b - self.assertEQUAL(a, bitarray('010100101')) - self.assertEQUAL(b, bitarray('100110011')) - try: - a ^= 1 - except TypeError: - error = 1 - self.assertEqual(error, 1) - - def test_bitwise_self(self): - for a in self.randombitarrays(): - aa = a.copy() - self.assertEQUAL(a & a, aa) - self.assertEQUAL(a | a, aa) - self.assertEQUAL(a ^ a, zeros(len(aa), aa.endian())) - self.assertEQUAL(a, aa) - - def test_bitwise_inplace_self(self): - for a in self.randombitarrays(): - aa = a.copy() - a &= a - self.assertEQUAL(a, aa) - a |= a - self.assertEQUAL(a, aa) - a ^= a - self.assertEqual(a, zeros(len(aa), aa.endian())) - - def test_invert(self): - a = bitarray('11011') - b = ~a - self.assertEQUAL(b, bitarray('00100')) - self.assertEQUAL(a, bitarray('11011')) - self.assertFalse(a is b) - self.check_obj(b) - - for a in self.randombitarrays(): - b = bitarray(a) - b.invert() - for i in range(len(a)): - self.assertEqual(b[i], not a[i]) - self.check_obj(b) - self.assertEQUAL(~a, b) - - @staticmethod - def shift(a, n, direction): - if n >= len(a): - return zeros(len(a), a.endian()) - - if direction == 'right': - return zeros(n, a.endian()) + a[:len(a)-n] - elif direction == 'left': - return a[n:] + zeros(n, a.endian()) - else: - raise ValueError("invalid direction: %s" % direction) - - def test_lshift(self): - a = bitarray('11011') - b = a << 2 - self.assertEQUAL(b, bitarray('01100')) - self.assertRaises(TypeError, lambda: a << 1.2) - self.assertRaises(TypeError, a.__lshift__, 1.2) - self.assertRaises(ValueError, lambda: a << -1) - self.assertRaises(OverflowError, a.__lshift__, 2 ** 63) - - for a in self.randombitarrays(): - c = a.copy() - n = randint(0, len(a) + 3) - b = a << n - self.assertEqual(len(b), len(a)) - self.assertEQUAL(b, self.shift(a, n, 'left')) - self.assertEQUAL(a, c) - - def test_rshift(self): - a = bitarray('1101101') - b = a >> 1 - self.assertEQUAL(b, bitarray('0110110')) - self.assertRaises(TypeError, lambda: a >> 1.2) - self.assertRaises(TypeError, a.__rshift__, 1.2) - self.assertRaises(ValueError, lambda: a >> -1) - - for a in self.randombitarrays(): - c = a.copy() - n = randint(0, len(a) + 3) - b = a >> n - self.assertEqual(len(b), len(a)) - self.assertEQUAL(b, self.shift(a, n, 'right')) - self.assertEQUAL(a, c) - - def test_ilshift(self): - a = bitarray('110110101') - a <<= 7 - self.assertEQUAL(a, bitarray('010000000')) - self.assertRaises(TypeError, a.__ilshift__, 1.2) - self.assertRaises(ValueError, a.__ilshift__, -3) - - for a in self.randombitarrays(): - b = a.copy() - n = randint(0, len(a) + 3) - b <<= n - self.assertEqual(len(b), len(a)) - self.assertEQUAL(b, self.shift(a, n, 'left')) - - def test_irshift(self): - a = bitarray('110110111') - a >>= 3 - self.assertEQUAL(a, bitarray('000110110')) - self.assertRaises(TypeError, a.__irshift__, 1.2) - self.assertRaises(ValueError, a.__irshift__, -4) - - for a in self.randombitarrays(): - b = a.copy() - n = randint(0, len(a) + 3) - b >>= n - self.assertEqual(len(b), len(a)) - self.assertEQUAL(b, self.shift(a, n, 'right')) - - def check_random(self, n, endian, n_shift, direction): - a = urandom(n, endian) - self.assertEqual(len(a), n) - - b = a.copy() - if direction == 'left': - b <<= n_shift - else: - b >>= n_shift - self.assertEQUAL(b, self.shift(a, n_shift, direction)) - - def test_shift_range(self): - for endian in 'little', 'big': - for direction in 'left', 'right': - for n in range(0, 200): - self.check_random(n, endian, 1, direction) - self.check_random(n, endian, randint(0, n), direction) - for n_shift in range(0, 100): - self.check_random(100, endian, n_shift, direction) - - def test_zero_shift(self): - for a in self.randombitarrays(): - aa = a.copy() - self.assertEQUAL(a << 0, aa) - self.assertEQUAL(a >> 0, aa) - a <<= 0 - self.assertEQUAL(a, aa) - a >>= 0 - self.assertEQUAL(a, aa) - - def test_len_or_larger_shift(self): - # ensure shifts with len(a) (or larger) result in all zero bitarrays - for a in self.randombitarrays(): - c = a.copy() - z = zeros(len(a), a.endian()) - n = randint(len(a), len(a) + 10) - self.assertEQUAL(a << n, z) - self.assertEQUAL(a >> n, z) - self.assertEQUAL(a, c) - a <<= n - self.assertEQUAL(a, z) - a = bitarray(c) - a >>= n - self.assertEQUAL(a, z) - - def test_shift_example(self): - a = bitarray('0010011') - self.assertEqual(a << 3, bitarray('0011000')) - a >>= 4 - self.assertEqual(a, bitarray('0000001')) - -tests.append(NumberTests) - -# --------------------------------------------------------------------------- - -class ExtendTests(unittest.TestCase, Util): - - def test_wrongArgs(self): - a = bitarray() - self.assertRaises(TypeError, a.extend) - self.assertRaises(TypeError, a.extend, None) - self.assertRaises(TypeError, a.extend, True) - self.assertRaises(TypeError, a.extend, 24) - self.assertRaises(TypeError, a.extend, 1.0) - - def test_bitarray(self): - a = bitarray() - a.extend(bitarray()) - self.assertEqual(a, bitarray()) - a.extend(bitarray('110')) - self.assertEqual(a, bitarray('110')) - a.extend(bitarray('1110')) - self.assertEqual(a, bitarray('1101110')) - - a = bitarray('00001111', endian='little') - a.extend(bitarray('00100111', endian='big')) - self.assertEqual(a, bitarray('00001111 00100111')) - - def test_bitarray_random(self): - for a in self.randombitarrays(): - sa = a.to01() - for b in self.randombitarrays(): - bb = b.copy() - c = bitarray(a) - c.extend(b) - self.assertEqual(c.to01(), sa + bb.to01()) - self.assertEqual(c.endian(), a.endian()) - self.assertEqual(len(c), len(a) + len(b)) - self.check_obj(c) - # ensure b hasn't changed - self.assertEQUAL(b, bb) - self.check_obj(b) - - def test_list(self): - a = bitarray() - a.extend([]) - self.assertEqual(a, bitarray()) - a.extend([0, 1, True, False]) - self.assertEqual(a, bitarray('0110')) - self.assertRaises(ValueError, a.extend, [0, 1, 2]) - self.assertRaises(TypeError, a.extend, [0, 1, 'a']) - self.assertEqual(a, bitarray('0110')) - - for a in self.randomlists(): - for b in self.randomlists(): - c = bitarray(a) - c.extend(b) - self.assertEqual(c.tolist(), a + b) - self.check_obj(c) - - def test_tuple(self): - a = bitarray() - a.extend(tuple()) - self.assertEqual(a, bitarray()) - a.extend((0, 1, True, 0, False)) - self.assertEqual(a, bitarray('01100')) - self.assertRaises(ValueError, a.extend, (0, 1, 2)) - self.assertRaises(TypeError, a.extend, (0, 1, 'a')) - self.assertEqual(a, bitarray('01100')) - - for a in self.randomlists(): - for b in self.randomlists(): - c = bitarray(a) - c.extend(tuple(b)) - self.assertEqual(c.tolist(), a + b) - self.check_obj(c) - - def test_generator_1(self): - def gen(lst): - for x in lst: - yield x - a = bitarray('0011') - a.extend(gen([0, 1, False, True, 0])) - self.assertEqual(a, bitarray('0011 01010')) - self.assertRaises(ValueError, a.extend, gen([0, 1, 2])) - self.assertRaises(TypeError, a.extend, gen([1, 0, None])) - self.assertEqual(a, bitarray('0011 01010')) - - a = bytearray() - a.extend(gen([0, 1, 255])) - self.assertEqual(a, b'\x00\x01\xff') - self.assertRaises(ValueError, a.extend, gen([0, 1, 256])) - self.assertRaises(TypeError, a.extend, gen([1, 0, None])) - self.assertEqual(a, b'\x00\x01\xff') - - for a in self.randomlists(): - def foo(): - for e in a: - yield e - b = bitarray() - b.extend(foo()) - self.assertEqual(b.tolist(), a) - self.check_obj(b) - - def test_generator_2(self): - def gen(): - for i in range(10): - if i == 4: - raise KeyError - yield i % 2 - - a = bitarray() - self.assertRaises(KeyError, a.extend, gen()) - self.assertEqual(a, bitarray('0101')) - a = [] - self.assertRaises(KeyError, a.extend, gen()) - self.assertEqual(a, [0, 1, 0, 1]) - - def test_iterator_1(self): - a = bitarray() - a.extend(iter([])) - self.assertEqual(a, bitarray()) - a.extend(iter([1, 1, 0, True, False])) - self.assertEqual(a, bitarray('11010')) - self.assertRaises(ValueError, a.extend, iter([1, 1, 0, 0, 2])) - self.assertEqual(a, bitarray('11010')) - - for a in self.randomlists(): - for b in self.randomlists(): - c = bitarray(a) - c.extend(iter(b)) - self.assertEqual(c.tolist(), a + b) - self.check_obj(c) - - def test_iterator_2(self): - a = bitarray() - a.extend(itertools.repeat(True, 23)) - self.assertEqual(a, bitarray(23 * '1')) - self.check_obj(a) - - def test_string01(self): - a = bitarray() - a.extend(str()) - a.extend('') - self.assertEqual(a, bitarray()) - a.extend('0110111') - self.assertEqual(a, bitarray('0110111')) - self.assertRaises(ValueError, a.extend, '0011201') - # ensure no bits got added after error was raised - self.assertEqual(a, bitarray('0110111')) - - a = bitarray() - self.assertRaises(ValueError, a.extend, 1000 * '01' + '.') - self.assertEqual(a, bitarray()) - - for a in self.randomlists(): - for b in self.randomlists(): - c = bitarray(a) - c.extend(''.join(['0', '1'][x] for x in b)) - self.assertEqual(c, bitarray(a + b)) - self.check_obj(c) - - def test_string01_whitespace(self): - a = bitarray() - a.extend('0 1\n0\r1\t0\v1_') - self.assertEqual(a, bitarray('010101')) - a += '_ 1\n0\r1\t0\v' - self.assertEqual(a, bitarray('010101 1010')) - self.check_obj(a) - - def test_unicode(self): - a = bitarray() - a.extend(u'') - self.assertEqual(a, bitarray()) - self.assertRaises(ValueError, a.extend, u'0011201') - # ensure no bits got added after error was raised - self.assertEqual(a, bitarray()) - self.check_obj(a) - - a = bitarray() - a.extend(u'001 011_') - self.assertEqual(a, bitarray('001011')) - self.assertRaises(UnicodeEncodeError, a.extend, u'1\u2605 0') - self.assertEqual(a, bitarray('001011')) - self.check_obj(a) - - def test_bytes(self): - a = bitarray() - b = b'10110' - if is_py3k: - self.assertRaises(TypeError, a.extend, b) - else: - a.extend(b) - self.assertEqual(a, bitarray('10110')) - self.check_obj(a) - - def test_self(self): - for s in '', '1', '110', '00110111': - a = bitarray(s) - a.extend(a) - self.assertEqual(a, bitarray(2 * s)) - - for a in self.randombitarrays(): - endian = a.endian() - s = a.to01() - a.extend(a) - self.assertEqual(a.to01(), 2 * s) - self.assertEqual(a.endian(), endian) - self.assertEqual(len(a), 2 * len(s)) - self.check_obj(a) - -tests.append(ExtendTests) - -# --------------------------------------------------------------------------- - -class MethodTests(unittest.TestCase, Util): - - def test_append_simple(self): - a = bitarray() - a.append(True) - a.append(False) - a.append(False) - self.assertEQUAL(a, bitarray('100')) - a.append(0) - a.append(1) - self.assertEQUAL(a, bitarray('10001')) - self.assertRaises(ValueError, a.append, 2) - self.assertRaises(TypeError, a.append, None) - self.assertRaises(TypeError, a.append, '') - self.assertEQUAL(a, bitarray('10001')) - self.check_obj(a) - - def test_append_random(self): - for a in self.randombitarrays(): - aa = a.tolist() - a.append(1) - self.assertEQUAL(a, bitarray(aa + [1], endian=a.endian())) - a.append(0) - self.assertEQUAL(a, bitarray(aa + [1, 0], endian=a.endian())) - self.check_obj(a) - - def test_insert(self): - a = bitarray('111100') - a.insert(3, False) - self.assertEqual(a, bitarray('1110100')) - self.assertRaises(ValueError, a.insert, 0, 2) - self.assertRaises(TypeError, a.insert, 0, None) - self.assertRaises(TypeError, a.insert) - self.assertRaises(TypeError, a.insert, None) - self.assertEqual(a, bitarray('1110100')) - self.check_obj(a) - - def test_insert_random(self): - for a in self.randombitarrays(): - aa = a.tolist() - for _ in range(20): - item = randint(0, 1) - pos = randint(-len(a) - 2, len(a) + 2) - a.insert(pos, item) - aa.insert(pos, item) - self.assertEqual(a.tolist(), aa) - self.check_obj(a) - - def test_fill_simple(self): - for endian in 'little', 'big': - a = bitarray(endian=endian) - self.assertEqual(a.fill(), 0) - self.assertEqual(len(a), 0) - - a = bitarray('101', endian) - self.assertEqual(a.fill(), 5) - self.assertEqual(a, bitarray('10100000')) - self.assertEqual(a.fill(), 0) - self.assertEqual(a, bitarray('10100000')) - self.check_obj(a) - - def test_fill_random(self): - for a in self.randombitarrays(): - b = a.copy() - res = b.fill() - self.assertTrue(0 <= res < 8) - self.assertEqual(b.endian(), a.endian()) - self.check_obj(b) - if len(a) % 8 == 0: - self.assertEqual(b, a) - else: - self.assertEqual(len(b) % 8, 0) - self.assertNotEqual(b, a) - self.assertEqual(b[:len(a)], a) - self.assertEqual(b[len(a):], zeros(len(b) - len(a))) - - def test_invert_simple(self): - a = bitarray() - a.invert() - self.assertEQUAL(a, bitarray()) - self.check_obj(a) - - a = bitarray('11011') - a.invert() - self.assertEQUAL(a, bitarray('00100')) - a.invert(2) - self.assertEQUAL(a, bitarray('00000')) - a.invert(-1) - self.assertEQUAL(a, bitarray('00001')) - - def test_invert_errors(self): - a = bitarray(5) - self.assertRaises(IndexError, a.invert, 5) - self.assertRaises(IndexError, a.invert, -6) - self.assertRaises(TypeError, a.invert, "A") - self.assertRaises(TypeError, a.invert, 0, 1) - - def test_invert_random(self): - for a in self.randombitarrays(start=1): - b = a.copy() - i = randint(0, len(a) - 1) - b.invert(i) - a[i] = not a[i] - self.assertEQUAL(a, b) - - def test_sort_simple(self): - a = bitarray('1101000') - a.sort() - self.assertEqual(a, bitarray('0000111')) - self.check_obj(a) - - a = bitarray('1101000') - a.sort(reverse=True) - self.assertEqual(a, bitarray('1110000')) - a.sort(reverse=False) - self.assertEqual(a, bitarray('0000111')) - a.sort(True) - self.assertEqual(a, bitarray('1110000')) - a.sort(False) - self.assertEqual(a, bitarray('0000111')) - - self.assertRaises(TypeError, a.sort, 'A') - - def test_sort_random(self): - for rev in False, True, 0, 1, 7, -1, -7, None: - for a in self.randombitarrays(): - lst = a.tolist() - if rev is None: - lst.sort() - a.sort() - else: - lst.sort(reverse=rev) - a.sort(reverse=rev) - self.assertEqual(a, bitarray(lst)) - self.check_obj(a) - - def test_reverse_explicit(self): - for x, y in [('', ''), ('1', '1'), ('10', '01'), ('001', '100'), - ('1110', '0111'), ('11100', '00111'), - ('011000', '000110'), ('1101100', '0011011'), - ('11110000', '00001111'), - ('11111000011', '11000011111'), - ('11011111 00100000 000111', - '111000 00000100 11111011')]: - a = bitarray(x) - a.reverse() - self.assertEQUAL(a, bitarray(y)) - self.check_obj(a) - - self.assertRaises(TypeError, bitarray().reverse, 42) - - def test_reverse_random(self): - for a in self.randombitarrays(): - b = a.copy() - a.reverse() - self.assertEQUAL(a, bitarray(reversed(b), endian=a.endian())) - self.assertEQUAL(a, b[::-1]) - self.check_obj(a) - - def test_tolist(self): - a = bitarray() - self.assertEqual(a.tolist(), []) - - a = bitarray('110') - lst = a.tolist() - self.assertIsInstance(lst, list) - self.assertEqual(repr(lst), '[1, 1, 0]') - - for lst in self.randomlists(): - a = bitarray(lst) - self.assertEqual(a.tolist(), lst) - - def test_remove(self): - a = bitarray('1010110') - for val, res in [(False, '110110'), (True, '10110'), - (1, '0110'), (1, '010'), (0, '10'), - (0, '1'), (1, '')]: - a.remove(val) - self.assertEQUAL(a, bitarray(res)) - self.check_obj(a) - - a = bitarray('0010011') - a.remove(1) - self.assertEQUAL(a, bitarray('000011')) - self.assertRaises(TypeError, a.remove, 'A') - self.assertRaises(ValueError, a.remove, 21) - - def test_remove_errors(self): - a = bitarray() - for i in (True, False, 1, 0): - self.assertRaises(ValueError, a.remove, i) - - a = bitarray(21) - a.setall(0) - self.assertRaises(ValueError, a.remove, 1) - a.setall(1) - self.assertRaises(ValueError, a.remove, 0) - - def test_pop_simple(self): - for x, n, r, y in [('1', 0, 1, ''), - ('0', -1, 0, ''), - ('0011100', 3, 1, '001100')]: - a = bitarray(x) - self.assertTrue(a.pop(n) is r) - self.assertEqual(a, bitarray(y)) - self.check_obj(a) - - a = bitarray('01') - self.assertEqual(a.pop(), True) - self.assertEqual(a.pop(), False) - # pop from empty bitarray - self.assertRaises(IndexError, a.pop) - - def test_pop_random_1(self): - for a in self.randombitarrays(): - self.assertRaises(IndexError, a.pop, len(a)) - self.assertRaises(IndexError, a.pop, -len(a) - 1) - if len(a) == 0: - continue - aa = a.tolist() - enda = a.endian() - self.assertEqual(a.pop(), aa[-1]) - self.check_obj(a) - self.assertEqual(a.endian(), enda) - - def test_pop_random_2(self): - for a in self.randombitarrays(start=1): - n = randint(-len(a), len(a)-1) - aa = a.tolist() - x = a.pop(n) - self.assertBitEqual(x, aa[n]) - y = aa.pop(n) - self.assertEqual(a, bitarray(aa)) - self.assertBitEqual(x, y) - self.check_obj(a) - - def test_clear(self): - for a in self.randombitarrays(): - endian = a.endian() - a.clear() - self.assertEqual(len(a), 0) - self.assertEqual(a.endian(), endian) - self.check_obj(a) - - def test_setall(self): - a = bitarray(5) - a.setall(True) - self.assertRaises(ValueError, a.setall, -1) - self.assertRaises(TypeError, a.setall, None) - self.assertEQUAL(a, bitarray('11111')) - a.setall(0) - self.assertEQUAL(a, bitarray('00000')) - self.check_obj(a) - - def test_setall_empty(self): - a = bitarray() - for v in 0, 1: - a.setall(v) - self.assertEqual(a, bitarray()) - self.check_obj(a) - - def test_setall_random(self): - for a in self.randombitarrays(): - val = randint(0, 1) - a.setall(val) - self.assertEqual(a, bitarray(len(a) * [val])) - self.check_obj(a) - - def test_bytereverse_explicit_all(self): - for x, y in [('', ''), - ('1', '0'), - ('1011', '0000'), - ('111011', '001101'), - ('11101101', '10110111'), - ('000000011', '100000000'), - ('11011111 00100000 000111', - '11111011 00000100 001110')]: - a = bitarray(x) - a.bytereverse() - self.assertEqual(a, bitarray(y)) - - def test_bytereverse_explicit_range(self): - a = bitarray('11100000 00000011 00111111 111110') - a.bytereverse(0, 1) # reverse byte 0 - self.assertEqual(a, bitarray('00000111 00000011 00111111 111110')) - a.bytereverse(1, 3) # reverse bytes 1 and 2 - self.assertEqual(a, bitarray('00000111 11000000 11111100 111110')) - a.bytereverse(2) # reverse bytes 2 till end of buffer - self.assertEqual(a, bitarray('00000111 11000000 00111111 000111')) - a.bytereverse(3) # reverse last byte - self.assertEqual(a, bitarray('00000111 11000000 00111111 001110')) - a.bytereverse(3, 1) # start > stop (nothing to reverse) - self.assertEqual(a, bitarray('00000111 11000000 00111111 001110')) - a.bytereverse(0, 4) # reverse all bytes - self.assertEqual(a, bitarray('11100000 00000011 11111100 000111')) - - self.assertRaises(IndexError, a.bytereverse, -1) - self.assertRaises(IndexError, a.bytereverse, 0, -1) - self.assertRaises(IndexError, a.bytereverse, 5) - self.assertRaises(IndexError, a.bytereverse, 0, 5) - - def test_bytereverse_byte(self): - for i in range(256): - a = bitarray() - a.frombytes(bytes(bytearray([i]))) - self.assertEqual(len(a), 8) - b = a.copy() - b.bytereverse() - self.assertEqual(b, a[::-1]) - a.reverse() - self.assertEqual(b, a) - self.check_obj(b) - - def test_bytereverse_random(self): - t = bitarray(endian=self.random_endian()) - t.frombytes(bytes(bytearray(range(256)))) - t.bytereverse() - table = t.tobytes() # translation table - self.assertEqual(table[:9], b'\x00\x80\x40\xc0\x20\xa0\x60\xe0\x10') - - for n in range(100): - a = urandom(8 * n, self.random_endian()) - i = randint(0, n) # start - j = randint(0, n) # stop - b = a.copy() - memoryview(b)[i:j] = b.tobytes()[i:j].translate(table) - a.bytereverse(i, j) - self.assertEQUAL(a, b) - self.check_obj(a) - - def test_bytereverse_endian(self): - for n in range(20): - a = urandom(8 * n, self.random_endian()) - b = a.copy() - a.bytereverse() - a = bitarray(a, self.other_endian(a.endian())) - self.assertEqual(a.tobytes(), b.tobytes()) - -tests.append(MethodTests) - -# --------------------------------------------------------------------------- - -class CountTests(unittest.TestCase, Util): - - def test_basic(self): - a = bitarray('10011') - self.assertEqual(a.count(), 3) - self.assertEqual(a.count(True), 3) - self.assertEqual(a.count(False), 2) - self.assertEqual(a.count(1), 3) - self.assertEqual(a.count(0), 2) - self.assertRaises(ValueError, a.count, 2) - self.assertRaises(TypeError, a.count, None) - self.assertRaises(TypeError, a.count, '') - self.assertRaises(TypeError, a.count, 'A') - self.assertRaises(TypeError, a.count, 1, 2.0) - self.assertRaises(TypeError, a.count, 1, 2, 4.0) - self.assertRaises(TypeError, a.count, 0, 'A') - self.assertRaises(TypeError, a.count, 0, 0, 'A') - - def test_byte(self): - for i in range(256): - a = bitarray() - a.frombytes(bytes(bytearray([i]))) - self.assertEqual(len(a), 8) - self.assertEqual(a.count(), bin(i)[2:].count('1')) - - def test_whole_range(self): - for a in self.randombitarrays(): - s = a.to01() - self.assertEqual(a.count(1), s.count('1')) - self.assertEqual(a.count(0), s.count('0')) - - def test_zeros(self): - N = 37 - a = zeros(N) - for i in range(N): - for j in range(i, N): - self.assertEqual(a.count(0, i, j), j - i) - - def test_explicit(self): - a = bitarray('01001100 01110011 01') - self.assertEqual(a.count(), 9) - self.assertEqual(a.count(0, 12), 3) - self.assertEqual(a.count(1, -5), 3) - self.assertEqual(a.count(1, 2, 17), 7) - self.assertEqual(a.count(1, 6, 11), 2) - self.assertEqual(a.count(0, 7, -3), 4) - self.assertEqual(a.count(1, 1, -1), 8) - self.assertEqual(a.count(1, 17, 14), 0) - - def test_random(self): - for a in self.randombitarrays(): - s = a.to01() - i = randint(-3, len(a) + 2) - j = randint(-3, len(a) + 2) - self.assertEqual(a.count(1, i, j), s[i:j].count('1')) - self.assertEqual(a.count(0, i, j), s[i:j].count('0')) - -tests.append(CountTests) - -# --------------------------------------------------------------------------- - -class IndexTests(unittest.TestCase, Util): - - def test_simple(self): - a = bitarray() - for i in True, False, 1, 0: - self.assertEqual(a.find(i), -1) - self.assertRaises(ValueError, a.index, i) - - a = zeros(100) - self.assertRaises(TypeError, a.find) - self.assertRaises(TypeError, a.find, 1, 'a') - self.assertRaises(TypeError, a.find, 1, 0, 'a') - self.assertRaises(TypeError, a.find, 1, 0, 100, 1) - - self.assertRaises(ValueError, a.index, True) - self.assertRaises(TypeError, a.index) - self.assertRaises(TypeError, a.index, 1, 'a') - self.assertRaises(TypeError, a.index, 1, 0, 'a') - self.assertRaises(TypeError, a.index, 1, 0, 100, 1) - - a[20] = a[27] = 1 - for i in 1, True, bitarray('1'), bitarray('10'): - self.assertEqual(a.index(i), 20) - self.assertEqual(a.index(i, 21), 27) - self.assertEqual(a.index(i, 27), 27) - self.assertEqual(a.index(i, -73), 27) - self.assertRaises(ValueError, a.index, -1) - self.assertRaises(TypeError, a.index, None) - self.assertRaises(ValueError, a.index, 1, 5, 17) - self.assertRaises(ValueError, a.index, 1, 5, -83) - self.assertRaises(ValueError, a.index, 1, 21, 27) - self.assertRaises(ValueError, a.index, 1, 28) - self.assertEqual(a.index(0), 0) - self.assertEqual(a.find(0), 0) - - s = bitarray() - self.assertEqual(a.index(s), 0) - self.assertEqual(a.find(s), 0) - - def test_200(self): - a = 200 * bitarray('1') - self.assertRaises(ValueError, a.index, False) - self.assertEqual(a.find(False), -1) - a[173] = a[187] = a[189] = 0 - for i in 0, False, bitarray('0'), bitarray('01'): - self.assertEqual(a.index(i), 173) - self.assertEqual(a.find(i), 173) - self.assertEqual(a.index(True), 0) - self.assertEqual(a.find(True), 0) - s = bitarray('010') - self.assertEqual(a.index(s), 187) - self.assertEqual(a.find(s), 187) - - def test_range(self): - n = 250 - a = bitarray(n) - for m in range(n): - a.setall(0) - self.assertRaises(ValueError, a.index, 1) - self.assertEqual(a.find(1), -1) - a[m] = 1 - self.assertEqual(a.index(1), m) - self.assertEqual(a.find(1), m) - - a.setall(1) - self.assertRaises(ValueError, a.index, 0) - self.assertEqual(a.find(0), -1) - a[m] = 0 - self.assertEqual(a.index(0), m) - self.assertEqual(a.find(0), m) - - def test_explicit(self): - for endian in 'big', 'little': - a = bitarray('00001000 00000000 0010000', endian) - self.assertEqual(a.index(1), 4) - self.assertEqual(a.index(1, 1), 4) - self.assertEqual(a.index(0, 4), 5) - self.assertEqual(a.index(1, 5), 18) - self.assertEqual(a.index(1, -11), 18) - self.assertEqual(a.index(1, -50), 4) - self.assertRaises(ValueError, a.index, 1, 5, 18) - self.assertRaises(ValueError, a.index, 1, 19) - - self.assertEqual(a.find(1), 4) - self.assertEqual(a.find(1, 1), 4) - self.assertEqual(a.find(0, 4), 5) - self.assertEqual(a.find(1, 5), 18) - self.assertEqual(a.find(1, -11), 18) - self.assertEqual(a.find(1, -50), 4) - self.assertEqual(a.find(1, 5, 18), -1) - self.assertEqual(a.find(1, 19), -1) - - def test_explicit_2(self): - a = bitarray('10010101 11001111 1001011', self.random_endian()) - s = bitarray('011', self.random_endian()) - self.assertEqual(a.index(s, 15), 20) - self.assertEqual(a.index(s, -3), 20) - self.assertRaises(ValueError, a.index, s, 15, 22) - self.assertRaises(ValueError, a.index, s, 15, -1) - - self.assertEqual(a.find(s, 15), 20) - self.assertEqual(a.find(s, -3), 20) - self.assertEqual(a.find(s, 15, 22), -1) - self.assertEqual(a.find(s, 15, -1), -1) - - def test_random_start_stop(self): - n = 2000 - a = zeros(n) - for _ in range(100): - a[randint(0, n - 1)] = 1 - aa = a.tolist() - for _ in range(100): - start = randint(0, n) - stop = randint(0, n) - try: # reference from list - ref = aa.index(1, start, stop) - except ValueError: - ref = -1 - - res1 = a.find(1, start, stop) - self.assertEqual(res1, ref) - - try: - res2 = a.index(1, start, stop) - except ValueError: - res2 = -1 - self.assertEqual(res2, ref) - - def test_random_2(self): - for n in range(1, 70): - a = bitarray(n) - i = randint(0, 1) - a.setall(i) - for _ in range(randint(1, 4)): - a.invert(randint(0, n - 1)) - aa = a.tolist() - for _ in range(10): - start = randint(-10, n + 10) - stop = randint(-10, n + 10) - try: - res0 = aa.index(not i, start, stop) - except ValueError: - res0 = -1 - - res1 = a.find(not i, start, stop) - self.assertEqual(res1, res0) - - try: - res2 = a.index(not i, start, stop) - except ValueError: - res2 = -1 - self.assertEqual(res2, res0) - - def test_random_3(self): - for a in self.randombitarrays(): - aa = a.to01() - if a: - self.assertEqual(a.find(a), 0) - self.assertEqual(a.index(a), 0) - - for sub in '0', '1', '01', '01', '11', '101', '1111', '00100': - b = bitarray(sub, self.random_endian()) - self.assertEqual(a.find(b), aa.find(sub)) - - i = randint(-len(a) - 3, len(a) + 2) - j = randint(-len(a) - 3, len(a) + 2) - ref = aa.find(sub, i, j) - self.assertEqual(a.find(b, i, j), ref) - if ref == -1: - self.assertRaises(ValueError, a.index, b, i, j) - else: - self.assertEqual(a.index(b, i, j), ref) - -tests.append(IndexTests) - -# --------------------------------------------------------------------------- - -class SearchTests(unittest.TestCase, Util): - - def test_simple(self): - a = bitarray() - for s in 0, 1, False, True, bitarray('0'), bitarray('1'): - self.assertEqual(a.search(s), []) - - a = bitarray('00100') - for s in 1, True, bitarray('1'), bitarray('10'): - self.assertEqual(a.search(s), [2]) - - a = 100 * bitarray('1') - self.assertEqual(a.search(0), []) - self.assertEqual(a.search(1), list(range(100))) - - a = bitarray('10010101110011111001011') - for limit in range(10): - self.assertEqual(a.search(bitarray('011'), limit), - [6, 11, 20][:limit]) - - self.assertRaises(ValueError, a.search, bitarray()) - self.assertRaises(TypeError, a.search, '010') - - def test_itersearch(self): - a = bitarray('10011') - self.assertRaises(ValueError, a.itersearch, bitarray()) - self.assertRaises(TypeError, a.itersearch, 1, 0) - self.assertRaises(TypeError, a.itersearch, '') - it = a.itersearch(1) - self.assertIsType(it, 'searchiterator') - self.assertEqual(next(it), 0) - self.assertEqual(next(it), 3) - self.assertEqual(next(it), 4) - self.assertStopIteration(it) - - def test_explicit_1(self): - a = bitarray('10011', self.random_endian()) - for s, res in [('0', [1, 2]), ('1', [0, 3, 4]), - ('01', [2]), ('11', [3]), - ('000', []), ('1001', [0]), - ('011', [2]), ('0011', [1]), - ('10011', [0]), ('100111', [])]: - b = bitarray(s, self.random_endian()) - self.assertEqual(a.search(b), res) - self.assertEqual(list(a.itersearch(b)), res) - - def test_explicit_2(self): - a = bitarray('10010101 11001111 1001011') - for s, res in [('011', [6, 11, 20]), - ('111', [7, 12, 13, 14]), # note the overlap - ('1011', [5, 19]), - ('100', [0, 9, 16])]: - b = bitarray(s) - self.assertEqual(a.search(b), res) - self.assertEqual(list(a.itersearch(b)), res) - - def test_bool_random(self): - for a in self.randombitarrays(): - b = a.copy() - b.setall(0) - for i in a.itersearch(1): - b[i] = 1 - self.assertEQUAL(b, a) - - b.setall(1) - for i in a.itersearch(0): - b[i] = 0 - self.assertEQUAL(b, a) - - s = set(a.search(0) + a.search(1)) - self.assertEqual(len(s), len(a)) - - def test_random(self): - for a in self.randombitarrays(): - aa = a.to01() - if a: - self.assertEqual(a.search(a), [0]) - self.assertEqual(list(a.itersearch(a)), [0]) - - for sub in '0', '1', '01', '01', '11', '101', '1101', '01100': - b = bitarray(sub, self.random_endian()) - plst = [i for i in range(len(a)) if a[i:i + len(b)] == b] - self.assertEqual(a.search(b), plst) - for p in a.itersearch(b): - self.assertEqual(a[p:p + len(b)], b) - -tests.append(SearchTests) - -# --------------------------------------------------------------------------- - -class BytesTests(unittest.TestCase, Util): - - @staticmethod - def randombytes(): - for n in range(1, 20): - yield os.urandom(n) - - def test_frombytes_simple(self): - a = bitarray(endian='big') - a.frombytes(b'A') - self.assertEqual(a, bitarray('01000001')) - - b = a - b.frombytes(b'BC') - self.assertEQUAL(b, bitarray('01000001 01000010 01000011', - endian='big')) - self.assertTrue(b is a) - - def test_frombytes_empty(self): - for a in self.randombitarrays(): - b = a.copy() - a.frombytes(b'') - self.assertEQUAL(a, b) - self.assertFalse(a is b) - self.check_obj(a) - - def test_frombytes_errors(self): - a = bitarray() - self.assertRaises(TypeError, a.frombytes) - self.assertRaises(TypeError, a.frombytes, b'', b'') - self.assertRaises(TypeError, a.frombytes, 1) - self.check_obj(a) - - def test_frombytes_random(self): - for b in self.randombitarrays(): - for s in self.randombytes(): - a = bitarray(endian=b.endian()) - a.frombytes(s) - c = b.copy() - b.frombytes(s) - self.assertEQUAL(b[-len(a):], a) - self.assertEQUAL(b[:-len(a)], c) - self.assertEQUAL(b, c + a) - self.check_obj(a) - - def test_tobytes_empty(self): - a = bitarray() - self.assertEqual(a.tobytes(), b'') - - def test_tobytes_endian(self): - for end in ('big', 'little'): - a = bitarray(endian=end) - a.frombytes(b'foo') - self.assertEqual(a.tobytes(), b'foo') - - for s in self.randombytes(): - a = bitarray(endian=end) - a.frombytes(s) - self.assertEqual(a.tobytes(), s) - self.check_obj(a) - - def test_tobytes_explicit_ones(self): - for n, s in [(1, b'\x01'), (2, b'\x03'), (3, b'\x07'), (4, b'\x0f'), - (5, b'\x1f'), (6, b'\x3f'), (7, b'\x7f'), (8, b'\xff'), - (12, b'\xff\x0f'), (15, b'\xff\x7f'), (16, b'\xff\xff'), - (17, b'\xff\xff\x01'), (24, b'\xff\xff\xff')]: - a = bitarray(n, endian='little') - a.setall(1) - self.assertEqual(a.tobytes(), s) - - def test_unpack_simple(self): - a = bitarray('01') - self.assertIsInstance(a.unpack(), bytes) - self.assertEqual(a.unpack(), b'\x00\x01') - self.assertEqual(a.unpack(b'A'), b'A\x01') - self.assertEqual(a.unpack(b'0', b'1'), b'01') - self.assertEqual(a.unpack(one=b'\xff'), b'\x00\xff') - self.assertEqual(a.unpack(zero=b'A'), b'A\x01') - self.assertEqual(a.unpack(one=b't', zero=b'f'), b'ft') - - def test_unpack_random(self): - for a in self.randombitarrays(): - self.assertEqual(a.unpack(b'0', b'1'), - a.to01().encode()) - # round trip - b = bitarray() - b.pack(a.unpack()) - self.assertEqual(b, a) - # round trip with invert - b = bitarray() - b.pack(a.unpack(b'\x01', b'\x00')) - b.invert() - self.assertEqual(b, a) - - def test_unpack_errors(self): - a = bitarray('01') - self.assertRaises(TypeError, a.unpack, b'') - self.assertRaises(TypeError, a.unpack, b'0', b'') - self.assertRaises(TypeError, a.unpack, b'a', zero=b'b') - self.assertRaises(TypeError, a.unpack, foo=b'b') - self.assertRaises(TypeError, a.unpack, one=b'aa', zero=b'b') - if is_py3k: - self.assertRaises(TypeError, a.unpack, '0') - self.assertRaises(TypeError, a.unpack, one='a') - self.assertRaises(TypeError, a.unpack, b'0', '1') - - def test_pack_simple(self): - for endian in 'little', 'big': - _set_default_endian(endian) - a = bitarray() - a.pack(bytes()) - self.assertEQUAL(a, bitarray()) - a.pack(b'\x00') - self.assertEQUAL(a, bitarray('0')) - a.pack(b'\xff') - self.assertEQUAL(a, bitarray('01')) - a.pack(b'\x01\x00\x7a') - self.assertEQUAL(a, bitarray('01101')) - self.check_obj(a) - - def test_pack_allbytes(self): - a = bitarray() - a.pack(bytes(bytearray(range(256)))) - self.assertEqual(a, bitarray('0' + 255 * '1')) - self.check_obj(a) - - def test_pack_errors(self): - a = bitarray() - self.assertRaises(TypeError, a.pack, 0) - if is_py3k: - self.assertRaises(TypeError, a.pack, '1') - self.assertRaises(TypeError, a.pack, [1, 3]) - self.assertRaises(TypeError, a.pack, bitarray()) - -tests.append(BytesTests) - -# --------------------------------------------------------------------------- - -class FileTests(unittest.TestCase, Util): - - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - self.tmpfname = os.path.join(self.tmpdir, 'testfile') - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def read_file(self): - with open(self.tmpfname, 'rb') as fi: - return fi.read() - - def assertFileSize(self, size): - self.assertEqual(os.path.getsize(self.tmpfname), size) - - - def test_pickle(self): - d1 = {i: a for i, a in enumerate(self.randombitarrays())} - with open(self.tmpfname, 'wb') as fo: - pickle.dump(d1, fo) - with open(self.tmpfname, 'rb') as fi: - d2 = pickle.load(fi) - for key in d1.keys(): - self.assertEQUAL(d1[key], d2[key]) - - def test_pickle_load(self): - if not is_py3k: - return - # the test data file was created using bitarray 1.5.0 / Python 3.5.5 - path = os.path.join(os.path.dirname(__file__), 'test_data.pickle') - with open(path, 'rb') as fi: - d = pickle.load(fi) - - for i, (s, end) in enumerate([ - ('110', 'little'), - ('011', 'big'), - ('1110000001001000000000000000001', 'little'), - ('0010011110000000000000000000001', 'big'), - ]): - b = d['b%d' % i] - self.assertEqual(b.to01(), s) - self.assertEqual(b.endian(), end) - self.assertIsType(b, 'bitarray') - self.check_obj(b) - - f = d['f%d' % i] - self.assertEqual(f.to01(), s) - self.assertEqual(f.endian(), end) - self.assertIsType(f, 'frozenbitarray') - self.check_obj(f) - - def test_shelve(self): - if hasattr(sys, 'gettotalrefcount'): - return - - d1 = shelve.open(self.tmpfname) - stored = [] - for a in self.randombitarrays(): - key = str(len(a)) - d1[key] = a - stored.append((key, a)) - d1.close() - - d2 = shelve.open(self.tmpfname) - for k, v in stored: - self.assertEQUAL(d2[k], v) - d2.close() - - def test_fromfile_empty(self): - with open(self.tmpfname, 'wb') as fo: - pass - self.assertFileSize(0) - - a = bitarray() - with open(self.tmpfname, 'rb') as fi: - a.fromfile(fi) - self.assertEqual(a, bitarray()) - self.check_obj(a) - - def test_fromfile_Foo(self): - with open(self.tmpfname, 'wb') as fo: - fo.write(b'Foo') - self.assertFileSize(3) - - a = bitarray(endian='big') - with open(self.tmpfname, 'rb') as fi: - a.fromfile(fi) - self.assertEqual(a, bitarray('01000110 01101111 01101111')) - - a = bitarray(endian='little') - with open(self.tmpfname, 'rb') as fi: - a.fromfile(fi) - self.assertEqual(a, bitarray('01100010 11110110 11110110')) - - def test_fromfile_wrong_args(self): - a = bitarray() - self.assertRaises(TypeError, a.fromfile) - self.assertRaises(Exception, a.fromfile, 42) - self.assertRaises(Exception, a.fromfile, 'bar') - - with open(self.tmpfname, 'wb') as fo: - pass - with open(self.tmpfname, 'rb') as fi: - self.assertRaises(TypeError, a.fromfile, fi, None) - - def test_fromfile_erros(self): - with open(self.tmpfname, 'wb') as fo: - fo.write(b'0123456789') - self.assertFileSize(10) - - a = bitarray() - with open(self.tmpfname, 'wb') as fi: - self.assertRaises(Exception, a.fromfile, fi) - - if is_py3k: - with open(self.tmpfname, 'r') as fi: - self.assertRaises(TypeError, a.fromfile, fi) - - def test_from_large_files(self): - for N in range(65534, 65538): - data = os.urandom(N) - with open(self.tmpfname, 'wb') as fo: - fo.write(data) - - a = bitarray() - with open(self.tmpfname, 'rb') as fi: - a.fromfile(fi) - self.assertEqual(len(a), 8 * N) - self.assertEqual(buffer_info(a, 'size'), N) - self.assertEqual(a.tobytes(), data) - - def test_fromfile_extend_existing(self): - with open(self.tmpfname, 'wb') as fo: - fo.write(b'Foo') - - foo_le = '011000101111011011110110' - a = bitarray('1', endian='little') - with open(self.tmpfname, 'rb') as fi: - a.fromfile(fi) - - self.assertEqual(a, bitarray('1' + foo_le)) - - for n in range(20): - a = bitarray(n, endian='little') - a.setall(1) - with open(self.tmpfname, 'rb') as fi: - a.fromfile(fi) - self.assertEqual(a, bitarray(n * '1' + foo_le)) - - def test_fromfile_n(self): - a = bitarray() - a.frombytes(b'ABCDEFGHIJ') - with open(self.tmpfname, 'wb') as fo: - a.tofile(fo) - self.assertFileSize(10) - - with open(self.tmpfname, 'rb') as f: - a = bitarray() - a.fromfile(f, 0); self.assertEqual(a.tobytes(), b'') - a.fromfile(f, 1); self.assertEqual(a.tobytes(), b'A') - f.read(1) # skip B - a.fromfile(f, 1); self.assertEqual(a.tobytes(), b'AC') - a = bitarray() - a.fromfile(f, 2); self.assertEqual(a.tobytes(), b'DE') - a.fromfile(f, 1); self.assertEqual(a.tobytes(), b'DEF') - a.fromfile(f, 0); self.assertEqual(a.tobytes(), b'DEF') - a.fromfile(f); self.assertEqual(a.tobytes(), b'DEFGHIJ') - a.fromfile(f); self.assertEqual(a.tobytes(), b'DEFGHIJ') - self.check_obj(a) - - a = bitarray() - with open(self.tmpfname, 'rb') as f: - f.read(1) - self.assertRaises(EOFError, a.fromfile, f, 10) - # check that although we received an EOFError, the bytes were read - self.assertEqual(a.tobytes(), b'BCDEFGHIJ') - - a = bitarray() - with open(self.tmpfname, 'rb') as f: - # negative values - like ommiting the argument - a.fromfile(f, -1) - self.assertEqual(a.tobytes(), b'ABCDEFGHIJ') - self.assertRaises(EOFError, a.fromfile, f, 1) - - def test_fromfile_BytesIO(self): - f = BytesIO(b'somedata') - a = bitarray() - a.fromfile(f, 4) - self.assertEqual(len(a), 32) - self.assertEqual(a.tobytes(), b'some') - a.fromfile(f) - self.assertEqual(len(a), 64) - self.assertEqual(a.tobytes(), b'somedata') - self.check_obj(a) - - def test_tofile_empty(self): - a = bitarray() - with open(self.tmpfname, 'wb') as f: - a.tofile(f) - - self.assertFileSize(0) - - def test_tofile_Foo(self): - a = bitarray('0100011 001101111 01101111', endian='big') - b = a.copy() - with open(self.tmpfname, 'wb') as f: - a.tofile(f) - self.assertEQUAL(a, b) - - self.assertFileSize(3) - self.assertEqual(self.read_file(), b'Foo') - - def test_tofile_random(self): - for a in self.randombitarrays(): - with open(self.tmpfname, 'wb') as fo: - a.tofile(fo) - n = bits2bytes(len(a)) - self.assertFileSize(n) - raw = self.read_file() - self.assertEqual(len(raw), n) - self.assertEqual(raw, a.tobytes()) - - def test_tofile_errors(self): - n = 100 - a = bitarray(8 * n) - self.assertRaises(TypeError, a.tofile) - - with open(self.tmpfname, 'wb') as f: - a.tofile(f) - self.assertFileSize(n) - # write to closed file - self.assertRaises(ValueError, a.tofile, f) - - if is_py3k: - with open(self.tmpfname, 'w') as f: - self.assertRaises(TypeError, a.tofile, f) - - with open(self.tmpfname, 'rb') as f: - self.assertRaises(Exception, a.tofile, f) - - def test_tofile_large(self): - n = 100 * 1000 - a = bitarray(8 * n) - a.setall(0) - a[2::37] = 1 - with open(self.tmpfname, 'wb') as f: - a.tofile(f) - self.assertFileSize(n) - - raw = self.read_file() - self.assertEqual(len(raw), n) - self.assertEqual(raw, a.tobytes()) - - def test_tofile_ones(self): - for n in range(20): - a = n * bitarray('1', endian='little') - with open(self.tmpfname, 'wb') as fo: - a.tofile(fo) - - raw = self.read_file() - self.assertEqual(len(raw), bits2bytes(len(a))) - # when we fill the unused bits in a, we can compare - a.fill() - b = bitarray(endian='little') - b.frombytes(raw) - self.assertEqual(a, b) - - def test_tofile_BytesIO(self): - for n in list(range(10)) + list(range(65534, 65538)): - data = os.urandom(n) - a = bitarray(0, 'big') - a.frombytes(data) - self.assertEqual(len(a), 8 * n) - f = BytesIO() - a.tofile(f) - self.assertEqual(f.getvalue(), data) - - def test_mmap(self): - if not is_py3k: - return - import mmap - - with open(self.tmpfname, 'wb') as fo: - fo.write(1000 * b'\0') - - with open(self.tmpfname, 'r+b') as f: # see issue #141 - with mmap.mmap(f.fileno(), 0) as mapping: - a = bitarray(buffer=mapping, endian='little') - info = buffer_info(a) - self.assertFalse(info['readonly']) - self.assertTrue(info['imported']) - self.assertEqual(a, zeros(8000)) - a[::2] = True - # not sure this is necessary, without 'del a', I get: - # BufferError: cannot close exported pointers exist - del a - - self.assertEqual(self.read_file(), 1000 * b'\x55') - - def test_mmap_2(self): - if not is_py3k: - return - from mmap import mmap - - with open(self.tmpfname, 'wb') as fo: - fo.write(1000 * b'\x22') - - with open(self.tmpfname, 'r+b') as f: - a = bitarray(buffer=mmap(f.fileno(), 0), endian='little') - info = buffer_info(a) - self.assertFalse(info['readonly']) - self.assertTrue(info['imported']) - self.assertEqual(a, 1000 * bitarray('0100 0100')) - a[::4] = 1 - - self.assertEqual(self.read_file(), 1000 * b'\x33') - - def test_mmap_readonly(self): - if not is_py3k: - return - import mmap - - with open(self.tmpfname, 'wb') as fo: - fo.write(994 * b'\x89' + b'Veedon') - - with open(self.tmpfname, 'rb') as fi: # readonly - m = mmap.mmap(fi.fileno(), 0, access=mmap.ACCESS_READ) - a = bitarray(buffer=m, endian='big') - info = buffer_info(a) - self.assertTrue(info['readonly']) - self.assertTrue(info['imported']) - self.assertRaisesMessage(TypeError, - "cannot modify read-only memory", - a.__setitem__, 0, 1) - self.assertEqual(a[:8 * 994], 994 * bitarray('1000 1001')) - self.assertEqual(a[8 * 994:].tobytes(), b'Veedon') - -tests.append(FileTests) - -# ----------------------------- Decode Tree --------------------------------- - -alphabet_code = { - ' ': bitarray('001'), '.': bitarray('0101010'), - 'a': bitarray('0110'), 'b': bitarray('0001100'), - 'c': bitarray('000011'), 'd': bitarray('01011'), - 'e': bitarray('111'), 'f': bitarray('010100'), - 'g': bitarray('101000'), 'h': bitarray('00000'), - 'i': bitarray('1011'), 'j': bitarray('0111101111'), - 'k': bitarray('00011010'), 'l': bitarray('01110'), - 'm': bitarray('000111'), 'n': bitarray('1001'), - 'o': bitarray('1000'), 'p': bitarray('101001'), - 'q': bitarray('00001001101'), 'r': bitarray('1101'), - 's': bitarray('1100'), 't': bitarray('0100'), - 'u': bitarray('000100'), 'v': bitarray('0111100'), - 'w': bitarray('011111'), 'x': bitarray('0000100011'), - 'y': bitarray('101010'), 'z': bitarray('00011011110') -} - -class DecodeTreeTests(unittest.TestCase, Util): - - def test_create(self): - dt = decodetree(alphabet_code) - self.assertIsType(dt, 'decodetree') - self.assertIsInstance(dt, decodetree) - self.assertRaises(TypeError, decodetree, None) - self.assertRaises(TypeError, decodetree, 'foo') - d = dict(alphabet_code) - d['-'] = bitarray() - self.assertRaises(ValueError, decodetree, d) - - def test_ambiguous_code(self): - for d in [ - {'a': bitarray('0'), 'b': bitarray('0'), 'c': bitarray('1')}, - {'a': bitarray('01'), 'b': bitarray('01'), 'c': bitarray('1')}, - {'a': bitarray('0'), 'b': bitarray('01')}, - {'a': bitarray('0'), 'b': bitarray('11'), 'c': bitarray('111')}, - ]: - self.assertRaises(ValueError, decodetree, d) - - def test_sizeof(self): - dt = decodetree({'.': bitarray('1')}) - self.assertTrue(0 < sys.getsizeof(dt) < 100) - - dt = decodetree({'a': zeros(20)}) - self.assertTrue(sys.getsizeof(dt) > 200) - - def test_nodes(self): - for n in range(1, 20): - dt = decodetree({'a': zeros(n)}) - self.assertEqual(dt.nodes(), n + 1) - - dt = decodetree({'I': bitarray('1'), 'l': bitarray('01'), - 'a': bitarray('001'), 'n': bitarray('000')}) - self.assertEqual(dt.nodes(), 7) - dt = decodetree(alphabet_code) - self.assertEqual(dt.nodes(), 70) - - def test_todict(self): - t = decodetree(alphabet_code) - d = t.todict() - self.assertIsInstance(d, dict) - self.assertEqual(d, alphabet_code) - - def test_decode(self): - t = decodetree(alphabet_code) - a = bitarray('1011 01110 0110 1001') - self.assertEqual(a.decode(t), ['i', 'l', 'a', 'n']) - self.assertEqual(''.join(a.iterdecode(t)), 'ilan') - a = bitarray() - self.assertEqual(a.decode(t), []) - self.assertEqual(''.join(a.iterdecode(t)), '') - self.check_obj(a) - - def test_large(self): - d = {i: bitarray(bool((1 << j) & i) for j in range(10)) - for i in range(1024)} - t = decodetree(d) - self.assertEqual(t.todict(), d) - self.assertEqual(t.nodes(), 2047) - self.assertTrue(sys.getsizeof(t) > 10000) - -tests.append(DecodeTreeTests) - -# ------------------ variable length encoding and decoding ------------------ - -class PrefixCodeTests(unittest.TestCase, Util): - - def test_encode_string(self): - a = bitarray() - a.encode(alphabet_code, '') - self.assertEqual(a, bitarray()) - a.encode(alphabet_code, 'a') - self.assertEqual(a, bitarray('0110')) - - def test_encode_list(self): - a = bitarray() - a.encode(alphabet_code, []) - self.assertEqual(a, bitarray()) - a.encode(alphabet_code, ['e']) - self.assertEqual(a, bitarray('111')) - - def test_encode_iter(self): - a = bitarray() - d = {0: bitarray('0'), 1: bitarray('1')} - a.encode(d, iter([0, 1, 1, 0])) - self.assertEqual(a, bitarray('0110')) - - def foo(): - for c in 1, 1, 0, 0, 1, 1: - yield c - - a.clear() - a.encode(d, foo()) - a.encode(d, range(2)) - self.assertEqual(a, bitarray('11001101')) - self.assertEqual(d, {0: bitarray('0'), 1: bitarray('1')}) - - def test_encode_symbol_not_in_code(self): - d = dict(alphabet_code) - a = bitarray() - a.encode(d, 'is') - self.assertEqual(a, bitarray('1011 1100')) - self.assertRaises(ValueError, a.encode, d, 'ilAn') - msg = "symbol not defined in prefix code" - if is_py3k: - msg += ": None" - self.assertRaisesMessage(ValueError, msg, a.encode, d, [None, 2]) - - def test_encode_not_iterable(self): - d = {'a': bitarray('0'), 'b': bitarray('1')} - a = bitarray() - a.encode(d, 'abba') - self.assertRaises(TypeError, a.encode, d, 42) - self.assertRaises(TypeError, a.encode, d, 1.3) - self.assertRaises(TypeError, a.encode, d, None) - self.assertEqual(a, bitarray('0110')) - - def test_check_codedict_encode(self): - a = bitarray() - self.assertRaises(TypeError, a.encode, None, '') - self.assertRaises(ValueError, a.encode, {}, '') - self.assertRaises(TypeError, a.encode, {'a': 'b'}, 'a') - self.assertRaises(ValueError, a.encode, {'a': bitarray()}, 'a') - self.assertEqual(len(a), 0) - - def test_check_codedict_decode(self): - a = bitarray('101') - self.assertRaises(TypeError, a.decode, 0) - self.assertRaises(ValueError, a.decode, {}) - self.assertRaises(TypeError, a.decode, {'a': 42}) - self.assertRaises(ValueError, a.decode, {'a': bitarray()}) - self.assertEqual(a, bitarray('101')) - - def test_check_codedict_iterdecode(self): - a = bitarray('1100101') - self.assertRaises(TypeError, a.iterdecode, 0) - self.assertRaises(ValueError, a.iterdecode, {}) - self.assertRaises(TypeError, a.iterdecode, {'a': []}) - self.assertRaises(ValueError, a.iterdecode, {'a': bitarray()}) - self.assertEqual(a, bitarray('1100101')) - - def test_decode_simple(self): - d = {'I': bitarray('1'), 'l': bitarray('01'), - 'a': bitarray('001'), 'n': bitarray('000')} - dcopy = dict(d) - a = bitarray('101001000') - res = list("Ilan") - self.assertEqual(a.decode(d), res) - self.assertEqual(list(a.iterdecode(d)), res) - self.assertEqual(d, dcopy) - self.assertEqual(a, bitarray('101001000')) - - def test_iterdecode_type(self): - a = bitarray() - it = a.iterdecode(alphabet_code) - self.assertIsType(it, 'decodeiterator') - - def test_iterdecode_remove_tree(self): - d = {'I': bitarray('1'), 'l': bitarray('01'), - 'a': bitarray('001'), 'n': bitarray('000')} - t = decodetree(d) - a = bitarray('101001000') - it = a.iterdecode(t) - del t - self.assertEqual(''.join(it), "Ilan") - - def test_decode_empty(self): - d = {'a': bitarray('1')} - a = bitarray() - self.assertEqual(a.decode(d), []) - self.assertEqual(d, {'a': bitarray('1')}) - # test decode iterator - self.assertEqual(list(a.iterdecode(d)), []) - self.assertEqual(d, {'a': bitarray('1')}) - self.assertEqual(len(a), 0) - - def test_decode_incomplete(self): - d = {'a': bitarray('0'), 'b': bitarray('111')} - a = bitarray('00011') - msg = "incomplete prefix code at position 3" - self.assertRaisesMessage(ValueError, msg, a.decode, d) - it = a.iterdecode(d) - self.assertIsType(it, 'decodeiterator') - self.assertRaisesMessage(ValueError, msg, list, it) - t = decodetree(d) - self.assertRaisesMessage(ValueError, msg, a.decode, t) - self.assertRaisesMessage(ValueError, msg, list, a.iterdecode(t)) - - self.assertEqual(a, bitarray('00011')) - self.assertEqual(d, {'a': bitarray('0'), 'b': bitarray('111')}) - self.assertEqual(t.todict(), d) - - def test_decode_incomplete_2(self): - a = bitarray() - a.encode(alphabet_code, "now we rise") - x = len(a) - a.extend('00') - msg = "incomplete prefix code at position %d" % x - self.assertRaisesMessage(ValueError, msg, a.decode, alphabet_code) - - def test_decode_buggybitarray(self): - d = dict(alphabet_code) - # i s t - a = bitarray('1011 1100 0100 011110111001101001') - msg = "prefix code unrecognized in bitarray at position 12 .. 21" - self.assertRaisesMessage(ValueError, msg, a.decode, d) - self.assertRaisesMessage(ValueError, msg, list, a.iterdecode(d)) - t = decodetree(d) - self.assertRaisesMessage(ValueError, msg, a.decode, t) - self.assertRaisesMessage(ValueError, msg, list, a.iterdecode(d)) - - self.check_obj(a) - self.assertEqual(t.todict(), d) - - def test_iterdecode_no_term(self): - d = {'a': bitarray('0'), 'b': bitarray('111')} - a = bitarray('011') - it = a.iterdecode(d) - self.assertEqual(next(it), 'a') - self.assertRaisesMessage(ValueError, - "incomplete prefix code at position 1", - next, it) - self.assertEqual(a, bitarray('011')) - - def test_iterdecode_buggybitarray(self): - d = {'a': bitarray('0')} - a = bitarray('1') - it = a.iterdecode(d) - self.assertRaises(ValueError, next, it) - self.assertEqual(a, bitarray('1')) - self.assertEqual(d, {'a': bitarray('0')}) - - def test_decode_buggybitarray2(self): - d = {'a': bitarray('00'), 'b': bitarray('01')} - a = bitarray('1') - self.assertRaises(ValueError, a.decode, d) - self.assertRaises(ValueError, next, a.iterdecode(d)) - - t = decodetree(d) - self.assertRaises(ValueError, a.decode, t) - self.assertRaises(ValueError, next, a.iterdecode(t)) - - self.assertEqual(a, bitarray('1')) - self.assertEqual(d, {'a': bitarray('00'), 'b': bitarray('01')}) - self.assertEqual(t.todict(), d) - - def test_decode_random(self): - pat1 = re.compile(r'incomplete prefix code.+\s(\d+)') - pat2 = re.compile(r'prefix code unrecognized.+\s(\d+)\s*\.\.\s*(\d+)') - t = decodetree(alphabet_code) - for a in self.randombitarrays(): - try: - a.decode(t) - except ValueError as e: - msg = str(e) - m1 = pat1.match(msg) - m2 = pat2.match(msg) - self.assertFalse(m1 and m2) - if m1: - i = int(m1.group(1)) - if m2: - i, j = int(m2.group(1)), int(m2.group(2)) - self.assertFalse(a[i:j] in alphabet_code.values()) - a[:i].decode(t) - - def test_decode_ambiguous_code(self): - for d in [ - {'a': bitarray('0'), 'b': bitarray('0'), 'c': bitarray('1')}, - {'a': bitarray('01'), 'b': bitarray('01'), 'c': bitarray('1')}, - {'a': bitarray('0'), 'b': bitarray('01')}, - {'a': bitarray('0'), 'b': bitarray('11'), 'c': bitarray('111')}, - ]: - a = bitarray() - self.assertRaises(ValueError, a.decode, d) - self.assertRaises(ValueError, a.iterdecode, d) - self.check_obj(a) - - def test_miscitems(self): - d = {None : bitarray('00'), - 0 : bitarray('110'), - 1 : bitarray('111'), - '' : bitarray('010'), - 2 : bitarray('011')} - a = bitarray() - a.encode(d, [None, 0, 1, '', 2]) - self.assertEqual(a, bitarray('00110111010011')) - self.assertEqual(a.decode(d), [None, 0, 1, '', 2]) - # iterator - it = a.iterdecode(d) - self.assertEqual(next(it), None) - self.assertEqual(next(it), 0) - self.assertEqual(next(it), 1) - self.assertEqual(next(it), '') - self.assertEqual(next(it), 2) - self.assertStopIteration(it) - - def test_quick_example(self): - a = bitarray() - message = 'the quick brown fox jumps over the lazy dog.' - a.encode(alphabet_code, message) - self.assertEqual(a, bitarray( - # t h e q u i c k - '0100 00000 111 001 00001001101 000100 1011 000011 00011010 001' - # b r o w n f o x - '0001100 1101 1000 011111 1001 001 010100 1000 0000100011 001' - # j u m p s o v e r - '0111101111 000100 000111 101001 1100 001 1000 0111100 111 1101' - # t h e l a z y - '001 0100 00000 111 001 01110 0110 00011011110 101010 001' - # d o g . - '01011 1000 101000 0101010')) - self.assertEqual(''.join(a.decode(alphabet_code)), message) - self.assertEqual(''.join(a.iterdecode(alphabet_code)), message) - t = decodetree(alphabet_code) - self.assertEqual(''.join(a.decode(t)), message) - self.assertEqual(''.join(a.iterdecode(t)), message) - self.check_obj(a) - -tests.append(PrefixCodeTests) - -# --------------------------- Buffer Import --------------------------------- - -class BufferImportTests(unittest.TestCase, Util): - - def test_bytes(self): - b = 100 * b'\0' - a = bitarray(buffer=b) - - info = buffer_info(a) - self.assertFalse(info['allocated']) - self.assertTrue(info['readonly']) - self.assertTrue(info['imported']) - - self.assertRaises(TypeError, a.setall, 1) - self.assertRaises(TypeError, a.clear) - self.assertEqual(a, zeros(800)) - self.check_obj(a) - - def test_bytearray(self): - b = bytearray(100 * [0]) - a = bitarray(buffer=b, endian='little') - - info = buffer_info(a) - self.assertFalse(info['allocated']) - self.assertFalse(info['readonly']) - self.assertTrue(info['imported']) - - a[0] = 1 - self.assertEqual(b[0], 1) - a[7] = 1 - self.assertEqual(b[0], 129) - a[:] = 1 - self.assertEqual(b, bytearray(100 * [255])) - self.assertRaises(BufferError, a.pop) - a[8:16] = bitarray('10000010', endian='big') - self.assertEqual(b, bytearray([255, 65] + 98 * [255])) - self.assertEqual(a.tobytes(), bytes(b)) - for n in 7, 9: - self.assertRaises(BufferError, a.__setitem__, slice(8, 16), - bitarray(n)) - b[1] = b[2] = 255 - self.assertEqual(b, bytearray(100 * [255])) - self.assertEqual(a, 800 * bitarray('1')) - self.check_obj(a) - - def test_array(self): - if not is_py3k: # Python 2's array cannot be used as buffer - return - from array import array - - a = array('B', [0, 255, 64]) - b = bitarray(None, 'little', a) - self.assertEqual(b, bitarray('00000000 11111111 00000010')) - a[1] = 32 - self.assertEqual(b, bitarray('00000000 00000100 00000010')) - b[3] = 1 - self.assertEqual(a.tolist(), [8, 32, 64]) - self.check_obj(b) - - def test_bitarray(self): - a = urandom(10000) - b = bitarray(buffer=a) - # a and b are two distict bitarrays that share the same buffer now - self.assertFalse(a is b) - - a_info = buffer_info(a) - self.assertFalse(a_info['imported']) - self.assertEqual(a_info['exports'], 1) - b_info = buffer_info(b) - self.assertTrue(b_info['imported']) - self.assertEqual(b_info['exports'], 0) - # buffer address is the same! - self.assertEqual(a_info['address'], - b_info['address']) - - self.assertFalse(a is b) - self.assertEqual(a, b) - b[437:461] = 0 - self.assertEqual(a, b) - a[327:350] = 1 - self.assertEqual(a, b) - b[101:1187] <<= 79 - self.assertEqual(a, b) - a[100:9800:5] = 1 - self.assertEqual(a, b) - - self.assertRaisesMessage( - BufferError, - "cannot resize bitarray that is exporting buffers", - a.pop) - self.assertRaisesMessage( - BufferError, - "cannot resize imported buffer", - b.pop) - self.check_obj(a) - self.check_obj(b) - - def test_bitarray_range(self): - for n in range(100): - a = urandom(n, self.random_endian()) - b = bitarray(buffer=a, endian=a.endian()) - # an imported buffer will always have a multiple of 8 bits - self.assertEqual(len(b) % 8, 0) - self.assertEQUAL(b[:n], a) - self.check_obj(a) - self.check_obj(b) - - def test_bitarray_chain(self): - a = urandom(64) - d = {0: a} - for n in range(1, 100): - d[n] = bitarray(buffer=d[n - 1]) - - self.assertEqual(d[99], a) - a.setall(0) - self.assertEqual(d[99], zeros(64)) - a[:] = 1 - self.assertTrue(d[99].all()) - for c in d.values(): - self.check_obj(c) - - def test_frozenbitarray(self): - a = frozenbitarray('10011011 011') - self.assertTrue(buffer_info(a, 'readonly')) - self.check_obj(a) - - b = bitarray(buffer=a) - self.assertTrue(buffer_info(b, 'readonly')) # also readonly - self.assertRaises(TypeError, b.__setitem__, 1, 0) - self.check_obj(b) - - def test_invalid_buffer(self): - # these objects do not expose a buffer - for arg in (123, 1.23, Ellipsis, [1, 2, 3], (1, 2, 3), {1: 2}, - set([1, 2, 3]),): - self.assertRaises(TypeError, bitarray, buffer=arg) - - def test_del_import_object(self): - b = bytearray(100 * [0]) - a = bitarray(buffer=b) - del b - self.assertEqual(a, zeros(800)) - a.setall(1) - self.assertTrue(a.all()) - self.check_obj(a) - - def test_readonly_errors(self): - a = bitarray(buffer=b'A') - info = buffer_info(a) - self.assertTrue(info['readonly']) - self.assertTrue(info['imported']) - - self.assertRaises(TypeError, a.append, True) - self.assertRaises(TypeError, a.bytereverse) - self.assertRaises(TypeError, a.clear) - self.assertRaises(TypeError, a.encode, {'a': bitarray('0')}, 'aa') - self.assertRaises(TypeError, a.extend, [0, 1, 0]) - self.assertRaises(TypeError, a.fill) - self.assertRaises(TypeError, a.frombytes, b'') - self.assertRaises(TypeError, a.insert, 0, 1) - self.assertRaises(TypeError, a.invert) - self.assertRaises(TypeError, a.pack, b'\0\0\xff') - self.assertRaises(TypeError, a.pop) - self.assertRaises(TypeError, a.remove, 1) - self.assertRaises(TypeError, a.reverse) - self.assertRaises(TypeError, a.setall, 0) - self.assertRaises(TypeError, a.sort) - self.assertRaises(TypeError, a.__delitem__, 0) - self.assertRaises(TypeError, a.__delitem__, slice(None, None, 2)) - self.assertRaises(TypeError, a.__setitem__, 0, 0) - self.assertRaises(TypeError, a.__iadd__, bitarray('010')) - self.assertRaises(TypeError, a.__ior__, bitarray('100')) - self.assertRaises(TypeError, a.__ixor__, bitarray('110')) - self.assertRaises(TypeError, a.__irshift__, 1) - self.assertRaises(TypeError, a.__ilshift__, 1) - self.check_obj(a) - - def test_resize_errors(self): - a = bitarray(buffer=bytearray([123])) - info = buffer_info(a) - self.assertFalse(info['readonly']) - self.assertTrue(info['imported']) - - self.assertRaises(BufferError, a.append, True) - self.assertRaises(BufferError, a.clear) - self.assertRaises(BufferError, a.encode, {'a': bitarray('0')}, 'aa') - self.assertRaises(BufferError, a.extend, [0, 1, 0]) - self.assertRaises(BufferError, a.fill) - self.assertRaises(BufferError, a.frombytes, b'a') - self.assertRaises(BufferError, a.insert, 0, 1) - self.assertRaises(BufferError, a.pack, b'\0\0\xff') - self.assertRaises(BufferError, a.pop) - self.assertRaises(BufferError, a.remove, 1) - self.assertRaises(BufferError, a.__delitem__, 0) - self.check_obj(a) - -tests.append(BufferImportTests) - -# --------------------------- Buffer Export --------------------------------- - -class BufferExportTests(unittest.TestCase, Util): - - def test_read_simple(self): - a = bitarray('01000001 01000010 01000011', endian='big') - v = memoryview(a) - self.assertFalse(v.readonly) - self.assertEqual(buffer_info(a, 'exports'), 1) - self.assertEqual(len(v), 3) - self.assertEqual(v[0], 65 if is_py3k else 'A') - self.assertEqual(v.tobytes(), b'ABC') - a[13] = 1 - self.assertEqual(v.tobytes(), b'AFC') - - w = memoryview(a) # a second buffer export - self.assertEqual(buffer_info(a, 'exports'), 2) - self.check_obj(a) - - def test_many_exports(self): - a = bitarray('01000111 01011111') - d = {} # put bitarrays in dict to key object around - for n in range(1, 20): - d[n] = bitarray(buffer=a) - self.assertEqual(buffer_info(a, 'exports'), n) - self.assertEqual(len(d[n]), 16) - self.check_obj(a) - - def test_range(self): - for n in range(100): - a = bitarray(n) - v = memoryview(a) - self.assertEqual(len(v), bits2bytes(len(a))) - info = buffer_info(a) - self.assertFalse(info['readonly']) - self.assertFalse(info['imported']) - self.assertEqual(info['exports'], 1) - self.check_obj(a) - - def test_read_random(self): - a = bitarray() - a.frombytes(os.urandom(100)) - v = memoryview(a) - self.assertEqual(len(v), 100) - b = a[34 * 8 : 67 * 8] - self.assertEqual(v[34:67].tobytes(), b.tobytes()) - self.assertEqual(v.tobytes(), a.tobytes()) - self.check_obj(a) - - def test_resize(self): - a = bitarray('011', endian='big') - v = memoryview(a) - self.assertFalse(v.readonly) - self.assertRaises(BufferError, a.append, 1) - self.assertRaises(BufferError, a.clear) - self.assertRaises(BufferError, a.encode, {'a': bitarray('0')}, 'aa') - self.assertRaises(BufferError, a.extend, '0') - self.assertRaises(BufferError, a.fill) - self.assertRaises(BufferError, a.frombytes, b'\0') - self.assertRaises(BufferError, a.insert, 0, 1) - self.assertRaises(BufferError, a.pack, b'\0') - self.assertRaises(BufferError, a.pop) - self.assertRaises(BufferError, a.remove, 1) - self.assertRaises(BufferError, a.__delitem__, slice(0, 8)) - self.assertEqual(v.tobytes(), a.tobytes()) - self.check_obj(a) - - def test_frozenbitarray(self): - a = frozenbitarray(40) - v = memoryview(a) - self.assertTrue(v.readonly) - self.assertEqual(len(v), 5) - self.assertEqual(v.tobytes(), a.tobytes()) - self.check_obj(a) - - def test_write(self): - a = bitarray(8000) - a.setall(0) - v = memoryview(a) - self.assertFalse(v.readonly) - v[500] = 255 if is_py3k else '\xff' - self.assertEqual(a[3999:4009], bitarray('0111111110')) - a[4003] = 0 - self.assertEqual(a[3999:4009], bitarray('0111011110')) - v[301:304] = b'ABC' - self.assertEqual(a[300 * 8 : 305 * 8].tobytes(), b'\x00ABC\x00') - self.check_obj(a) - - def test_write_py3(self): - if not is_py3k: - return - a = bitarray(40) - a.setall(0) - m = memoryview(a) - v = m[1:4] - v[0] = 65 - v[1] = 66 - v[2] = 67 - self.assertEqual(a.tobytes(), b'\x00ABC\x00') - self.check_obj(a) - -tests.append(BufferExportTests) - -# --------------------------------------------------------------------------- - -class TestsFrozenbitarray(unittest.TestCase, Util): - - def test_init(self): - a = frozenbitarray('110') - self.assertEqual(a, bitarray('110')) - self.assertEqual(a.to01(), '110') - self.assertIsInstance(a, bitarray) - self.assertIsType(a, 'frozenbitarray') - self.assertTrue(buffer_info(a, 'readonly')) - self.check_obj(a) - - a = frozenbitarray(bitarray()) - self.assertEQUAL(a, frozenbitarray()) - self.assertIsType(a, 'frozenbitarray') - - for endian in 'big', 'little': - a = frozenbitarray(0, endian) - self.assertEqual(a.endian(), endian) - self.assertIsType(a, 'frozenbitarray') - - a = frozenbitarray(bitarray(0, endian)) - self.assertEqual(a.endian(), endian) - self.assertIsType(a, 'frozenbitarray') - - def test_methods(self): - # test a few methods which do not raise the TypeError - a = frozenbitarray('1101100') - self.assertEqual(a[2], 0) - self.assertEqual(a[:4].to01(), '1101') - self.assertEqual(a.count(), 4) - self.assertEqual(a.index(0), 2) - b = a.copy() - self.assertEqual(b, a) - self.assertIsType(b, 'frozenbitarray') - self.assertEqual(len(b), 7) - self.assertFalse(b.all()) - self.assertTrue(b.any()) - - def test_init_from_bitarray(self): - for a in self.randombitarrays(): - b = frozenbitarray(a) - self.assertFalse(b is a) - self.assertEQUAL(b, a) - c = frozenbitarray(b) - self.assertFalse(c is b) - self.assertEQUAL(c, b) - self.assertEqual(hash(c), hash(b)) - - def test_init_from_misc(self): - tup = 0, 1, 0, 1, 1, False, True - for obj in list(tup), tup, iter(tup), bitarray(tup): - a = frozenbitarray(obj) - self.assertEqual(a, bitarray(tup)) - - def test_repr(self): - a = frozenbitarray() - self.assertEqual(repr(a), "frozenbitarray()") - self.assertEqual(str(a), "frozenbitarray()") - a = frozenbitarray('10111') - self.assertEqual(repr(a), "frozenbitarray('10111')") - self.assertEqual(str(a), "frozenbitarray('10111')") - - def test_immutable(self): - a = frozenbitarray('111') - self.assertRaises(TypeError, a.append, True) - self.assertRaises(TypeError, a.bytereverse) - self.assertRaises(TypeError, a.clear) - self.assertRaises(TypeError, a.encode, {'a': bitarray('0')}, 'aa') - self.assertRaises(TypeError, a.extend, [0, 1, 0]) - self.assertRaises(TypeError, a.fill) - self.assertRaises(TypeError, a.frombytes, b'') - self.assertRaises(TypeError, a.insert, 0, 1) - self.assertRaises(TypeError, a.invert) - self.assertRaises(TypeError, a.pack, b'\0\0\xff') - self.assertRaises(TypeError, a.pop) - self.assertRaises(TypeError, a.remove, 1) - self.assertRaises(TypeError, a.reverse) - self.assertRaises(TypeError, a.setall, 0) - self.assertRaises(TypeError, a.sort) - self.assertRaises(TypeError, a.__delitem__, 0) - self.assertRaises(TypeError, a.__delitem__, slice(None, None, 2)) - self.assertRaises(TypeError, a.__setitem__, 0, 0) - self.assertRaises(TypeError, a.__iadd__, bitarray('010')) - self.assertRaises(TypeError, a.__ior__, bitarray('100')) - self.assertRaises(TypeError, a.__ixor__, bitarray('110')) - self.assertRaises(TypeError, a.__irshift__, 1) - self.assertRaises(TypeError, a.__ilshift__, 1) - self.check_obj(a) - - def test_freeze(self): - # not so much a test for frozenbitarray, but how it is initialized - a = bitarray(78) - self.assertFalse(buffer_info(a, 'readonly')) # not readonly - a._freeze() - self.assertTrue(buffer_info(a, 'readonly')) # readonly - - def test_memoryview(self): - a = frozenbitarray('01000001 01000010', 'big') - v = memoryview(a) - self.assertEqual(v.tobytes(), b'AB') - self.assertRaises(TypeError, v.__setitem__, 0, 255) - - def test_buffer_import_readonly(self): - b = bytes(bytearray([15, 95, 128])) - a = frozenbitarray(buffer=b, endian='big') - self.assertEQUAL(a, bitarray('00001111 01011111 10000000', 'big')) - info = buffer_info(a) - self.assertTrue(info['readonly']) - self.assertTrue(info['imported']) - - def test_buffer_import_writable(self): - c = bytearray([15, 95]) - self.assertRaisesMessage( - TypeError, - "cannot import writable buffer into frozenbitarray", - frozenbitarray, buffer=c) - - def test_set(self): - a = frozenbitarray('1') - b = frozenbitarray('11') - c = frozenbitarray('01') - d = frozenbitarray('011') - s = set([a, b, c, d]) - self.assertEqual(len(s), 4) - self.assertTrue(d in s) - self.assertFalse(frozenbitarray('0') in s) - - def test_dictkey(self): - a = frozenbitarray('01') - b = frozenbitarray('1001') - d = {a: 123, b: 345} - self.assertEqual(d[frozenbitarray('01')], 123) - self.assertEqual(d[frozenbitarray(b)], 345) - - def test_dictkey2(self): # taken slightly modified from issue #74 - a1 = frozenbitarray([True, False]) - a2 = frozenbitarray([False, False]) - dct = {a1: "one", a2: "two"} - a3 = frozenbitarray([True, False]) - self.assertEqual(a3, a1) - self.assertEqual(dct[a3], 'one') - - def test_mix(self): - a = bitarray('110') - b = frozenbitarray('0011') - self.assertEqual(a + b, bitarray('1100011')) - a.extend(b) - self.assertEqual(a, bitarray('1100011')) - - def test_hash_endianness_simple(self): - a = frozenbitarray('1', 'big') - b = frozenbitarray('1', 'little') - self.assertEqual(a, b) - self.assertEqual(hash(a), hash(b)) - d = {a: 'value'} - self.assertEqual(d[b], 'value') - self.assertEqual(len(set([a, b])), 1) - - def test_hash_endianness_random(self): - s = set() - n = 0 - for a in self.randombitarrays(): - a = frozenbitarray(a) - b = frozenbitarray(a, self.other_endian(a.endian())) - self.assertEqual(a, b) - self.assertNotEqual(a.endian(), b.endian()) - self.assertEqual(hash(a), hash(b)) - d = {a: 1, b: 2} - self.assertEqual(len(d), 1) - s.add(a) - s.add(b) - n += 1 - - self.assertEqual(len(s), n) - - def test_pickle(self): - for a in self.randombitarrays(): - f = frozenbitarray(a) - g = pickle.loads(pickle.dumps(f)) - self.assertEqual(f, g) - self.assertEqual(f.endian(), g.endian()) - self.assertTrue(str(g).startswith('frozenbitarray')) - self.check_obj(a) - -tests.append(TestsFrozenbitarray) - -# --------------------------------------------------------------------------- - -def run(verbosity=1, repeat=1): - import bitarray.test_util as btu - tests.extend(btu.tests) - - print('bitarray is installed in: %s' % os.path.dirname(__file__)) - print('bitarray version: %s' % __version__) - print('sys.version: %s' % sys.version) - print('sys.prefix: %s' % sys.prefix) - print('pointer size: %d bit' % (8 * SYSINFO[0])) - print('sizeof(size_t): %d' % SYSINFO[1]) - print('sizeof(bitarrayobject): %d' % SYSINFO[2]) - print('PY_UINT64_T defined: %s' % SYSINFO[5]) - print('DEBUG: %s' % DEBUG) - suite = unittest.TestSuite() - for cls in tests: - for _ in range(repeat): - suite.addTest(unittest.makeSuite(cls)) - - runner = unittest.TextTestRunner(verbosity=verbosity) - return runner.run(suite) - - -if __name__ == '__main__': - run() diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/test_data.pickle b/shell/ext-py/bitarray-2.3.0/bitarray/test_data.pickle deleted file mode 100644 index fb8d8aba3bfc0c1d3da43ede312bbe26c21d8a1d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 356 zcmYk0$xg#C7zQ1iv{3f2?>jA9)#U|{`UXDb*akH;QU#hQ96++vlb@Rqmo&HkF#pUq zQwGrrr4Sud3#n;r_XP5d!4x~S1IaGhD zLVSg%SpESs9Gf{A-{S1EEqj`M!$IUaeGc=E z?=@#aa#U_Pww!2AU8lp@B*@1q&a18q%cbVZaP3@ipPQVUyKYO@o#kHBG_>YimxsbP R= start and i < stop] - ref = max(filtered) if filtered else -1 - try: - res = rindex(a, 1, start, stop) - except ValueError: - res = -1 - self.assertEqual(res, ref) - - def test_many_set(self): - for _ in range(10): - n = randint(1, 10000) - v = randint(0, 1) - a = bitarray(n) - a.setall(not v) - lst = [randint(0, n - 1) for _ in range(100)] - for i in lst: - a[i] = v - self.assertEqual(rindex(a, v), max(lst)) - - def test_one_set(self): - for _ in range(10): - N = randint(1, 10000) - a = bitarray(N) - a.setall(0) - a[randint(0, N - 1)] = 1 - self.assertEqual(rindex(a), a.index(1)) - -tests.append(TestsRIndex) - -# --------------------------------------------------------------------------- - -class TestsStrip(unittest.TestCase, Util): - - def test_simple(self): - self.assertRaises(TypeError, strip, '0110') - self.assertRaises(TypeError, strip, bitarray(), 123) - self.assertRaises(ValueError, strip, bitarray(), 'up') - for default_endian in 'big', 'little': - _set_default_endian(default_endian) - a = bitarray('00010110000') - self.assertEQUAL(strip(a), bitarray('0001011')) - self.assertEQUAL(strip(a, 'left'), bitarray('10110000')) - self.assertEQUAL(strip(a, 'both'), bitarray('1011')) - b = frozenbitarray('00010110000') - c = strip(b, 'both') - self.assertEqual(c, bitarray('1011')) - self.assertIsType(c, 'frozenbitarray') - - def test_zeros(self): - for n in range(10): - for mode in 'left', 'right', 'both': - a = zeros(n) - c = strip(a, mode) - self.assertIsType(c, 'bitarray') - self.assertEqual(c, bitarray()) - self.assertEqual(a, zeros(n)) - - b = frozenbitarray(a) - c = strip(b, mode) - self.assertIsType(c, 'frozenbitarray') - self.assertEqual(c, bitarray()) - - def test_random(self): - for a in self.randombitarrays(): - b = a.copy() - f = frozenbitarray(a) - s = a.to01() - for mode, res in [ - ('left', bitarray(s.lstrip('0'), a.endian())), - ('right', bitarray(s.rstrip('0'), a.endian())), - ('both', bitarray(s.strip('0'), a.endian())), - ]: - c = strip(a, mode) - self.assertEQUAL(c, res) - self.assertIsType(c, 'bitarray') - self.assertEQUAL(a, b) - - c = strip(f, mode) - self.assertEQUAL(c, res) - self.assertIsType(c, 'frozenbitarray') - self.assertEQUAL(f, b) - - def test_one_set(self): - for _ in range(10): - n = randint(1, 10000) - a = bitarray(n) - a.setall(0) - a[randint(0, n - 1)] = 1 - self.assertEqual(strip(a, 'both'), bitarray('1')) - self.assertEqual(len(a), n) - -tests.append(TestsStrip) - -# --------------------------------------------------------------------------- - -class TestsCount_N(unittest.TestCase, Util): - - @staticmethod - def count_n(a, n): - "return lowest index i for which a[:i].count() == n" - i, j = n, a.count(1, 0, n) - while j < n: - j += a[i] - i += 1 - return i - - def check_result(self, a, n, i): - self.assertEqual(a.count(1, 0, i), n) - if i == 0: - self.assertEqual(n, 0) - else: - self.assertTrue(a[i - 1]) - - def test_empty(self): - a = bitarray() - self.assertEqual(count_n(a, 0), 0) - self.assertRaises(ValueError, count_n, a, 1) - self.assertRaises(TypeError, count_n, '', 0) - self.assertRaises(TypeError, count_n, a, 7.0) - self.assertRaises(TypeError, count_n, a, 0, 0) - - def test_simple(self): - a = bitarray('111110111110111110111110011110111110111110111000') - b = a.copy() - self.assertEqual(len(a), 48) - self.assertEqual(a.count(), 37) - self.assertEqual(count_n(a, 0), 0) - self.assertEqual(count_n(a, 20), 23) - self.assertEqual(count_n(a, 37), 45) - self.assertRaisesMessage(ValueError, "non-negative integer expected", - count_n, a, -1) # n < 0 - self.assertRaisesMessage(ValueError, "n larger than bitarray size", - count_n, a, 49) # n > len(a) - self.assertRaisesMessage(ValueError, "n exceeds total count", - count_n, a, 38) # n > a.count() - for n in range(0, 38): - i = count_n(a, n) - self.check_result(a, n, i) - self.assertEqual(a[:i].count(), n) - self.assertEqual(i, self.count_n(a, n)) - self.assertEQUAL(a, b) - - def test_frozen(self): - a = frozenbitarray('001111101111101111101111100111100') - self.assertEqual(len(a), 33) - self.assertEqual(a.count(), 24) - self.assertEqual(count_n(a, 0), 0) - self.assertEqual(count_n(a, 10), 13) - self.assertEqual(count_n(a, 24), 31) - self.assertRaises(ValueError, count_n, a, -1) # n < 0 - self.assertRaises(ValueError, count_n, a, 25) # n > a.count() - self.assertRaises(ValueError, count_n, a, 34) # n > len(a) - for n in range(0, 25): - self.check_result(a, n, count_n(a, n)) - - def test_ones(self): - n = randint(1, 100000) - a = bitarray(n) - a.setall(1) - self.assertEqual(count_n(a, n), n) - self.assertRaises(ValueError, count_n, a, n + 1) - for _ in range(20): - i = randint(0, n) - self.assertEqual(count_n(a, i), i) - - def test_one_set(self): - n = randint(1, 100000) - a = bitarray(n) - a.setall(0) - self.assertEqual(count_n(a, 0), 0) - self.assertRaises(ValueError, count_n, a, 1) - for _ in range(20): - a.setall(0) - i = randint(0, n - 1) - a[i] = 1 - self.assertEqual(count_n(a, 1), i + 1) - self.assertRaises(ValueError, count_n, a, 2) - - def test_large(self): - for N in list(range(50)) + [randint(50, 250000) for _ in range(10)]: - a = bitarray(N) - v = randint(0, 1) - a.setall(not v) - for _ in range(randint(0, min(N, 100))): - a[randint(0, N - 1)] = v - tc = a.count() # total count - self.check_result(a, tc, count_n(a, tc)) - self.assertRaises(ValueError, count_n, a, tc + 1) - for _ in range(20): - n = randint(0, tc) - self.check_result(a, n, count_n(a, n)) - - def test_random(self): - for a in self.randombitarrays(): - n = a.count() // 2 - i = count_n(a, n) - self.check_result(a, n, i) - -tests.append(TestsCount_N) - -# --------------------------------------------------------------------------- - -class TestsBitwiseCount(unittest.TestCase, Util): - - def test_count_byte(self): - ones = bitarray(8) - ones.setall(1) - zeros = bitarray(8) - zeros.setall(0) - for i in range(0, 256): - a = bitarray() - a.frombytes(bytes(bytearray([i]))) - cnt = a.count() - self.assertEqual(count_and(a, zeros), 0) - self.assertEqual(count_and(a, ones), cnt) - self.assertEqual(count_and(a, a), cnt) - self.assertEqual(count_or(a, zeros), cnt) - self.assertEqual(count_or(a, ones), 8) - self.assertEqual(count_or(a, a), cnt) - self.assertEqual(count_xor(a, zeros), cnt) - self.assertEqual(count_xor(a, ones), 8 - cnt) - self.assertEqual(count_xor(a, a), 0) - - def test_bit_count1(self): - a = bitarray('001111') - aa = a.copy() - b = bitarray('010011') - bb = b.copy() - self.assertEqual(count_and(a, b), 2) - self.assertEqual(count_or(a, b), 5) - self.assertEqual(count_xor(a, b), 3) - for f in count_and, count_or, count_xor: - # not two arguments - self.assertRaises(TypeError, f) - self.assertRaises(TypeError, f, a) - self.assertRaises(TypeError, f, a, b, 3) - # wrong argument types - self.assertRaises(TypeError, f, a, '') - self.assertRaises(TypeError, f, '1', b) - self.assertRaises(TypeError, f, a, 4) - self.assertEQUAL(a, aa) - self.assertEQUAL(b, bb) - - b.append(1) - for f in count_and, count_or, count_xor: - self.assertRaises(ValueError, f, a, b) - self.assertRaises(ValueError, f, - bitarray('110', 'big'), - bitarray('101', 'little')) - - def test_bit_count_frozen(self): - a = frozenbitarray('001111') - b = frozenbitarray('010011') - self.assertEqual(count_and(a, b), 2) - self.assertEqual(count_or(a, b), 5) - self.assertEqual(count_xor(a, b), 3) - - def test_bit_count_random(self): - for n in list(range(50)) + [randint(1000, 2000)]: - a = urandom(n) - b = urandom(n) - self.assertEqual(count_and(a, b), (a & b).count()) - self.assertEqual(count_or(a, b), (a | b).count()) - self.assertEqual(count_xor(a, b), (a ^ b).count()) - -tests.append(TestsBitwiseCount) - -# --------------------------------------------------------------------------- - -class TestsSubset(unittest.TestCase, Util): - - def test_basic(self): - a = frozenbitarray('0101') - b = bitarray('0111') - self.assertTrue(subset(a, b)) - self.assertFalse(subset(b, a)) - self.assertRaises(TypeError, subset) - self.assertRaises(TypeError, subset, a, '') - self.assertRaises(TypeError, subset, '1', b) - self.assertRaises(TypeError, subset, a, 4) - b.append(1) - self.assertRaises(ValueError, subset, a, b) - - def subset_simple(self, a, b): - return (a & b).count() == a.count() - - def test_True(self): - for a, b in [('', ''), ('0', '1'), ('0', '0'), ('1', '1'), - ('000', '111'), ('0101', '0111'), - ('000010111', '010011111')]: - a, b = bitarray(a), bitarray(b) - self.assertTrue(subset(a, b) is True) - self.assertTrue(self.subset_simple(a, b) is True) - - def test_False(self): - for a, b in [('1', '0'), ('1101', '0111'), - ('0000101111', '0100111011')]: - a, b = bitarray(a), bitarray(b) - self.assertTrue(subset(a, b) is False) - self.assertTrue(self.subset_simple(a, b) is False) - - def test_random(self): - for a in self.randombitarrays(start=1): - b = a.copy() - # we set one random bit in b to 1, so a is always a subset of b - b[randint(0, len(a) - 1)] = 1 - self.assertTrue(subset(a, b)) - # but b in not always a subset of a - self.assertEqual(subset(b, a), self.subset_simple(b, a)) - # we set all bits in a, which ensures that b is a subset of a - a.setall(1) - self.assertTrue(subset(b, a)) - -tests.append(TestsSubset) - -# --------------------------------------------------------------------------- - -class TestsParity(unittest.TestCase, Util): - - def test_bitarray(self): - a = bitarray() - self.assertBitEqual(parity(a), 0) - par = False - for _ in range(100): - self.assertEqual(parity(a), par) - a.append(1) - par = not par - - def test_unused_bits(self): - a = bitarray(1) - a.setall(1) - self.assertTrue(parity(a)) - - def test_frozenbitarray(self): - self.assertBitEqual(parity(frozenbitarray()), 0) - self.assertBitEqual(parity(frozenbitarray('0010011')), 1) - self.assertBitEqual(parity(frozenbitarray('10100110')), 0) - - def test_wrong_args(self): - self.assertRaises(TypeError, parity, '') - self.assertRaises(TypeError, bitarray(), 1) - - def test_byte(self): - for i in range(256): - a = bitarray() - a.frombytes(bytes(bytearray([i]))) - self.assertEqual(parity(a), a.count() % 2) - - def test_random(self): - for a in self.randombitarrays(): - self.assertEqual(parity(a), a.count() % 2) - -tests.append(TestsParity) - -# --------------------------------------------------------------------------- - -class TestsHexlify(unittest.TestCase, Util): - - def test_ba2hex(self): - self.assertEqual(ba2hex(bitarray(0, 'big')), '') - self.assertEqual(ba2hex(bitarray('1110', 'big')), 'e') - self.assertEqual(ba2hex(bitarray('1110', 'little')), '7') - self.assertEqual(ba2hex(bitarray('0000 0001', 'big')), '01') - self.assertEqual(ba2hex(bitarray('1000 0000', 'big')), '80') - self.assertEqual(ba2hex(bitarray('0000 0001', 'little')), '08') - self.assertEqual(ba2hex(bitarray('1000 0000', 'little')), '10') - self.assertEqual(ba2hex(frozenbitarray('1100 0111', 'big')), 'c7') - # length not multiple of 4 - self.assertRaises(ValueError, ba2hex, bitarray('10')) - self.assertRaises(TypeError, ba2hex, '101') - - c = ba2hex(bitarray('1101', 'big')) - self.assertIsInstance(c, str) - - for n in range(7): - a = bitarray(n * '1111', 'big') - b = a.copy() - self.assertEqual(ba2hex(a), n * 'f') - # ensure original object wasn't altered - self.assertEQUAL(a, b) - - def test_hex2ba(self): - _set_default_endian('big') - self.assertEqual(hex2ba(''), bitarray()) - for c in 'e', 'E', b'e', b'E', u'e', u'E': - a = hex2ba(c) - self.assertEqual(a.to01(), '1110') - self.assertEqual(a.endian(), 'big') - self.assertEQUAL(hex2ba('01'), bitarray('0000 0001', 'big')) - self.assertEQUAL(hex2ba('08', 'little'), - bitarray('0000 0001', 'little')) - self.assertEQUAL(hex2ba('aD'), bitarray('1010 1101', 'big')) - self.assertEQUAL(hex2ba(b'10aF'), - bitarray('0001 0000 1010 1111', 'big')) - self.assertEQUAL(hex2ba(b'10aF', 'little'), - bitarray('1000 0000 0101 1111', 'little')) - - def test_hex2ba_errors(self): - self.assertRaises(TypeError, hex2ba, 0) - - for endian in 'little', 'big': - _set_default_endian(endian) - self.assertRaises(ValueError, hex2ba, '01a7g89') - self.assertRaises(UnicodeEncodeError, hex2ba, u'10\u20ac') - # check for NUL bytes - for b in b'\0', b'\0f', b'f\0', b'\0ff', b'f\0f', b'ff\0': - self.assertRaises(ValueError, hex2ba, b) - - def test_explicit(self): - data = [ # little big - ('', '', ''), - ('1000', '1', '8'), - ('1000 1100', '13', '8c'), - ('1000 1100 1110', '137', '8ce'), - ('1000 1100 1110 1111' , '137f', '8cef'), - ('1000 1100 1110 1111 0100', '137f2', '8cef4'), - ] - for bs, hex_le, hex_be in data: - a_be = bitarray(bs, 'big') - a_le = bitarray(bs, 'little') - self.assertEQUAL(hex2ba(hex_be, 'big'), a_be) - self.assertEQUAL(hex2ba(hex_le, 'little'), a_le) - self.assertEqual(ba2hex(a_be), hex_be) - self.assertEqual(ba2hex(a_le), hex_le) - - def test_round_trip(self): - s = ''.join(choice(hexdigits) for _ in range(randint(20, 100))) - for default_endian in 'big', 'little': - _set_default_endian(default_endian) - a = hex2ba(s) - self.check_obj(a) - self.assertEqual(len(a) % 4, 0) - self.assertEqual(a.endian(), default_endian) - t = ba2hex(a) - self.assertEqual(t, s.lower()) - b = hex2ba(t, default_endian) - self.assertEQUAL(a, b) - - def test_binascii(self): - a = urandom(800, 'big') - s = binascii.hexlify(a.tobytes()).decode() - self.assertEqual(ba2hex(a), s) - b = bitarray(endian='big') - b.frombytes(binascii.unhexlify(s)) - self.assertEQUAL(hex2ba(s, 'big'), b) - -tests.append(TestsHexlify) - -# --------------------------------------------------------------------------- - -class TestsBase(unittest.TestCase, Util): - - def test_ba2base(self): - c = ba2base(16, bitarray('1101', 'big')) - self.assertIsInstance(c, str) - - def test_base2ba(self): - _set_default_endian('big') - for c in 'e', 'E', b'e', b'E', u'e', u'E': - a = base2ba(16, c) - self.assertEqual(a.to01(), '1110') - self.assertEqual(a.endian(), 'big') - - def test_explicit(self): - data = [ # n little big - ('', 2, '', ''), - ('1 0 1', 2, '101', '101'), - ('11 01 00', 4, '320', '310'), - ('111 001', 8, '74', '71'), - ('1111 0001', 16, 'f8', 'f1'), - ('11111 00001', 32, '7Q', '7B'), - ('111111 000001', 64, '/g', '/B'), - ] - for bs, n, s_le, s_be in data: - a_le = bitarray(bs, 'little') - a_be = bitarray(bs, 'big') - self.assertEQUAL(base2ba(n, s_le, 'little'), a_le) - self.assertEQUAL(base2ba(n, s_be, 'big'), a_be) - self.assertEqual(ba2base(n, a_le), s_le) - self.assertEqual(ba2base(n, a_be), s_be) - - def test_empty(self): - for n in 2, 4, 8, 16, 32, 64: - a = base2ba(n, '') - self.assertEqual(a, bitarray()) - self.assertEqual(ba2base(n, a), '') - - def test_upper(self): - self.assertEqual(base2ba(16, 'F'), bitarray('1111')) - - def test_invalid_characters(self): - for n, s in ((2, '2'), (4, '4'), (8, '8'), (16, 'g'), (32, '8'), - (32, '1'), (32, 'a'), (64, '-'), (64, '_')): - self.assertRaises(ValueError, base2ba, n, s) - - def test_invalid_args(self): - a = bitarray() - self.assertRaises(TypeError, ba2base, None, a) - self.assertRaises(TypeError, base2ba, None, '') - for i in range(-10, 260): - if i in (2, 4, 8, 16, 32, 64): - continue - self.assertRaises(ValueError, ba2base, i, a) - self.assertRaises(ValueError, base2ba, i, '') - - def test_binary(self): - a = base2ba(2, '1011') - self.assertEqual(a, bitarray('1011')) - self.assertEqual(ba2base(2, a), '1011') - - for a in self.randombitarrays(): - s = ba2base(2, a) - self.assertEqual(s, a.to01()) - self.assertEQUAL(base2ba(2, s, a.endian()), a) - - def test_quaternary(self): - a = base2ba(4, '0123', 'big') - self.assertEqual(a, bitarray('00 01 10 11')) - self.assertEqual(ba2base(4, a), '0123') - - def test_octal(self): - a = base2ba(8, '0147', 'big') - self.assertEqual(a, bitarray('000 001 100 111')) - self.assertEqual(ba2base(8, a), '0147') - - def test_hexadecimal(self): - a = base2ba(16, 'F61', 'big') - self.assertEqual(a, bitarray('1111 0110 0001')) - self.assertEqual(ba2base(16, a), 'f61') - - for n in range(50): - s = ''.join(choice(hexdigits) for _ in range(n)) - for endian in 'big', 'little': - a = base2ba(16, s, endian) - self.assertEQUAL(a, hex2ba(s, endian)) - self.assertEqual(ba2base(16, a), ba2hex(a)) - - def test_base32(self): - a = base2ba(32, '7SH', 'big') - self.assertEqual(a, bitarray('11111 10010 00111')) - self.assertEqual(ba2base(32, a), '7SH') - - msg = os.urandom(randint(10, 100) * 5) - s = base64.b32encode(msg).decode() - a = base2ba(32, s, 'big') - self.assertEqual(a.tobytes(), msg) - self.assertEqual(ba2base(32, a), s) - - def test_base64(self): - a = base2ba(64, '/jH', 'big') - self.assertEqual(a, bitarray('111111 100011 000111')) - self.assertEqual(ba2base(64, a), '/jH') - - msg = os.urandom(randint(10, 100) * 3) - s = base64.standard_b64encode(msg).decode() - a = base2ba(64, s, 'big') - self.assertEqual(a.tobytes(), msg) - self.assertEqual(ba2base(64, a), s) - - def test_alphabets(self): - for m, n, alpabet in [ - (1, 2, '01'), - (2, 4, '0123'), - (3, 8, '01234567'), - (4, 16, '0123456789abcdef'), - (5, 32, 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'), - (6, 64, 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' - 'abcdefghijklmnopqrstuvwxyz0123456789+/'), - ]: - self.assertEqual(1 << m, n) - self.assertEqual(len(alpabet), n) - for i, c in enumerate(alpabet): - for endian in 'big', 'little': - self.assertEqual(ba2int(base2ba(n, c, endian)), i) - self.assertEqual(ba2base(n, int2ba(i, m, endian)), c) - - def test_random(self): - for a in self.randombitarrays(): - for m in range(1, 7): - n = 1 << m - if len(a) % m == 0: - s = ba2base(n, a) - b = base2ba(n, s, a.endian()) - self.assertEQUAL(a, b) - self.check_obj(b) - else: - self.assertRaises(ValueError, ba2base, n, a) - - def test_random2(self): - for m in range(1, 7): - n = 1 << m - for length in range(0, 100, m): - a = urandom(length, 'little') - self.assertEQUAL(base2ba(n, ba2base(n, a), 'little'), a) - b = bitarray(a, 'big') - self.assertEQUAL(base2ba(n, ba2base(n, b), 'big'), b) - -tests.append(TestsBase) - -# --------------------------------------------------------------------------- - -class VLFTests(unittest.TestCase, Util): - - def test_explicit(self): - for s, bits in [ - (b'\x40', ''), - (b'\x30', '0'), - (b'\x38', '1'), - (b'\x00', '0000'), - (b'\x01', '0001'), - (b'\xe0\x40', '0000 1'), - (b'\x90\x02', '0000 000001'), - (b'\xb5\xa7\x18', '0101 0100111 0011'), - ]: - a = bitarray(bits) - self.assertEqual(vl_encode(a), s) - self.assertEqual(vl_decode(s), a) - - def test_encode(self): - for endian in 'big', 'little': - s = vl_encode(bitarray('001101', endian)) - self.assertIsInstance(s, bytes) - self.assertEqual(s, b'\xd3\x20') - - def test_decode_args(self): - if sys.version_info[0] == 3: - self.assertRaises(TypeError, vl_decode, 'foo') - self.assertRaises(TypeError, vl_decode, iter([b'\x40'])) - - self.assertRaises(TypeError, vl_decode, b'\x40', 'big', 3) - self.assertRaises(ValueError, vl_decode, b'\x40', 'foo') - for item in None, 2.34, Ellipsis: - self.assertRaises(TypeError, vl_decode, iter([item])) - - lst = [b'\xd3\x20', iter(b'\xd3\x20')] - if sys.version_info[0] == 3: - lst.append(iter([0xd3, 0x20])) - for s in lst: - a = vl_decode(s, endian=self.random_endian()) - self.assertIsInstance(a, bitarray) - self.assertEqual(a, bitarray('0011 01')) - - def test_decode_endian(self): - for endian in 'little', 'big', None: - a = vl_decode(b'\xd3\x20', endian) - self.assertEqual(a, bitarray('0011 01')) - self.assertEqual(a.endian(), - endian if endian else get_default_endian()) - - def test_decode_trailing(self): - for s, bits in [(b'\x40ABC', ''), - (b'\xe0\x40A', '00001')]: - stream = iter(s) - self.assertEqual(vl_decode(stream), bitarray(bits)) - self.assertEqual(next(stream), - b'A' if sys.version_info[0] == 2 else 65) - - def test_decode_ambiguity(self): - for s in b'\x40', b'\x4f', b'\x45': - self.assertEqual(vl_decode(iter(s)), bitarray()) - for s in b'\x1e', b'\x1f': - self.assertEqual(vl_decode(iter(s)), bitarray('111')) - - def test_decode_stream(self): - stream = iter(b'\x40\x30\x38\x40\x2c\xe0\x40\xd3\x20') - for bits in '', '0', '1', '', '11', '0000 1', '0011 01': - self.assertEqual(vl_decode(stream), bitarray(bits)) - - arrays = [urandom(randint(0, 30)) for _ in range(1000)] - stream = iter(b''.join(vl_encode(a) for a in arrays)) - for a in arrays: - self.assertEqual(vl_decode(stream), a) - - def test_decode_errors(self): - # decode empty bits - self.assertRaises(StopIteration, vl_decode, b'') - # invalid number of padding bits - for s in b'\x50', b'\x60', b'\x70': - self.assertRaises(ValueError, vl_decode, s) - self.assertRaises(ValueError, vl_decode, b'\xf0') - # high bit set, but no terminating byte - for s in b'\x80', b'\x80\x80': - self.assertRaises(StopIteration, vl_decode, s) - - def test_decode_error_message(self): - pat = re.compile(r'[\w\s,]+:\s+(\d+)') - for n in range(120): - a = None - s = bytes(bytearray([randint(0x80, 0xef) for _ in range(n)])) - try: - a = vl_decode(s) - except StopIteration as e: - m = pat.match(str(e)) - self.assertEqual(m.group(1), str(n)) - self.assertTrue(a is None) - - def test_decode_invalid_stream(self): - if sys.version_info[0] == 2: - return - N = 100 - s = iter(N * (3 * [0x80] + ['XX']) + ['end.']) - for _ in range(N): - a = None - try: - a = vl_decode(s) - except TypeError: - pass - self.assertTrue(a is None) - self.assertEqual(next(s), 'end.') - - def test_explicit_zeros(self): - for n in range(100): - a = zeros(4 + n * 7) - s = n * b'\x80' + b'\x00' - self.assertEqual(vl_encode(a), s) - self.assertEqual(vl_decode(s), a) - - def round_trip(self, a): - s = vl_encode(a) - b = vl_decode(s) - self.check_obj(b) - self.assertEqual(a, b) - PADBITS = 3 - self.assertEqual(len(s), (len(a) + PADBITS + 6) // 7) - - head = ord(s[0]) if sys.version_info[0] == 2 else s[0] - padding = (head & 0x70) >> 4 - self.assertEqual(len(a) + padding, 7 * len(s) - PADBITS) - - def test_range(self): - for n in range(500): - self.round_trip(bitarray(n)) - - def test_large(self): - a = urandom(randint(50000, 100000)) - self.round_trip(a) - - def test_random(self): - for a in self.randombitarrays(): - self.round_trip(a) - -tests.append(VLFTests) - -# --------------------------------------------------------------------------- - -class TestsIntegerization(unittest.TestCase, Util): - - def test_ba2int(self): - self.assertEqual(ba2int(bitarray('0')), 0) - self.assertEqual(ba2int(bitarray('1')), 1) - self.assertEqual(ba2int(bitarray('00101', 'big')), 5) - self.assertEqual(ba2int(bitarray('00101', 'little')), 20) - self.assertEqual(ba2int(frozenbitarray('11')), 3) - self.assertRaises(ValueError, ba2int, bitarray()) - self.assertRaises(ValueError, ba2int, frozenbitarray()) - self.assertRaises(TypeError, ba2int, '101') - a = bitarray('111') - b = a.copy() - self.assertEqual(ba2int(a), 7) - # ensure original object wasn't altered - self.assertEQUAL(a, b) - - def test_int2ba(self): - self.assertEqual(int2ba(0), bitarray('0')) - self.assertEqual(int2ba(1), bitarray('1')) - self.assertEqual(int2ba(5), bitarray('101')) - self.assertEQUAL(int2ba(6, endian='big'), bitarray('110', 'big')) - self.assertEQUAL(int2ba(6, endian='little'), - bitarray('011', 'little')) - self.assertRaises(TypeError, int2ba, 1.0) - self.assertRaises(TypeError, int2ba, 1, 3.0) - self.assertRaises(ValueError, int2ba, 1, 0) - self.assertRaises(TypeError, int2ba, 1, 10, 123) - self.assertRaises(ValueError, int2ba, 1, 10, 'asd') - # signed integer requires length - self.assertRaises(TypeError, int2ba, 100, signed=True) - - def test_signed(self): - for s, i in [ - ('0', 0), - ('1', -1), - ('00', 0), - ('10', 1), - ('01', -2), - ('11', -1), - ('000', 0), - ('100', 1), - ('010', 2), - ('110', 3), - ('001', -4), - ('101', -3), - ('011', -2), - ('111', -1), - ('00000', 0), - ('11110', 15), - ('00001', -16), - ('11111', -1), - ('000000000', 0), - ('111111110', 255), - ('000000001', -256), - ('111111111', -1), - ('0000000000000000000000', 0), - ('1001000011000000100010', 9 + 3 * 256 + 17 * 2 ** 16), - ('1111111111111111111110', 2 ** 21 - 1), - ('0000000000000000000001', -2 ** 21), - ('1001000011000000100011', -2 ** 21 - + (9 + 3 * 256 + 17 * 2 ** 16)), - ('1111111111111111111111', -1), - ]: - self.assertEqual(ba2int(bitarray(s, 'little'), signed=1), i) - self.assertEqual(ba2int(bitarray(s[::-1], 'big'), signed=1), i) - - self.assertEQUAL(int2ba(i, len(s), 'little', signed=1), - bitarray(s, 'little')) - self.assertEQUAL(int2ba(i, len(s), 'big', signed=1), - bitarray(s[::-1], 'big')) - - def test_int2ba_overflow(self): - self.assertRaises(OverflowError, int2ba, -1) - self.assertRaises(OverflowError, int2ba, -1, 4) - - self.assertRaises(OverflowError, int2ba, 128, 7) - self.assertRaises(OverflowError, int2ba, 64, 7, signed=1) - self.assertRaises(OverflowError, int2ba, -65, 7, signed=1) - - for n in range(1, 20): - self.assertRaises(OverflowError, int2ba, 2 ** n, n) - self.assertRaises(OverflowError, int2ba, 2 ** (n - 1), n, - signed=1) - self.assertRaises(OverflowError, int2ba, -2 ** (n - 1) - 1, n, - signed=1) - - def test_int2ba_length(self): - self.assertRaises(TypeError, int2ba, 0, 1.0) - self.assertRaises(ValueError, int2ba, 0, 0) - self.assertEqual(int2ba(5, length=6, endian='big'), - bitarray('000101')) - for n in range(1, 100): - ab = int2ba(1, n, 'big') - al = int2ba(1, n, 'little') - self.assertEqual(ab.endian(), 'big') - self.assertEqual(al.endian(), 'little') - self.assertEqual(len(ab), n), - self.assertEqual(len(al), n) - self.assertEqual(ab, bitarray((n - 1) * '0') + bitarray('1')) - self.assertEqual(al, bitarray('1') + bitarray((n - 1) * '0')) - - ab = int2ba(0, n, 'big') - al = int2ba(0, n, 'little') - self.assertEqual(len(ab), n) - self.assertEqual(len(al), n) - self.assertEqual(ab, bitarray(n * '0', 'big')) - self.assertEqual(al, bitarray(n * '0', 'little')) - - self.assertEqual(int2ba(2 ** n - 1), bitarray(n * '1')) - self.assertEqual(int2ba(2 ** n - 1, endian='little'), - bitarray(n * '1')) - for endian in 'big', 'little': - self.assertEqual(int2ba(-1, n, endian, signed=True), - bitarray(n * '1')) - - def test_explicit(self): - _set_default_endian('big') - for i, sa in [( 0, '0'), (1, '1'), - ( 2, '10'), (3, '11'), - (25, '11001'), (265, '100001001'), - (3691038, '1110000101001000011110')]: - ab = bitarray(sa, 'big') - al = bitarray(sa[::-1], 'little') - self.assertEQUAL(int2ba(i), ab) - self.assertEQUAL(int2ba(i, endian='big'), ab) - self.assertEQUAL(int2ba(i, endian='little'), al) - self.assertEqual(ba2int(ab), ba2int(al), i) - - def check_round_trip(self, i): - for endian in 'big', 'little': - a = int2ba(i, endian=endian) - self.check_obj(a) - self.assertEqual(a.endian(), endian) - self.assertTrue(len(a) > 0) - # ensure we have no leading zeros - if a.endian == 'big': - self.assertTrue(len(a) == 1 or a.index(1) == 0) - self.assertEqual(ba2int(a), i) - if i > 0: - self.assertEqual(i.bit_length(), len(a)) - # add a few trailing / leading zeros to bitarray - if endian == 'big': - a = zeros(randint(0, 3), endian) + a - else: - a = a + zeros(randint(0, 3), endian) - self.assertEqual(a.endian(), endian) - self.assertEqual(ba2int(a), i) - - def test_many(self): - for i in range(20): - self.check_round_trip(i) - self.check_round_trip(randint(0, 10 ** randint(3, 300))) - - @staticmethod - def twos_complement(i, num_bits): - # https://en.wikipedia.org/wiki/Two%27s_complement - mask = 2 ** (num_bits - 1) - return -(i & mask) + (i & ~mask) - - def test_random_signed(self): - for a in self.randombitarrays(start=1): - i = ba2int(a, signed=True) - b = int2ba(i, len(a), a.endian(), signed=True) - self.assertEQUAL(a, b) - - j = ba2int(a, signed=False) # unsigned - if i >= 0: - self.assertEqual(i, j) - - self.assertEqual(i, self.twos_complement(j, len(a))) - -tests.append(TestsIntegerization) - -# --------------------------------------------------------------------------- - -class MixedTests(unittest.TestCase, Util): - - def test_bin(self): - for i in range(100): - s = bin(i) - self.assertEqual(s[:2], '0b') - a = bitarray(s[2:], 'big') - self.assertEqual(ba2int(a), i) - t = '0b%s' % a.to01() - self.assertEqual(t, s) - self.assertEqual(eval(t), i) - - def test_oct(self): - if sys.version_info[0] == 2: - return - for i in range(1000): - s = oct(i) - self.assertEqual(s[:2], '0o') - a = base2ba(8, s[2:], 'big') - self.assertEqual(ba2int(a), i) - t = '0o%s' % ba2base(8, a) - self.assertEqual(t, s) - self.assertEqual(eval(t), i) - - def test_hex(self): - for i in range(1000): - s = hex(i) - self.assertEqual(s[:2], '0x') - a = hex2ba(s[2:], 'big') - self.assertEqual(ba2int(a), i) - t = '0x%s' % ba2hex(a) - self.assertEqual(t, s) - self.assertEqual(eval(t), i) - - def test_bitwise(self): - for a in self.randombitarrays(start=1): - b = urandom(len(a), a.endian()) - aa = a.copy() - bb = b.copy() - i = ba2int(a) - j = ba2int(b) - self.assertEqual(ba2int(a & b), i & j) - self.assertEqual(ba2int(a | b), i | j) - self.assertEqual(ba2int(a ^ b), i ^ j) - - n = randint(0, len(a)) - if a.endian() == 'big': - self.assertEqual(ba2int(a >> n), i >> n) - c = zeros(len(a), 'big') + a - self.assertEqual(ba2int(c << n), i << n) - - self.assertEQUAL(a, aa) - self.assertEQUAL(b, bb) - - def test_bitwise_inplace(self): - for a in self.randombitarrays(start=1): - b = urandom(len(a), a.endian()) - bb = b.copy() - i = ba2int(a) - j = ba2int(b) - c = a.copy() - c &= b - self.assertEqual(ba2int(c), i & j) - c = a.copy() - c |= b - self.assertEqual(ba2int(c), i | j) - c = a.copy() - c ^= b - self.assertEqual(ba2int(c), i ^ j) - self.assertEQUAL(b, bb) - - n = randint(0, len(a)) - if a.endian() == 'big': - c = a.copy() - c >>= n - self.assertEqual(ba2int(c), i >> n) - c = zeros(len(a), 'big') + a - c <<= n - self.assertEqual(ba2int(c), i << n) - -tests.append(MixedTests) - -# --------------------------------------------------------------------------- - -class TestsSerialization(unittest.TestCase, Util): - - def test_explicit(self): - for a, b in [ - (bitarray(0, 'little'), b'\x00'), - (bitarray(0, 'big'), b'\x10'), - (bitarray('1', 'little'), b'\x07\x01'), - (bitarray('1', 'big'), b'\x17\x80'), - (bitarray('11110000', 'little'), b'\x00\x0f'), - (bitarray('11110000', 'big'), b'\x10\xf0'), - ]: - self.assertEqual(serialize(a), b) - self.assertEQUAL(deserialize(b), a) - - def test_zeros_and_ones(self): - for endian in 'little', 'big': - for n in range(100): - a = zeros(n, endian) - s = serialize(a) - self.assertIsInstance(s, bytes) - self.assertEqual(s[1:], b'\0' * bits2bytes(n)) - self.assertEQUAL(a, deserialize(s)) - a.setall(1) - self.assertEQUAL(a, deserialize(serialize(a))) - - def test_wrong_args(self): - self.assertRaises(TypeError, serialize, '0') - self.assertRaises(TypeError, serialize, bitarray(), 1) - self.assertRaises(TypeError, deserialize, u'01') - self.assertRaises(TypeError, deserialize, b'\x00', 1) - - def test_invalid_bytes(self): - self.assertRaises(ValueError, deserialize, b'') - - for i in range(256): - b = bytes(bytearray([i])) - if i == 0 or i == 16: - self.assertEqual(deserialize(b), bitarray()) - else: - self.assertRaises(ValueError, deserialize, b) - - b += b'\0' - if i < 32 and i % 16 < 8: - self.assertEqual(deserialize(b), zeros(8 - i % 8)) - else: - self.assertRaises(ValueError, deserialize, b) - - def test_bits_ignored(self): - # the unused padding bits (with the last bytes) are ignored - for b, a in [ - (b'\x07\x01', bitarray('1', 'little')), - (b'\x07\x03', bitarray('1', 'little')), - (b'\x07\xff', bitarray('1', 'little')), - (b'\x17\x80', bitarray('1', 'big')), - (b'\x17\xc0', bitarray('1', 'big')), - (b'\x17\xff', bitarray('1', 'big')), - ]: - self.assertEQUAL(deserialize(b), a) - - def test_random(self): - for a in self.randombitarrays(): - b = deserialize(serialize(a)) - self.assertEQUAL(a, b) - self.check_obj(b) - -tests.append(TestsSerialization) - -# --------------------------------------------------------------------------- - -class TestsHuffman(unittest.TestCase): - - def test_simple(self): - freq = {0: 10, 'as': 2, None: 1.6} - code = huffman_code(freq) - self.assertEqual(len(code), 3) - self.assertEqual(len(code[0]), 1) - self.assertEqual(len(code['as']), 2) - self.assertEqual(len(code[None]), 2) - - def test_tiny(self): - code = huffman_code({0: 0}) - self.assertEqual(len(code), 1) - self.assertEqual(code, {0: bitarray()}) - - code = huffman_code({0: 0, 1: 0}) - self.assertEqual(len(code), 2) - for i in range(2): - self.assertEqual(len(code[i]), 1) - - def test_endianness(self): - freq = {'A': 10, 'B': 2, 'C': 5} - for endian in 'big', 'little': - code = huffman_code(freq, endian) - self.assertEqual(len(code), 3) - for v in code.values(): - self.assertEqual(v.endian(), endian) - - def test_wrong_arg(self): - self.assertRaises(TypeError, huffman_code, [('a', 1)]) - self.assertRaises(TypeError, huffman_code, 123) - self.assertRaises(TypeError, huffman_code, None) - # cannot compare 'a' with 1 - self.assertRaises(TypeError, huffman_code, {'A': 'a', 'B': 1}) - self.assertRaises(ValueError, huffman_code, {}) - - def check_tree(self, code): - n = len(code) - tree = decodetree(code) - self.assertEqual(tree.todict(), code) - # ensure tree has 2n-1 nodes (n symbol nodes and n-1 internal nodes) - self.assertEqual(tree.nodes(), 2 * n - 1) - - def test_balanced(self): - n = 6 - freq = {} - for i in range(2 ** n): - freq[i] = 1 - code = huffman_code(freq) - self.assertEqual(len(code), 2 ** n) - self.assertTrue(all(len(v) == n for v in code.values())) - self.check_tree(code) - - def test_unbalanced(self): - N = 27 - freq = {} - for i in range(N): - freq[i] = 2 ** i - code = huffman_code(freq) - self.assertEqual(len(code), N) - for i in range(N): - self.assertEqual(len(code[i]), N - (1 if i <= 1 else i)) - self.check_tree(code) - - def test_counter(self): - message = 'the quick brown fox jumps over the lazy dog.' - code = huffman_code(Counter(message)) - a = bitarray() - a.encode(code, message) - self.assertEqual(''.join(a.decode(code)), message) - self.check_tree(code) - - def test_random_list(self): - plain = [randint(0, 100) for _ in range(500)] - code = huffman_code(Counter(plain)) - a = bitarray() - a.encode(code, plain) - self.assertEqual(a.decode(code), plain) - self.check_tree(code) - - def test_random_freq(self): - N = randint(2, 1000) - # create Huffman code for N symbols - code = huffman_code({i: random() for i in range(N)}) - self.check_tree(code) - -tests.append(TestsHuffman) - -# --------------------------------------------------------------------------- - -def run(verbosity=1): - import bitarray - - print('bitarray.util is installed in: %s' % os.path.dirname(__file__)) - print('bitarray version: %s' % bitarray.__version__) - print('Python version: %s' % sys.version) - - suite = unittest.TestSuite() - for cls in tests: - suite.addTest(unittest.makeSuite(cls)) - - runner = unittest.TextTestRunner(verbosity=verbosity) - return runner.run(suite) - - -if __name__ == '__main__': - run() diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/util.py b/shell/ext-py/bitarray-2.3.0/bitarray/util.py deleted file mode 100644 index d77be4cae..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/util.py +++ /dev/null @@ -1,408 +0,0 @@ -# Copyright (c) 2019 - 2021, Ilan Schnell; All Rights Reserved -# bitarray is published under the PSF license. -# -# Author: Ilan Schnell -""" -Useful utilities for working with bitarrays. -""" -from __future__ import absolute_import - -import os -import sys - -from bitarray import bitarray, bits2bytes, get_default_endian - -from bitarray._util import ( - count_n, rindex, parity, count_and, count_or, count_xor, subset, - serialize, ba2hex, _hex2ba, ba2base, _base2ba, vl_encode, _vl_decode, - _set_bato, -) - -__all__ = [ - 'zeros', 'urandom', 'pprint', 'make_endian', 'rindex', 'strip', 'count_n', - 'parity', 'count_and', 'count_or', 'count_xor', 'subset', - 'ba2hex', 'hex2ba', 'ba2base', 'base2ba', 'ba2int', 'int2ba', - 'serialize', 'deserialize', 'vl_encode', 'vl_decode', 'huffman_code', -] - - -# tell the _util extension what the bitarray type object is, such that it -# can check for instances thereof -_set_bato(bitarray) - -_is_py2 = bool(sys.version_info[0] == 2) - - -def zeros(__length, endian=None): - """zeros(length, /, endian=None) -> bitarray - -Create a bitarray of length, with all values 0, and optional -endianness, which may be 'big', 'little'. -""" - if not isinstance(__length, (int, long) if _is_py2 else int): - raise TypeError("int expected, got '%s'" % type(__length).__name__) - - a = bitarray(__length, get_default_endian() if endian is None else endian) - a.setall(0) - return a - - -def urandom(__length, endian=None): - """urandom(length, /, endian=None) -> bitarray - -Return a bitarray of `length` random bits (uses `os.urandom`). -""" - a = bitarray(0, get_default_endian() if endian is None else endian) - a.frombytes(os.urandom(bits2bytes(__length))) - del a[__length:] - return a - - -def pprint(__a, stream=None, group=8, indent=4, width=80): - """pprint(bitarray, /, stream=None, group=8, indent=4, width=80) - -Prints the formatted representation of object on `stream`, followed by a -newline. If `stream` is `None`, `sys.stdout` is used. By default, elements -are grouped in bytes (8 elements), and 8 bytes (64 elements) per line. -Non-bitarray objects are printed by the standard library -function `pprint.pprint()`. -""" - if stream is None: - stream = sys.stdout - - if not isinstance(__a, bitarray): - import pprint as _pprint - _pprint.pprint(__a, stream=stream, indent=indent, width=width) - return - - group = int(group) - if group < 1: - raise ValueError('group must be >= 1') - indent = int(indent) - if indent < 0: - raise ValueError('indent must be >= 0') - width = int(width) - if width <= indent: - raise ValueError('width must be > %d (indent)' % indent) - - gpl = (width - indent) // (group + 1) # groups per line - epl = group * gpl # elements per line - if epl == 0: - epl = width - indent - 2 - type_name = type(__a).__name__ - # here 4 is len("'()'") - multiline = len(type_name) + 4 + len(__a) + len(__a) // group >= width - if multiline: - quotes = "'''" - elif __a: - quotes = "'" - else: - quotes = "" - - stream.write("%s(%s" % (type_name, quotes)) - for i, b in enumerate(__a): - if multiline and i % epl == 0: - stream.write('\n%s' % (indent * ' ')) - if i % group == 0 and i % epl != 0: - stream.write(' ') - stream.write(str(b)) - - if multiline: - stream.write('\n') - - stream.write("%s)\n" % quotes) - stream.flush() - - -def make_endian(a, endian): - """make_endian(bitarray, endian, /) -> bitarray - -When the endianness of the given bitarray is different from `endian`, -return a new bitarray, with endianness `endian` and the same elements -as the original bitarray. -Otherwise (endianness is already `endian`) the original bitarray is returned -unchanged. -""" - if not isinstance(a, bitarray): - raise TypeError("bitarray expected, got '%s'" % type(a).__name__) - - if a.endian() == endian: - return a - - return bitarray(a, endian) - - -def strip(__a, mode='right'): - """strip(bitarray, /, mode='right') -> bitarray - -Return a new bitarray with zeros stripped from left, right or both ends. -Allowed values for mode are the strings: `left`, `right`, `both` -""" - if not isinstance(__a, bitarray): - raise TypeError("bitarray expected, got '%s'" % type(__a).__name__) - if not isinstance(mode, str): - raise TypeError("str expected for mode, got '%s'" % type(__a).__name__) - if mode not in ('left', 'right', 'both'): - raise ValueError("mode must be 'left', 'right' or 'both', got: %r" % - mode) - first = 0 - if mode in ('left', 'both'): - try: - first = __a.index(1) - except ValueError: - return __a[:0] - - last = len(__a) - 1 - if mode in ('right', 'both'): - try: - last = rindex(__a) - except ValueError: - return __a[:0] - - return __a[first:last + 1] - - -def hex2ba(__s, endian=None): - """hex2ba(hexstr, /, endian=None) -> bitarray - -Bitarray of hexadecimal representation. hexstr may contain any number -(including odd numbers) of hex digits (upper or lower case). -""" - if not isinstance(__s, (str, unicode if _is_py2 else bytes)): - raise TypeError("str expected, got: '%s'" % type(__s).__name__) - - if isinstance(__s, unicode if _is_py2 else str): - __s = __s.encode('ascii') - assert isinstance(__s, bytes) - - a = bitarray(4 * len(__s), - get_default_endian() if endian is None else endian) - _hex2ba(a, __s) - return a - - -def base2ba(__n, __s, endian=None): - """base2ba(n, asciistr, /, endian=None) -> bitarray - -Bitarray of the base `n` ASCII representation. -Allowed values for `n` are 2, 4, 8, 16, 32 and 64. -For `n=16` (hexadecimal), `hex2ba()` will be much faster, as `base2ba()` -does not take advantage of byte level operations. -For `n=32` the RFC 4648 Base32 alphabet is used, and for `n=64` the -standard base 64 alphabet is used. -""" - if not isinstance(__n, int): - raise TypeError("integer expected") - try: - m = {2: 1, 4: 2, 8: 3, 16: 4, 32: 5, 64: 6}[__n] - except KeyError: - raise ValueError("base must be 2, 4, 8, 16, 32 or 64") - - if not isinstance(__s, (str, unicode if _is_py2 else bytes)): - raise TypeError("str expected, got: '%s'" % type(s).__name__) - - if isinstance(__s, unicode if _is_py2 else str): - __s = __s.encode('ascii') - assert isinstance(__s, bytes) - - a = bitarray(m * len(__s), - get_default_endian() if endian is None else endian) - _base2ba(__n, a, __s) - return a - - -def ba2int(__a, signed=False): - """ba2int(bitarray, /, signed=False) -> int - -Convert the given bitarray into an integer. -The bit-endianness of the bitarray is respected. -`signed` indicates whether two's complement is used to represent the integer. -""" - if not isinstance(__a, bitarray): - raise TypeError("bitarray expected, got '%s'" % type(__a).__name__) - length = len(__a) - if length == 0: - raise ValueError("non-empty bitarray expected") - - big_endian = bool(__a.endian() == 'big') - # for big endian pad leading zeros - for little endian we don't need to - # pad trailing zeros, as .tobytes() will treat them as zero - if big_endian and length % 8: - __a = zeros(8 - length % 8, 'big') + __a - b = __a.tobytes() - - if _is_py2: - c = bytearray(b) - res = 0 - j = len(c) - 1 if big_endian else 0 - for x in c: - res |= x << 8 * j - j += -1 if big_endian else 1 - else: # py3 - res = int.from_bytes(b, byteorder=__a.endian()) - - if signed and res >= 1 << (length - 1): - res -= 1 << length - return res - - -def int2ba(__i, length=None, endian=None, signed=False): - """int2ba(int, /, length=None, endian=None, signed=False) -> bitarray - -Convert the given integer to a bitarray (with given endianness, -and no leading (big-endian) / trailing (little-endian) zeros), unless -the `length` of the bitarray is provided. An `OverflowError` is raised -if the integer is not representable with the given number of bits. -`signed` determines whether two's complement is used to represent the integer, -and requires `length` to be provided. -""" - if not isinstance(__i, (int, long) if _is_py2 else int): - raise TypeError("int expected, got '%s'" % type(__i).__name__) - if length is not None: - if not isinstance(length, int): - raise TypeError("int expected for length") - if length <= 0: - raise ValueError("length must be > 0") - if signed and length is None: - raise TypeError("signed requires length") - - if __i == 0: - # there are special cases for 0 which we'd rather not deal with below - return zeros(length or 1, endian) - - if signed: - m = 1 << (length - 1) - if not (-m <= __i < m): - raise OverflowError("signed integer not in range(%d, %d), " - "got %d" % (-m, m, __i)) - if __i < 0: - __i += 1 << length - else: # unsigned - if __i < 0: - raise OverflowError("unsigned integer not positive, got %d" % __i) - if length and __i >= (1 << length): - raise OverflowError("unsigned integer not in range(0, %d), " - "got %d" % (1 << length, __i)) - - a = bitarray(0, get_default_endian() if endian is None else endian) - big_endian = bool(a.endian() == 'big') - if _is_py2: - c = bytearray() - while __i: - __i, r = divmod(__i, 256) - c.append(r) - if big_endian: - c.reverse() - b = bytes(c) - else: # py3 - b = __i.to_bytes(bits2bytes(__i.bit_length()), byteorder=a.endian()) - - a.frombytes(b) - if length is None: - return strip(a, 'left' if big_endian else 'right') - - la = len(a) - if la > length: - a = a[-length:] if big_endian else a[:length] - if la < length: - pad = zeros(length - la, endian) - a = pad + a if big_endian else a + pad - assert len(a) == length - return a - - -def deserialize(__b): - """deserialize(bytes, /) -> bitarray - -Return a bitarray given the bytes representation returned by `serialize()`. -""" - if not isinstance(__b, bytes): - raise TypeError("bytes expected, got: '%s'" % type(__b).__name__) - if len(__b) == 0: - raise ValueError("non-empty bytes expected") - - head = ord(__b[0]) if _is_py2 else __b[0] - assert isinstance(head, int) - if head >= 32 or head % 16 >= 8: - raise ValueError('invalid header byte 0x%02x' % head) - return bitarray(__b) - - -def vl_decode(__stream, endian=None): - """vl_decode(stream, /, endian=None) -> bitarray - -Decode binary stream (an integer iterator, or bytes object), and return -the decoded bitarray. This function consumes only one bitarray and leaves -the remaining stream untouched. `StopIteration` is raised when no -terminating byte is found. -Use `vl_encode()` for encoding. -""" - if isinstance(__stream, bytes): - __stream = iter(__stream) - - a = bitarray(256, get_default_endian() if endian is None else endian) - _vl_decode(__stream, a) - return bitarray(a) # drop previously over-allocated bitarray - - -def huffman_code(__freq_map, endian=None): - """huffman_code(dict, /, endian=None) -> dict - -Given a frequency map, a dictionary mapping symbols to their frequency, -calculate the Huffman code, i.e. a dict mapping those symbols to -bitarrays (with given endianness). Note that the symbols are not limited -to being strings. Symbols may may be any hashable object (such as `None`). -""" - import heapq - - if not isinstance(__freq_map, dict): - raise TypeError("dict expected, got '%s'" % type(__freq_map).__name__) - if len(__freq_map) == 0: - raise ValueError("non-empty dict expected") - if endian is None: - endian = get_default_endian() - - class Node(object): - # a Node object will have either .symbol or .child set below, - # .freq will always be set - def __lt__(self, other): - # heapq needs to be able to compare the nodes - return self.freq < other.freq - - def huff_tree(freq_map): - # given a dictionary mapping symbols to thier frequency, - # construct a Huffman tree and return its root node - - minheap = [] - # create all the leaf nodes and push them onto the queue - for sym, f in freq_map.items(): - nd = Node() - nd.symbol = sym - nd.freq = f - heapq.heappush(minheap, nd) - - # repeat the process until only one node remains - while len(minheap) > 1: - # take the nodes with smallest frequencies from the queue - child_0 = heapq.heappop(minheap) - child_1 = heapq.heappop(minheap) - # construct the new internal node and push it onto the queue - parent = Node() - parent.child = [child_0, child_1] - parent.freq = child_0.freq + child_1.freq - heapq.heappush(minheap, parent) - - # the single remaining node is the root of the Huffman tree - return minheap[0] - - result = {} - - def traverse(nd, prefix=bitarray(0, endian)): - try: # leaf - result[nd.symbol] = prefix - except AttributeError: # parent, so traverse each of the children - traverse(nd.child[0], prefix + bitarray('0')) - traverse(nd.child[1], prefix + bitarray('1')) - - traverse(huff_tree(__freq_map)) - return result diff --git a/shell/ext-py/bitarray-2.3.0/bitarray/util.pyi b/shell/ext-py/bitarray-2.3.0/bitarray/util.pyi deleted file mode 100644 index ef4ad099d..000000000 --- a/shell/ext-py/bitarray-2.3.0/bitarray/util.pyi +++ /dev/null @@ -1,49 +0,0 @@ -from collections.abc import Iterator -from typing import Any, AnyStr, BinaryIO, Optional, Union - -from bitarray import bitarray - - -def zeros(length: int, endian: Optional[str] = ...) -> bitarray: ... -def urandom(length: int, endian: Optional[str] = ...) -> bitarray: ... -def pprint(a: Any, stream: BinaryIO = ..., - group: int = ..., - indent: int = ..., - width: int = ...) -> None: ... - -def make_endian(a: bitarray, endian: str) -> bitarray: ... -def rindex(a: bitarray, - value: int = ..., - start: int = ..., - stop: int = ...) -> int: ... - -def strip(a: bitarray, mode: str = ...) -> bitarray: ... - -def count_n(a: bitarray, n: int) -> int: ... -def parity(a: bitarray) -> int: ... -def count_and(a: bitarray, b: bitarray) -> int: ... -def count_or(a: bitarray, b: bitarray) -> int: ... -def count_xor(a: bitarray, b: bitarray) -> int: ... -def subset(a: bitarray, b: bitarray) -> bool: ... - -def ba2hex(a: bitarray) -> str: ... -def hex2ba(s: AnyStr, endian: Optional[str] = ...) -> bitarray: ... -def ba2base(n: int, a: bitarray) -> str: ... -def base2ba(n: int, - s: AnyStr, - endian: Optional[str] = ...) -> bitarray: ... - -def ba2int(a: bitarray, signed: int = ...) -> int: ... -def int2ba(i: int, - length: int = ..., - endian: str = ..., - signed: int = ...) -> bitarray: ... - -def serialize(a: bitarray) -> bytes: ... -def deserialize(b: bytes) -> bitarray: ... -def vl_encode(a: bitarray) -> bytes: ... -def vl_decode(stream: Union[bytes, Iterator[int]], - endian: Optional[str] = ...) -> bitarray: ... - -def huffman_code(freq_map: dict[Any, Union[int, float]], - endian: Optional[str] = ...) -> dict[Any, bitarray]: ... diff --git a/shell/ext-py/bitarray-2.3.0/contributing.md b/shell/ext-py/bitarray-2.3.0/contributing.md deleted file mode 100644 index 62d325e68..000000000 --- a/shell/ext-py/bitarray-2.3.0/contributing.md +++ /dev/null @@ -1,31 +0,0 @@ -Contributing to bitarray -======================== - -The bitarray type is very stable and feature complete at this point, -which means that pull requests to `bitarray/_bitarray.c` will most likely -be rejected, unless they improve readability and performance. - -There may be room for improvements/additions in the `bitarray.util` module, -added in the 1.2.0 release. However, due to the slow release cycle of this -package, it may be more practical to create your own library which depends -on bitarray. This is completely possible, even on the C-level. Please -study the implementation of `bitarray/_util.c` for details. In particular for -C extensions to work with the bitarray type, it is important that -the `bitarrayobject` struct is defined in the same way: - - typedef struct { - PyObject_VAR_HEAD - char *ob_item; /* buffer */ - Py_ssize_t allocated; /* allocated buffer size (in bytes) */ - Py_ssize_t nbits; /* length of bitarray, i.e. elements */ - int endian; /* bit endianness of bitarray */ - int ob_exports; /* how many buffer exports */ - PyObject *weakreflist; /* list of weak references */ - } bitarrayobject; - - /* member endian may have these values */ - #define ENDIAN_LITTLE 0 - #define ENDIAN_BIG 1 - -These essential (as well as other useful) declarations can be found -in `bitarray/bitarray.h`. diff --git a/shell/ext-py/bitarray-2.3.0/setup.py b/shell/ext-py/bitarray-2.3.0/setup.py deleted file mode 100644 index 0f213a9dc..000000000 --- a/shell/ext-py/bitarray-2.3.0/setup.py +++ /dev/null @@ -1,50 +0,0 @@ -import re -from distutils.core import setup, Extension - - -kwds = {} -try: - kwds['long_description'] = open('README.rst').read() -except IOError: - pass - -# Read version from bitarray/bitarray.h -pat = re.compile(r'#define\s+BITARRAY_VERSION\s+"(\S+)"', re.M) -data = open('bitarray/bitarray.h').read() -kwds['version'] = pat.search(data).group(1) - - -setup( - name = "bitarray", - author = "Ilan Schnell", - author_email = "ilanschnell@gmail.com", - url = "https://github.com/ilanschnell/bitarray", - license = "PSF", - classifiers = [ - "License :: OSI Approved :: Python Software Foundation License", - "Development Status :: 6 - Mature", - "Intended Audience :: Developers", - "Operating System :: OS Independent", - "Programming Language :: C", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Topic :: Utilities", - ], - description = "efficient arrays of booleans -- C extension", - packages = ["bitarray"], - package_data = {"bitarray": ["*.h", "*.pickle", - "py.typed", # see PEP 561 - "*.pyi"]}, - ext_modules = [Extension(name = "bitarray._bitarray", - sources = ["bitarray/_bitarray.c"]), - Extension(name = "bitarray._util", - sources = ["bitarray/_util.c"])], - **kwds -) diff --git a/shell/ext-py/bitarray-2.3.0/update_doc.py b/shell/ext-py/bitarray-2.3.0/update_doc.py deleted file mode 100644 index bc4c78753..000000000 --- a/shell/ext-py/bitarray-2.3.0/update_doc.py +++ /dev/null @@ -1,195 +0,0 @@ -import sys -assert sys.version_info[0] == 3, "This program requires Python 3" - -import re -import doctest -from io import StringIO - -import bitarray -import bitarray.util - - -BASE_URL = "https://github.com/ilanschnell/bitarray" - -NEW_IN = { - 'frozenbitarray': '1.1', - 'get_default_endian': '1.3', - 'util.make_endian': '1.3', - 'bitarray': '2.3: optional `buffer` argument', - 'bitarray.bytereverse': '2.2.5: optional `start` and `stop` arguments', - 'bitarray.count': '1.1.0: optional `start` and `stop` arguments', - 'bitarray.clear': '1.4', - 'bitarray.find': '2.1', - 'bitarray.invert': '1.5.3: optional `index` argument', - 'decodetree': '1.6', - 'util.urandom': '1.7', - 'util.pprint': '1.8', - 'util.serialize': '1.8', - 'util.deserialize': '1.8', - 'util.ba2base': '1.9', - 'util.base2ba': '1.9', - 'util.parity': '1.9', - 'util.rindex': '2.3.0: optional `start` and `stop` arguments', - 'util.vl_encode': '2.2', - 'util.vl_decode': '2.2', -} - -DOCS = { - 'rep': ('Bitarray representations', 'represent.rst'), - 'vlf': ('Variable length bitarray format', 'variable_length.rst'), -} - -DOC_LINKS = { - 'util.ba2base': 'rep', - 'util.base2ba': 'rep', - 'util.serialize': 'rep', - 'util.deserialize': 'rep', - 'util.vl_encode': 'vlf', - 'util.vl_decode': 'vlf', -} - -_NAMES = set() - -sig_pat = re.compile(r'(\w+\([^()]*\))( -> (.+))?') -def write_doc(fo, name): - _NAMES.add(name) - doc = eval('bitarray.%s.__doc__' % name) - assert doc, name - lines = doc.splitlines() - m = sig_pat.match(lines[0]) - if m is None: - raise Exception("signature line invalid: %r" % lines[0]) - s = '``%s``' % m.group(1) - if m.group(3): - s += ' -> %s' % m.group(3) - fo.write('%s\n' % s) - assert lines[1] == '' - for line in lines[2:]: - out = line.rstrip() - fo.write(" %s\n" % out.replace('`', '``') if out else "\n") - - link = DOC_LINKS.get(name) - if link: - title, filename = DOCS[link] - url = BASE_URL + '/blob/master/doc/' + filename - fo.write("\n See also: `%s <%s>`__\n" % (title, url)) - - new_in = NEW_IN.get(name) - if new_in: - fo.write("\n New in version %s.\n" % new_in.replace('`', '``')) - - fo.write('\n\n') - - -def write_reference(fo): - fo.write("""\ -Reference -========= - -bitarray version: %s -- `change log <%s>`__ - -In the following, ``item`` and ``value`` are usually a single bit - -an integer 0 or 1. - - -The bitarray object: --------------------- - -""" % (bitarray.__version__, BASE_URL + "/blob/master/doc/changelog.rst")) - write_doc(fo, 'bitarray') - - fo.write("**A bitarray object supports the following methods:**\n\n") - for method in sorted(dir(bitarray.bitarray)): - if method.startswith('_'): - continue - write_doc(fo, 'bitarray.%s' % method) - - fo.write("Other objects:\n" - "--------------\n\n") - write_doc(fo, 'frozenbitarray') - write_doc(fo, 'decodetree') - - fo.write("Functions defined in the `bitarray` module:\n" - "-------------------------------------------\n\n") - for func in sorted(['test', 'bits2bytes', 'get_default_endian']): - write_doc(fo, func) - - fo.write("Functions defined in `bitarray.util` module:\n" - "--------------------------------------------\n\n" - "This sub-module was add in version 1.2.\n\n") - for func in bitarray.util.__all__: - write_doc(fo, 'util.%s' % func) - - for name in list(NEW_IN) + list(DOC_LINKS): - assert name in _NAMES, name - -def update_readme(path): - ver_pat = re.compile(r'(bitarray.+?)(\d+\.\d+\.\d+)') - - with open(path, 'r') as fi: - data = fi.read() - - with StringIO() as fo: - for line in data.splitlines(): - if line == 'Reference': - break - line = ver_pat.sub(lambda m: m.group(1) + bitarray.__version__, - line) - fo.write("%s\n" % line.rstrip()) - - write_reference(fo) - new_data = fo.getvalue() - - if new_data == data: - print("already up-to-date") - else: - with open(path, 'w') as f: - f.write(new_data) - - -def write_changelog(fo): - ver_pat = re.compile(r'(\d{4}-\d{2}-\d{2})\s+(\d+\.\d+\.\d+)') - issue_pat = re.compile(r'#(\d+)') - link_pat = re.compile(r'\[(.+)\]\((.+)\)') - - def issue_replace(match): - url = "%s/issues/%s" % (BASE_URL, match.group(1)) - return "`%s <%s>`__" % (match.group(0), url) - - fo.write("Change log\n" - "==========\n\n") - - for line in open('./CHANGE_LOG'): - line = line.rstrip() - match = ver_pat.match(line) - if match: - line = match.expand(r'**\2** (\1):') - elif line.startswith('-----'): - line = '' - elif line.startswith(' '): - line = line[2:] - line = line.replace('`', '``') - line = issue_pat.sub(issue_replace, line) - line = link_pat.sub( - lambda m: "`%s <%s>`__" % (m.group(1), m.group(2)), line) - fo.write(line + '\n') - - -def main(): - if len(sys.argv) > 1: - sys.exit("no arguments expected") - - update_readme('./README.rst') - with open('./doc/reference.rst', 'w') as fo: - write_reference(fo) - with open('./doc/changelog.rst', 'w') as fo: - write_changelog(fo) - - doctest.testfile('./README.rst') - doctest.testfile('./doc/buffer.rst') - doctest.testfile('./doc/represent.rst') - doctest.testfile('./doc/variable_length.rst') - - -if __name__ == '__main__': - main() diff --git a/shell/ext-py/kerberos-1.3.1/MANIFEST.in b/shell/ext-py/kerberos-1.3.1/MANIFEST.in deleted file mode 100644 index 39d92ed76..000000000 --- a/shell/ext-py/kerberos-1.3.1/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -include LICENSE README.md -recursive-include src *.c *.h -recursive-include pysrc *.py diff --git a/shell/ext-py/kerberos-1.3.1/PKG-INFO b/shell/ext-py/kerberos-1.3.1/PKG-INFO deleted file mode 100644 index 2d7d27866..000000000 --- a/shell/ext-py/kerberos-1.3.1/PKG-INFO +++ /dev/null @@ -1,138 +0,0 @@ -Metadata-Version: 2.1 -Name: kerberos -Version: 1.3.1 -Summary: Kerberos high-level interface -Home-page: https://github.com/apple/ccs-pykerberos -Author: Apple Inc. -Author-email: calendarserver-dev@lists.macosforge.org -License: Apache License, Version 2.0 -Description: # PyKerberos Package - - This Python package is a high-level wrapper for Kerberos (GSSAPI) - operations. The goal is to avoid having to build a module that wraps - the entire Kerberos.framework, and instead offer a limited set of - functions that do what is needed for client/server Kerberos - authentication based on . - - Much of the C-code here is adapted from Apache's mod_auth_kerb-5.0rc7. - - - ## Build - - In this directory, run: - - ``` - python setup.py build - ``` - - ## Testing - - To run the tests in the tests folder, you must have a valid Kerberos setup on - the test machine. You can use the script .travis.sh as quick and easy way to - setup a Kerberos KDC and Apache web endpoint that can be used for the tests. - Otherwise you can also run the following to run a self contained Docker - container - - ``` - docker run \ - -v $(pwd):/app \ - -w /app \ - -e PYENV=2.7.13 \ - -e KERBEROS_USERNAME=administrator \ - -e KERBEROS_PASSWORD=Password01 \ - -e KERBEROS_REALM=example.com \ - -e KERBEROS_PORT=80 \ - ubuntu:16.04 \ - /bin/bash .travis.sh - ``` - - The docker command needs to be run in the same directory as this library and - you can test it with different Python versions by changing the value of the - PYENV environment value set in the command. - - Please have a look at testing_notes.md for more information. - - - ## IMPORTANT - - The checkPassword method provided by this library is meant only for testing purposes as it does - not offer any protection against possible KDC spoofing. That method should not be used in any - production code. - - - ## Channel Bindings - - You can use this library to authenticate with Channel Binding support. Channel - Bindings are tags that identify the particular data channel being used with the - authentication. You can use Channel bindings to offer more proof of a valid - identity. Some services like Microsoft's Extended Protection can enforce - Channel Binding support on authorisation and you can use this library to meet - those requirements. - - More details on Channel Bindings as set through the GSSAPI can be found here - . Using - TLS as a example this is how you would add Channel Binding support to your - authentication mechanism. The following code snippet is based on RFC5929 - using the 'tls-server-endpoint-point' - type. - - ``` - import hashlib - - def get_channel_bindings_application_data(socket): - # This is a highly simplified example, there are other use cases - # where you might need to use different hash types or get a socket - # object somehow. - server_certificate = socket.getpeercert(True) - certificate_hash = hashlib.sha256(server_certificate).hexdigest().upper() - certificate_digest = base64.b16decode(certificate_hash) - application_data = b'tls-server-end-point:%s' % certificate_digest - - return application_data - - def main(): - # Code to setup a socket with the server - # A lot of code to setup the handshake and start the auth process - socket = getsocketsomehow() - - # Connect to the host and start the auth process - - # Build the channel bindings object - application_data = get_channel_bindings_application_data(socket) - channel_bindings = kerberos.channelBindings(application_data=application_data) - - # More work to get responses from the server - - result, context = kerberos.authGSSClientInit(kerb_spn, gssflags=gssflags, principal=principal) - - # Pass through the channel_bindings object as created in the kerberos.channelBindings method - result = kerberos.authGSSClientStep(context, neg_resp_value, channel_bindings=channel_bindings) - - # Repeat as necessary - ``` - - ## Python APIs - - See kerberos.py. - - - ## Copyright and License - - Copyright (c) 2006-2021 Apple Inc. All rights reserved. - - This software is licensed under the Apache License, Version 2.0. The - Apache License is a well-established open source license, enabling - collaborative open source software development. - - See the "LICENSE" file for the full text of the license terms. - -Platform: all -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Systems Administration :: Authentication/Directory -Description-Content-Type: text/markdown diff --git a/shell/ext-py/kerberos-1.3.1/README.md b/shell/ext-py/kerberos-1.3.1/README.md deleted file mode 100644 index ce8f3c5cf..000000000 --- a/shell/ext-py/kerberos-1.3.1/README.md +++ /dev/null @@ -1,119 +0,0 @@ -# PyKerberos Package - -This Python package is a high-level wrapper for Kerberos (GSSAPI) -operations. The goal is to avoid having to build a module that wraps -the entire Kerberos.framework, and instead offer a limited set of -functions that do what is needed for client/server Kerberos -authentication based on . - -Much of the C-code here is adapted from Apache's mod_auth_kerb-5.0rc7. - - -## Build - -In this directory, run: - -``` -python setup.py build -``` - -## Testing - -To run the tests in the tests folder, you must have a valid Kerberos setup on -the test machine. You can use the script .travis.sh as quick and easy way to -setup a Kerberos KDC and Apache web endpoint that can be used for the tests. -Otherwise you can also run the following to run a self contained Docker -container - -``` -docker run \ --v $(pwd):/app \ --w /app \ --e PYENV=2.7.13 \ --e KERBEROS_USERNAME=administrator \ --e KERBEROS_PASSWORD=Password01 \ --e KERBEROS_REALM=example.com \ --e KERBEROS_PORT=80 \ -ubuntu:16.04 \ -/bin/bash .travis.sh -``` - -The docker command needs to be run in the same directory as this library and -you can test it with different Python versions by changing the value of the -PYENV environment value set in the command. - -Please have a look at testing_notes.md for more information. - - -## IMPORTANT - -The checkPassword method provided by this library is meant only for testing purposes as it does -not offer any protection against possible KDC spoofing. That method should not be used in any -production code. - - -## Channel Bindings - -You can use this library to authenticate with Channel Binding support. Channel -Bindings are tags that identify the particular data channel being used with the -authentication. You can use Channel bindings to offer more proof of a valid -identity. Some services like Microsoft's Extended Protection can enforce -Channel Binding support on authorisation and you can use this library to meet -those requirements. - -More details on Channel Bindings as set through the GSSAPI can be found here -. Using -TLS as a example this is how you would add Channel Binding support to your -authentication mechanism. The following code snippet is based on RFC5929 - using the 'tls-server-endpoint-point' -type. - -``` -import hashlib - -def get_channel_bindings_application_data(socket): - # This is a highly simplified example, there are other use cases - # where you might need to use different hash types or get a socket - # object somehow. - server_certificate = socket.getpeercert(True) - certificate_hash = hashlib.sha256(server_certificate).hexdigest().upper() - certificate_digest = base64.b16decode(certificate_hash) - application_data = b'tls-server-end-point:%s' % certificate_digest - - return application_data - -def main(): - # Code to setup a socket with the server - # A lot of code to setup the handshake and start the auth process - socket = getsocketsomehow() - - # Connect to the host and start the auth process - - # Build the channel bindings object - application_data = get_channel_bindings_application_data(socket) - channel_bindings = kerberos.channelBindings(application_data=application_data) - - # More work to get responses from the server - - result, context = kerberos.authGSSClientInit(kerb_spn, gssflags=gssflags, principal=principal) - - # Pass through the channel_bindings object as created in the kerberos.channelBindings method - result = kerberos.authGSSClientStep(context, neg_resp_value, channel_bindings=channel_bindings) - - # Repeat as necessary -``` - -## Python APIs - -See kerberos.py. - - -## Copyright and License - -Copyright (c) 2006-2021 Apple Inc. All rights reserved. - -This software is licensed under the Apache License, Version 2.0. The -Apache License is a well-established open source license, enabling -collaborative open source software development. - -See the "LICENSE" file for the full text of the license terms. diff --git a/shell/ext-py/kerberos-1.3.1/pysrc/kerberos.py b/shell/ext-py/kerberos-1.3.1/pysrc/kerberos.py deleted file mode 100644 index d4f53a6bc..000000000 --- a/shell/ext-py/kerberos-1.3.1/pysrc/kerberos.py +++ /dev/null @@ -1,461 +0,0 @@ -## -# Copyright (c) 2006-2018 Apple Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -## - -""" -PyKerberos Function Description. -""" - - - -class KrbError(Exception): - pass - - - -class BasicAuthError(KrbError): - pass - - - -class GSSError(KrbError): - pass - - - -def checkPassword(user, pswd, service, default_realm): - """ - This function provides a simple way to verify that a user name and password - match those normally used for Kerberos authentication. - It does this by checking that the supplied user name and password can be - used to get a ticket for the supplied service. - If the user name does not contain a realm, then the default realm supplied - is used. - - For this to work properly the Kerberos must be configured properly on this - machine. - That will likely mean ensuring that the edu.mit.Kerberos preference file - has the correct realms and KDCs listed. - - IMPORTANT: This method is vulnerable to KDC spoofing attacks and it should - only used for testing. Do not use this in any production system - your - security could be compromised if you do. - - @param user: A string containing the Kerberos user name. - A realm may be included by appending an C{"@"} followed by the realm - string to the actual user id. - If no realm is supplied, then the realm set in the default_realm - argument will be used. - - @param pswd: A string containing the password for the user. - - @param service: A string containing the Kerberos service to check access - for. - This will be of the form C{"sss/xx.yy.zz"}, where C{"sss"} is the - service identifier (e.g., C{"http"}, C{"krbtgt"}), and C{"xx.yy.zz"} is - the hostname of the server. - - @param default_realm: A string containing the default realm to use if one - is not supplied in the user argument. - Note that Kerberos realms are normally all uppercase (e.g., - C{"EXAMPLE.COM"}). - - @return: True if authentication succeeds, false otherwise. - """ - - - -def changePassword(user, oldpswd, newpswd): - """ - This function allows to change the user password on the KDC. - - @param user: A string containing the Kerberos user name. - A realm may be included by appending a C{"@"} followed by the realm - string to the actual user id. - If no realm is supplied, then the realm set in the default_realm - argument will be used. - - @param oldpswd: A string containing the old (current) password for the - user. - - @param newpswd: A string containing the new password for the user. - - @return: True if password changing succeeds, false otherwise. - """ - - - -def getServerPrincipalDetails(service, hostname): - """ - This function returns the service principal for the server given a service - type and hostname. - Details are looked up via the C{/etc/keytab} file. - - @param service: A string containing the Kerberos service type for the - server. - - @param hostname: A string containing the hostname of the server. - - @return: A string containing the service principal. - """ - - - -""" -GSSAPI Function Result Codes: - - -1 : Error - 0 : GSSAPI step continuation (only returned by 'Step' function) - 1 : GSSAPI step complete, or function return OK - -""" - -# Some useful result codes -AUTH_GSS_CONTINUE = 0 -AUTH_GSS_COMPLETE = 1 - -# Some useful gss flags -GSS_C_DELEG_FLAG = 1 -GSS_C_MUTUAL_FLAG = 2 -GSS_C_REPLAY_FLAG = 4 -GSS_C_SEQUENCE_FLAG = 8 -GSS_C_CONF_FLAG = 16 -GSS_C_INTEG_FLAG = 32 -GSS_C_ANON_FLAG = 64 -GSS_C_PROT_READY_FLAG = 128 -GSS_C_TRANS_FLAG = 256 - - - -def authGSSClientInit(service, **kwargs): - """ - Initializes a context for GSSAPI client-side authentication with the given - service principal. - L{authGSSClientClean} must be called after this function returns an OK - result to dispose of the context once all GSSAPI operations are complete. - - @param service: A string containing the service principal in the form - C{"type@fqdn"}. - - @param principal: Optional string containing the client principal in the - form C{"user@realm"}. - - @param gssflags: Optional integer used to set GSS flags. - (e.g. C{GSS_C_DELEG_FLAG|GSS_C_MUTUAL_FLAG|GSS_C_SEQUENCE_FLAG} will - allow for forwarding credentials to the remote host) - - @param delegated: Optional server context containing delegated credentials - - @param mech_oid: Optional GGS mech OID - - @return: A tuple of (result, context) where result is the result code (see - above) and context is an opaque value that will need to be passed to - subsequent functions. - """ - - - -def authGSSClientClean(context): - """ - Destroys the context for GSSAPI client-side authentication. This function - is provided for compatibility with earlier versions of PyKerberos but does - nothing. The context object destroys itself when it is reclaimed. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A result code (see above). - """ - - - -def authGSSClientInquireCred(context): - """ - Get the current user name, if any, without a client-side GSSAPI step. - If the principal has already been authenticated via completed client-side - GSSAPI steps then the user name of the authenticated principal is kept. The - user name will be available via authGSSClientUserName. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A result code (see above). - """ - - - -""" -Address Types for Channel Bindings -https://docs.oracle.com/cd/E19455-01/806-3814/6jcugr7dp/index.html#reference-9 - -""" - -GSS_C_AF_UNSPEC = 0 -GSS_C_AF_LOCAL = 1 -GSS_C_AF_INET = 2 -GSS_C_AF_IMPLINK = 3 -GSS_C_AF_PUP = 4 -GSS_C_AF_CHAOS = 5 -GSS_C_AF_NS = 6 -GSS_C_AF_NBS = 7 -GSS_C_AF_ECMA = 8 -GSS_C_AF_DATAKIT = 9 -GSS_C_AF_CCITT = 10 -GSS_C_AF_SNA = 11 -GSS_C_AF_DECnet = 12 -GSS_C_AF_DLI = 13 -GSS_C_AF_LAT = 14 -GSS_C_AF_HYLINK = 15 -GSS_C_AF_APPLETALK = 16 -GSS_C_AF_BSC = 17 -GSS_C_AF_DSS = 18 -GSS_C_AF_OSI = 19 -GSS_C_AF_X25 = 21 -GSS_C_AF_NULLADDR = 255 - - - -def channelBindings(**kwargs): - """ - Builds a gss_channel_bindings_struct which can be used to pass onto - L{authGSSClientStep} to bind onto the auth. Details on Channel Bindings - can be foud at https://tools.ietf.org/html/rfc5929. More details on the - struct can be found at - https://docs.oracle.com/cd/E19455-01/806-3814/overview-52/index.html - - @param initiator_addrtype: Optional integer used to set the - initiator_addrtype, defaults to GSS_C_AF_UNSPEC if not set - - @param initiator_address: Optional byte string containing the - initiator_address - - @param acceptor_addrtype: Optional integer used to set the - acceptor_addrtype, defaults to GSS_C_AF_UNSPEC if not set - - @param acceptor_address: Optional byte string containing the - acceptor_address - - @param application_data: Optional byte string containing the - application_data. An example would be 'tls-server-end-point:{cert-hash}' - where {cert-hash} is the hash of the server's certificate - - @return: A tuple of (result, gss_channel_bindings_struct) where result is - the result code and gss_channel_bindings_struct is the channel bindings - structure that can be passed onto L{authGSSClientStep} - """ - - - -def authGSSClientStep(context, challenge, **kwargs): - """ - Processes a single GSSAPI client-side step using the supplied server data. - - @param context: The context object returned from L{authGSSClientInit}. - - @param challenge: A string containing the base64-encoded server data (which - may be empty for the first step). - - @param channel_bindings: Optional channel bindings to bind onto the auth - request. This struct can be built using :{channelBindings} - and if not specified it will pass along GSS_C_NO_CHANNEL_BINDINGS as - a default. - - @return: A result code (see above). - """ - - - -def authGSSClientResponse(context): - """ - Get the client response from the last successful GSSAPI client-side step. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A string containing the base64-encoded client data to be sent to - the server. - """ - - - -def authGSSClientResponseConf(context): - """ - Determine whether confidentiality was enabled in the previously unwrapped - buffer. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: C{1} if confidentiality was enabled in the previously unwrapped - buffer, C{0} otherwise. - """ - - - -def authGSSClientUserName(context): - """ - Get the user name of the principal authenticated via the now complete - GSSAPI client-side operations, or the current user name obtained via - authGSSClientInquireCred. This method must only be called after - authGSSClientStep or authGSSClientInquireCred return a complete response - code. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A string containing the user name. - """ - - - -def authGSSClientUnwrap(context, challenge): - """ - Perform the client side GSSAPI unwrap step. - - @param challenge: A string containing the base64-encoded server data. - - @return: A result code (see above) - """ - - - -def authGSSClientWrap(context, data, user=None, protect=0): - """ - Perform the client side GSSAPI wrap step. - - @param data: The result of the L{authGSSClientResponse} after the - L{authGSSClientUnwrap}. - - @param user: The user to authorize. - - @param protect: If C{0}, then just provide integrity protection. - If C{1}, then provide confidentiality as well. - - @return: A result code (see above) - """ - - - -def authGSSServerInit(service): - """ - Initializes a context for GSSAPI server-side authentication with the given - service principal. - authGSSServerClean must be called after this function returns an OK result - to dispose of the context once all GSSAPI operations are complete. - - @param service: A string containing the service principal in the form - C{"type@fqdn"}. To initialize the context for the purpose of accepting - delegated credentials, pass the literal string C{"DELEGATE"}. - - @return: A tuple of (result, context) where result is the result code (see - above) and context is an opaque value that will need to be passed to - subsequent functions. - """ - - - -def authGSSServerClean(context): - """ - Destroys the context for GSSAPI server-side authentication. This function - is provided for compatibility with earlier versions of PyKerberos but does - nothing. The context object destroys itself when it is reclaimed. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A result code (see above). - """ - - - -def authGSSServerStep(context, challenge): - """ - Processes a single GSSAPI server-side step using the supplied client data. - - @param context: The context object returned from L{authGSSClientInit}. - - @param challenge: A string containing the base64-encoded client data. - - @return: A result code (see above). - """ - - - -def authGSSServerResponse(context): - """ - Get the server response from the last successful GSSAPI server-side step. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A string containing the base64-encoded server data to be sent to - the client. - """ - - - -def authGSSServerHasDelegated(context): - """ - Checks whether a server context has delegated credentials. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A bool saying whether delegated credentials are available. - """ - - - -def authGSSServerUserName(context): - """ - Get the user name of the principal trying to authenticate to the server. - This method must only be called after L{authGSSServerStep} returns a - complete or continue response code. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A string containing the user name. - """ - - - -def authGSSServerTargetName(context): - """ - Get the target name if the server did not supply its own credentials. - This method must only be called after L{authGSSServerStep} returns a - complete or continue response code. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A string containing the target name. - """ - - - -def authGSSServerStoreDelegate(context): - """ - Save the ticket sent to the server in the file C{/tmp/krb5_pyserv_XXXXXX}. - This method must only be called after L{authGSSServerStep} returns a - complete or continue response code. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A result code (see above). - """ - - - -def authGSSServerCacheName(context): - """ - Get the name of the credential cache created with - L{authGSSServerStoreDelegate}. - This method must only be called after L{authGSSServerStoreDelegate}. - - @param context: The context object returned from L{authGSSClientInit}. - - @return: A string containing the cache name. - """ diff --git a/shell/ext-py/kerberos-1.3.1/setup.cfg b/shell/ext-py/kerberos-1.3.1/setup.cfg deleted file mode 100644 index 8bfd5a12f..000000000 --- a/shell/ext-py/kerberos-1.3.1/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[egg_info] -tag_build = -tag_date = 0 - diff --git a/shell/ext-py/kerberos-1.3.1/setup.py b/shell/ext-py/kerberos-1.3.1/setup.py deleted file mode 100644 index 8bc0c4f6a..000000000 --- a/shell/ext-py/kerberos-1.3.1/setup.py +++ /dev/null @@ -1,138 +0,0 @@ -## -# Copyright (c) 2006-2018 Apple Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -## - -from os.path import dirname, join as joinpath -from setuptools import setup, Extension -from io import open - -try: - from subprocess import getoutput -except ImportError: - from commands import getoutput - - -# -# Options -# - -project_name = "kerberos" - -version_string = "1.3.1" - -description = "Kerberos high-level interface" - -with open("README.md", "r", encoding="utf-8") as fh: - long_description = fh.read() - -long_description_content_type = "text/markdown" - -url = "https://github.com/apple/ccs-pykerberos" - -classifiers = [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Operating System :: OS Independent", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 3", - "Topic :: Software Development :: Libraries :: Python Modules", - "Topic :: System :: Systems Administration :: Authentication/Directory", -] - -author = "Apple Inc." - -author_email = "calendarserver-dev@lists.macosforge.org" - -license = "Apache License, Version 2.0" - -platforms = ["all"] - - -# -# Entry points -# - -entry_points = { - "console_scripts": [], -} - - -# -# Dependencies -# - -setup_requirements = [] - -install_requirements = [] - -extras_requirements = {} - -extra_link_args = getoutput("krb5-config --libs gssapi").split() - -extra_compile_args = getoutput("krb5-config --cflags gssapi").split() - - -# -# Set up Extension modules that need to be built -# - -extensions = [ - Extension( - "kerberos", - extra_link_args=extra_link_args, - extra_compile_args=extra_compile_args, - sources=[ - "src/base64.c", - "src/kerberos.c", - "src/kerberosbasic.c", - "src/kerberosgss.c", - "src/kerberospw.c", - ], - ), -] - - -# -# Run setup -# - - -def doSetup(): - setup( - name=project_name, - version=version_string, - description=description, - long_description=long_description, - long_description_content_type=long_description_content_type, - url=url, - classifiers=classifiers, - author=author, - author_email=author_email, - license=license, - platforms=platforms, - ext_modules=extensions, - setup_requires=setup_requirements, - install_requires=install_requirements, - extras_require=extras_requirements, - ) - - -# -# Main -# - -if __name__ == "__main__": - doSetup() diff --git a/shell/ext-py/kerberos-1.3.1/src/base64.c b/shell/ext-py/kerberos-1.3.1/src/base64.c deleted file mode 100644 index 4496f92e2..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/base64.c +++ /dev/null @@ -1,133 +0,0 @@ -/** - * Copyright (c) 2006-2018 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include "base64.h" - -#include -#include - -// base64 tables -static char basis_64[] = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; -static signed char index_64[128] = -{ - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, - -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,62, -1,-1,-1,63, - 52,53,54,55, 56,57,58,59, 60,61,-1,-1, -1,-1,-1,-1, - -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14, - 15,16,17,18, 19,20,21,22, 23,24,25,-1, -1,-1,-1,-1, - -1,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40, - 41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1 -}; -#define CHAR64(c) (((c) < 0 || (c) > 127) ? -1 : index_64[(c)]) - -// base64_encode : base64 encode -// -// value : data to encode -// vlen : length of data -// (result) : new char[] - c-str of result -char *base64_encode(const unsigned char *value, size_t vlen) -{ - char *result = (char *)malloc((vlen * 4) / 3 + 5); - if (result == NULL) - { - return NULL; - } - char *out = result; - while (vlen >= 3) - { - *out++ = basis_64[value[0] >> 2]; - *out++ = basis_64[((value[0] << 4) & 0x30) | (value[1] >> 4)]; - *out++ = basis_64[((value[1] << 2) & 0x3C) | (value[2] >> 6)]; - *out++ = basis_64[value[2] & 0x3F]; - value += 3; - vlen -= 3; - } - if (vlen > 0) - { - *out++ = basis_64[value[0] >> 2]; - unsigned char oval = (value[0] << 4) & 0x30; - if (vlen > 1) oval |= value[1] >> 4; - *out++ = basis_64[oval]; - *out++ = (vlen < 2) ? '=' : basis_64[(value[1] << 2) & 0x3C]; - *out++ = '='; - } - *out = '\0'; - - return result; -} - -// base64_decode : base64 decode -// -// value : c-str to decode -// rlen : length of decoded result -// (result) : new unsigned char[] - decoded result -unsigned char *base64_decode(const char *value, size_t *rlen) -{ - *rlen = 0; - int c1, c2, c3, c4; - - size_t vlen = strlen(value); - unsigned char *result =(unsigned char *)malloc((vlen * 3) / 4 + 1); - if (result == NULL) - { - return NULL; - } - unsigned char *out = result; - - while (1) { - if (value[0]==0) { - return result; - } - c1 = value[0]; - if (CHAR64(c1) == -1) { - goto base64_decode_error;; - } - c2 = value[1]; - if (CHAR64(c2) == -1) { - goto base64_decode_error;; - } - c3 = value[2]; - if ((c3 != '=') && (CHAR64(c3) == -1)) { - goto base64_decode_error;; - } - c4 = value[3]; - if ((c4 != '=') && (CHAR64(c4) == -1)) { - goto base64_decode_error;; - } - - value += 4; - *out++ = (CHAR64(c1) << 2) | (CHAR64(c2) >> 4); - *rlen += 1; - - if (c3 != '=') { - *out++ = ((CHAR64(c2) << 4) & 0xf0) | (CHAR64(c3) >> 2); - *rlen += 1; - - if (c4 != '=') { - *out++ = ((CHAR64(c3) << 6) & 0xc0) | CHAR64(c4); - *rlen += 1; - } - } - } - -base64_decode_error: - *result = 0; - *rlen = 0; - - return result; -} diff --git a/shell/ext-py/kerberos-1.3.1/src/base64.h b/shell/ext-py/kerberos-1.3.1/src/base64.h deleted file mode 100644 index 4ddff3e33..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/base64.h +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright (c) 2006-2018 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include - -char *base64_encode(const unsigned char *value, size_t vlen); -unsigned char *base64_decode(const char *value, size_t *rlen); diff --git a/shell/ext-py/kerberos-1.3.1/src/kerberos.c b/shell/ext-py/kerberos-1.3.1/src/kerberos.c deleted file mode 100644 index 1e889dfd5..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/kerberos.c +++ /dev/null @@ -1,935 +0,0 @@ -/** - * Copyright (c) 2006-2018 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include - -#include "kerberosbasic.h" -#include "kerberospw.h" -#include "kerberosgss.h" - - -/* - * Support the Python 3 API while maintaining backward compatibility for the - * Python 2 API. - * Thanks to Lennart Regebro for http://python3porting.com/cextensions.html - */ -// Handle basic API changes -#if PY_MAJOR_VERSION >= 3 - // Basic renames (function parameters are the same) - // No more int objects - #define PyInt_FromLong PyLong_FromLong -#endif - -#if PY_VERSION_HEX >= 0x03020000 - // CObjects to Capsules - #define PyCObject_Check PyCapsule_CheckExact - #define PyCObject_SetVoidPtr PyCapsule_SetPointer - - // More complex macros (function parameters are not the same) - // Note for PyCObject_FromVoidPtr, destr is now the third parameter - #define PyCObject_FromVoidPtr(cobj, destr) PyCapsule_New(cobj, NULL, destr) - #define PyCObject_AsVoidPtr(pobj) PyCapsule_GetPointer(pobj, NULL) -#endif -// Handle differences in module definition syntax and interface -#if PY_MAJOR_VERSION >= 3 - #define MOD_ERROR_VAL NULL - #define MOD_SUCCESS_VAL(val) val - #define MOD_INIT(name) PyMODINIT_FUNC PyInit_##name(void) - #define MOD_DEF(ob, name, doc, methods) \ - static struct PyModuleDef moduledef = { \ - PyModuleDef_HEAD_INIT, name, doc, -1, methods, }; \ - ob = PyModule_Create(&moduledef); -#else - #define MOD_ERROR_VAL - #define MOD_SUCCESS_VAL(val) - #define MOD_INIT(name) void init##name(void) - #define MOD_DEF(ob, name, doc, methods) \ - ob = Py_InitModule3(name, methods, doc); -#endif - -typedef union { char b[16]; uint64_t ull[2]; } align16; -typedef union { char b[8]; uint64_t ull; } align8; - -static align16 krb5_mech_oid_bytes = { { 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x12, 0x01, 0x02, 0x02 } }; -gss_OID_desc krb5_mech_oid = { 9, NULL }; - -static align8 spnego_mech_oid_bytes = { { 0x2b, 0x06, 0x01, 0x05, 0x05, 0x02 } }; -gss_OID_desc spnego_mech_oid = { 6, NULL }; - -PyObject *KrbException_class; -PyObject *BasicAuthException_class; -PyObject *PwdChangeException_class; -PyObject *GssException_class; - -static PyObject *checkPassword(PyObject *self, PyObject *args) -{ - const char *user = NULL; - const char *pswd = NULL; - const char *service = NULL; - const char *default_realm = NULL; - int result = 0; - - if (! PyArg_ParseTuple(args, "ssss", &user, &pswd, &service, &default_realm)) { - return NULL; - } - - result = authenticate_user_krb5pwd(user, pswd, service, default_realm); - - if (result) { - return Py_INCREF(Py_True), Py_True; - } else { - return NULL; - } -} - -static PyObject *changePassword(PyObject *self, PyObject *args) -{ - const char *newpswd = NULL; - const char *oldpswd = NULL; - const char *user = NULL; - int result = 0; - - if (! PyArg_ParseTuple(args, "sss", &user, &oldpswd, &newpswd)) { - return NULL; - } - - result = change_user_krb5pwd(user, oldpswd, newpswd); - - if (result) { - return Py_INCREF(Py_True), Py_True; - } else { - return NULL; - } -} - -static PyObject *getServerPrincipalDetails(PyObject *self, PyObject *args) -{ - const char *service = NULL; - const char *hostname = NULL; - char* result = NULL; - - if (! PyArg_ParseTuple(args, "ss", &service, &hostname)) { - return NULL; - } - - result = server_principal_details(service, hostname); - - if (result != NULL) { - PyObject* pyresult = Py_BuildValue("s", result); - free(result); - return pyresult; - } else { - return NULL; - } -} - -static void -#if PY_VERSION_HEX >= 0x03020000 -destroy_gss_client(PyObject *obj) { - gss_client_state *state = PyCapsule_GetPointer(obj, NULL); -#else -destroy_gss_client(void *obj) { - gss_client_state *state = (gss_client_state *)obj; -#endif - if (state) { - authenticate_gss_client_clean(state); - free(state); - } -} - -static PyObject* authGSSClientInit(PyObject* self, PyObject* args, PyObject* keywds) -{ - const char *service = NULL; - const char *principal = NULL; - gss_client_state *state = NULL; - PyObject *pystate = NULL; - gss_server_state *delegatestate = NULL; - PyObject *pydelegatestate = NULL; - gss_OID mech_oid = GSS_C_NO_OID; - PyObject *pymech_oid = NULL; - static char *kwlist[] = { - "service", "principal", "gssflags", "delegated", "mech_oid", NULL - }; - long int gss_flags = GSS_C_MUTUAL_FLAG | GSS_C_SEQUENCE_FLAG; - int result = 0; - - if (! PyArg_ParseTupleAndKeywords( - args, keywds, "s|zlOO", kwlist, - &service, &principal, &gss_flags, &pydelegatestate, &pymech_oid - )) { - return NULL; - } - - state = (gss_client_state *) malloc(sizeof(gss_client_state)); - if (state == NULL) - { - PyErr_NoMemory(); - return NULL; - } - pystate = PyCObject_FromVoidPtr(state, &destroy_gss_client); - if (pystate == NULL) { - free(state); - return NULL; - } - - if (pydelegatestate != NULL && PyCObject_Check(pydelegatestate)) { - delegatestate = (gss_server_state*)PyCObject_AsVoidPtr(pydelegatestate); - } - - if (pymech_oid != NULL && PyCObject_Check(pymech_oid)) { - mech_oid = (gss_OID)PyCObject_AsVoidPtr(pymech_oid); - } - - result = authenticate_gss_client_init( - service, principal, gss_flags, delegatestate, mech_oid, state - ); - - if (result == AUTH_GSS_ERROR) { - Py_DECREF(pystate); - return NULL; - } - - return Py_BuildValue("(iN)", result, pystate); -} - -static PyObject *authGSSClientClean(PyObject *self, PyObject *args) -{ - return Py_BuildValue("i", AUTH_GSS_COMPLETE); -} - -#if PY_VERSION_HEX >= 0x03020000 -void destruct_channel_bindings(PyObject* o) { - struct gss_channel_bindings_struct *channel_bindings = PyCapsule_GetPointer(o, NULL); -#else -void destruct_channel_bindings(void* o) { - struct gss_channel_bindings_struct *channel_bindings = (struct gss_channel_bindings_struct *)o; -#endif - - if (channel_bindings != NULL) { - if (channel_bindings->initiator_address.value != NULL) { - PyMem_Free(channel_bindings->initiator_address.value); - } - - if (channel_bindings->acceptor_address.value != NULL) { - PyMem_Free(channel_bindings->acceptor_address.value); - } - - if (channel_bindings->application_data.value != NULL) { - PyMem_Free(channel_bindings->application_data.value); - } - - free(channel_bindings); - } -} - -static PyObject *channelBindings(PyObject *self, PyObject *args, PyObject* keywds) -{ - int initiator_addrtype = GSS_C_AF_UNSPEC; - int acceptor_addrtype = GSS_C_AF_UNSPEC; - - const char *encoding = NULL; - char *initiator_address = NULL; - char *acceptor_address = NULL; - char *application_data = NULL; - int initiator_length = 0; - int acceptor_length = 0; - int application_length = 0; - - PyObject *pychan_bindings = NULL; - struct gss_channel_bindings_struct *input_chan_bindings; - static char *kwlist[] = {"initiator_addrtype", "initiator_address", "acceptor_addrtype", - "acceptor_address", "application_data", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, keywds, "|iet#iet#et#", kwlist, - &initiator_addrtype, &encoding, &initiator_address, &initiator_length, - &acceptor_addrtype, &encoding, &acceptor_address, &acceptor_length, - &encoding, &application_data, &application_length)) { - return NULL; - } - - input_chan_bindings = (struct gss_channel_bindings_struct *) malloc(sizeof(struct gss_channel_bindings_struct)); - pychan_bindings = PyCObject_FromVoidPtr(input_chan_bindings, &destruct_channel_bindings); - - input_chan_bindings->initiator_addrtype = initiator_addrtype; - input_chan_bindings->initiator_address.length = initiator_length; - input_chan_bindings->initiator_address.value = initiator_address; - - input_chan_bindings->acceptor_addrtype = acceptor_addrtype; - input_chan_bindings->acceptor_address.length = acceptor_length; - input_chan_bindings->acceptor_address.value = acceptor_address; - - input_chan_bindings->application_data.length = application_length; - input_chan_bindings->application_data.value = application_data; - - return Py_BuildValue("N", pychan_bindings); -} - -static PyObject *authGSSClientStep(PyObject *self, PyObject *args, PyObject* keywds) -{ - gss_client_state *state = NULL; - PyObject *pystate = NULL; - char *challenge = NULL; - PyObject *pychan_bindings = NULL; - struct gss_channel_bindings_struct *channel_bindings; - static char *kwlist[] = {"state", "challenge", "channel_bindings", NULL}; - int result = 0; - - if (! PyArg_ParseTupleAndKeywords(args, keywds, "Os|O", kwlist, &pystate, &challenge, &pychan_bindings)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_client_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - if (pychan_bindings == NULL) { - channel_bindings = GSS_C_NO_CHANNEL_BINDINGS; - } else { - if (!PyCObject_Check(pychan_bindings)) { - PyErr_SetString(PyExc_TypeError, "Expected a gss_channel_bindings_struct object"); - return NULL; - } - channel_bindings = (struct gss_channel_bindings_struct *)PyCObject_AsVoidPtr(pychan_bindings); - } - - result = authenticate_gss_client_step(state, challenge, channel_bindings); - - if (result == AUTH_GSS_ERROR) { - return NULL; - } - - return Py_BuildValue("i", result); -} - -static PyObject *authGSSClientResponseConf(PyObject *self, PyObject *args) -{ - gss_client_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_client_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return Py_BuildValue("i", state->responseConf); -} - -static PyObject *authGSSServerHasDelegated(PyObject *self, PyObject *args) -{ - gss_server_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_server_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return PyBool_FromLong(authenticate_gss_server_has_delegated(state)); -} - -static PyObject *authGSSClientResponse(PyObject *self, PyObject *args) -{ - gss_client_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_client_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return Py_BuildValue("s", state->response); -} - -static PyObject *authGSSClientUserName(PyObject *self, PyObject *args) -{ - gss_client_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_client_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return Py_BuildValue("s", state->username); -} - -static PyObject *authGSSClientUnwrap(PyObject *self, PyObject *args) -{ - gss_client_state *state = NULL; - PyObject *pystate = NULL; - char *challenge = NULL; - int result = 0; - - if (! PyArg_ParseTuple(args, "Os", &pystate, &challenge)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_client_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - result = authenticate_gss_client_unwrap(state, challenge); - - if (result == AUTH_GSS_ERROR) { - return NULL; - } - - return Py_BuildValue("i", result); -} - -static PyObject *authGSSClientWrap(PyObject *self, PyObject *args) -{ - gss_client_state *state = NULL; - PyObject *pystate = NULL; - char *challenge = NULL; - char *user = NULL; - int protect = 0; - int result = 0; - - if (! PyArg_ParseTuple( - args, "Os|zi", &pystate, &challenge, &user, &protect - )) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_client_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - result = authenticate_gss_client_wrap(state, challenge, user, protect); - - if (result == AUTH_GSS_ERROR) { - return NULL; - } - - return Py_BuildValue("i", result); -} - -static PyObject *authGSSClientInquireCred(PyObject *self, PyObject *args) -{ - gss_client_state *state = NULL; - PyObject *pystate = NULL; - int result = 0; - if (!PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (!PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_client_state *)PyCObject_AsVoidPtr(pystate); - if (state == NULL) { - return NULL; - } - - result = authenticate_gss_client_inquire_cred(state); - if (result == AUTH_GSS_ERROR) { - return NULL; - } - - return Py_BuildValue("i", result); -} - -static void -#if PY_VERSION_HEX >= 0x03020000 -destroy_gss_server(PyObject *obj) { - gss_server_state *state = PyCapsule_GetPointer(obj, NULL); -#else -destroy_gss_server(void *obj) { - gss_server_state *state = (gss_server_state *)obj; -#endif - if (state) { - authenticate_gss_server_clean(state); - free(state); - } -} - -static PyObject *authGSSServerInit(PyObject *self, PyObject *args) -{ - const char *service = NULL; - gss_server_state *state = NULL; - PyObject *pystate = NULL; - int result = 0; - - if (! PyArg_ParseTuple(args, "s", &service)) { - return NULL; - } - - state = (gss_server_state *) malloc(sizeof(gss_server_state)); - if (state == NULL) - { - PyErr_NoMemory(); - return NULL; - } - pystate = PyCObject_FromVoidPtr(state, &destroy_gss_server); - if (pystate == NULL) { - free(state); - return NULL; - } - - result = authenticate_gss_server_init(service, state); - - if (result == AUTH_GSS_ERROR) { - Py_DECREF(pystate); - return NULL; - } - - return Py_BuildValue("(iN)", result, pystate); -} - -static PyObject *authGSSServerClean(PyObject *self, PyObject *args) -{ - return Py_BuildValue("i", AUTH_GSS_COMPLETE); -} - -static PyObject *authGSSServerStep(PyObject *self, PyObject *args) -{ - gss_server_state *state = NULL; - PyObject *pystate = NULL; - char *challenge = NULL; - int result = 0; - - if (! PyArg_ParseTuple(args, "Os", &pystate, &challenge)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_server_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - result = authenticate_gss_server_step(state, challenge); - - if (result == AUTH_GSS_ERROR) { - return NULL; - } - - return Py_BuildValue("i", result); -} - -static PyObject *authGSSServerStoreDelegate(PyObject *self, PyObject *args) -{ - gss_server_state *state = NULL; - PyObject *pystate = NULL; - int result = 0; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_server_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - result = authenticate_gss_server_store_delegate(state); - - if (result == AUTH_GSS_ERROR) { - return NULL; - } - - return Py_BuildValue("i", result); -} - -static PyObject *authGSSServerResponse(PyObject *self, PyObject *args) -{ - gss_server_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_server_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return Py_BuildValue("s", state->response); -} - -static PyObject *authGSSServerUserName(PyObject *self, PyObject *args) -{ - gss_server_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_server_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return Py_BuildValue("s", state->username); -} - -static PyObject *authGSSServerCacheName(PyObject *self, PyObject *args) -{ - gss_server_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_server_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return Py_BuildValue("s", state->ccname); -} - -static PyObject *authGSSServerTargetName(PyObject *self, PyObject *args) -{ - gss_server_state *state = NULL; - PyObject *pystate = NULL; - - if (! PyArg_ParseTuple(args, "O", &pystate)) { - return NULL; - } - - if (! PyCObject_Check(pystate)) { - PyErr_SetString(PyExc_TypeError, "Expected a context object"); - return NULL; - } - - state = (gss_server_state *)PyCObject_AsVoidPtr(pystate); - - if (state == NULL) { - return NULL; - } - - return Py_BuildValue("s", state->targetname); -} - -static PyMethodDef KerberosMethods[] = { - { - "checkPassword", - checkPassword, METH_VARARGS, - "Check the supplied user/password against Kerberos KDC." - }, - { - "changePassword", - changePassword, METH_VARARGS, - "Change the user password." - }, - { - "getServerPrincipalDetails", - getServerPrincipalDetails, METH_VARARGS, - "Return the service principal for a given service and hostname." - }, - { - "authGSSClientInit", - (PyCFunction)authGSSClientInit, METH_VARARGS | METH_KEYWORDS, - "Initialize client-side GSSAPI operations." - }, - { - "channelBindings", - (PyCFunction)channelBindings, METH_VARARGS | METH_KEYWORDS, - "Build the Channel Bindings Structure for authGSSClientStep." - }, - { - "authGSSClientClean", - authGSSClientClean, METH_VARARGS, - "Terminate client-side GSSAPI operations." - }, - { - "authGSSClientStep", - (PyCFunction)authGSSClientStep, METH_VARARGS | METH_KEYWORDS, - "Do a client-side GSSAPI step." - }, - { - "authGSSClientResponse", - authGSSClientResponse, METH_VARARGS, - "Get the response from the last client-side GSSAPI step." - }, - { - "authGSSClientInquireCred", authGSSClientInquireCred, METH_VARARGS, - "Get the current user name, if any, without a client-side GSSAPI step" - }, - { - "authGSSClientResponseConf", - authGSSClientResponseConf, METH_VARARGS, - "return 1 if confidentiality was set in the last unwrapped buffer, 0 otherwise." - }, - { - "authGSSClientUserName", - authGSSClientUserName, METH_VARARGS, - "Get the user name from the last client-side GSSAPI step." - }, - { - "authGSSServerInit", - authGSSServerInit, METH_VARARGS, - "Initialize server-side GSSAPI operations." - }, - { - "authGSSClientWrap", - authGSSClientWrap, METH_VARARGS, - "Do a GSSAPI wrap." - }, - { - "authGSSClientUnwrap", - authGSSClientUnwrap, METH_VARARGS, - "Do a GSSAPI unwrap." - }, - { - "authGSSClientInquireCred", authGSSClientInquireCred, METH_VARARGS, - "Get the current user name, if any." - }, - { - "authGSSServerClean", - authGSSServerClean, METH_VARARGS, - "Terminate server-side GSSAPI operations." - }, - { - "authGSSServerStep", - authGSSServerStep, METH_VARARGS, - "Do a server-side GSSAPI step." - }, - { - "authGSSServerHasDelegated", - authGSSServerHasDelegated, METH_VARARGS, - "Check whether the client delegated credentials to us." - }, - { - "authGSSServerStoreDelegate", - authGSSServerStoreDelegate, METH_VARARGS, - "Store the delegated Credentials." - }, - { - "authGSSServerResponse", - authGSSServerResponse, METH_VARARGS, - "Get the response from the last server-side GSSAPI step." - }, - { - "authGSSServerUserName", - authGSSServerUserName, METH_VARARGS, - "Get the user name from the last server-side GSSAPI step." - }, - { - "authGSSServerCacheName", - authGSSServerCacheName, METH_VARARGS, - "Get the location of the cache where delegated credentials are stored." - }, - { - "authGSSServerTargetName", - authGSSServerTargetName, METH_VARARGS, - "Get the target name from the last server-side GSSAPI step." - }, - {NULL, NULL, 0, NULL} /* Sentinel */ -}; - -MOD_INIT(kerberos) -{ - PyObject *m,*d; - - MOD_DEF(m, "kerberos", NULL, KerberosMethods); - - if (m == NULL) { - return MOD_ERROR_VAL; - } - - d = PyModule_GetDict(m); - - /* create the base exception class */ - if (! (KrbException_class = PyErr_NewException( - "kerberos.KrbError", NULL, NULL - ))) { - goto error; - } - - PyDict_SetItemString(d, "KrbError", KrbException_class); - Py_INCREF(KrbException_class); - - /* ...and the derived exceptions */ - if (! (BasicAuthException_class = PyErr_NewException( - "kerberos.BasicAuthError", KrbException_class, NULL - ))) { - goto error; - } - - Py_INCREF(BasicAuthException_class); - PyDict_SetItemString(d, "BasicAuthError", BasicAuthException_class); - - if (! (PwdChangeException_class = PyErr_NewException( - "kerberos.PwdChangeError", KrbException_class, NULL - ))) { - goto error; - } - - Py_INCREF(PwdChangeException_class); - PyDict_SetItemString(d, "PwdChangeError", PwdChangeException_class); - - if (! (GssException_class = PyErr_NewException( - "kerberos.GSSError", KrbException_class, NULL - ))) { - goto error; - } - - Py_INCREF(GssException_class); - PyDict_SetItemString( - d, "GSSError", GssException_class - ); - - PyDict_SetItemString( - d, "AUTH_GSS_COMPLETE", PyInt_FromLong(AUTH_GSS_COMPLETE) - ); - PyDict_SetItemString( - d, "AUTH_GSS_CONTINUE", PyInt_FromLong(AUTH_GSS_CONTINUE) - ); - - PyDict_SetItemString( - d, "GSS_C_DELEG_FLAG", PyInt_FromLong(GSS_C_DELEG_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_MUTUAL_FLAG", PyInt_FromLong(GSS_C_MUTUAL_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_REPLAY_FLAG", PyInt_FromLong(GSS_C_REPLAY_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_SEQUENCE_FLAG", PyInt_FromLong(GSS_C_SEQUENCE_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_CONF_FLAG", PyInt_FromLong(GSS_C_CONF_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_INTEG_FLAG", PyInt_FromLong(GSS_C_INTEG_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_ANON_FLAG", PyInt_FromLong(GSS_C_ANON_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_PROT_READY_FLAG", PyInt_FromLong(GSS_C_PROT_READY_FLAG) - ); - PyDict_SetItemString( - d, "GSS_C_TRANS_FLAG", PyInt_FromLong(GSS_C_TRANS_FLAG) - ); - krb5_mech_oid.elements = &krb5_mech_oid_bytes.b; - - PyDict_SetItemString( - d, "GSS_MECH_OID_KRB5", PyCObject_FromVoidPtr(&krb5_mech_oid, NULL) - ); - - spnego_mech_oid.elements = &spnego_mech_oid_bytes.b; - PyDict_SetItemString( - d, "GSS_MECH_OID_SPNEGO", PyCObject_FromVoidPtr(&spnego_mech_oid, NULL) - ); - -error: - if (PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "kerberos: init failed"); - return MOD_ERROR_VAL; - } - - return MOD_SUCCESS_VAL(m); -} diff --git a/shell/ext-py/kerberos-1.3.1/src/kerberosbasic.c b/shell/ext-py/kerberos-1.3.1/src/kerberosbasic.c deleted file mode 100644 index 08703e156..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/kerberosbasic.c +++ /dev/null @@ -1,171 +0,0 @@ -/** - * Copyright (c) 2006-2018 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include -#include "kerberosbasic.h" - -#include -#include -#include - -#undef PRINTFS - -extern PyObject *BasicAuthException_class; -static void set_basicauth_error(krb5_context context, krb5_error_code code); - -static krb5_error_code verify_krb5_user( - krb5_context context, krb5_principal principal, const char *password, - krb5_principal server -); - -int authenticate_user_krb5pwd( - const char *user, const char *pswd, const char *service, - const char *default_realm -) { - krb5_context kcontext = NULL; - krb5_error_code code; - krb5_principal client = NULL; - krb5_principal server = NULL; - int ret = 0; - char *name = NULL; - char *p = NULL; - - code = krb5_init_context(&kcontext); - if (code) - { - PyErr_SetObject( - BasicAuthException_class, - Py_BuildValue( - "((s:i))", "Cannot initialize Kerberos5 context", code - ) - ); - return 0; - } - - ret = krb5_parse_name (kcontext, service, &server); - - if (ret) { - set_basicauth_error(kcontext, ret); - ret = 0; - goto end; - } - - code = krb5_unparse_name(kcontext, server, &name); - if (code) { - set_basicauth_error(kcontext, code); - ret = 0; - goto end; - } -#ifdef PRINTFS - printf("Using %s as server principal for password verification\n", name); -#endif - free(name); - name = NULL; - - name = (char *)malloc(256); - if (name == NULL) - { - PyErr_NoMemory(); - ret = 0; - goto end; - } - p = strchr(user, '@'); - if (p == NULL) { - snprintf(name, 256, "%s@%s", user, default_realm); - } else { - snprintf(name, 256, "%s", user); - } - - code = krb5_parse_name(kcontext, name, &client); - if (code) { - set_basicauth_error(kcontext, code); - ret = 0; - goto end; - } - - code = verify_krb5_user(kcontext, client, pswd, server); - - if (code) { - ret = 0; - goto end; - } - - ret = 1; - -end: -#ifdef PRINTFS - printf( - "kerb_authenticate_user_krb5pwd ret=%d user=%s authtype=%s\n", - ret, user, "Basic" - ); -#endif - if (name) { - free(name); - } - if (client) { - krb5_free_principal(kcontext, client); - } - if (server) { - krb5_free_principal(kcontext, server); - } - krb5_free_context(kcontext); - - return ret; -} - -/* Inspired by krb5_verify_user from Heimdal */ -static krb5_error_code verify_krb5_user( - krb5_context context, krb5_principal principal, const char *password, - krb5_principal server -) { - krb5_creds creds; - krb5_get_init_creds_opt gic_options; - krb5_error_code ret; - char *name = NULL; - - memset(&creds, 0, sizeof(creds)); - - ret = krb5_unparse_name(context, principal, &name); - if (ret == 0) { -#ifdef PRINTFS - printf("Trying to get TGT for user %s\n", name); -#endif - free(name); - } - - krb5_get_init_creds_opt_init(&gic_options); - ret = krb5_get_init_creds_password( - context, &creds, principal, (char *)password, - NULL, NULL, 0, NULL, &gic_options - ); - if (ret) { - set_basicauth_error(context, ret); - goto end; - } - -end: - krb5_free_cred_contents(context, &creds); - - return ret; -} - -static void set_basicauth_error(krb5_context context, krb5_error_code code) -{ - PyErr_SetObject( - BasicAuthException_class, - Py_BuildValue("(s:i)", krb5_get_err_text(context, code), code) - ); -} diff --git a/shell/ext-py/kerberos-1.3.1/src/kerberosbasic.h b/shell/ext-py/kerberos-1.3.1/src/kerberosbasic.h deleted file mode 100644 index c558234e4..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/kerberosbasic.h +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright (c) 2006-2018 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include -#include -#include - -#define krb5_get_err_text(context,code) error_message(code) - -int authenticate_user_krb5pwd( - const char *user, const char *pswd, const char *service, - const char *default_realm -); diff --git a/shell/ext-py/kerberos-1.3.1/src/kerberosgss.c b/shell/ext-py/kerberos-1.3.1/src/kerberosgss.c deleted file mode 100644 index c82a5e439..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/kerberosgss.c +++ /dev/null @@ -1,1007 +0,0 @@ -/** - * Copyright (c) 2006-2018 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include -#include "kerberosgss.h" - -#include "base64.h" - -#include -#include -#include -#include - -static void set_gss_error(OM_uint32 err_maj, OM_uint32 err_min); - -int create_krb5_ccache( - gss_server_state *state, krb5_context kcontext, krb5_principal princ, - krb5_ccache *ccache -); - -extern PyObject *GssException_class; -extern PyObject *KrbException_class; - -char* server_principal_details(const char* service, const char* hostname) -{ - char match[1024]; - size_t match_len = 0; - char* result = NULL; - - int code; - krb5_context kcontext; - krb5_keytab kt = NULL; - krb5_kt_cursor cursor = NULL; - krb5_keytab_entry entry; - char* pname = NULL; - - // Generate the principal prefix we want to match - snprintf(match, 1024, "%s/%s@", service, hostname); - match_len = strlen(match); - - code = krb5_init_context(&kcontext); - if (code) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue( - "((s:i))", "Cannot initialize Kerberos5 context", code - ) - ); - return NULL; - } - - if ((code = krb5_kt_default(kcontext, &kt))) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue("((s:i))", "Cannot get default keytab", code) - ); - goto end; - } - - if ((code = krb5_kt_start_seq_get(kcontext, kt, &cursor))) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue( - "((s:i))", "Cannot get sequence cursor from keytab", code - ) - ); - goto end; - } - - while ((code = krb5_kt_next_entry(kcontext, kt, &entry, &cursor)) == 0) { - if ((code = krb5_unparse_name(kcontext, entry.principal, &pname))) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue( - "((s:i))", "Cannot parse principal name from keytab", code - ) - ); - goto end; - } - - if (strncmp(pname, match, match_len) == 0) { - result = malloc(strlen(pname) + 1); - if (result == NULL) { - PyErr_NoMemory(); - goto end; - } - strcpy(result, pname); - krb5_free_unparsed_name(kcontext, pname); - krb5_free_keytab_entry_contents(kcontext, &entry); - break; - } - - krb5_free_unparsed_name(kcontext, pname); - krb5_free_keytab_entry_contents(kcontext, &entry); - } - - if (result == NULL) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue("((s:i))", "Principal not found in keytab", -1) - ); - } - -end: - if (cursor) { - krb5_kt_end_seq_get(kcontext, kt, &cursor); - } - if (kt) { - krb5_kt_close(kcontext, kt); - } - krb5_free_context(kcontext); - - return result; -} - -int authenticate_gss_client_init( - const char* service, const char* principal, long int gss_flags, - gss_server_state* delegatestate, gss_OID mech_oid, gss_client_state* state -) -{ - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc name_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc principal_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_COMPLETE; - - state->server_name = GSS_C_NO_NAME; - state->mech_oid = mech_oid; - state->context = GSS_C_NO_CONTEXT; - state->gss_flags = gss_flags; - state->client_creds = GSS_C_NO_CREDENTIAL; - state->username = NULL; - state->response = NULL; - - // Import server name first - name_token.length = strlen(service); - name_token.value = (char *)service; - - maj_stat = gss_import_name( - &min_stat, &name_token, gss_krb5_nt_service_name, &state->server_name - ); - - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - // Use the delegate credentials if they exist - if (delegatestate && delegatestate->client_creds != GSS_C_NO_CREDENTIAL) { - state->client_creds = delegatestate->client_creds; - } - // If available use the principal to extract its associated credentials - else if (principal && *principal) { - gss_name_t name; - principal_token.length = strlen(principal); - principal_token.value = (char *)principal; - - maj_stat = gss_import_name( - &min_stat, &principal_token, GSS_C_NT_USER_NAME, &name - ); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - maj_stat = gss_acquire_cred( - &min_stat, name, GSS_C_INDEFINITE, GSS_C_NO_OID_SET, - GSS_C_INITIATE, &state->client_creds, NULL, NULL - ); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - maj_stat = gss_release_name(&min_stat, &name); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - } - -end: - return ret; -} - -int authenticate_gss_client_clean(gss_client_state *state) -{ - OM_uint32 maj_stat; - OM_uint32 min_stat; - int ret = AUTH_GSS_COMPLETE; - - if (state->context != GSS_C_NO_CONTEXT) { - maj_stat = gss_delete_sec_context( - &min_stat, &state->context, GSS_C_NO_BUFFER - ); - } - if (state->server_name != GSS_C_NO_NAME) { - maj_stat = gss_release_name(&min_stat, &state->server_name); - } - if ( - state->client_creds != GSS_C_NO_CREDENTIAL && - ! (state->gss_flags & GSS_C_DELEG_FLAG) - ) { - maj_stat = gss_release_cred(&min_stat, &state->client_creds); - } - if (state->username != NULL) { - free(state->username); - state->username = NULL; - } - if (state->response != NULL) { - free(state->response); - state->response = NULL; - } - - return ret; -} - -int authenticate_gss_client_step( - gss_client_state* state, const char* challenge, struct gss_channel_bindings_struct* channel_bindings -) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_CONTINUE; - - // Always clear out the old response - if (state->response != NULL) { - free(state->response); - state->response = NULL; - } - - // If there is a challenge (data from the server) we need to give it to GSS - if (challenge && *challenge) { - size_t len; - input_token.value = base64_decode(challenge, &len); - if (input_token.value == NULL) - { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - input_token.length = len; - } - - // Do GSSAPI step - Py_BEGIN_ALLOW_THREADS - maj_stat = gss_init_sec_context( - &min_stat, - state->client_creds, - &state->context, - state->server_name, - state->mech_oid, - (OM_uint32)state->gss_flags, - 0, - channel_bindings, - &input_token, - NULL, - &output_token, - NULL, - NULL - ); - Py_END_ALLOW_THREADS - - if ((maj_stat != GSS_S_COMPLETE) && (maj_stat != GSS_S_CONTINUE_NEEDED)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - ret = (maj_stat == GSS_S_COMPLETE) ? AUTH_GSS_COMPLETE : AUTH_GSS_CONTINUE; - // Grab the client response to send back to the server - if (output_token.length) { - state->response = base64_encode((const unsigned char *)output_token.value, output_token.length); - if (state->response == NULL) { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - maj_stat = gss_release_buffer(&min_stat, &output_token); - } - - // Try to get the user name if we have completed all GSS operations - if (ret == AUTH_GSS_COMPLETE) { - gss_name_t gssuser = GSS_C_NO_NAME; - maj_stat = gss_inquire_context(&min_stat, state->context, &gssuser, NULL, NULL, NULL, NULL, NULL, NULL); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - gss_buffer_desc name_token; - name_token.length = 0; - maj_stat = gss_display_name(&min_stat, gssuser, &name_token, NULL); - if (GSS_ERROR(maj_stat)) { - if (name_token.value) { - gss_release_buffer(&min_stat, &name_token); - } - gss_release_name(&min_stat, &gssuser); - - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } else { - if (state->username != NULL) { - free(state->username); - state->username = NULL; - } - state->username = (char *)malloc(name_token.length + 1); - if (state->username == NULL) { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - strncpy(state->username, (char*) name_token.value, name_token.length); - state->username[name_token.length] = 0; - gss_release_buffer(&min_stat, &name_token); - gss_release_name(&min_stat, &gssuser); - } - } - -end: - if (output_token.value) { - gss_release_buffer(&min_stat, &output_token); - } - if (input_token.value) { - free(input_token.value); - } - return ret; -} - -int authenticate_gss_client_unwrap( - gss_client_state *state, const char *challenge -) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_CONTINUE; - int conf = 0; - - // Always clear out the old response - if (state->response != NULL) { - free(state->response); - state->response = NULL; - state->responseConf = 0; - } - - // If there is a challenge (data from the server) we need to give it to GSS - if (challenge && *challenge) { - size_t len; - input_token.value = base64_decode(challenge, &len); - if (input_token.value == NULL) { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - input_token.length = len; - } - - // Do GSSAPI step - maj_stat = gss_unwrap( - &min_stat, - state->context, - &input_token, - &output_token, - &conf, - NULL - ); - - if (maj_stat != GSS_S_COMPLETE) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } else { - ret = AUTH_GSS_COMPLETE; - } - - // Grab the client response - if (output_token.length) { - state->response = base64_encode( - (const unsigned char *)output_token.value, output_token.length - ); - if (state->response == NULL) - { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - state->responseConf = conf; - maj_stat = gss_release_buffer(&min_stat, &output_token); - } - -end: - if (output_token.value) { - gss_release_buffer(&min_stat, &output_token); - } - if (input_token.value) { - free(input_token.value); - } - return ret; -} - -int authenticate_gss_client_wrap( - gss_client_state* state, const char* challenge, const char* user, - int protect -) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_CONTINUE; - char buf[4096], server_conf_flags; - unsigned long buf_size; - - // Always clear out the old response - if (state->response != NULL) { - free(state->response); - state->response = NULL; - } - - if (challenge && *challenge) { - size_t len; - input_token.value = base64_decode(challenge, &len); - if (input_token.value == NULL) - { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - input_token.length = len; - } - - if (user) { - // get bufsize - server_conf_flags = ((char*) input_token.value)[0]; - ((char*) input_token.value)[0] = 0; - buf_size = ntohl(*((long *) input_token.value)); - free(input_token.value); -#ifdef PRINTFS - printf( - "User: %s, %c%c%c\n", user, - server_conf_flags & GSS_AUTH_P_NONE ? 'N' : '-', - server_conf_flags & GSS_AUTH_P_INTEGRITY ? 'I' : '-', - server_conf_flags & GSS_AUTH_P_PRIVACY ? 'P' : '-' - ); - printf("Maximum GSS token size is %ld\n", buf_size); -#endif - - // agree to terms (hack!) - buf_size = htonl(buf_size); // not relevant without integrity/privacy - memcpy(buf, &buf_size, 4); - buf[0] = GSS_AUTH_P_NONE; - // server decides if principal can log in as user - strncpy(buf + 4, user, sizeof(buf) - 4); - input_token.value = buf; - input_token.length = 4 + strlen(user); - } - - // Do GSSAPI wrap - maj_stat = gss_wrap( - &min_stat, - state->context, - protect, - GSS_C_QOP_DEFAULT, - &input_token, - NULL, - &output_token - ); - - if (maj_stat != GSS_S_COMPLETE) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } else { - ret = AUTH_GSS_COMPLETE; - } - // Grab the client response to send back to the server - if (output_token.length) { - state->response = base64_encode((const unsigned char *)output_token.value, output_token.length); - if (state->response == NULL) { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - maj_stat = gss_release_buffer(&min_stat, &output_token); - } - -end: - if (output_token.value) { - gss_release_buffer(&min_stat, &output_token); - } - return ret; -} - -int authenticate_gss_client_inquire_cred(gss_client_state* state) -{ - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_cred_id_t client_creds = GSS_C_NO_CREDENTIAL; - gss_buffer_desc name_token = GSS_C_EMPTY_BUFFER; - gss_name_t name = GSS_C_NO_NAME; - int ret = AUTH_GSS_COMPLETE; - - // Check whether credentials have already been obtained. - if (state->username != NULL) { - goto end; - } - - // Get credentials - maj_stat = gss_acquire_cred( - &min_stat, GSS_C_NO_NAME, GSS_C_INDEFINITE, - GSS_C_NO_OID_SET, GSS_C_INITIATE, &client_creds, NULL, NULL - ); - - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - // Get the name - maj_stat = gss_inquire_cred( - &min_stat, client_creds, &name, NULL, NULL, NULL - ); - - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - maj_stat = gss_display_name(&min_stat, name, &name_token, NULL); - - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - state->username = (char *)malloc(name_token.length + 1); - if (state->username == NULL) { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - strncpy(state->username, (char*) name_token.value, name_token.length); - state->username[name_token.length] = 0; - -end: - if (client_creds != GSS_C_NO_CREDENTIAL) { - gss_release_cred(&min_stat, &client_creds); - } - if (name_token.length) { - gss_release_buffer(&min_stat, &name_token); - } - if (name != GSS_C_NO_NAME) { - gss_release_name(&min_stat, &name); - } - return ret; -} - -int authenticate_gss_server_init(const char *service, gss_server_state *state) -{ - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc name_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_COMPLETE; - - state->context = GSS_C_NO_CONTEXT; - state->server_name = GSS_C_NO_NAME; - state->client_name = GSS_C_NO_NAME; - state->server_creds = GSS_C_NO_CREDENTIAL; - state->client_creds = GSS_C_NO_CREDENTIAL; - state->username = NULL; - state->targetname = NULL; - state->response = NULL; - state->ccname = NULL; - int cred_usage = GSS_C_ACCEPT; - - // Server name may be empty which means we aren't going to create our own creds - size_t service_len = strlen(service); - if (service_len != 0) { - // Import server name first - if (strcmp(service, "DELEGATE") == 0) { - cred_usage = GSS_C_BOTH; - } - else { - name_token.length = strlen(service); - name_token.value = (char *)service; - - maj_stat = gss_import_name( - &min_stat, &name_token, GSS_C_NT_HOSTBASED_SERVICE, - &state->server_name - ); - - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - } - - // Get credentials - maj_stat = gss_acquire_cred( - &min_stat, state->server_name, GSS_C_INDEFINITE, GSS_C_NO_OID_SET, - cred_usage, &state->server_creds, NULL, NULL - ); - - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - } - -end: - return ret; -} - -int authenticate_gss_server_clean(gss_server_state *state) -{ - OM_uint32 maj_stat; - OM_uint32 min_stat; - int ret = AUTH_GSS_COMPLETE; - - if (state->context != GSS_C_NO_CONTEXT) { - maj_stat = gss_delete_sec_context( - &min_stat, &state->context, GSS_C_NO_BUFFER - ); - } - if (state->server_name != GSS_C_NO_NAME) { - maj_stat = gss_release_name(&min_stat, &state->server_name); - } - if (state->client_name != GSS_C_NO_NAME) { - maj_stat = gss_release_name(&min_stat, &state->client_name); - } - if (state->server_creds != GSS_C_NO_CREDENTIAL) { - maj_stat = gss_release_cred(&min_stat, &state->server_creds); - } - if (state->client_creds != GSS_C_NO_CREDENTIAL) { - maj_stat = gss_release_cred(&min_stat, &state->client_creds); - } - if (state->username != NULL) { - free(state->username); - state->username = NULL; - } - if (state->targetname != NULL) { - free(state->targetname); - state->targetname = NULL; - } - if (state->response != NULL) { - free(state->response); - state->response = NULL; - } - if (state->ccname != NULL) { - free(state->ccname); - state->ccname = NULL; - } - - return ret; -} - -int authenticate_gss_server_step( - gss_server_state *state, const char *challenge -) { - OM_uint32 maj_stat; - OM_uint32 min_stat; - gss_buffer_desc input_token = GSS_C_EMPTY_BUFFER; - gss_buffer_desc output_token = GSS_C_EMPTY_BUFFER; - int ret = AUTH_GSS_CONTINUE; - - // Always clear out the old response - if (state->response != NULL) { - free(state->response); - state->response = NULL; - } - - // If there is a challenge (data from the server) we need to give it to GSS - if (challenge && *challenge) { - size_t len; - input_token.value = base64_decode(challenge, &len); - if (input_token.value == NULL) - { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - input_token.length = len; - } else { - PyErr_SetString( - KrbException_class, "No challenge parameter in request from client" - ); - ret = AUTH_GSS_ERROR; - goto end; - } - - Py_BEGIN_ALLOW_THREADS - maj_stat = gss_accept_sec_context( - &min_stat, - &state->context, - state->server_creds, - &input_token, - GSS_C_NO_CHANNEL_BINDINGS, - &state->client_name, - NULL, - &output_token, - NULL, - NULL, - &state->client_creds - ); - Py_END_ALLOW_THREADS - - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - // Grab the server response to send back to the client - if (output_token.length) { - state->response = base64_encode( - (const unsigned char *)output_token.value, output_token.length - ); - if (state->response == NULL) - { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - maj_stat = gss_release_buffer(&min_stat, &output_token); - } - - // Get the user name - maj_stat = gss_display_name( - &min_stat, state->client_name, &output_token, NULL - ); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - state->username = (char *)malloc(output_token.length + 1); - if (state->username == NULL) - { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - strncpy(state->username, (char*) output_token.value, output_token.length); - state->username[output_token.length] = 0; - - // Get the target name if no server creds were supplied - if (state->server_creds == GSS_C_NO_CREDENTIAL) { - gss_name_t target_name = GSS_C_NO_NAME; - maj_stat = gss_inquire_context( - &min_stat, state->context, NULL, &target_name, NULL, NULL, NULL, - NULL, NULL - ); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - maj_stat = gss_display_name( - &min_stat, target_name, &output_token, NULL - ); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - state->targetname = (char *)malloc(output_token.length + 1); - if (state->targetname == NULL) - { - PyErr_NoMemory(); - ret = AUTH_GSS_ERROR; - goto end; - } - strncpy( - state->targetname, (char*) output_token.value, output_token.length - ); - state->targetname[output_token.length] = 0; - } - - ret = AUTH_GSS_COMPLETE; - -end: - if (output_token.length) { - gss_release_buffer(&min_stat, &output_token); - } - if (input_token.value) { - free(input_token.value); - } - return ret; -} - -int authenticate_gss_server_has_delegated(gss_server_state *state) -{ - return (state->client_creds != GSS_C_NO_CREDENTIAL); -} - -static void set_gss_error(OM_uint32 err_maj, OM_uint32 err_min) -{ - OM_uint32 maj_stat, min_stat; - OM_uint32 msg_ctx = 0; - gss_buffer_desc status_string; - char buf_maj[512]; - char buf_min[512]; - - do { - maj_stat = gss_display_status( - &min_stat, - err_maj, - GSS_C_GSS_CODE, - GSS_C_NO_OID, - &msg_ctx, - &status_string - ); - if (GSS_ERROR(maj_stat)) { - break; - } - strncpy(buf_maj, (char*) status_string.value, sizeof(buf_maj)); - gss_release_buffer(&min_stat, &status_string); - - maj_stat = gss_display_status( - &min_stat, - err_min, - GSS_C_MECH_CODE, - GSS_C_NULL_OID, - &msg_ctx, - &status_string - ); - if (! GSS_ERROR(maj_stat)) { - strncpy(buf_min, (char*) status_string.value, sizeof(buf_min)); - gss_release_buffer(&min_stat, &status_string); - } - } while (!GSS_ERROR(maj_stat) && msg_ctx != 0); - - PyErr_SetObject( - GssException_class, - Py_BuildValue("((s:i)(s:i))", buf_maj, err_maj, buf_min, err_min) - ); -} - -int authenticate_gss_server_store_delegate(gss_server_state *state) -{ - gss_cred_id_t delegated_cred = state->client_creds; - char *princ_name = state->username; - OM_uint32 maj_stat, min_stat; - krb5_principal princ = NULL; - krb5_ccache ccache = NULL; - krb5_error_code problem; - krb5_context context; - int ret = 500; - - if (delegated_cred == GSS_C_NO_CREDENTIAL){ - PyErr_SetObject( - KrbException_class, - Py_BuildValue("(s)", "Ticket is not delegatable") - ); - return AUTH_GSS_ERROR; - } - - problem = krb5_init_context(&context); - if (problem) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue("(s)", "Cannot initialize krb5 context") - ); - return AUTH_GSS_ERROR; - } - - problem = krb5_parse_name(context, princ_name, &princ); - if (problem) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue( - "(s:s)", "Cannot parse delegated username", - krb5_get_err_text(context, problem) - ) - ); - ret = AUTH_GSS_ERROR; - goto end; - } - - problem = create_krb5_ccache(state, context, princ, &ccache); - if (problem) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue( - "(s:s)", "Error in creating krb5 cache", - krb5_get_err_text(context, problem) - ) - ); - ret = AUTH_GSS_ERROR; - goto end; - } - - maj_stat = gss_krb5_copy_ccache(&min_stat, delegated_cred, ccache); - if (GSS_ERROR(maj_stat)) { - set_gss_error(maj_stat, min_stat); - ret = AUTH_GSS_ERROR; - goto end; - } - - krb5_cc_close(context, ccache); - ccache = NULL; - ret = 0; - -end: - if (princ) { - krb5_free_principal(context, princ); - } - if (ccache) { - krb5_cc_destroy(context, ccache); - } - krb5_free_context(context); - - return ret; -} - -int create_krb5_ccache( - gss_server_state *state, krb5_context kcontext, krb5_principal princ, - krb5_ccache *ccache -) { - int fd; - char ccname[32]; - krb5_error_code problem; - int ret; - krb5_ccache tmp_ccache = NULL; - - snprintf(ccname, sizeof(ccname), "/tmp/krb5cc_pyserv_XXXXXX"); - fd = mkstemp(ccname); - if (fd < 0) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue("(s:s)", "Error in mkstemp", strerror(errno)) - ); - ret = 1; - goto end; - } - close(fd); - - problem = krb5_cc_resolve(kcontext, ccname, &tmp_ccache); - if (problem) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue( - "(s:s)", "Error resolving the credential cache", - krb5_get_err_text(kcontext, problem) - ) - ); - ret = 1; - unlink(ccname); - goto end; - } - - problem = krb5_cc_initialize(kcontext, tmp_ccache, princ); - if (problem) { - PyErr_SetObject( - KrbException_class, - Py_BuildValue( - "(s:s)", "Error initialising the credential cache", - krb5_get_err_text(kcontext, problem) - ) - ); - ret = 1; - goto end; - } - - *ccache = tmp_ccache; - tmp_ccache = NULL; - - ret = 0; - -end: - if (tmp_ccache) { - krb5_cc_destroy(kcontext, tmp_ccache); - } - - state->ccname = (char *)malloc(32*sizeof(char)); - if (state->ccname == NULL) { - PyErr_NoMemory(); - return 1; - } - strcpy(state->ccname, ccname); - - return ret; -} diff --git a/shell/ext-py/kerberos-1.3.1/src/kerberosgss.h b/shell/ext-py/kerberos-1.3.1/src/kerberosgss.h deleted file mode 100644 index 362040d98..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/kerberosgss.h +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Copyright (c) 2006-2018 Apple Inc. All rights reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include -#include -#include - -#define krb5_get_err_text(context,code) error_message(code) - -#define AUTH_GSS_ERROR -1 -#define AUTH_GSS_COMPLETE 1 -#define AUTH_GSS_CONTINUE 0 - -#define GSS_AUTH_P_NONE 1 -#define GSS_AUTH_P_INTEGRITY 2 -#define GSS_AUTH_P_PRIVACY 4 - -typedef struct { - gss_ctx_id_t context; - gss_name_t server_name; - gss_OID mech_oid; - long int gss_flags; - gss_cred_id_t client_creds; - char* username; - char* response; - int responseConf; -} gss_client_state; - -typedef struct { - gss_ctx_id_t context; - gss_name_t server_name; - gss_name_t client_name; - gss_cred_id_t server_creds; - gss_cred_id_t client_creds; - char* username; - char* targetname; - char* response; - char* ccname; -} gss_server_state; - -char* server_principal_details(const char* service, const char* hostname); - -int authenticate_gss_client_init( - const char* service, const char* principal, long int gss_flags, - gss_server_state* delegatestate, gss_OID mech_oid, gss_client_state* state -); -int authenticate_gss_client_clean( - gss_client_state *state -); -int authenticate_gss_client_step( - gss_client_state *state, const char *challenge, struct gss_channel_bindings_struct *channel_bindings -); -int authenticate_gss_client_unwrap( - gss_client_state* state, const char* challenge -); -int authenticate_gss_client_wrap( - gss_client_state* state, const char* challenge, const char* user, - int protect -); -int authenticate_gss_client_inquire_cred( - gss_client_state* state -); - -int authenticate_gss_server_init( - const char* service, gss_server_state* state -); -int authenticate_gss_server_clean( - gss_server_state *state -); -int authenticate_gss_server_step( - gss_server_state *state, const char *challenge -); -int authenticate_gss_server_store_delegate( - gss_server_state *state -); -int authenticate_gss_server_has_delegated( - gss_server_state *state -); diff --git a/shell/ext-py/kerberos-1.3.1/src/kerberospw.c b/shell/ext-py/kerberos-1.3.1/src/kerberospw.c deleted file mode 100644 index e6abc0d8a..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/kerberospw.c +++ /dev/null @@ -1,172 +0,0 @@ -/** - * Copyright (c) 2008 Guido Guenther - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - **/ - -#include -#include "kerberospw.h" - -#include -#include -#include - -#undef PRINTFS - -extern PyObject *PwdChangeException_class; - -static void set_pwchange_error(krb5_context context, krb5_error_code code) -{ - PyErr_SetObject( - PwdChangeException_class, - Py_BuildValue("(s:i)", krb5_get_err_text(context, code), code) - ); -} - -/* Inspired by krb5_verify_user from Heimdal */ -static krb5_error_code verify_krb5_user( - krb5_context context, - krb5_principal principal, - const char *password, - const char *service, - krb5_creds* creds -) { - krb5_get_init_creds_opt gic_options; - krb5_error_code code; - int ret = 0; - -#ifdef PRINTFS - { - char *name = NULL; - code = krb5_unparse_name(context, principal, &name); - if (!code) { - printf("Trying to get TGT for user %s\n", name); - } - free(name); - } -#endif - - krb5_get_init_creds_opt_init(&gic_options); - krb5_get_init_creds_opt_set_forwardable(&gic_options, 0); - krb5_get_init_creds_opt_set_proxiable(&gic_options, 0); - krb5_get_init_creds_opt_set_renew_life(&gic_options, 0); - - memset(creds, 0, sizeof(krb5_creds)); - - code = krb5_get_init_creds_password( - context, creds, principal, - (char *)password, NULL, NULL, 0, - (char *)service, &gic_options - ); - if (code) { - set_pwchange_error(context, code); - goto end; - } - ret = 1; /* success */ - -end: - return ret; -} - -int change_user_krb5pwd( - const char *user, const char* oldpswd, const char *newpswd -) { - krb5_context kcontext = NULL; - krb5_error_code code; - krb5_principal client = NULL; - krb5_creds creds; - int ret = 0; - int bytes = 0; - char *name = NULL; - - const char* service = "kadmin/changepw"; - int result_code; - krb5_data result_code_string, result_string; - - code = krb5_init_context(&kcontext); - if (code) { - PyErr_SetObject( - PwdChangeException_class, - Py_BuildValue( - "((s:i))", "Cannot initialize Kerberos5 context", code - ) - ); - return 0; - } - - name = (char *)malloc(256); - if (name == NULL) - { - PyErr_NoMemory(); - goto end; - } - snprintf(name, 256, "%s", user); - - code = krb5_parse_name(kcontext, name, &client); - if (code) { - set_pwchange_error(kcontext, code); - goto end; - } - - code = verify_krb5_user(kcontext, client, oldpswd, service, &creds); - if (! code) { /* exception set by verify_krb5_user */ - goto end; - } - - code = krb5_change_password(kcontext, &creds, (char*)newpswd, - &result_code, &result_code_string, &result_string); - if (code) { - set_pwchange_error(kcontext, code); - goto end; - } - if (result_code) { - char *message = NULL; - bytes = asprintf( - &message, "%.*s: %.*s", - (int) result_code_string.length, - (char *) result_code_string.data, - (int) result_string.length, - (char *) result_string.data - ); - if (bytes == -1) - { - PyErr_NoMemory(); - } - else - { - PyErr_SetObject( - PwdChangeException_class, - Py_BuildValue("((s:i))", message, result_code) - ); - free(message); - } - goto end; - } - - ret = 1; /* success */ - -end: -#ifdef PRINTFS - printf("%s: ret=%d user=%s\n", __FUNCTION__, ret, name); -#endif - - if (name) { - free(name); - } - if (client) { - krb5_free_principal(kcontext, client); - } - krb5_free_context(kcontext); - - return ret; -} diff --git a/shell/ext-py/kerberos-1.3.1/src/kerberospw.h b/shell/ext-py/kerberos-1.3.1/src/kerberospw.h deleted file mode 100644 index 250f2d534..000000000 --- a/shell/ext-py/kerberos-1.3.1/src/kerberospw.h +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright (c) 2008 Guido Guenther - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - **/ - -#include -#include -#include - -#define krb5_get_err_text(context,code) error_message(code) - -int change_user_krb5pwd( - const char *user, const char* oldpswd, const char *newpswd -); diff --git a/shell/ext-py/prettytable-0.7.2/CHANGELOG b/shell/ext-py/prettytable-0.7.2/CHANGELOG deleted file mode 100644 index a790f1f25..000000000 --- a/shell/ext-py/prettytable-0.7.2/CHANGELOG +++ /dev/null @@ -1,142 +0,0 @@ -########## PrettyTable 0.7 - Feb 17, 2013 ########### - -* Improved Python 2 and 3 compatibility (2.4-3.2). -* Improved support for non-Latin characters. Table widths should - now be calculated correctly for tables with e.g. Japanese text. -* Table contents can now be read in from a .csv file -* Table contents can now be read in from a DB-API compatible cursor -* Table contents can now be read in from a string containing a - HTML table (thanks to Christoph Robbert for submitting this patch!) -* new valign attribute controls vertical alignment of text when - some cells in a row have multiple lines of text and others don't. - (thanks to Google Code user maartendb for submitting this patch!) -* hrules attribute can now be set to HEADER, which draws a rule only - under the header row -* new vrules attribute controls drawing of vertical rules and can - be set to FRAME, ALL or NONE -* new header_style attribute controls formatting of text in table - headers and can be set to "cap", "title", "upper", "lower" or None -* Fixed a simple bug regarding validation of max_width (thanks to - Anthony Toole for pointing out this bug and providing a patch). -* Fixed a simple bug regarding initialisation of int_format value - for new tables (thanks to Ingo Schmiegel for pointing out this - bug!) -* Fixed a bug regarding some constructor keywords, such as "border", - being ignored (thanks to Google Code user antonio.s.messina for - reporting this bug). - -########## PrettyTable 0.6 - May 5, 2012 ########## - -* Code is now simultaneously compatible with Python 2 and 3 -* Replaced all setter methods with managed attributes -* All styling options can now be set persistently as managed attributes -* Added "add_style" method to make setting style options easily -* Added "del_row", "clear_rows" and "clear" methods to facilitate - removal of data from table. -* Added "copy" method to facilitate cloning of a table. -* Removed caching functionality, which added complexity and fragility - for relatively little gain -* Removed methods that just printed strings produced by get_string and - get_html_string - just use inbuilt print! -* Improved unicode support (thanks to Google Code user ru.w31rd0 for - patch!) -* Added support for decimal and floating point number formatting - support (thanks to Google Code user willfurnass for the suggestion!) -* Added support for using a custom key sorting methods (thanks to - Google Code user amannijhawan for the suggestion!) -* Added support for line breaks in data (suggested and implemented by - Klein Stephane) -* Added support for max column widths (thanks to Tibor Arpas for the - suggestion!) -* Fixed table slicing -* Fixed bug where closing tags in HTML tables were not printed - (thanks to Google Code user kehander for reporting this bug!) -* Fixed HTML table sorting bug (thanks to Google Code user dougbeal - for reporting this bug!) -* Fixed bug whereby changing field_names did not recompute widths - (thanks to Google Code user denilsonsa for reporting this bug!) - -########## PrettyTable 0.5 - May 26, 2009 ########## - -* Fixed a bug whereby printing with headers=False and border=False - would introduce an extraneous newline. Thanks to Alexander Lamaison - for reporting this bug. -* When printing with headers=False, column widths will now be reduced - as appropriate in columns where the field name is wider than the - data. Thanks to Alexander Lamaison for suggesting this behaviour. -* Support for Unicode has improved. Thanks to Chris Clark for - submitting this improvement. -* The value of the "border" argument now correctly controls the - presence of a border when printing HTML tables with print_html or - get_html_string, instead of being incorrectly ignored. Thanks to - Chris Clark for fixing this. -* The print_html and get_html_string methods now accept an - "attributes" argument which is a dictionary of name/value pairs to be - placed inside the tag (so you can, e.g. set class, name or id - values in order to style your table with CSS). Thanks to Chris Clark - for submitting this feature. -* The print_html and get_html_string methods now, by default, do their - best to match the various formatting options in their HTML output. - They use inline CSS to adjust the alignment of data in columns, the - padding widths of columns and in some cases the border settings. You - can give either method a "format=False" attribute to turn this - behaviour off if you want to do your own styling. With "format=False" - the methods print a "bare bones" table, similar to the default - behaviour in 0.4. - -########## PrettyTable 0.4 - May 13, 2009 ########## - -* Added "add_column" method to enable building tables up column-by-column. -* Added "print_HTML" and "get_HTML_string" methods to enable HTML table - production. -* Added "set_border_chars" method to enable control over characters used to - draw the table border. -* Added "set_left_padding" and "set_right_padding" methods to allow - independent padding control for both sides of a column. -* Added "sortby" option to enable column sorting. -* Added "header" option to enable switching off field name printing at top of - table. -* Modified "hrules" option to enable greater control over presence of - horizontal lines. -* Added "border" option to enable switching off all line printing. - -Thanks to Tim Cera, Chris Clark, Alexander Lamaison for suggesting and helping -to test many of the new features in this release. - -########## PrettyTable 0.3 - May 01, 2009 ########## - -* Added "padding_width" option to control the number of spaces between the - vertical line rules at the edges of a column and its content. This can be - set as a keyword argument to the constructor or after instantiation using - the "set_padding_width" method. The value is set to 1 by defaut. If your - table is too wide for a small screen with this value, setting it to 0 might - help you squeeze it in. - -Thanks to Chris Clark for contributing a patch against 0.2.1 to add this -feature! - -########## PrettyTable 0.2.1 - April 29, 2009 ########## - -* Caching no longer breaks when using the "printt(fields=[...])" syntax. The - list of fields was not hashable and hence could not be used as a dictionary - key. I fixed this using the output of the "cPickle" module's "dumps" - function as the dictionary key instead. -* Horizontal lines are now the appropriate length when the above syntax is - used. - -Thanks to Julien Koesten for reporting these bugs and testing the fixes almost -immediately after the release of 0.2! - -########## PrettyTable 0.2 - April 29, 2009 ########## - -* Added "get_string" method. -* Added "__str__" method (which just calls "get_string") to enable nice - "print x" syntax. -* Can now pass field names as a constructor argument. -* Return values of "get_string" are cached in a dictionary that is only - cleared after a call to "add_row" or something else which invalidates the - cache. - -########## PrettyTable 0.1 - February 26, 2009 ######### - -* Original release diff --git a/shell/ext-py/prettytable-0.7.2/COPYING b/shell/ext-py/prettytable-0.7.2/COPYING deleted file mode 100644 index 7de41fb3c..000000000 --- a/shell/ext-py/prettytable-0.7.2/COPYING +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2009-2013 Luke Maurits -# All rights reserved. -# With contributions from: -# * Chris Clark -# * Christoph Robbert -# * Klein Stephane -# * "maartendb" -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# * The name of the author may not be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. diff --git a/shell/ext-py/prettytable-0.7.2/MANIFEST.in b/shell/ext-py/prettytable-0.7.2/MANIFEST.in deleted file mode 100644 index 22805d761..000000000 --- a/shell/ext-py/prettytable-0.7.2/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include COPYING -include CHANGELOG -include README -include prettytable_test.py diff --git a/shell/ext-py/prettytable-0.7.2/PKG-INFO b/shell/ext-py/prettytable-0.7.2/PKG-INFO deleted file mode 100644 index 92ec53447..000000000 --- a/shell/ext-py/prettytable-0.7.2/PKG-INFO +++ /dev/null @@ -1,18 +0,0 @@ -Metadata-Version: 1.0 -Name: prettytable -Version: 0.7.2 -Summary: A simple Python library for easily displaying tabular data in a visually appealing ASCII table format -Home-page: http://code.google.com/p/prettytable -Author: Luke Maurits -Author-email: luke@maurits.id.au -License: BSD (3 clause) -Description: UNKNOWN -Platform: UNKNOWN -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2.4 -Classifier: Programming Language :: Python :: 2.5 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: License :: OSI Approved :: BSD License -Classifier: Topic :: Text Processing diff --git a/shell/ext-py/prettytable-0.7.2/README b/shell/ext-py/prettytable-0.7.2/README deleted file mode 100644 index 5f85d05ab..000000000 --- a/shell/ext-py/prettytable-0.7.2/README +++ /dev/null @@ -1,498 +0,0 @@ -TUTORIAL ON HOW TO USE THE PRETTYTABLE 0.6+ API - -*** This tutorial is distributed with PrettyTable and is meant to serve -as a "quick start" guide for the lazy or impatient. It is not an -exhaustive description of the whole API, and it is not guaranteed to be -100% up to date. For more complete and update documentation, check the -PrettyTable wiki at http://code.google.com/p/prettytable/w/list *** - -= Getting your data into (and out of) the table = - -Let's suppose you have a shiny new PrettyTable: - -from prettytable import PrettyTable -x = PrettyTable() - -and you want to put some data into it. You have a few options. - -== Row by row == - -You can add data one row at a time. To do this you can set the field names -first using the `field_names` attribute, and then add the rows one at a time -using the `add_row` method: - -x.field_names = ["City name", "Area", "Population", "Annual Rainfall"] -x.add_row(["Adelaide",1295, 1158259, 600.5]) -x.add_row(["Brisbane",5905, 1857594, 1146.4]) -x.add_row(["Darwin", 112, 120900, 1714.7]) -x.add_row(["Hobart", 1357, 205556, 619.5]) -x.add_row(["Sydney", 2058, 4336374, 1214.8]) -x.add_row(["Melbourne", 1566, 3806092, 646.9]) -x.add_row(["Perth", 5386, 1554769, 869.4]) - -== Column by column == - -You can add data one column at a time as well. To do this you use the -`add_column` method, which takes two arguments - a string which is the name for -the field the column you are adding corresponds to, and a list or tuple which -contains the column data" - -x.add_column("City name", -["Adelaide","Brisbane","Darwin","Hobart","Sydney","Melbourne","Perth"]) -x.add_column("Area", [1295, 5905, 112, 1357, 2058, 1566, 5386]) -x.add_column("Population", [1158259, 1857594, 120900, 205556, 4336374, 3806092, -1554769]) -x.add_column("Annual Rainfall",[600.5, 1146.4, 1714.7, 619.5, 1214.8, 646.9, -869.4]) - -== Mixing and matching == - -If you really want to, you can even mix and match `add_row` and `add_column` -and build some of your table in one way and some of it in the other. There's a -unit test which makes sure that doing things this way will always work out -nicely as if you'd done it using just one of the two approaches. Tables built -this way are kind of confusing for other people to read, though, so don't do -this unless you have a good reason. - -== Importing data from a CSV file == - -If you have your table data in a comma separated values file (.csv), you can -read this data into a PrettyTable like this: - -from prettytable import from_csv -fp = open("myfile.csv", "r") -mytable = from_csv(fp) -fp.close() - -== Importing data from a HTML string == - -If you have a string containing a HTML
, you can read this data into a -PrettyTable like this: - -from prettytable import from_html -mytable = from_html(html_string) - -== Importing data from a database cursor == - -If you have your table data in a database which you can access using a library -which confirms to the Python DB-API (e.g. an SQLite database accessible using -the sqlite module), then you can build a PrettyTable using a cursor object, -like this: - -import sqlite3 -from prettytable import from_db_cursor - -connection = sqlite3.connect("mydb.db") -cursor = connection.cursor() -cursor.execute("SELECT field1, field2, field3 FROM my_table") -mytable = from_db_cursor(cursor) - -== Getting data out == - -There are three ways to get data out of a PrettyTable, in increasing order of -completeness: - - * The `del_row` method takes an integer index of a single row to delete. - * The `clear_rows` method takes no arguments and deletes all the rows in the -table - but keeps the field names as they were so you that you can repopulate -it with the same kind of data. - * The `clear` method takes no arguments and deletes all rows and all field -names. It's not quite the same as creating a fresh table instance, though - -style related settings, discussed later, are maintained. - -= Displaying your table in ASCII form = - -PrettyTable's main goal is to let you print tables in an attractive ASCII form, -like this: - -+-----------+------+------------+-----------------+ -| City name | Area | Population | Annual Rainfall | -+-----------+------+------------+-----------------+ -| Adelaide | 1295 | 1158259 | 600.5 | -| Brisbane | 5905 | 1857594 | 1146.4 | -| Darwin | 112 | 120900 | 1714.7 | -| Hobart | 1357 | 205556 | 619.5 | -| Melbourne | 1566 | 3806092 | 646.9 | -| Perth | 5386 | 1554769 | 869.4 | -| Sydney | 2058 | 4336374 | 1214.8 | -+-----------+------+------------+-----------------+ - -You can print tables like this to `stdout` or get string representations of -them. - -== Printing == - -To print a table in ASCII form, you can just do this: - -print x - -in Python 2.x or: - -print(x) - -in Python 3.x. - -The old x.printt() method from versions 0.5 and earlier has been removed. - -To pass options changing the look of the table, use the get_string() method -documented below: - -print x.get_string() - -== Stringing == - -If you don't want to actually print your table in ASCII form but just get a -string containing what _would_ be printed if you use "print x", you can use -the `get_string` method: - -mystring = x.get_string() - -This string is guaranteed to look exactly the same as what would be printed by -doing "print x". You can now do all the usual things you can do with a -string, like write your table to a file or insert it into a GUI. - -== Controlling which data gets displayed == - -If you like, you can restrict the output of `print x` or `x.get_string` to -only the fields or rows you like. - -The `fields` argument to these methods takes a list of field names to be -printed: - -print x.get_string(fields=["City name", "Population"]) - -gives: - -+-----------+------------+ -| City name | Population | -+-----------+------------+ -| Adelaide | 1158259 | -| Brisbane | 1857594 | -| Darwin | 120900 | -| Hobart | 205556 | -| Melbourne | 3806092 | -| Perth | 1554769 | -| Sydney | 4336374 | -+-----------+------------+ - -The `start` and `end` arguments take the index of the first and last row to -print respectively. Note that the indexing works like Python list slicing - to -print the 2nd, 3rd and 4th rows of the table, set `start` to 1 (the first row -is row 0, so the second is row 1) and set `end` to 4 (the index of the 4th row, -plus 1): - -print x.get_string(start=1,end=4) - -prints: - -+-----------+------+------------+-----------------+ -| City name | Area | Population | Annual Rainfall | -+-----------+------+------------+-----------------+ -| Brisbane | 5905 | 1857594 | 1146.4 | -| Darwin | 112 | 120900 | 1714.7 | -| Hobart | 1357 | 205556 | 619.5 | -+-----------+------+------------+-----------------+ - -== Changing the alignment of columns == - -By default, all columns in a table are centre aligned. - -=== All columns at once === - -You can change the alignment of all the columns in a table at once by assigning -a one character string to the `align` attribute. The allowed strings are "l", -"r" and "c" for left, right and centre alignment, respectively: - -x.align = "r" -print x - -gives: - -+-----------+------+------------+-----------------+ -| City name | Area | Population | Annual Rainfall | -+-----------+------+------------+-----------------+ -| Adelaide | 1295 | 1158259 | 600.5 | -| Brisbane | 5905 | 1857594 | 1146.4 | -| Darwin | 112 | 120900 | 1714.7 | -| Hobart | 1357 | 205556 | 619.5 | -| Melbourne | 1566 | 3806092 | 646.9 | -| Perth | 5386 | 1554769 | 869.4 | -| Sydney | 2058 | 4336374 | 1214.8 | -+-----------+------+------------+-----------------+ - -=== One column at a time === - -You can also change the alignment of individual columns based on the -corresponding field name by treating the `align` attribute as if it were a -dictionary. - -x.align["City name"] = "l" -x.align["Area"] = "c" -x.align["Population"] = "r" -x.align["Annual Rainfall"] = "c" -print x - -gives: - -+-----------+------+------------+-----------------+ -| City name | Area | Population | Annual Rainfall | -+-----------+------+------------+-----------------+ -| Adelaide | 1295 | 1158259 | 600.5 | -| Brisbane | 5905 | 1857594 | 1146.4 | -| Darwin | 112 | 120900 | 1714.7 | -| Hobart | 1357 | 205556 | 619.5 | -| Melbourne | 1566 | 3806092 | 646.9 | -| Perth | 5386 | 1554769 | 869.4 | -| Sydney | 2058 | 4336374 | 1214.8 | -+-----------+------+------------+-----------------+ - -== Sorting your table by a field == - -You can make sure that your ASCII tables are produced with the data sorted by -one particular field by giving `get_string` a `sortby` keyword argument, which - must be a string containing the name of one field. - -For example, to print the example table we built earlier of Australian capital -city data, so that the most populated city comes last, we can do this: - -print x.get_string(sortby="Population") - -to get - -+-----------+------+------------+-----------------+ -| City name | Area | Population | Annual Rainfall | -+-----------+------+------------+-----------------+ -| Darwin | 112 | 120900 | 1714.7 | -| Hobart | 1357 | 205556 | 619.5 | -| Adelaide | 1295 | 1158259 | 600.5 | -| Perth | 5386 | 1554769 | 869.4 | -| Brisbane | 5905 | 1857594 | 1146.4 | -| Melbourne | 1566 | 3806092 | 646.9 | -| Sydney | 2058 | 4336374 | 1214.8 | -+-----------+------+------------+-----------------+ - -If we want the most populated city to come _first_, we can also give a -`reversesort=True` argument. - -If you _always_ want your tables to be sorted in a certain way, you can make -the setting long term like this: - -x.sortby = "Population" -print x -print x -print x - -All three tables printed by this code will be sorted by population (you could -do `x.reversesort = True` as well, if you wanted). The behaviour will persist -until you turn it off: - -x.sortby = None - -If you want to specify a custom sorting function, you can use the `sort_key` -keyword argument. Pass this a function which accepts two lists of values -and returns a negative or positive value depending on whether the first list -should appeare before or after the second one. If your table has n columns, -each list will have n+1 elements. Each list corresponds to one row of the -table. The first element will be whatever data is in the relevant row, in -the column specified by the `sort_by` argument. The remaining n elements -are the data in each of the table's columns, in order, including a repeated -instance of the data in the `sort_by` column. - -= Changing the appearance of your table - the easy way = - -By default, PrettyTable produces ASCII tables that look like the ones used in -SQL database shells. But if can print them in a variety of other formats as -well. If the format you want to use is common, PrettyTable makes this very -easy for you to do using the `set_style` method. If you want to produce an -uncommon table, you'll have to do things slightly harder (see later). - -== Setting a table style == - -You can set the style for your table using the `set_style` method before any -calls to `print` or `get_string`. Here's how to print a table in a format -which works nicely with Microsoft Word's "Convert to table" feature: - -from prettytable import MSWORD_FRIENDLY -x.set_style(MSWORD_FRIENDLY) -print x - -In addition to `MSWORD_FRIENDLY` there are currently two other in-built styles -you can use for your tables: - - * `DEFAULT` - The default look, used to undo any style changes you may have -made - * `PLAIN_COLUMN` - A borderless style that works well with command line -programs for columnar data - -Other styles are likely to appear in future releases. - -= Changing the appearance of your table - the hard way = - -If you want to display your table in a style other than one of the in-built -styles listed above, you'll have to set things up the hard way. - -Don't worry, it's not really that hard! - -== Style options == - -PrettyTable has a number of style options which control various aspects of how -tables are displayed. You have the freedom to set each of these options -individually to whatever you prefer. The `set_style` method just does this -automatically for you. - -The options are these: - - * `border` - A boolean option (must be `True` or `False`). Controls whether - or not a border is drawn around the table. - * `header` - A boolean option (must be `True` or `False`). Controls whether - or not the first row of the table is a header showing the names of all the - fields. - * `hrules` - Controls printing of horizontal rules after rows. Allowed - values: FRAME, HEADER, ALL, NONE - note that these are variables defined - inside the `prettytable` module so make sure you import them or use - `prettytable.FRAME` etc. - * `vrules` - Controls printing of vertical rules between columns. Allowed - values: FRAME, ALL, NONE. - * `int_format` - A string which controls the way integer data is printed. - This works like: print "%d" % data - * `float_format` - A string which controls the way floating point data is - printed. This works like: print "%f" % data - * `padding_width` - Number of spaces on either side of column data (only used - if left and right paddings are None). - * `left_padding_width` - Number of spaces on left hand side of column data. - * `right_padding_width` - Number of spaces on right hand side of column data. - * `vertical_char` - Single character string used to draw vertical lines. - Default is `|`. - * `horizontal_char` - Single character string used to draw horizontal lines. - Default is `-`. - * `junction_char` - Single character string used to draw line junctions. - Default is `+`. - -You can set the style options to your own settings in two ways: - -== Setting style options for the long term == - -If you want to print your table with a different style several times, you can -set your option for the "long term" just by changing the appropriate -attributes. If you never want your tables to have borders you can do this: - -x.border = False -print x -print x -print x - -Neither of the 3 tables printed by this will have borders, even if you do -things like add extra rows inbetween them. The lack of borders will last until -you do: - -x.border = True - -to turn them on again. This sort of long term setting is exactly how -`set_style` works. `set_style` just sets a bunch of attributes to pre-set -values for you. - -Note that if you know what style options you want at the moment you are -creating your table, you can specify them using keyword arguments to the -constructor. For example, the following two code blocks are equivalent: - -x = PrettyTable() -x.border = False -x.header = False -x.padding_width = 5 - -x = PrettyTable(border=False, header=False, padding_width=5) - -== Changing style options just once == - -If you don't want to make long term style changes by changing an attribute like -in the previous section, you can make changes that last for just one -``get_string`` by giving those methods keyword arguments. To print two -"normal" tables with one borderless table between them, you could do this: - -print x -print x.get_string(border=False) -print x - -= Displaying your table in HTML form = - -PrettyTable will also print your tables in HTML form, as `
`s. Just like -in ASCII form, you can actually print your table - just use `print_html()` - or -get a string representation - just use `get_html_string()`. HTML printing -supports the `fields`, `start`, `end`, `sortby` and `reversesort` arguments in -exactly the same way as ASCII printing. - -== Styling HTML tables == - -By default, PrettyTable outputs HTML for "vanilla" tables. The HTML code is -quite simple. It looks like this: - -
- - - - - - - - - - - - - - - - - ... - ... - ... -
City nameAreaPopulationAnnual Rainfall
Adelaide12951158259600.5
Brisbane590518575941146.4
- -If you like, you can ask PrettyTable to do its best to mimick the style options -that your table has set using inline CSS. This is done by giving a -`format=True` keyword argument to either the `print_html` or `get_html_string` -methods. Note that if you _always_ want to print formatted HTML you can do: - -x.format = True - -and the setting will persist until you turn it off. - -Just like with ASCII tables, if you want to change the table's style for just -one `print_html` or one `get_html_string` you can pass those methods keyword -arguments - exactly like `print` and `get_string`. - -== Setting HTML attributes == - -You can provide a dictionary of HTML attribute name/value pairs to the -`print_html` and `get_html_string` methods using the `attributes` keyword -argument. This lets you specify common HTML attributes like `name`, `id` and -`class` that can be used for linking to your tables or customising their -appearance using CSS. For example: - -x.print_html(attributes={"name":"my_table", "class":"red_table"}) - -will print: - - - - - - - - - ... - ... - ... -
City nameAreaPopulationAnnual Rainfall
- -= Miscellaneous things = - -== Copying a table == - -You can call the `copy` method on a PrettyTable object without arguments to -return an identical independent copy of the table. - -If you want a copy of a PrettyTable object with just a subset of the rows, -you can use list slicing notation: - -new_table = old_table[0:5] diff --git a/shell/ext-py/prettytable-0.7.2/prettytable.py b/shell/ext-py/prettytable-0.7.2/prettytable.py deleted file mode 100644 index 8abb952b9..000000000 --- a/shell/ext-py/prettytable-0.7.2/prettytable.py +++ /dev/null @@ -1,1475 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2009-2013, Luke Maurits -# All rights reserved. -# With contributions from: -# * Chris Clark -# * Klein Stephane -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# * The name of the author may not be used to endorse or promote products -# derived from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -__version__ = "0.7.2" - -import copy -import csv -import random -import re -import sys -import textwrap -import itertools -import unicodedata - -py3k = sys.version_info[0] >= 3 -if py3k: - unicode = str - basestring = str - itermap = map - iterzip = zip - uni_chr = chr - from html.parser import HTMLParser -else: - itermap = itertools.imap - iterzip = itertools.izip - uni_chr = unichr - from HTMLParser import HTMLParser - -if py3k and sys.version_info[1] >= 2: - from html import escape -else: - from cgi import escape - -# hrule styles -FRAME = 0 -ALL = 1 -NONE = 2 -HEADER = 3 - -# Table styles -DEFAULT = 10 -MSWORD_FRIENDLY = 11 -PLAIN_COLUMNS = 12 -RANDOM = 20 - -_re = re.compile("\033\[[0-9;]*m") - -def _get_size(text): - lines = text.split("\n") - height = len(lines) - width = max([_str_block_width(line) for line in lines]) - return (width, height) - -class PrettyTable(object): - - def __init__(self, field_names=None, **kwargs): - - """Return a new PrettyTable instance - - Arguments: - - encoding - Unicode encoding scheme used to decode any encoded input - field_names - list or tuple of field names - fields - list or tuple of field names to include in displays - start - index of first data row to include in output - end - index of last data row to include in output PLUS ONE (list slice style) - header - print a header showing field names (True or False) - header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None) - border - print a border around the table (True or False) - hrules - controls printing of horizontal rules after rows. Allowed values: FRAME, HEADER, ALL, NONE - vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE - int_format - controls formatting of integer data - float_format - controls formatting of floating point data - padding_width - number of spaces on either side of column data (only used if left and right paddings are None) - left_padding_width - number of spaces on left hand side of column data - right_padding_width - number of spaces on right hand side of column data - vertical_char - single character string used to draw vertical lines - horizontal_char - single character string used to draw horizontal lines - junction_char - single character string used to draw line junctions - sortby - name of field to sort rows by - sort_key - sorting key function, applied to data points before sorting - valign - default valign for each row (None, "t", "m" or "b") - reversesort - True or False to sort in descending or ascending order""" - - self.encoding = kwargs.get("encoding", "UTF-8") - - # Data - self._field_names = [] - self._align = {} - self._valign = {} - self._max_width = {} - self._rows = [] - if field_names: - self.field_names = field_names - else: - self._widths = [] - - # Options - self._options = "start end fields header border sortby reversesort sort_key attributes format hrules vrules".split() - self._options.extend("int_format float_format padding_width left_padding_width right_padding_width".split()) - self._options.extend("vertical_char horizontal_char junction_char header_style valign xhtml print_empty".split()) - for option in self._options: - if option in kwargs: - self._validate_option(option, kwargs[option]) - else: - kwargs[option] = None - - self._start = kwargs["start"] or 0 - self._end = kwargs["end"] or None - self._fields = kwargs["fields"] or None - - if kwargs["header"] in (True, False): - self._header = kwargs["header"] - else: - self._header = True - self._header_style = kwargs["header_style"] or None - if kwargs["border"] in (True, False): - self._border = kwargs["border"] - else: - self._border = True - self._hrules = kwargs["hrules"] or FRAME - self._vrules = kwargs["vrules"] or ALL - - self._sortby = kwargs["sortby"] or None - if kwargs["reversesort"] in (True, False): - self._reversesort = kwargs["reversesort"] - else: - self._reversesort = False - self._sort_key = kwargs["sort_key"] or (lambda x: x) - - self._int_format = kwargs["int_format"] or {} - self._float_format = kwargs["float_format"] or {} - self._padding_width = kwargs["padding_width"] or 1 - self._left_padding_width = kwargs["left_padding_width"] or None - self._right_padding_width = kwargs["right_padding_width"] or None - - self._vertical_char = kwargs["vertical_char"] or self._unicode("|") - self._horizontal_char = kwargs["horizontal_char"] or self._unicode("-") - self._junction_char = kwargs["junction_char"] or self._unicode("+") - - if kwargs["print_empty"] in (True, False): - self._print_empty = kwargs["print_empty"] - else: - self._print_empty = True - self._format = kwargs["format"] or False - self._xhtml = kwargs["xhtml"] or False - self._attributes = kwargs["attributes"] or {} - - def _unicode(self, value): - if not isinstance(value, basestring): - value = str(value) - if not isinstance(value, unicode): - value = unicode(value, self.encoding, "strict") - return value - - def _justify(self, text, width, align): - excess = width - _str_block_width(text) - if align == "l": - return text + excess * " " - elif align == "r": - return excess * " " + text - else: - if excess % 2: - # Uneven padding - # Put more space on right if text is of odd length... - if _str_block_width(text) % 2: - return (excess//2)*" " + text + (excess//2 + 1)*" " - # and more space on left if text is of even length - else: - return (excess//2 + 1)*" " + text + (excess//2)*" " - # Why distribute extra space this way? To match the behaviour of - # the inbuilt str.center() method. - else: - # Equal padding on either side - return (excess//2)*" " + text + (excess//2)*" " - - def __getattr__(self, name): - - if name == "rowcount": - return len(self._rows) - elif name == "colcount": - if self._field_names: - return len(self._field_names) - elif self._rows: - return len(self._rows[0]) - else: - return 0 - else: - raise AttributeError(name) - - def __getitem__(self, index): - - new = PrettyTable() - new.field_names = self.field_names - for attr in self._options: - setattr(new, "_"+attr, getattr(self, "_"+attr)) - setattr(new, "_align", getattr(self, "_align")) - if isinstance(index, slice): - for row in self._rows[index]: - new.add_row(row) - elif isinstance(index, int): - new.add_row(self._rows[index]) - else: - raise Exception("Index %s is invalid, must be an integer or slice" % str(index)) - return new - - if py3k: - def __str__(self): - return self.__unicode__() - else: - def __str__(self): - return self.__unicode__().encode(self.encoding) - - def __unicode__(self): - return self.get_string() - - ############################## - # ATTRIBUTE VALIDATORS # - ############################## - - # The method _validate_option is all that should be used elsewhere in the code base to validate options. - # It will call the appropriate validation method for that option. The individual validation methods should - # never need to be called directly (although nothing bad will happen if they *are*). - # Validation happens in TWO places. - # Firstly, in the property setters defined in the ATTRIBUTE MANAGMENT section. - # Secondly, in the _get_options method, where keyword arguments are mixed with persistent settings - - def _validate_option(self, option, val): - if option in ("field_names"): - self._validate_field_names(val) - elif option in ("start", "end", "max_width", "padding_width", "left_padding_width", "right_padding_width", "format"): - self._validate_nonnegative_int(option, val) - elif option in ("sortby"): - self._validate_field_name(option, val) - elif option in ("sort_key"): - self._validate_function(option, val) - elif option in ("hrules"): - self._validate_hrules(option, val) - elif option in ("vrules"): - self._validate_vrules(option, val) - elif option in ("fields"): - self._validate_all_field_names(option, val) - elif option in ("header", "border", "reversesort", "xhtml", "print_empty"): - self._validate_true_or_false(option, val) - elif option in ("header_style"): - self._validate_header_style(val) - elif option in ("int_format"): - self._validate_int_format(option, val) - elif option in ("float_format"): - self._validate_float_format(option, val) - elif option in ("vertical_char", "horizontal_char", "junction_char"): - self._validate_single_char(option, val) - elif option in ("attributes"): - self._validate_attributes(option, val) - else: - raise Exception("Unrecognised option: %s!" % option) - - def _validate_field_names(self, val): - # Check for appropriate length - if self._field_names: - try: - assert len(val) == len(self._field_names) - except AssertionError: - raise Exception("Field name list has incorrect number of values, (actual) %d!=%d (expected)" % (len(val), len(self._field_names))) - if self._rows: - try: - assert len(val) == len(self._rows[0]) - except AssertionError: - raise Exception("Field name list has incorrect number of values, (actual) %d!=%d (expected)" % (len(val), len(self._rows[0]))) - # Check for uniqueness - try: - assert len(val) == len(set(val)) - except AssertionError: - raise Exception("Field names must be unique!") - - def _validate_header_style(self, val): - try: - assert val in ("cap", "title", "upper", "lower", None) - except AssertionError: - raise Exception("Invalid header style, use cap, title, upper, lower or None!") - - def _validate_align(self, val): - try: - assert val in ["l","c","r"] - except AssertionError: - raise Exception("Alignment %s is invalid, use l, c or r!" % val) - - def _validate_valign(self, val): - try: - assert val in ["t","m","b",None] - except AssertionError: - raise Exception("Alignment %s is invalid, use t, m, b or None!" % val) - - def _validate_nonnegative_int(self, name, val): - try: - assert int(val) >= 0 - except AssertionError: - raise Exception("Invalid value for %s: %s!" % (name, self._unicode(val))) - - def _validate_true_or_false(self, name, val): - try: - assert val in (True, False) - except AssertionError: - raise Exception("Invalid value for %s! Must be True or False." % name) - - def _validate_int_format(self, name, val): - if val == "": - return - try: - assert type(val) in (str, unicode) - assert val.isdigit() - except AssertionError: - raise Exception("Invalid value for %s! Must be an integer format string." % name) - - def _validate_float_format(self, name, val): - if val == "": - return - try: - assert type(val) in (str, unicode) - assert "." in val - bits = val.split(".") - assert len(bits) <= 2 - assert bits[0] == "" or bits[0].isdigit() - assert bits[1] == "" or bits[1].isdigit() - except AssertionError: - raise Exception("Invalid value for %s! Must be a float format string." % name) - - def _validate_function(self, name, val): - try: - assert hasattr(val, "__call__") - except AssertionError: - raise Exception("Invalid value for %s! Must be a function." % name) - - def _validate_hrules(self, name, val): - try: - assert val in (ALL, FRAME, HEADER, NONE) - except AssertionError: - raise Exception("Invalid value for %s! Must be ALL, FRAME, HEADER or NONE." % name) - - def _validate_vrules(self, name, val): - try: - assert val in (ALL, FRAME, NONE) - except AssertionError: - raise Exception("Invalid value for %s! Must be ALL, FRAME, or NONE." % name) - - def _validate_field_name(self, name, val): - try: - assert (val in self._field_names) or (val is None) - except AssertionError: - raise Exception("Invalid field name: %s!" % val) - - def _validate_all_field_names(self, name, val): - try: - for x in val: - self._validate_field_name(name, x) - except AssertionError: - raise Exception("fields must be a sequence of field names!") - - def _validate_single_char(self, name, val): - try: - assert _str_block_width(val) == 1 - except AssertionError: - raise Exception("Invalid value for %s! Must be a string of length 1." % name) - - def _validate_attributes(self, name, val): - try: - assert isinstance(val, dict) - except AssertionError: - raise Exception("attributes must be a dictionary of name/value pairs!") - - ############################## - # ATTRIBUTE MANAGEMENT # - ############################## - - def _get_field_names(self): - return self._field_names - """The names of the fields - - Arguments: - - fields - list or tuple of field names""" - def _set_field_names(self, val): - val = [self._unicode(x) for x in val] - self._validate_option("field_names", val) - if self._field_names: - old_names = self._field_names[:] - self._field_names = val - if self._align and old_names: - for old_name, new_name in zip(old_names, val): - self._align[new_name] = self._align[old_name] - for old_name in old_names: - if old_name not in self._align: - self._align.pop(old_name) - else: - for field in self._field_names: - self._align[field] = "c" - if self._valign and old_names: - for old_name, new_name in zip(old_names, val): - self._valign[new_name] = self._valign[old_name] - for old_name in old_names: - if old_name not in self._valign: - self._valign.pop(old_name) - else: - for field in self._field_names: - self._valign[field] = "t" - field_names = property(_get_field_names, _set_field_names) - - def _get_align(self): - return self._align - def _set_align(self, val): - self._validate_align(val) - for field in self._field_names: - self._align[field] = val - align = property(_get_align, _set_align) - - def _get_valign(self): - return self._valign - def _set_valign(self, val): - self._validate_valign(val) - for field in self._field_names: - self._valign[field] = val - valign = property(_get_valign, _set_valign) - - def _get_max_width(self): - return self._max_width - def _set_max_width(self, val): - self._validate_option("max_width", val) - for field in self._field_names: - self._max_width[field] = val - max_width = property(_get_max_width, _set_max_width) - - def _get_fields(self): - """List or tuple of field names to include in displays - - Arguments: - - fields - list or tuple of field names to include in displays""" - return self._fields - def _set_fields(self, val): - self._validate_option("fields", val) - self._fields = val - fields = property(_get_fields, _set_fields) - - def _get_start(self): - """Start index of the range of rows to print - - Arguments: - - start - index of first data row to include in output""" - return self._start - - def _set_start(self, val): - self._validate_option("start", val) - self._start = val - start = property(_get_start, _set_start) - - def _get_end(self): - """End index of the range of rows to print - - Arguments: - - end - index of last data row to include in output PLUS ONE (list slice style)""" - return self._end - def _set_end(self, val): - self._validate_option("end", val) - self._end = val - end = property(_get_end, _set_end) - - def _get_sortby(self): - """Name of field by which to sort rows - - Arguments: - - sortby - field name to sort by""" - return self._sortby - def _set_sortby(self, val): - self._validate_option("sortby", val) - self._sortby = val - sortby = property(_get_sortby, _set_sortby) - - def _get_reversesort(self): - """Controls direction of sorting (ascending vs descending) - - Arguments: - - reveresort - set to True to sort by descending order, or False to sort by ascending order""" - return self._reversesort - def _set_reversesort(self, val): - self._validate_option("reversesort", val) - self._reversesort = val - reversesort = property(_get_reversesort, _set_reversesort) - - def _get_sort_key(self): - """Sorting key function, applied to data points before sorting - - Arguments: - - sort_key - a function which takes one argument and returns something to be sorted""" - return self._sort_key - def _set_sort_key(self, val): - self._validate_option("sort_key", val) - self._sort_key = val - sort_key = property(_get_sort_key, _set_sort_key) - - def _get_header(self): - """Controls printing of table header with field names - - Arguments: - - header - print a header showing field names (True or False)""" - return self._header - def _set_header(self, val): - self._validate_option("header", val) - self._header = val - header = property(_get_header, _set_header) - - def _get_header_style(self): - """Controls stylisation applied to field names in header - - Arguments: - - header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None)""" - return self._header_style - def _set_header_style(self, val): - self._validate_header_style(val) - self._header_style = val - header_style = property(_get_header_style, _set_header_style) - - def _get_border(self): - """Controls printing of border around table - - Arguments: - - border - print a border around the table (True or False)""" - return self._border - def _set_border(self, val): - self._validate_option("border", val) - self._border = val - border = property(_get_border, _set_border) - - def _get_hrules(self): - """Controls printing of horizontal rules after rows - - Arguments: - - hrules - horizontal rules style. Allowed values: FRAME, ALL, HEADER, NONE""" - return self._hrules - def _set_hrules(self, val): - self._validate_option("hrules", val) - self._hrules = val - hrules = property(_get_hrules, _set_hrules) - - def _get_vrules(self): - """Controls printing of vertical rules between columns - - Arguments: - - vrules - vertical rules style. Allowed values: FRAME, ALL, NONE""" - return self._vrules - def _set_vrules(self, val): - self._validate_option("vrules", val) - self._vrules = val - vrules = property(_get_vrules, _set_vrules) - - def _get_int_format(self): - """Controls formatting of integer data - Arguments: - - int_format - integer format string""" - return self._int_format - def _set_int_format(self, val): -# self._validate_option("int_format", val) - for field in self._field_names: - self._int_format[field] = val - int_format = property(_get_int_format, _set_int_format) - - def _get_float_format(self): - """Controls formatting of floating point data - Arguments: - - float_format - floating point format string""" - return self._float_format - def _set_float_format(self, val): -# self._validate_option("float_format", val) - for field in self._field_names: - self._float_format[field] = val - float_format = property(_get_float_format, _set_float_format) - - def _get_padding_width(self): - """The number of empty spaces between a column's edge and its content - - Arguments: - - padding_width - number of spaces, must be a positive integer""" - return self._padding_width - def _set_padding_width(self, val): - self._validate_option("padding_width", val) - self._padding_width = val - padding_width = property(_get_padding_width, _set_padding_width) - - def _get_left_padding_width(self): - """The number of empty spaces between a column's left edge and its content - - Arguments: - - left_padding - number of spaces, must be a positive integer""" - return self._left_padding_width - def _set_left_padding_width(self, val): - self._validate_option("left_padding_width", val) - self._left_padding_width = val - left_padding_width = property(_get_left_padding_width, _set_left_padding_width) - - def _get_right_padding_width(self): - """The number of empty spaces between a column's right edge and its content - - Arguments: - - right_padding - number of spaces, must be a positive integer""" - return self._right_padding_width - def _set_right_padding_width(self, val): - self._validate_option("right_padding_width", val) - self._right_padding_width = val - right_padding_width = property(_get_right_padding_width, _set_right_padding_width) - - def _get_vertical_char(self): - """The charcter used when printing table borders to draw vertical lines - - Arguments: - - vertical_char - single character string used to draw vertical lines""" - return self._vertical_char - def _set_vertical_char(self, val): - val = self._unicode(val) - self._validate_option("vertical_char", val) - self._vertical_char = val - vertical_char = property(_get_vertical_char, _set_vertical_char) - - def _get_horizontal_char(self): - """The charcter used when printing table borders to draw horizontal lines - - Arguments: - - horizontal_char - single character string used to draw horizontal lines""" - return self._horizontal_char - def _set_horizontal_char(self, val): - val = self._unicode(val) - self._validate_option("horizontal_char", val) - self._horizontal_char = val - horizontal_char = property(_get_horizontal_char, _set_horizontal_char) - - def _get_junction_char(self): - """The charcter used when printing table borders to draw line junctions - - Arguments: - - junction_char - single character string used to draw line junctions""" - return self._junction_char - def _set_junction_char(self, val): - val = self._unicode(val) - self._validate_option("vertical_char", val) - self._junction_char = val - junction_char = property(_get_junction_char, _set_junction_char) - - def _get_format(self): - """Controls whether or not HTML tables are formatted to match styling options - - Arguments: - - format - True or False""" - return self._format - def _set_format(self, val): - self._validate_option("format", val) - self._format = val - format = property(_get_format, _set_format) - - def _get_print_empty(self): - """Controls whether or not empty tables produce a header and frame or just an empty string - - Arguments: - - print_empty - True or False""" - return self._print_empty - def _set_print_empty(self, val): - self._validate_option("print_empty", val) - self._print_empty = val - print_empty = property(_get_print_empty, _set_print_empty) - - def _get_attributes(self): - """A dictionary of HTML attribute name/value pairs to be included in the tag when printing HTML - - Arguments: - - attributes - dictionary of attributes""" - return self._attributes - def _set_attributes(self, val): - self._validate_option("attributes", val) - self._attributes = val - attributes = property(_get_attributes, _set_attributes) - - ############################## - # OPTION MIXER # - ############################## - - def _get_options(self, kwargs): - - options = {} - for option in self._options: - if option in kwargs: - self._validate_option(option, kwargs[option]) - options[option] = kwargs[option] - else: - options[option] = getattr(self, "_"+option) - return options - - ############################## - # PRESET STYLE LOGIC # - ############################## - - def set_style(self, style): - - if style == DEFAULT: - self._set_default_style() - elif style == MSWORD_FRIENDLY: - self._set_msword_style() - elif style == PLAIN_COLUMNS: - self._set_columns_style() - elif style == RANDOM: - self._set_random_style() - else: - raise Exception("Invalid pre-set style!") - - def _set_default_style(self): - - self.header = True - self.border = True - self._hrules = FRAME - self._vrules = ALL - self.padding_width = 1 - self.left_padding_width = 1 - self.right_padding_width = 1 - self.vertical_char = "|" - self.horizontal_char = "-" - self.junction_char = "+" - - def _set_msword_style(self): - - self.header = True - self.border = True - self._hrules = NONE - self.padding_width = 1 - self.left_padding_width = 1 - self.right_padding_width = 1 - self.vertical_char = "|" - - def _set_columns_style(self): - - self.header = True - self.border = False - self.padding_width = 1 - self.left_padding_width = 0 - self.right_padding_width = 8 - - def _set_random_style(self): - - # Just for fun! - self.header = random.choice((True, False)) - self.border = random.choice((True, False)) - self._hrules = random.choice((ALL, FRAME, HEADER, NONE)) - self._vrules = random.choice((ALL, FRAME, NONE)) - self.left_padding_width = random.randint(0,5) - self.right_padding_width = random.randint(0,5) - self.vertical_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?") - self.horizontal_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?") - self.junction_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?") - - ############################## - # DATA INPUT METHODS # - ############################## - - def add_row(self, row): - - """Add a row to the table - - Arguments: - - row - row of data, should be a list with as many elements as the table - has fields""" - - if self._field_names and len(row) != len(self._field_names): - raise Exception("Row has incorrect number of values, (actual) %d!=%d (expected)" %(len(row),len(self._field_names))) - if not self._field_names: - self.field_names = [("Field %d" % (n+1)) for n in range(0,len(row))] - self._rows.append(list(row)) - - def del_row(self, row_index): - - """Delete a row to the table - - Arguments: - - row_index - The index of the row you want to delete. Indexing starts at 0.""" - - if row_index > len(self._rows)-1: - raise Exception("Cant delete row at index %d, table only has %d rows!" % (row_index, len(self._rows))) - del self._rows[row_index] - - def add_column(self, fieldname, column, align="c", valign="t"): - - """Add a column to the table. - - Arguments: - - fieldname - name of the field to contain the new column of data - column - column of data, should be a list with as many elements as the - table has rows - align - desired alignment for this column - "l" for left, "c" for centre and "r" for right - valign - desired vertical alignment for new columns - "t" for top, "m" for middle and "b" for bottom""" - - if len(self._rows) in (0, len(column)): - self._validate_align(align) - self._validate_valign(valign) - self._field_names.append(fieldname) - self._align[fieldname] = align - self._valign[fieldname] = valign - for i in range(0, len(column)): - if len(self._rows) < i+1: - self._rows.append([]) - self._rows[i].append(column[i]) - else: - raise Exception("Column length %d does not match number of rows %d!" % (len(column), len(self._rows))) - - def clear_rows(self): - - """Delete all rows from the table but keep the current field names""" - - self._rows = [] - - def clear(self): - - """Delete all rows and field names from the table, maintaining nothing but styling options""" - - self._rows = [] - self._field_names = [] - self._widths = [] - - ############################## - # MISC PUBLIC METHODS # - ############################## - - def copy(self): - return copy.deepcopy(self) - - ############################## - # MISC PRIVATE METHODS # - ############################## - - def _format_value(self, field, value): - if isinstance(value, int) and field in self._int_format: - value = self._unicode(("%%%sd" % self._int_format[field]) % value) - elif isinstance(value, float) and field in self._float_format: - value = self._unicode(("%%%sf" % self._float_format[field]) % value) - return self._unicode(value) - - def _compute_widths(self, rows, options): - if options["header"]: - widths = [_get_size(field)[0] for field in self._field_names] - else: - widths = len(self.field_names) * [0] - for row in rows: - for index, value in enumerate(row): - fieldname = self.field_names[index] - if fieldname in self.max_width: - widths[index] = max(widths[index], min(_get_size(value)[0], self.max_width[fieldname])) - else: - widths[index] = max(widths[index], _get_size(value)[0]) - self._widths = widths - - def _get_padding_widths(self, options): - - if options["left_padding_width"] is not None: - lpad = options["left_padding_width"] - else: - lpad = options["padding_width"] - if options["right_padding_width"] is not None: - rpad = options["right_padding_width"] - else: - rpad = options["padding_width"] - return lpad, rpad - - def _get_rows(self, options): - """Return only those data rows that should be printed, based on slicing and sorting. - - Arguments: - - options - dictionary of option settings.""" - - # Make a copy of only those rows in the slice range - rows = copy.deepcopy(self._rows[options["start"]:options["end"]]) - # Sort if necessary - if options["sortby"]: - sortindex = self._field_names.index(options["sortby"]) - # Decorate - rows = [[row[sortindex]]+row for row in rows] - # Sort - rows.sort(reverse=options["reversesort"], key=options["sort_key"]) - # Undecorate - rows = [row[1:] for row in rows] - return rows - - def _format_row(self, row, options): - return [self._format_value(field, value) for (field, value) in zip(self._field_names, row)] - - def _format_rows(self, rows, options): - return [self._format_row(row, options) for row in rows] - - ############################## - # PLAIN TEXT STRING METHODS # - ############################## - - def get_string(self, **kwargs): - - """Return string representation of table in current state. - - Arguments: - - start - index of first data row to include in output - end - index of last data row to include in output PLUS ONE (list slice style) - fields - names of fields (columns) to include - header - print a header showing field names (True or False) - border - print a border around the table (True or False) - hrules - controls printing of horizontal rules after rows. Allowed values: ALL, FRAME, HEADER, NONE - vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE - int_format - controls formatting of integer data - float_format - controls formatting of floating point data - padding_width - number of spaces on either side of column data (only used if left and right paddings are None) - left_padding_width - number of spaces on left hand side of column data - right_padding_width - number of spaces on right hand side of column data - vertical_char - single character string used to draw vertical lines - horizontal_char - single character string used to draw horizontal lines - junction_char - single character string used to draw line junctions - sortby - name of field to sort rows by - sort_key - sorting key function, applied to data points before sorting - reversesort - True or False to sort in descending or ascending order - print empty - if True, stringify just the header for an empty table, if False return an empty string """ - - options = self._get_options(kwargs) - - lines = [] - - # Don't think too hard about an empty table - # Is this the desired behaviour? Maybe we should still print the header? - if self.rowcount == 0 and (not options["print_empty"] or not options["border"]): - return "" - - # Get the rows we need to print, taking into account slicing, sorting, etc. - rows = self._get_rows(options) - - # Turn all data in all rows into Unicode, formatted as desired - formatted_rows = self._format_rows(rows, options) - - # Compute column widths - self._compute_widths(formatted_rows, options) - - # Add header or top of border - self._hrule = self._stringify_hrule(options) - if options["header"]: - lines.append(self._stringify_header(options)) - elif options["border"] and options["hrules"] in (ALL, FRAME): - lines.append(self._hrule) - - # Add rows - for row in formatted_rows: - lines.append(self._stringify_row(row, options)) - - # Add bottom of border - if options["border"] and options["hrules"] == FRAME: - lines.append(self._hrule) - - return self._unicode("\n").join(lines) - - def _stringify_hrule(self, options): - - if not options["border"]: - return "" - lpad, rpad = self._get_padding_widths(options) - if options['vrules'] in (ALL, FRAME): - bits = [options["junction_char"]] - else: - bits = [options["horizontal_char"]] - # For tables with no data or fieldnames - if not self._field_names: - bits.append(options["junction_char"]) - return "".join(bits) - for field, width in zip(self._field_names, self._widths): - if options["fields"] and field not in options["fields"]: - continue - bits.append((width+lpad+rpad)*options["horizontal_char"]) - if options['vrules'] == ALL: - bits.append(options["junction_char"]) - else: - bits.append(options["horizontal_char"]) - if options["vrules"] == FRAME: - bits.pop() - bits.append(options["junction_char"]) - return "".join(bits) - - def _stringify_header(self, options): - - bits = [] - lpad, rpad = self._get_padding_widths(options) - if options["border"]: - if options["hrules"] in (ALL, FRAME): - bits.append(self._hrule) - bits.append("\n") - if options["vrules"] in (ALL, FRAME): - bits.append(options["vertical_char"]) - else: - bits.append(" ") - # For tables with no data or field names - if not self._field_names: - if options["vrules"] in (ALL, FRAME): - bits.append(options["vertical_char"]) - else: - bits.append(" ") - for field, width, in zip(self._field_names, self._widths): - if options["fields"] and field not in options["fields"]: - continue - if self._header_style == "cap": - fieldname = field.capitalize() - elif self._header_style == "title": - fieldname = field.title() - elif self._header_style == "upper": - fieldname = field.upper() - elif self._header_style == "lower": - fieldname = field.lower() - else: - fieldname = field - bits.append(" " * lpad + self._justify(fieldname, width, self._align[field]) + " " * rpad) - if options["border"]: - if options["vrules"] == ALL: - bits.append(options["vertical_char"]) - else: - bits.append(" ") - # If vrules is FRAME, then we just appended a space at the end - # of the last field, when we really want a vertical character - if options["border"] and options["vrules"] == FRAME: - bits.pop() - bits.append(options["vertical_char"]) - if options["border"] and options["hrules"] != NONE: - bits.append("\n") - bits.append(self._hrule) - return "".join(bits) - - def _stringify_row(self, row, options): - - for index, field, value, width, in zip(range(0,len(row)), self._field_names, row, self._widths): - # Enforce max widths - lines = value.split("\n") - new_lines = [] - for line in lines: - if _str_block_width(line) > width: - line = textwrap.fill(line, width) - new_lines.append(line) - lines = new_lines - value = "\n".join(lines) - row[index] = value - - row_height = 0 - for c in row: - h = _get_size(c)[1] - if h > row_height: - row_height = h - - bits = [] - lpad, rpad = self._get_padding_widths(options) - for y in range(0, row_height): - bits.append([]) - if options["border"]: - if options["vrules"] in (ALL, FRAME): - bits[y].append(self.vertical_char) - else: - bits[y].append(" ") - - for field, value, width, in zip(self._field_names, row, self._widths): - - valign = self._valign[field] - lines = value.split("\n") - dHeight = row_height - len(lines) - if dHeight: - if valign == "m": - lines = [""] * int(dHeight / 2) + lines + [""] * (dHeight - int(dHeight / 2)) - elif valign == "b": - lines = [""] * dHeight + lines - else: - lines = lines + [""] * dHeight - - y = 0 - for l in lines: - if options["fields"] and field not in options["fields"]: - continue - - bits[y].append(" " * lpad + self._justify(l, width, self._align[field]) + " " * rpad) - if options["border"]: - if options["vrules"] == ALL: - bits[y].append(self.vertical_char) - else: - bits[y].append(" ") - y += 1 - - # If vrules is FRAME, then we just appended a space at the end - # of the last field, when we really want a vertical character - for y in range(0, row_height): - if options["border"] and options["vrules"] == FRAME: - bits[y].pop() - bits[y].append(options["vertical_char"]) - - if options["border"] and options["hrules"]== ALL: - bits[row_height-1].append("\n") - bits[row_height-1].append(self._hrule) - - for y in range(0, row_height): - bits[y] = "".join(bits[y]) - - return "\n".join(bits) - - ############################## - # HTML STRING METHODS # - ############################## - - def get_html_string(self, **kwargs): - - """Return string representation of HTML formatted version of table in current state. - - Arguments: - - start - index of first data row to include in output - end - index of last data row to include in output PLUS ONE (list slice style) - fields - names of fields (columns) to include - header - print a header showing field names (True or False) - border - print a border around the table (True or False) - hrules - controls printing of horizontal rules after rows. Allowed values: ALL, FRAME, HEADER, NONE - vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE - int_format - controls formatting of integer data - float_format - controls formatting of floating point data - padding_width - number of spaces on either side of column data (only used if left and right paddings are None) - left_padding_width - number of spaces on left hand side of column data - right_padding_width - number of spaces on right hand side of column data - sortby - name of field to sort rows by - sort_key - sorting key function, applied to data points before sorting - attributes - dictionary of name/value pairs to include as HTML attributes in the
tag - xhtml - print
tags if True,
tags if false""" - - options = self._get_options(kwargs) - - if options["format"]: - string = self._get_formatted_html_string(options) - else: - string = self._get_simple_html_string(options) - - return string - - def _get_simple_html_string(self, options): - - lines = [] - if options["xhtml"]: - linebreak = "
" - else: - linebreak = "
" - - open_tag = [] - open_tag.append("") - lines.append("".join(open_tag)) - - # Headers - if options["header"]: - lines.append(" ") - for field in self._field_names: - if options["fields"] and field not in options["fields"]: - continue - lines.append(" " % escape(field).replace("\n", linebreak)) - lines.append(" ") - - # Data - rows = self._get_rows(options) - formatted_rows = self._format_rows(rows, options) - for row in formatted_rows: - lines.append(" ") - for field, datum in zip(self._field_names, row): - if options["fields"] and field not in options["fields"]: - continue - lines.append(" " % escape(datum).replace("\n", linebreak)) - lines.append(" ") - - lines.append("
%s
%s
") - - return self._unicode("\n").join(lines) - - def _get_formatted_html_string(self, options): - - lines = [] - lpad, rpad = self._get_padding_widths(options) - if options["xhtml"]: - linebreak = "
" - else: - linebreak = "
" - - open_tag = [] - open_tag.append("") - lines.append("".join(open_tag)) - - # Headers - if options["header"]: - lines.append(" ") - for field in self._field_names: - if options["fields"] and field not in options["fields"]: - continue - lines.append(" %s" % (lpad, rpad, escape(field).replace("\n", linebreak))) - lines.append(" ") - - # Data - rows = self._get_rows(options) - formatted_rows = self._format_rows(rows, options) - aligns = [] - valigns = [] - for field in self._field_names: - aligns.append({ "l" : "left", "r" : "right", "c" : "center" }[self._align[field]]) - valigns.append({"t" : "top", "m" : "middle", "b" : "bottom"}[self._valign[field]]) - for row in formatted_rows: - lines.append(" ") - for field, datum, align, valign in zip(self._field_names, row, aligns, valigns): - if options["fields"] and field not in options["fields"]: - continue - lines.append(" %s" % (lpad, rpad, align, valign, escape(datum).replace("\n", linebreak))) - lines.append(" ") - lines.append("") - - return self._unicode("\n").join(lines) - -############################## -# UNICODE WIDTH FUNCTIONS # -############################## - -def _char_block_width(char): - # Basic Latin, which is probably the most common case - #if char in xrange(0x0021, 0x007e): - #if char >= 0x0021 and char <= 0x007e: - if 0x0021 <= char <= 0x007e: - return 1 - # Chinese, Japanese, Korean (common) - if 0x4e00 <= char <= 0x9fff: - return 2 - # Hangul - if 0xac00 <= char <= 0xd7af: - return 2 - # Combining? - if unicodedata.combining(uni_chr(char)): - return 0 - # Hiragana and Katakana - if 0x3040 <= char <= 0x309f or 0x30a0 <= char <= 0x30ff: - return 2 - # Full-width Latin characters - if 0xff01 <= char <= 0xff60: - return 2 - # CJK punctuation - if 0x3000 <= char <= 0x303e: - return 2 - # Backspace and delete - if char in (0x0008, 0x007f): - return -1 - # Other control characters - elif char in (0x0000, 0x001f): - return 0 - # Take a guess - return 1 - -def _str_block_width(val): - - return sum(itermap(_char_block_width, itermap(ord, _re.sub("", val)))) - -############################## -# TABLE FACTORIES # -############################## - -def from_csv(fp, field_names = None, **kwargs): - - dialect = csv.Sniffer().sniff(fp.read(1024)) - fp.seek(0) - reader = csv.reader(fp, dialect) - - table = PrettyTable(**kwargs) - if field_names: - table.field_names = field_names - else: - if py3k: - table.field_names = [x.strip() for x in next(reader)] - else: - table.field_names = [x.strip() for x in reader.next()] - - for row in reader: - table.add_row([x.strip() for x in row]) - - return table - -def from_db_cursor(cursor, **kwargs): - - if cursor.description: - table = PrettyTable(**kwargs) - table.field_names = [col[0] for col in cursor.description] - for row in cursor.fetchall(): - table.add_row(row) - return table - -class TableHandler(HTMLParser): - - def __init__(self, **kwargs): - HTMLParser.__init__(self) - self.kwargs = kwargs - self.tables = [] - self.last_row = [] - self.rows = [] - self.max_row_width = 0 - self.active = None - self.last_content = "" - self.is_last_row_header = False - - def handle_starttag(self,tag, attrs): - self.active = tag - if tag == "th": - self.is_last_row_header = True - - def handle_endtag(self,tag): - if tag in ["th", "td"]: - stripped_content = self.last_content.strip() - self.last_row.append(stripped_content) - if tag == "tr": - self.rows.append( - (self.last_row, self.is_last_row_header)) - self.max_row_width = max(self.max_row_width, len(self.last_row)) - self.last_row = [] - self.is_last_row_header = False - if tag == "table": - table = self.generate_table(self.rows) - self.tables.append(table) - self.rows = [] - self.last_content = " " - self.active = None - - - def handle_data(self, data): - self.last_content += data - - def generate_table(self, rows): - """ - Generates from a list of rows a PrettyTable object. - """ - table = PrettyTable(**self.kwargs) - for row in self.rows: - if len(row[0]) < self.max_row_width: - appends = self.max_row_width - len(row[0]) - for i in range(1,appends): - row[0].append("-") - - if row[1] == True: - self.make_fields_unique(row[0]) - table.field_names = row[0] - else: - table.add_row(row[0]) - return table - - def make_fields_unique(self, fields): - """ - iterates over the row and make each field unique - """ - for i in range(0, len(fields)): - for j in range(i+1, len(fields)): - if fields[i] == fields[j]: - fields[j] += "'" - -def from_html(html_code, **kwargs): - """ - Generates a list of PrettyTables from a string of HTML code. Each in - the HTML becomes one PrettyTable object. - """ - - parser = TableHandler(**kwargs) - parser.feed(html_code) - return parser.tables - -def from_html_one(html_code, **kwargs): - """ - Generates a PrettyTables from a string of HTML code which contains only a - single
- """ - - tables = from_html(html_code, **kwargs) - try: - assert len(tables) == 1 - except AssertionError: - raise Exception("More than one
in provided HTML code! Use from_html instead.") - return tables[0] - -############################## -# MAIN (TEST FUNCTION) # -############################## - -def main(): - - x = PrettyTable(["City name", "Area", "Population", "Annual Rainfall"]) - x.sortby = "Population" - x.reversesort = True - x.int_format["Area"] = "04d" - x.float_format = "6.1f" - x.align["City name"] = "l" # Left align city names - x.add_row(["Adelaide", 1295, 1158259, 600.5]) - x.add_row(["Brisbane", 5905, 1857594, 1146.4]) - x.add_row(["Darwin", 112, 120900, 1714.7]) - x.add_row(["Hobart", 1357, 205556, 619.5]) - x.add_row(["Sydney", 2058, 4336374, 1214.8]) - x.add_row(["Melbourne", 1566, 3806092, 646.9]) - x.add_row(["Perth", 5386, 1554769, 869.4]) - print(x) - -if __name__ == "__main__": - main() diff --git a/shell/ext-py/prettytable-0.7.2/setup.cfg b/shell/ext-py/prettytable-0.7.2/setup.cfg deleted file mode 100644 index 861a9f554..000000000 --- a/shell/ext-py/prettytable-0.7.2/setup.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[egg_info] -tag_build = -tag_date = 0 -tag_svn_revision = 0 - diff --git a/shell/ext-py/prettytable-0.7.2/setup.py b/shell/ext-py/prettytable-0.7.2/setup.py deleted file mode 100644 index 20a2322c4..000000000 --- a/shell/ext-py/prettytable-0.7.2/setup.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python -from setuptools import setup -from prettytable import __version__ as version - -setup( - name='prettytable', - version=version, - classifiers=[ - 'Programming Language :: Python', - 'Programming Language :: Python :: 2.4', - 'Programming Language :: Python :: 2.5', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'License :: OSI Approved :: BSD License', - 'Topic :: Text Processing' - ], - license="BSD (3 clause)", - description='A simple Python library for easily displaying tabular data in a visually appealing ASCII table format', - author='Luke Maurits', - author_email='luke@maurits.id.au', - url='http://code.google.com/p/prettytable', - py_modules=['prettytable'], - test_suite = "prettytable_test" -) diff --git a/shell/ext-py/sasl-0.3.1/LICENSE.txt b/shell/ext-py/sasl-0.3.1/LICENSE.txt deleted file mode 100644 index d64569567..000000000 --- a/shell/ext-py/sasl-0.3.1/LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/shell/ext-py/sasl-0.3.1/MANIFEST.in b/shell/ext-py/sasl-0.3.1/MANIFEST.in deleted file mode 100644 index 6f995c7f5..000000000 --- a/shell/ext-py/sasl-0.3.1/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include LICENSE.txt -include setup.py - -graft sasl diff --git a/shell/ext-py/sasl-0.3.1/PKG-INFO b/shell/ext-py/sasl-0.3.1/PKG-INFO deleted file mode 100644 index 8a22fd467..000000000 --- a/shell/ext-py/sasl-0.3.1/PKG-INFO +++ /dev/null @@ -1,12 +0,0 @@ -Metadata-Version: 1.2 -Name: sasl -Version: 0.3.1 -Summary: Cyrus-SASL bindings for Python -Home-page: http://github.com/cloudera/python-sasl -Maintainer: Todd Lipcon -Maintainer-email: todd@cloudera.com -License: Apache License, Version 2.0 -Description: UNKNOWN -Platform: UNKNOWN -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 diff --git a/shell/ext-py/sasl-0.3.1/sasl/__init__.py b/shell/ext-py/sasl-0.3.1/sasl/__init__.py deleted file mode 100644 index 10b9b71a0..000000000 --- a/shell/ext-py/sasl-0.3.1/sasl/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2015 Cloudera Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from sasl.saslwrapper import * diff --git a/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.cpp b/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.cpp deleted file mode 100644 index 4253457c8..000000000 --- a/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.cpp +++ /dev/null @@ -1,6480 +0,0 @@ -/* Generated by Cython 0.29.21 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) - #error Cython requires Python 2.6+ or Python 3.3+. -#else -#define CYTHON_ABI "0_29_21" -#define CYTHON_HEX_VERSION 0x001D15F0 -#define CYTHON_FUTURE_DIVISION 1 -#include -#ifndef offsetof - #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif -#define __PYX_COMMA , -#ifndef HAVE_LONG_LONG - #if PY_VERSION_HEX >= 0x02070000 - #define HAVE_LONG_LONG - #endif -#endif -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif -#ifndef Py_HUGE_VAL - #define Py_HUGE_VAL HUGE_VAL -#endif -#ifdef PYPY_VERSION - #define CYTHON_COMPILING_IN_PYPY 1 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #undef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 0 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #if PY_VERSION_HEX < 0x03050000 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #undef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #undef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 1 - #undef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 0 - #undef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 0 - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#elif defined(PYSTON_VERSION) - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 1 - #define CYTHON_COMPILING_IN_CPYTHON 0 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #undef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 0 - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #undef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 0 - #undef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 0 - #undef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT 0 - #undef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE 0 - #undef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS 0 - #undef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK 0 -#else - #define CYTHON_COMPILING_IN_PYPY 0 - #define CYTHON_COMPILING_IN_PYSTON 0 - #define CYTHON_COMPILING_IN_CPYTHON 1 - #ifndef CYTHON_USE_TYPE_SLOTS - #define CYTHON_USE_TYPE_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYTYPE_LOOKUP - #define CYTHON_USE_PYTYPE_LOOKUP 0 - #elif !defined(CYTHON_USE_PYTYPE_LOOKUP) - #define CYTHON_USE_PYTYPE_LOOKUP 1 - #endif - #if PY_MAJOR_VERSION < 3 - #undef CYTHON_USE_ASYNC_SLOTS - #define CYTHON_USE_ASYNC_SLOTS 0 - #elif !defined(CYTHON_USE_ASYNC_SLOTS) - #define CYTHON_USE_ASYNC_SLOTS 1 - #endif - #if PY_VERSION_HEX < 0x02070000 - #undef CYTHON_USE_PYLONG_INTERNALS - #define CYTHON_USE_PYLONG_INTERNALS 0 - #elif !defined(CYTHON_USE_PYLONG_INTERNALS) - #define CYTHON_USE_PYLONG_INTERNALS 1 - #endif - #ifndef CYTHON_USE_PYLIST_INTERNALS - #define CYTHON_USE_PYLIST_INTERNALS 1 - #endif - #ifndef CYTHON_USE_UNICODE_INTERNALS - #define CYTHON_USE_UNICODE_INTERNALS 1 - #endif - #if PY_VERSION_HEX < 0x030300F0 - #undef CYTHON_USE_UNICODE_WRITER - #define CYTHON_USE_UNICODE_WRITER 0 - #elif !defined(CYTHON_USE_UNICODE_WRITER) - #define CYTHON_USE_UNICODE_WRITER 1 - #endif - #ifndef CYTHON_AVOID_BORROWED_REFS - #define CYTHON_AVOID_BORROWED_REFS 0 - #endif - #ifndef CYTHON_ASSUME_SAFE_MACROS - #define CYTHON_ASSUME_SAFE_MACROS 1 - #endif - #ifndef CYTHON_UNPACK_METHODS - #define CYTHON_UNPACK_METHODS 1 - #endif - #ifndef CYTHON_FAST_THREAD_STATE - #define CYTHON_FAST_THREAD_STATE 1 - #endif - #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 - #endif - #ifndef CYTHON_PEP489_MULTI_PHASE_INIT - #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) - #endif - #ifndef CYTHON_USE_TP_FINALIZE - #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1) - #endif - #ifndef CYTHON_USE_DICT_VERSIONS - #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1) - #endif - #ifndef CYTHON_USE_EXC_INFO_STACK - #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3) - #endif -#endif -#if !defined(CYTHON_FAST_PYCCALL) -#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) -#endif -#if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" - #undef SHIFT - #undef BASE - #undef MASK - #ifdef SIZEOF_VOID_P - enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) }; - #endif -#endif -#ifndef __has_attribute - #define __has_attribute(x) 0 -#endif -#ifndef __has_cpp_attribute - #define __has_cpp_attribute(x) 0 -#endif -#ifndef CYTHON_RESTRICT - #if defined(__GNUC__) - #define CYTHON_RESTRICT __restrict__ - #elif defined(_MSC_VER) && _MSC_VER >= 1400 - #define CYTHON_RESTRICT __restrict - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_RESTRICT restrict - #else - #define CYTHON_RESTRICT - #endif -#endif -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif -#ifndef CYTHON_MAYBE_UNUSED_VAR -# if defined(__cplusplus) - template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } -# else -# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) -# endif -#endif -#ifndef CYTHON_NCP_UNUSED -# if CYTHON_COMPILING_IN_CPYTHON -# define CYTHON_NCP_UNUSED -# else -# define CYTHON_NCP_UNUSED CYTHON_UNUSED -# endif -#endif -#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) -#ifdef _MSC_VER - #ifndef _MSC_STDINT_H_ - #if _MSC_VER < 1300 - typedef unsigned char uint8_t; - typedef unsigned int uint32_t; - #else - typedef unsigned __int8 uint8_t; - typedef unsigned __int32 uint32_t; - #endif - #endif -#else - #include -#endif -#ifndef CYTHON_FALLTHROUGH - #if defined(__cplusplus) && __cplusplus >= 201103L - #if __has_cpp_attribute(fallthrough) - #define CYTHON_FALLTHROUGH [[fallthrough]] - #elif __has_cpp_attribute(clang::fallthrough) - #define CYTHON_FALLTHROUGH [[clang::fallthrough]] - #elif __has_cpp_attribute(gnu::fallthrough) - #define CYTHON_FALLTHROUGH [[gnu::fallthrough]] - #endif - #endif - #ifndef CYTHON_FALLTHROUGH - #if __has_attribute(fallthrough) - #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) - #else - #define CYTHON_FALLTHROUGH - #endif - #endif - #if defined(__clang__ ) && defined(__apple_build_version__) - #if __apple_build_version__ < 7000000 - #undef CYTHON_FALLTHROUGH - #define CYTHON_FALLTHROUGH - #endif - #endif -#endif - -#ifndef __cplusplus - #error "Cython files generated with the C++ option must be compiled with a C++ compiler." -#endif -#ifndef CYTHON_INLINE - #if defined(__clang__) - #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) - #else - #define CYTHON_INLINE inline - #endif -#endif -template -void __Pyx_call_destructor(T& x) { - x.~T(); -} -template -class __Pyx_FakeReference { - public: - __Pyx_FakeReference() : ptr(NULL) { } - __Pyx_FakeReference(const T& ref) : ptr(const_cast(&ref)) { } - T *operator->() { return ptr; } - T *operator&() { return ptr; } - operator T&() { return *ptr; } - template bool operator ==(U other) { return *ptr == other; } - template bool operator !=(U other) { return *ptr != other; } - private: - T *ptr; -}; - -#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) - #define Py_OptimizeFlag 0 -#endif -#define __PYX_BUILD_PY_SSIZE_T "n" -#define CYTHON_FORMAT_SSIZE_T "z" -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) - #define __Pyx_DefaultClassType PyClass_Type -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#else - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) -#endif - #define __Pyx_DefaultClassType PyType_Type -#endif -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif -#ifndef Py_TPFLAGS_HAVE_FINALIZE - #define Py_TPFLAGS_HAVE_FINALIZE 0 -#endif -#ifndef METH_STACKLESS - #define METH_STACKLESS 0 -#endif -#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL) - #ifndef METH_FASTCALL - #define METH_FASTCALL 0x80 - #endif - typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs); - typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args, - Py_ssize_t nargs, PyObject *kwnames); -#else - #define __Pyx_PyCFunctionFast _PyCFunctionFast - #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords -#endif -#if CYTHON_FAST_PYCCALL -#define __Pyx_PyFastCFunction_Check(func)\ - ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))))) -#else -#define __Pyx_PyFastCFunction_Check(func) 0 -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) - #define PyObject_Malloc(s) PyMem_Malloc(s) - #define PyObject_Free(p) PyMem_Free(p) - #define PyObject_Realloc(p) PyMem_Realloc(p) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1 - #define PyMem_RawMalloc(n) PyMem_Malloc(n) - #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n) - #define PyMem_RawFree(p) PyMem_Free(p) -#endif -#if CYTHON_COMPILING_IN_PYSTON - #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) -#else - #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) - #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) -#endif -#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#elif PY_VERSION_HEX >= 0x03060000 - #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet() -#elif PY_VERSION_HEX >= 0x03000000 - #define __Pyx_PyThreadState_Current PyThreadState_GET() -#else - #define __Pyx_PyThreadState_Current _PyThreadState_Current -#endif -#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT) -#include "pythread.h" -#define Py_tss_NEEDS_INIT 0 -typedef int Py_tss_t; -static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) { - *key = PyThread_create_key(); - return 0; -} -static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) { - Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t)); - *key = Py_tss_NEEDS_INIT; - return key; -} -static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) { - PyObject_Free(key); -} -static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) { - return *key != Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) { - PyThread_delete_key(*key); - *key = Py_tss_NEEDS_INIT; -} -static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) { - return PyThread_set_key_value(*key, value); -} -static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { - return PyThread_get_key_value(*key); -} -#endif -#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized) -#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n)) -#else -#define __Pyx_PyDict_NewPresized(n) PyDict_New() -#endif -#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS -#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash) -#else -#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name) -#endif -#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) - #define CYTHON_PEP393_ENABLED 1 - #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ - 0 : _PyUnicode_Ready((PyObject *)(op))) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) - #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) - #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) - #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) - #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) - #else - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u)) - #endif -#else - #define CYTHON_PEP393_ENABLED 0 - #define PyUnicode_1BYTE_KIND 1 - #define PyUnicode_2BYTE_KIND 2 - #define PyUnicode_4BYTE_KIND 4 - #define __Pyx_PyUnicode_READY(op) (0) - #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) - #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) - #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) - #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) - #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) - #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) - #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) - #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) -#endif -#if CYTHON_COMPILING_IN_PYPY - #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) -#else - #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) - #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ - PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) - #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) - #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) -#endif -#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) - #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) -#endif -#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) -#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) -#else - #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) -#endif -#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) - #define PyObject_ASCII(o) PyObject_Repr(o) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#ifndef PyObject_Unicode - #define PyObject_Unicode PyObject_Str -#endif -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) - #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) -#else - #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) - #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif -#if PY_VERSION_HEX >= 0x030900A4 - #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size) -#else - #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) - #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size) -#endif -#if CYTHON_ASSUME_SAFE_MACROS - #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq) -#else - #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq) -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask - #define PyNumber_Int PyNumber_Long -#endif -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif -#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY - #ifndef PyUnicode_InternFromString - #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) - #endif -#endif -#if PY_VERSION_HEX < 0x030200A4 - typedef long Py_hash_t; - #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t -#else - #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t -#endif -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) -#else - #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) -#endif -#if CYTHON_USE_ASYNC_SLOTS - #if PY_VERSION_HEX >= 0x030500B1 - #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods - #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) - #else - #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) - #endif -#else - #define __Pyx_PyType_AsAsync(obj) NULL -#endif -#ifndef __Pyx_PyAsyncMethodsStruct - typedef struct { - unaryfunc am_await; - unaryfunc am_aiter; - unaryfunc am_anext; - } __Pyx_PyAsyncMethodsStruct; -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) - #define _USE_MATH_DEFINES -#endif -#include -#ifdef NAN -#define __PYX_NAN() ((float) NAN) -#else -static CYTHON_INLINE float __PYX_NAN() { - float value; - memset(&value, 0xFF, sizeof(value)); - return value; -} -#endif -#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) -#define __Pyx_truncl trunc -#else -#define __Pyx_truncl truncl -#endif - -#define __PYX_MARK_ERR_POS(f_index, lineno) \ - { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; } -#define __PYX_ERR(f_index, lineno, Ln_error) \ - { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; } - -#ifndef __PYX_EXTERN_C - #ifdef __cplusplus - #define __PYX_EXTERN_C extern "C" - #else - #define __PYX_EXTERN_C extern - #endif -#endif - -#define __PYX_HAVE__sasl__saslwrapper -#define __PYX_HAVE_API__sasl__saslwrapper -/* Early includes */ -#include -#include -#include "ios" -#include "new" -#include "stdexcept" -#include "typeinfo" -#include -#include "saslwrapper.h" -#ifdef _OPENMP -#include -#endif /* _OPENMP */ - -#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS) -#define CYTHON_WITHOUT_ASSERTIONS -#endif - -typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; - const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; - -#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0 -#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8) -#define __PYX_DEFAULT_STRING_ENCODING "" -#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString -#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#define __Pyx_uchar_cast(c) ((unsigned char)c) -#define __Pyx_long_cast(x) ((long)x) -#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ - (sizeof(type) < sizeof(Py_ssize_t)) ||\ - (sizeof(type) > sizeof(Py_ssize_t) &&\ - likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX) &&\ - (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ - v == (type)PY_SSIZE_T_MIN))) ||\ - (sizeof(type) == sizeof(Py_ssize_t) &&\ - (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ - v == (type)PY_SSIZE_T_MAX))) ) -static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) { - return (size_t) i < (size_t) limit; -} -#if defined (__cplusplus) && __cplusplus >= 201103L - #include - #define __Pyx_sst_abs(value) std::abs(value) -#elif SIZEOF_INT >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) abs(value) -#elif SIZEOF_LONG >= SIZEOF_SIZE_T - #define __Pyx_sst_abs(value) labs(value) -#elif defined (_MSC_VER) - #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value)) -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define __Pyx_sst_abs(value) llabs(value) -#elif defined (__GNUC__) - #define __Pyx_sst_abs(value) __builtin_llabs(value) -#else - #define __Pyx_sst_abs(value) ((value<0) ? -value : value) -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); -#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) -#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) -#define __Pyx_PyBytes_FromString PyBytes_FromString -#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize -#else - #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString - #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize -#endif -#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s)) -#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) -#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) -#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) -#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) -#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) -#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) -static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) { - const Py_UNICODE *u_end = u; - while (*u_end++) ; - return (size_t)(u_end - u - 1); -} -#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) -#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode -#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode -#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) -#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b); -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); -#define __Pyx_PySequence_Tuple(obj)\ - (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); -#if CYTHON_ASSUME_SAFE_MACROS -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) -#else -#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) -#endif -#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) -#if PY_MAJOR_VERSION >= 3 -#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) -#else -#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) -#endif -#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII -static int __Pyx_sys_getdefaultencoding_not_ascii; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - PyObject* ascii_chars_u = NULL; - PyObject* ascii_chars_b = NULL; - const char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - if (strcmp(default_encoding_c, "ascii") == 0) { - __Pyx_sys_getdefaultencoding_not_ascii = 0; - } else { - char ascii_chars[128]; - int c; - for (c = 0; c < 128; c++) { - ascii_chars[c] = c; - } - __Pyx_sys_getdefaultencoding_not_ascii = 1; - ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); - if (!ascii_chars_u) goto bad; - ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); - if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { - PyErr_Format( - PyExc_ValueError, - "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", - default_encoding_c); - goto bad; - } - Py_DECREF(ascii_chars_u); - Py_DECREF(ascii_chars_b); - } - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - Py_XDECREF(ascii_chars_u); - Py_XDECREF(ascii_chars_b); - return -1; -} -#endif -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) -#else -#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) -#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -static char* __PYX_DEFAULT_STRING_ENCODING; -static int __Pyx_init_sys_getdefaultencoding_params(void) { - PyObject* sys; - PyObject* default_encoding = NULL; - char* default_encoding_c; - sys = PyImport_ImportModule("sys"); - if (!sys) goto bad; - default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); - Py_DECREF(sys); - if (!default_encoding) goto bad; - default_encoding_c = PyBytes_AsString(default_encoding); - if (!default_encoding_c) goto bad; - __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1); - if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; - strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); - Py_DECREF(default_encoding); - return 0; -bad: - Py_XDECREF(default_encoding); - return -1; -} -#endif -#endif - - -/* Test for GCC > 2.95 */ -#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) - #define likely(x) __builtin_expect(!!(x), 1) - #define unlikely(x) __builtin_expect(!!(x), 0) -#else /* !__GNUC__ or GCC < 2.95 */ - #define likely(x) (x) - #define unlikely(x) (x) -#endif /* __GNUC__ */ -static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } - -static PyObject *__pyx_m = NULL; -static PyObject *__pyx_d; -static PyObject *__pyx_b; -static PyObject *__pyx_cython_runtime = NULL; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static PyObject *__pyx_empty_unicode; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "stringsource", - "sasl/saslwrapper.pyx", -}; - -/*--- Type declarations ---*/ -struct __pyx_obj_4sasl_11saslwrapper_Client; - -/* "sasl/saslwrapper.pyx":50 - * - * - * cdef class Client: # <<<<<<<<<<<<<< - * cdef ClientImpl _this - * - */ -struct __pyx_obj_4sasl_11saslwrapper_Client { - PyObject_HEAD - struct __pyx_vtabstruct_4sasl_11saslwrapper_Client *__pyx_vtab; - saslwrapper::ClientImpl _this; -}; - - - -struct __pyx_vtabstruct_4sasl_11saslwrapper_Client { - PyObject *(*setAttr)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*init)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch); - PyObject *(*start)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*step)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*encode)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*decode)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch); - PyObject *(*getUserId)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch); - PyObject *(*getSSF)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch); - PyObject *(*getError)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch); -}; -static struct __pyx_vtabstruct_4sasl_11saslwrapper_Client *__pyx_vtabptr_4sasl_11saslwrapper_Client; - -/* --- Runtime support code (head) --- */ -/* Refnanny.proto */ -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); - #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; -#ifdef WITH_THREAD - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - if (acquire_gil) {\ - PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - PyGILState_Release(__pyx_gilstate_save);\ - } else {\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ - } -#else - #define __Pyx_RefNannySetupContext(name, acquire_gil)\ - __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) -#endif - #define __Pyx_RefNannyFinishContext()\ - __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) - #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) - #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) -#else - #define __Pyx_RefNannyDeclarations - #define __Pyx_RefNannySetupContext(name, acquire_gil) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XINCREF(r) Py_XINCREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) - #define __Pyx_XGOTREF(r) - #define __Pyx_XGIVEREF(r) -#endif -#define __Pyx_XDECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_XDECREF(tmp);\ - } while (0) -#define __Pyx_DECREF_SET(r, v) do {\ - PyObject *tmp = (PyObject *) r;\ - r = v; __Pyx_DECREF(tmp);\ - } while (0) -#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) -#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) - -/* PyObjectGetAttrStr.proto */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) -#endif - -/* GetBuiltinName.proto */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name); - -/* PyDictVersioning.proto */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1) -#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\ - (version_var) = __PYX_GET_DICT_VERSION(dict);\ - (cache_var) = (value); -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\ - (VAR) = __pyx_dict_cached_value;\ - } else {\ - (VAR) = __pyx_dict_cached_value = (LOOKUP);\ - __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\ - }\ -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj); -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj); -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version); -#else -#define __PYX_GET_DICT_VERSION(dict) (0) -#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var) -#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP); -#endif - -/* GetModuleGlobalName.proto */ -#if CYTHON_USE_DICT_VERSIONS -#define __Pyx_GetModuleGlobalName(var, name) {\ - static PY_UINT64_T __pyx_dict_version = 0;\ - static PyObject *__pyx_dict_cached_value = NULL;\ - (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\ - (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\ - __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -#define __Pyx_GetModuleGlobalNameUncached(var, name) {\ - PY_UINT64_T __pyx_dict_version;\ - PyObject *__pyx_dict_cached_value;\ - (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\ -} -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value); -#else -#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name) -#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name) -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name); -#endif - -/* PyCFunctionFastCall.proto */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); -#else -#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) -#endif - -/* PyFunctionFastCall.proto */ -#if CYTHON_FAST_PYCALL -#define __Pyx_PyFunction_FastCall(func, args, nargs)\ - __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs); -#else -#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) -#endif -#define __Pyx_BUILD_ASSERT_EXPR(cond)\ - (sizeof(char [1 - 2*!(cond)]) - 1) -#ifndef Py_MEMBER_SIZE -#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) -#endif - static size_t __pyx_pyframe_localsplus_offset = 0; - #include "frameobject.h" - #define __Pxy_PyFrame_Initialize_Offsets()\ - ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\ - (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) - #define __Pyx_PyFrame_GetLocalsplus(frame)\ - (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) -#endif - -/* PyObjectCall.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); -#else -#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) -#endif - -/* PyObjectCall2Args.proto */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2); - -/* PyObjectCallMethO.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); -#endif - -/* PyObjectCallOneArg.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); - -/* PyThreadStateGet.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; -#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current; -#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type -#else -#define __Pyx_PyThreadState_declare -#define __Pyx_PyThreadState_assign -#define __Pyx_PyErr_Occurred() PyErr_Occurred() -#endif - -/* PyErrFetchRestore.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL) -#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL)) -#else -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#endif -#else -#define __Pyx_PyErr_Clear() PyErr_Clear() -#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc) -#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb) -#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) -#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) -#endif - -/* WriteUnraisableException.proto */ -static void __Pyx_WriteUnraisable(const char *name, int clineno, - int lineno, const char *filename, - int full_traceback, int nogil); - -/* RaiseArgTupleInvalid.proto */ -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); - -/* RaiseDoubleKeywords.proto */ -static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); - -/* ParseKeywords.proto */ -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ - PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ - const char* function_name); - -/* PyObjectCallNoArg.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); -#else -#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) -#endif - -/* RaiseException.proto */ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); - -/* PyObject_GenericGetAttrNoDict.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr -#endif - -/* PyObject_GenericGetAttr.proto */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name); -#else -#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr -#endif - -/* SetVTable.proto */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable); - -/* PyErrExceptionMatches.proto */ -#if CYTHON_FAST_THREAD_STATE -#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) -static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); -#else -#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) -#endif - -/* PyObjectGetAttrStrNoError.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name); - -/* SetupReduce.proto */ -static int __Pyx_setup_reduce(PyObject* type_obj); - -/* Import.proto */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); - -/* ImportFrom.proto */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); - -/* CLineInTraceback.proto */ -#ifdef CYTHON_CLINE_IN_TRACEBACK -#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0) -#else -static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line); -#endif - -/* CodeObjectCache.proto */ -typedef struct { - PyCodeObject* code_object; - int code_line; -} __Pyx_CodeObjectCacheEntry; -struct __Pyx_CodeObjectCache { - int count; - int max_count; - __Pyx_CodeObjectCacheEntry* entries; -}; -static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); -static PyCodeObject *__pyx_find_code_object(int code_line); -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); - -/* AddTraceback.proto */ -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename); - -/* None.proto */ -#include - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE uint32_t __Pyx_PyInt_As_uint32_t(PyObject *); - -/* CIntToPy.proto */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); - -/* CIntFromPy.proto */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); - -/* CIntFromPy.proto */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); - -/* FastTypeChecks.proto */ -#if CYTHON_COMPILING_IN_CPYTHON -#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type) -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type); -static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2); -#else -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) -#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type) -#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2)) -#endif -#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) - -/* CheckBinaryVersion.proto */ -static int __Pyx_check_binary_version(void); - -/* InitStrings.proto */ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); - -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_setAttr(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_init(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_start(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_mech_list, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_step(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_challenge, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_encode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_clear_text, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_decode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_cipher_text, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_getUserId(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_getSSF(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch); /* proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_getError(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch); /* proto*/ - -/* Module declarations from 'libcpp' */ - -/* Module declarations from 'libc.stdint' */ - -/* Module declarations from 'libc.string' */ - -/* Module declarations from 'libcpp.string' */ - -/* Module declarations from 'sasl.saslwrapper' */ -static PyTypeObject *__pyx_ptype_4sasl_11saslwrapper_Client = 0; -static std::string __pyx_f_4sasl_11saslwrapper_to_bytes(PyObject *, int __pyx_skip_dispatch); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_to_string(PyObject *, int __pyx_skip_dispatch); /*proto*/ -static std::string __pyx_convert_string_from_py_std__in_string(PyObject *); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_convert_PyObject_string_to_py_std__in_string(std::string const &); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_convert_PyUnicode_string_to_py_std__in_string(std::string const &); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_convert_PyStr_string_to_py_std__in_string(std::string const &); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_convert_PyBytes_string_to_py_std__in_string(std::string const &); /*proto*/ -static CYTHON_INLINE PyObject *__pyx_convert_PyByteArray_string_to_py_std__in_string(std::string const &); /*proto*/ -#define __Pyx_MODULE_NAME "sasl.saslwrapper" -extern int __pyx_module_is_main_sasl__saslwrapper; -int __pyx_module_is_main_sasl__saslwrapper = 0; - -/* Implementation of 'sasl.saslwrapper' */ -static PyObject *__pyx_builtin_TypeError; -static const char __pyx_k_PY3[] = "PY3"; -static const char __pyx_k_key[] = "key"; -static const char __pyx_k_six[] = "six"; -static const char __pyx_k_init[] = "init"; -static const char __pyx_k_main[] = "__main__"; -static const char __pyx_k_name[] = "__name__"; -static const char __pyx_k_step[] = "step"; -static const char __pyx_k_test[] = "__test__"; -static const char __pyx_k_start[] = "start"; -static const char __pyx_k_utf_8[] = "utf-8"; -static const char __pyx_k_value[] = "value"; -static const char __pyx_k_Client[] = "Client"; -static const char __pyx_k_decode[] = "decode"; -static const char __pyx_k_encode[] = "encode"; -static const char __pyx_k_getSSF[] = "getSSF"; -static const char __pyx_k_import[] = "__import__"; -static const char __pyx_k_reduce[] = "__reduce__"; -static const char __pyx_k_setAttr[] = "setAttr"; -static const char __pyx_k_getError[] = "getError"; -static const char __pyx_k_getstate[] = "__getstate__"; -static const char __pyx_k_setstate[] = "__setstate__"; -static const char __pyx_k_TypeError[] = "TypeError"; -static const char __pyx_k_getUserId[] = "getUserId"; -static const char __pyx_k_reduce_ex[] = "__reduce_ex__"; -static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; -static const char __pyx_k_string_types[] = "string_types"; -static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; -static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; -static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; -static const char __pyx_k_self__this_cannot_be_converted_t[] = "self._this cannot be converted to a Python object for pickling"; -static PyObject *__pyx_n_s_Client; -static PyObject *__pyx_n_s_PY3; -static PyObject *__pyx_n_s_TypeError; -static PyObject *__pyx_n_s_cline_in_traceback; -static PyObject *__pyx_n_s_decode; -static PyObject *__pyx_n_s_encode; -static PyObject *__pyx_n_s_getError; -static PyObject *__pyx_n_s_getSSF; -static PyObject *__pyx_n_s_getUserId; -static PyObject *__pyx_n_s_getstate; -static PyObject *__pyx_n_s_import; -static PyObject *__pyx_n_s_init; -static PyObject *__pyx_n_s_key; -static PyObject *__pyx_n_s_main; -static PyObject *__pyx_n_s_name; -static PyObject *__pyx_n_s_pyx_vtable; -static PyObject *__pyx_n_s_reduce; -static PyObject *__pyx_n_s_reduce_cython; -static PyObject *__pyx_n_s_reduce_ex; -static PyObject *__pyx_kp_s_self__this_cannot_be_converted_t; -static PyObject *__pyx_n_s_setAttr; -static PyObject *__pyx_n_s_setstate; -static PyObject *__pyx_n_s_setstate_cython; -static PyObject *__pyx_n_s_six; -static PyObject *__pyx_n_s_start; -static PyObject *__pyx_n_s_step; -static PyObject *__pyx_n_s_string_types; -static PyObject *__pyx_n_s_test; -static PyObject *__pyx_kp_u_utf_8; -static PyObject *__pyx_n_s_value; -static PyObject *__pyx_pf_4sasl_11saslwrapper_to_bytes(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_2to_string(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_setAttr(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_2init(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_4start(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_mech_list); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_6step(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_challenge); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_8encode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_clear_text); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_10decode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_cipher_text); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_12getUserId(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_14getSSF(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_16getError(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_18__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self); /* proto */ -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_20__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */ -static PyObject *__pyx_tp_new_4sasl_11saslwrapper_Client(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ -static PyObject *__pyx_tuple_; -static PyObject *__pyx_tuple__2; -/* Late includes */ - -/* "sasl/saslwrapper.pyx":38 - * - * - * cpdef string_t to_bytes(bytes_or_str): # <<<<<<<<<<<<<< - * if PY3 and isinstance(bytes_or_str, string_types): - * return bytes_or_str.encode('utf-8') - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_1to_bytes(PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str); /*proto*/ -static std::string __pyx_f_4sasl_11saslwrapper_to_bytes(PyObject *__pyx_v_bytes_or_str, CYTHON_UNUSED int __pyx_skip_dispatch) { - std::string __pyx_r; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - int __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - std::string __pyx_t_7; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("to_bytes", 0); - - /* "sasl/saslwrapper.pyx":39 - * - * cpdef string_t to_bytes(bytes_or_str): - * if PY3 and isinstance(bytes_or_str, string_types): # <<<<<<<<<<<<<< - * return bytes_or_str.encode('utf-8') - * return bytes_or_str - */ - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_PY3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 39, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(1, 39, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_3) { - } else { - __pyx_t_1 = __pyx_t_3; - goto __pyx_L4_bool_binop_done; - } - __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_string_types); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 39, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyObject_IsInstance(__pyx_v_bytes_or_str, __pyx_t_2); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(1, 39, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_4 = (__pyx_t_3 != 0); - __pyx_t_1 = __pyx_t_4; - __pyx_L4_bool_binop_done:; - if (__pyx_t_1) { - - /* "sasl/saslwrapper.pyx":40 - * cpdef string_t to_bytes(bytes_or_str): - * if PY3 and isinstance(bytes_or_str, string_types): - * return bytes_or_str.encode('utf-8') # <<<<<<<<<<<<<< - * return bytes_or_str - * - */ - __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_bytes_or_str, __pyx_n_s_encode); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { - __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); - if (likely(__pyx_t_6)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); - __Pyx_INCREF(__pyx_t_6); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_5, function); - } - } - __pyx_t_2 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_6, __pyx_kp_u_utf_8) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_kp_u_utf_8); - __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 40, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_7 = __pyx_convert_string_from_py_std__in_string(__pyx_t_2); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 40, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_r = __pyx_t_7; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":39 - * - * cpdef string_t to_bytes(bytes_or_str): - * if PY3 and isinstance(bytes_or_str, string_types): # <<<<<<<<<<<<<< - * return bytes_or_str.encode('utf-8') - * return bytes_or_str - */ - } - - /* "sasl/saslwrapper.pyx":41 - * if PY3 and isinstance(bytes_or_str, string_types): - * return bytes_or_str.encode('utf-8') - * return bytes_or_str # <<<<<<<<<<<<<< - * - * - */ - __pyx_t_7 = __pyx_convert_string_from_py_std__in_string(__pyx_v_bytes_or_str); if (unlikely(PyErr_Occurred())) __PYX_ERR(1, 41, __pyx_L1_error) - __pyx_r = __pyx_t_7; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":38 - * - * - * cpdef string_t to_bytes(bytes_or_str): # <<<<<<<<<<<<<< - * if PY3 and isinstance(bytes_or_str, string_types): - * return bytes_or_str.encode('utf-8') - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_WriteUnraisable("sasl.saslwrapper.to_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename, 1, 0); - __Pyx_pretend_to_initialize(&__pyx_r); - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_1to_bytes(PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_1to_bytes(PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("to_bytes (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_to_bytes(__pyx_self, ((PyObject *)__pyx_v_bytes_or_str)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_to_bytes(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("to_bytes", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_bytes_or_str, 0)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.to_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":44 - * - * - * cpdef to_string(bytes_or_str): # <<<<<<<<<<<<<< - * if isinstance(bytes_or_str, bytes): - * return bytes_or_str.decode('utf-8') - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_3to_string(PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_to_string(PyObject *__pyx_v_bytes_or_str, CYTHON_UNUSED int __pyx_skip_dispatch) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("to_string", 0); - - /* "sasl/saslwrapper.pyx":45 - * - * cpdef to_string(bytes_or_str): - * if isinstance(bytes_or_str, bytes): # <<<<<<<<<<<<<< - * return bytes_or_str.decode('utf-8') - * return bytes_or_str - */ - __pyx_t_1 = PyBytes_Check(__pyx_v_bytes_or_str); - __pyx_t_2 = (__pyx_t_1 != 0); - if (__pyx_t_2) { - - /* "sasl/saslwrapper.pyx":46 - * cpdef to_string(bytes_or_str): - * if isinstance(bytes_or_str, bytes): - * return bytes_or_str.decode('utf-8') # <<<<<<<<<<<<<< - * return bytes_or_str - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_bytes_or_str, __pyx_n_s_decode); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 46, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = NULL; - if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { - __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); - if (likely(__pyx_t_5)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); - __Pyx_INCREF(__pyx_t_5); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_4, function); - } - } - __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_kp_u_utf_8) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_kp_u_utf_8); - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 46, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":45 - * - * cpdef to_string(bytes_or_str): - * if isinstance(bytes_or_str, bytes): # <<<<<<<<<<<<<< - * return bytes_or_str.decode('utf-8') - * return bytes_or_str - */ - } - - /* "sasl/saslwrapper.pyx":47 - * if isinstance(bytes_or_str, bytes): - * return bytes_or_str.decode('utf-8') - * return bytes_or_str # <<<<<<<<<<<<<< - * - * - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_bytes_or_str); - __pyx_r = __pyx_v_bytes_or_str; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":44 - * - * - * cpdef to_string(bytes_or_str): # <<<<<<<<<<<<<< - * if isinstance(bytes_or_str, bytes): - * return bytes_or_str.decode('utf-8') - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("sasl.saslwrapper.to_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_3to_string(PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_3to_string(PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("to_string (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_2to_string(__pyx_self, ((PyObject *)__pyx_v_bytes_or_str)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_2to_string(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_bytes_or_str) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("to_string", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_to_string(__pyx_v_bytes_or_str, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.to_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":53 - * cdef ClientImpl _this - * - * cpdef setAttr(self, key, value): # <<<<<<<<<<<<<< - * if isinstance(value, int): - * return self._this.setAttr(to_bytes(key), value) - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_1setAttr(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_setAttr(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value, int __pyx_skip_dispatch) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - int __pyx_t_7; - int __pyx_t_8; - uint32_t __pyx_t_9; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("setAttr", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_setAttr); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_1setAttr)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - __pyx_t_5 = 0; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - __pyx_t_5 = 1; - } - } - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(__pyx_t_3)) { - PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_key, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 53, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GOTREF(__pyx_t_2); - } else - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { - PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_key, __pyx_v_value}; - __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 53, __pyx_L1_error) - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_GOTREF(__pyx_t_2); - } else - #endif - { - __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_6); - if (__pyx_t_4) { - __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; - } - __Pyx_INCREF(__pyx_v_key); - __Pyx_GIVEREF(__pyx_v_key); - PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_key); - __Pyx_INCREF(__pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_value); - __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":54 - * - * cpdef setAttr(self, key, value): - * if isinstance(value, int): # <<<<<<<<<<<<<< - * return self._this.setAttr(to_bytes(key), value) - * elif isinstance(value, string_types): - */ - __pyx_t_7 = PyInt_Check(__pyx_v_value); - __pyx_t_8 = (__pyx_t_7 != 0); - if (__pyx_t_8) { - - /* "sasl/saslwrapper.pyx":55 - * cpdef setAttr(self, key, value): - * if isinstance(value, int): - * return self._this.setAttr(to_bytes(key), value) # <<<<<<<<<<<<<< - * elif isinstance(value, string_types): - * return self._this.setAttr(to_bytes(key), to_bytes(value)) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_9 = __Pyx_PyInt_As_uint32_t(__pyx_v_value); if (unlikely((__pyx_t_9 == ((uint32_t)-1)) && PyErr_Occurred())) __PYX_ERR(1, 55, __pyx_L1_error) - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->_this.setAttr(__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_key, 0), ((uint32_t)__pyx_t_9))); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 55, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":54 - * - * cpdef setAttr(self, key, value): - * if isinstance(value, int): # <<<<<<<<<<<<<< - * return self._this.setAttr(to_bytes(key), value) - * elif isinstance(value, string_types): - */ - } - - /* "sasl/saslwrapper.pyx":56 - * if isinstance(value, int): - * return self._this.setAttr(to_bytes(key), value) - * elif isinstance(value, string_types): # <<<<<<<<<<<<<< - * return self._this.setAttr(to_bytes(key), to_bytes(value)) - * - */ - __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_string_types); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_8 = PyObject_IsInstance(__pyx_v_value, __pyx_t_1); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(1, 56, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_7 = (__pyx_t_8 != 0); - if (__pyx_t_7) { - - /* "sasl/saslwrapper.pyx":57 - * return self._this.setAttr(to_bytes(key), value) - * elif isinstance(value, string_types): - * return self._this.setAttr(to_bytes(key), to_bytes(value)) # <<<<<<<<<<<<<< - * - * cpdef init(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->_this.setAttr(__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_key, 0), ((std::string)__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_value, 0)))); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 57, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":56 - * if isinstance(value, int): - * return self._this.setAttr(to_bytes(key), value) - * elif isinstance(value, string_types): # <<<<<<<<<<<<<< - * return self._this.setAttr(to_bytes(key), to_bytes(value)) - * - */ - } - - /* "sasl/saslwrapper.pyx":53 - * cdef ClientImpl _this - * - * cpdef setAttr(self, key, value): # <<<<<<<<<<<<<< - * if isinstance(value, int): - * return self._this.setAttr(to_bytes(key), value) - */ - - /* function exit code */ - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("sasl.saslwrapper.Client.setAttr", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_1setAttr(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_1setAttr(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_key = 0; - PyObject *__pyx_v_value = 0; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("setAttr (wrapper)", 0); - { - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_key,&__pyx_n_s_value,0}; - PyObject* values[2] = {0,0}; - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args; - const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); - switch (pos_args) { - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - CYTHON_FALLTHROUGH; - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - CYTHON_FALLTHROUGH; - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - kw_args = PyDict_Size(__pyx_kwds); - switch (pos_args) { - case 0: - if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_key)) != 0)) kw_args--; - else goto __pyx_L5_argtuple_error; - CYTHON_FALLTHROUGH; - case 1: - if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_value)) != 0)) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("setAttr", 1, 2, 2, 1); __PYX_ERR(1, 53, __pyx_L3_error) - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "setAttr") < 0)) __PYX_ERR(1, 53, __pyx_L3_error) - } - } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { - goto __pyx_L5_argtuple_error; - } else { - values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - } - __pyx_v_key = values[0]; - __pyx_v_value = values[1]; - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("setAttr", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 53, __pyx_L3_error) - __pyx_L3_error:; - __Pyx_AddTraceback("sasl.saslwrapper.Client.setAttr", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_RefNannyFinishContext(); - return NULL; - __pyx_L4_argument_unpacking_done:; - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_setAttr(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self), __pyx_v_key, __pyx_v_value); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_setAttr(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_key, PyObject *__pyx_v_value) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("setAttr", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_setAttr(__pyx_v_self, __pyx_v_key, __pyx_v_value, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 53, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.setAttr", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":59 - * return self._this.setAttr(to_bytes(key), to_bytes(value)) - * - * cpdef init(self): # <<<<<<<<<<<<<< - * return self._this.init() - * - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_3init(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_init(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("init", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_init); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_3init)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":60 - * - * cpdef init(self): - * return self._this.init() # <<<<<<<<<<<<<< - * - * cpdef start(self, mech_list): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->_this.init()); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 60, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":59 - * return self._this.setAttr(to_bytes(key), to_bytes(value)) - * - * cpdef init(self): # <<<<<<<<<<<<<< - * return self._this.init() - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.init", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_3init(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_3init(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("init (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_2init(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_2init(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("init", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_init(__pyx_v_self, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 59, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.init", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":62 - * return self._this.init() - * - * cpdef start(self, mech_list): # <<<<<<<<<<<<<< - * cdef string_t chosen - * cdef string_t initial_response - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_5start(PyObject *__pyx_v_self, PyObject *__pyx_v_mech_list); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_start(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_mech_list, int __pyx_skip_dispatch) { - std::string __pyx_v_chosen; - std::string __pyx_v_initial_response; - bool __pyx_v_success; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("start", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_start); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_5start)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_mech_list) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_mech_list); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":65 - * cdef string_t chosen - * cdef string_t initial_response - * success = self._this.start(to_bytes(mech_list), chosen, initial_response) # <<<<<<<<<<<<<< - * return (success, chosen, initial_response) - * - */ - __pyx_v_success = __pyx_v_self->_this.start(__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_mech_list, 0), __pyx_v_chosen, __pyx_v_initial_response); - - /* "sasl/saslwrapper.pyx":66 - * cdef string_t initial_response - * success = self._this.start(to_bytes(mech_list), chosen, initial_response) - * return (success, chosen, initial_response) # <<<<<<<<<<<<<< - * - * cpdef step(self, challenge): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_success); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_v_chosen); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_v_initial_response); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 66, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":62 - * return self._this.init() - * - * cpdef start(self, mech_list): # <<<<<<<<<<<<<< - * cdef string_t chosen - * cdef string_t initial_response - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.start", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_5start(PyObject *__pyx_v_self, PyObject *__pyx_v_mech_list); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_5start(PyObject *__pyx_v_self, PyObject *__pyx_v_mech_list) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("start (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_4start(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self), ((PyObject *)__pyx_v_mech_list)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_4start(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_mech_list) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("start", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_start(__pyx_v_self, __pyx_v_mech_list, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 62, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.start", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":68 - * return (success, chosen, initial_response) - * - * cpdef step(self, challenge): # <<<<<<<<<<<<<< - * cdef string_t response - * success = self._this.step(to_bytes(challenge), response) - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_7step(PyObject *__pyx_v_self, PyObject *__pyx_v_challenge); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_step(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_challenge, int __pyx_skip_dispatch) { - std::string __pyx_v_response; - bool __pyx_v_success; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("step", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_step); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_7step)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_challenge) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_challenge); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":70 - * cpdef step(self, challenge): - * cdef string_t response - * success = self._this.step(to_bytes(challenge), response) # <<<<<<<<<<<<<< - * return (success, response) - * - */ - __pyx_v_success = __pyx_v_self->_this.step(__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_challenge, 0), __pyx_v_response); - - /* "sasl/saslwrapper.pyx":71 - * cdef string_t response - * success = self._this.step(to_bytes(challenge), response) - * return (success, response) # <<<<<<<<<<<<<< - * - * cpdef encode(self, clear_text): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_success); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_v_response); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 71, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":68 - * return (success, chosen, initial_response) - * - * cpdef step(self, challenge): # <<<<<<<<<<<<<< - * cdef string_t response - * success = self._this.step(to_bytes(challenge), response) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.step", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_7step(PyObject *__pyx_v_self, PyObject *__pyx_v_challenge); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_7step(PyObject *__pyx_v_self, PyObject *__pyx_v_challenge) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("step (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_6step(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self), ((PyObject *)__pyx_v_challenge)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_6step(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_challenge) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("step", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_step(__pyx_v_self, __pyx_v_challenge, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 68, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.step", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":73 - * return (success, response) - * - * cpdef encode(self, clear_text): # <<<<<<<<<<<<<< - * cdef string_t cipher_text - * success = self._this.encode(to_bytes(clear_text), cipher_text) - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_9encode(PyObject *__pyx_v_self, PyObject *__pyx_v_clear_text); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_encode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_clear_text, int __pyx_skip_dispatch) { - std::string __pyx_v_cipher_text; - bool __pyx_v_success; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("encode", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_encode); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 73, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_9encode)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_clear_text) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_clear_text); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 73, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":75 - * cpdef encode(self, clear_text): - * cdef string_t cipher_text - * success = self._this.encode(to_bytes(clear_text), cipher_text) # <<<<<<<<<<<<<< - * return (success, cipher_text) - * - */ - __pyx_v_success = __pyx_v_self->_this.encode(__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_clear_text, 0), __pyx_v_cipher_text); - - /* "sasl/saslwrapper.pyx":76 - * cdef string_t cipher_text - * success = self._this.encode(to_bytes(clear_text), cipher_text) - * return (success, cipher_text) # <<<<<<<<<<<<<< - * - * cpdef decode(self, cipher_text): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_success); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 76, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_v_cipher_text); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 76, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 76, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":73 - * return (success, response) - * - * cpdef encode(self, clear_text): # <<<<<<<<<<<<<< - * cdef string_t cipher_text - * success = self._this.encode(to_bytes(clear_text), cipher_text) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.encode", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_9encode(PyObject *__pyx_v_self, PyObject *__pyx_v_clear_text); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_9encode(PyObject *__pyx_v_self, PyObject *__pyx_v_clear_text) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("encode (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_8encode(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self), ((PyObject *)__pyx_v_clear_text)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_8encode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_clear_text) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("encode", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_encode(__pyx_v_self, __pyx_v_clear_text, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 73, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.encode", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":78 - * return (success, cipher_text) - * - * cpdef decode(self, cipher_text): # <<<<<<<<<<<<<< - * cdef string_t clear_text - * success = self._this.decode(to_bytes(cipher_text), clear_text) - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_11decode(PyObject *__pyx_v_self, PyObject *__pyx_v_cipher_text); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_decode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_cipher_text, int __pyx_skip_dispatch) { - std::string __pyx_v_clear_text; - bool __pyx_v_success; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("decode", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_decode); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 78, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_11decode)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_4, __pyx_v_cipher_text) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_cipher_text); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 78, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":80 - * cpdef decode(self, cipher_text): - * cdef string_t clear_text - * success = self._this.decode(to_bytes(cipher_text), clear_text) # <<<<<<<<<<<<<< - * return (success, clear_text) - * - */ - __pyx_v_success = __pyx_v_self->_this.decode(__pyx_f_4sasl_11saslwrapper_to_bytes(__pyx_v_cipher_text, 0), __pyx_v_clear_text); - - /* "sasl/saslwrapper.pyx":81 - * cdef string_t clear_text - * success = self._this.decode(to_bytes(cipher_text), clear_text) - * return (success, clear_text) # <<<<<<<<<<<<<< - * - * cpdef getUserId(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_success); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_v_clear_text); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 81, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":78 - * return (success, cipher_text) - * - * cpdef decode(self, cipher_text): # <<<<<<<<<<<<<< - * cdef string_t clear_text - * success = self._this.decode(to_bytes(cipher_text), clear_text) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.decode", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_11decode(PyObject *__pyx_v_self, PyObject *__pyx_v_cipher_text); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_11decode(PyObject *__pyx_v_self, PyObject *__pyx_v_cipher_text) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("decode (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_10decode(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self), ((PyObject *)__pyx_v_cipher_text)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_10decode(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, PyObject *__pyx_v_cipher_text) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("decode", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_decode(__pyx_v_self, __pyx_v_cipher_text, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 78, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.decode", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":83 - * return (success, clear_text) - * - * cpdef getUserId(self): # <<<<<<<<<<<<<< - * cdef string_t user_id - * success = self._this.getUserId(user_id) - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_13getUserId(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_getUserId(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch) { - std::string __pyx_v_user_id; - bool __pyx_v_success; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("getUserId", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_getUserId); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 83, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_13getUserId)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 83, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":85 - * cpdef getUserId(self): - * cdef string_t user_id - * success = self._this.getUserId(user_id) # <<<<<<<<<<<<<< - * return (success, user_id) - * - */ - __pyx_v_success = __pyx_v_self->_this.getUserId(__pyx_v_user_id); - - /* "sasl/saslwrapper.pyx":86 - * cdef string_t user_id - * success = self._this.getUserId(user_id) - * return (success, user_id) # <<<<<<<<<<<<<< - * - * cpdef getSSF(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_success); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_v_user_id); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 86, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":83 - * return (success, clear_text) - * - * cpdef getUserId(self): # <<<<<<<<<<<<<< - * cdef string_t user_id - * success = self._this.getUserId(user_id) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.getUserId", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_13getUserId(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_13getUserId(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("getUserId (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_12getUserId(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_12getUserId(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("getUserId", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_getUserId(__pyx_v_self, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 83, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.getUserId", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":88 - * return (success, user_id) - * - * cpdef getSSF(self): # <<<<<<<<<<<<<< - * cdef int ssf - * success = self._this.getSSF(&ssf) - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_15getSSF(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_getSSF(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch) { - int __pyx_v_ssf; - bool __pyx_v_success; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("getSSF", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_getSSF); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 88, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_15getSSF)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 88, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":90 - * cpdef getSSF(self): - * cdef int ssf - * success = self._this.getSSF(&ssf) # <<<<<<<<<<<<<< - * return (success, ssf) - * - */ - __pyx_v_success = __pyx_v_self->_this.getSSF((&__pyx_v_ssf)); - - /* "sasl/saslwrapper.pyx":91 - * cdef int ssf - * success = self._this.getSSF(&ssf) - * return (success, ssf) # <<<<<<<<<<<<<< - * - * cpdef getError(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_success); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 91, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_ssf); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 91, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 91, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_3); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":88 - * return (success, user_id) - * - * cpdef getSSF(self): # <<<<<<<<<<<<<< - * cdef int ssf - * success = self._this.getSSF(&ssf) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.getSSF", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_15getSSF(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_15getSSF(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("getSSF (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_14getSSF(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_14getSSF(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("getSSF", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_getSSF(__pyx_v_self, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 88, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.getSSF", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "sasl/saslwrapper.pyx":93 - * return (success, ssf) - * - * cpdef getError(self): # <<<<<<<<<<<<<< - * cdef string_t error - * self._this.getError(error) - */ - -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_17getError(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_f_4sasl_11saslwrapper_6Client_getError(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, int __pyx_skip_dispatch) { - std::string __pyx_v_error; - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("getError", 0); - /* Check if called by wrapper */ - if (unlikely(__pyx_skip_dispatch)) ; - /* Check if overridden in Python */ - else if (unlikely((Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0) || (Py_TYPE(((PyObject *)__pyx_v_self))->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)))) { - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - static PY_UINT64_T __pyx_tp_dict_version = __PYX_DICT_VERSION_INIT, __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - if (unlikely(!__Pyx_object_dict_version_matches(((PyObject *)__pyx_v_self), __pyx_tp_dict_version, __pyx_obj_dict_version))) { - PY_UINT64_T __pyx_type_dict_guard = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - #endif - __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_getError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)(void*)__pyx_pw_4sasl_11saslwrapper_6Client_17getError)) { - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_t_1); - __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; - if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { - __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); - if (likely(__pyx_t_4)) { - PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); - __Pyx_INCREF(__pyx_t_4); - __Pyx_INCREF(function); - __Pyx_DECREF_SET(__pyx_t_3, function); - } - } - __pyx_t_2 = (__pyx_t_4) ? __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4) : __Pyx_PyObject_CallNoArg(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - goto __pyx_L0; - } - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - __pyx_tp_dict_version = __Pyx_get_tp_dict_version(((PyObject *)__pyx_v_self)); - __pyx_obj_dict_version = __Pyx_get_object_dict_version(((PyObject *)__pyx_v_self)); - if (unlikely(__pyx_type_dict_guard != __pyx_tp_dict_version)) { - __pyx_tp_dict_version = __pyx_obj_dict_version = __PYX_DICT_VERSION_INIT; - } - #endif - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - #if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_PYTYPE_LOOKUP && CYTHON_USE_TYPE_SLOTS - } - #endif - } - - /* "sasl/saslwrapper.pyx":95 - * cpdef getError(self): - * cdef string_t error - * self._this.getError(error) # <<<<<<<<<<<<<< - * return error - */ - __pyx_v_self->_this.getError(__pyx_v_error); - - /* "sasl/saslwrapper.pyx":96 - * cdef string_t error - * self._this.getError(error) - * return error # <<<<<<<<<<<<<< - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_convert_PyBytes_string_to_py_std__in_string(__pyx_v_error); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 96, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "sasl/saslwrapper.pyx":93 - * return (success, ssf) - * - * cpdef getError(self): # <<<<<<<<<<<<<< - * cdef string_t error - * self._this.getError(error) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("sasl.saslwrapper.Client.getError", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_17getError(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_17getError(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("getError (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_16getError(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_16getError(struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("getError", 0); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __pyx_f_4sasl_11saslwrapper_6Client_getError(__pyx_v_self, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 93, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.getError", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("self._this cannot be converted to a Python object for pickling") - * def __setstate_cython__(self, __pyx_state): - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_19__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_19__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_18__reduce_cython__(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_18__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__reduce_cython__", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("self._this cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("self._this cannot be converted to a Python object for pickling") - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 2, __pyx_L1_error) - - /* "(tree fragment)":1 - * def __reduce_cython__(self): # <<<<<<<<<<<<<< - * raise TypeError("self._this cannot be converted to a Python object for pickling") - * def __setstate_cython__(self, __pyx_state): - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("self._this cannot be converted to a Python object for pickling") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("self._this cannot be converted to a Python object for pickling") - */ - -/* Python wrapper */ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_21__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ -static PyObject *__pyx_pw_4sasl_11saslwrapper_6Client_21__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = 0; - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); - __pyx_r = __pyx_pf_4sasl_11saslwrapper_6Client_20__setstate_cython__(((struct __pyx_obj_4sasl_11saslwrapper_Client *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); - - /* function exit code */ - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_pf_4sasl_11saslwrapper_6Client_20__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_4sasl_11saslwrapper_Client *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__setstate_cython__", 0); - - /* "(tree fragment)":4 - * raise TypeError("self._this cannot be converted to a Python object for pickling") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("self._this cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< - */ - __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __PYX_ERR(0, 4, __pyx_L1_error) - - /* "(tree fragment)":3 - * def __reduce_cython__(self): - * raise TypeError("self._this cannot be converted to a Python object for pickling") - * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< - * raise TypeError("self._this cannot be converted to a Python object for pickling") - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("sasl.saslwrapper.Client.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = NULL; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "string.from_py":13 - * - * @cname("__pyx_convert_string_from_py_std__in_string") - * cdef string __pyx_convert_string_from_py_std__in_string(object o) except *: # <<<<<<<<<<<<<< - * cdef Py_ssize_t length = 0 - * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) - */ - -static std::string __pyx_convert_string_from_py_std__in_string(PyObject *__pyx_v_o) { - Py_ssize_t __pyx_v_length; - char const *__pyx_v_data; - std::string __pyx_r; - __Pyx_RefNannyDeclarations - char const *__pyx_t_1; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_convert_string_from_py_std__in_string", 0); - - /* "string.from_py":14 - * @cname("__pyx_convert_string_from_py_std__in_string") - * cdef string __pyx_convert_string_from_py_std__in_string(object o) except *: - * cdef Py_ssize_t length = 0 # <<<<<<<<<<<<<< - * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) - * return string(data, length) - */ - __pyx_v_length = 0; - - /* "string.from_py":15 - * cdef string __pyx_convert_string_from_py_std__in_string(object o) except *: - * cdef Py_ssize_t length = 0 - * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) # <<<<<<<<<<<<<< - * return string(data, length) - * - */ - __pyx_t_1 = __Pyx_PyObject_AsStringAndSize(__pyx_v_o, (&__pyx_v_length)); if (unlikely(__pyx_t_1 == ((char const *)NULL))) __PYX_ERR(0, 15, __pyx_L1_error) - __pyx_v_data = __pyx_t_1; - - /* "string.from_py":16 - * cdef Py_ssize_t length = 0 - * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) - * return string(data, length) # <<<<<<<<<<<<<< - * - * - */ - __pyx_r = std::string(__pyx_v_data, __pyx_v_length); - goto __pyx_L0; - - /* "string.from_py":13 - * - * @cname("__pyx_convert_string_from_py_std__in_string") - * cdef string __pyx_convert_string_from_py_std__in_string(object o) except *: # <<<<<<<<<<<<<< - * cdef Py_ssize_t length = 0 - * cdef const char* data = __Pyx_PyObject_AsStringAndSize(o, &length) - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_AddTraceback("string.from_py.__pyx_convert_string_from_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __Pyx_pretend_to_initialize(&__pyx_r); - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "string.to_py":31 - * - * @cname("__pyx_convert_PyObject_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyObject_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyObject_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - -static CYTHON_INLINE PyObject *__pyx_convert_PyObject_string_to_py_std__in_string(std::string const &__pyx_v_s) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_convert_PyObject_string_to_py_std__in_string", 0); - - /* "string.to_py":32 - * @cname("__pyx_convert_PyObject_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyObject_string_to_py_std__in_string(const string& s): - * return __Pyx_PyObject_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< - * cdef extern from *: - * cdef object __Pyx_PyUnicode_FromStringAndSize(const char*, size_t) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyObject_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 32, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "string.to_py":31 - * - * @cname("__pyx_convert_PyObject_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyObject_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyObject_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("string.to_py.__pyx_convert_PyObject_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "string.to_py":37 - * - * @cname("__pyx_convert_PyUnicode_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyUnicode_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyUnicode_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - -static CYTHON_INLINE PyObject *__pyx_convert_PyUnicode_string_to_py_std__in_string(std::string const &__pyx_v_s) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_convert_PyUnicode_string_to_py_std__in_string", 0); - - /* "string.to_py":38 - * @cname("__pyx_convert_PyUnicode_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyUnicode_string_to_py_std__in_string(const string& s): - * return __Pyx_PyUnicode_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< - * cdef extern from *: - * cdef object __Pyx_PyStr_FromStringAndSize(const char*, size_t) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyUnicode_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 38, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "string.to_py":37 - * - * @cname("__pyx_convert_PyUnicode_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyUnicode_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyUnicode_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("string.to_py.__pyx_convert_PyUnicode_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "string.to_py":43 - * - * @cname("__pyx_convert_PyStr_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyStr_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyStr_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - -static CYTHON_INLINE PyObject *__pyx_convert_PyStr_string_to_py_std__in_string(std::string const &__pyx_v_s) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_convert_PyStr_string_to_py_std__in_string", 0); - - /* "string.to_py":44 - * @cname("__pyx_convert_PyStr_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyStr_string_to_py_std__in_string(const string& s): - * return __Pyx_PyStr_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< - * cdef extern from *: - * cdef object __Pyx_PyBytes_FromStringAndSize(const char*, size_t) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyStr_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 44, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "string.to_py":43 - * - * @cname("__pyx_convert_PyStr_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyStr_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyStr_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("string.to_py.__pyx_convert_PyStr_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "string.to_py":49 - * - * @cname("__pyx_convert_PyBytes_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyBytes_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyBytes_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - -static CYTHON_INLINE PyObject *__pyx_convert_PyBytes_string_to_py_std__in_string(std::string const &__pyx_v_s) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_convert_PyBytes_string_to_py_std__in_string", 0); - - /* "string.to_py":50 - * @cname("__pyx_convert_PyBytes_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyBytes_string_to_py_std__in_string(const string& s): - * return __Pyx_PyBytes_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< - * cdef extern from *: - * cdef object __Pyx_PyByteArray_FromStringAndSize(const char*, size_t) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 50, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "string.to_py":49 - * - * @cname("__pyx_convert_PyBytes_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyBytes_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyBytes_FromStringAndSize(s.data(), s.size()) - * cdef extern from *: - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("string.to_py.__pyx_convert_PyBytes_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "string.to_py":55 - * - * @cname("__pyx_convert_PyByteArray_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyByteArray_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyByteArray_FromStringAndSize(s.data(), s.size()) - * - */ - -static CYTHON_INLINE PyObject *__pyx_convert_PyByteArray_string_to_py_std__in_string(std::string const &__pyx_v_s) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannyDeclarations - PyObject *__pyx_t_1 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__pyx_convert_PyByteArray_string_to_py_std__in_string", 0); - - /* "string.to_py":56 - * @cname("__pyx_convert_PyByteArray_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyByteArray_string_to_py_std__in_string(const string& s): - * return __Pyx_PyByteArray_FromStringAndSize(s.data(), s.size()) # <<<<<<<<<<<<<< - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_PyByteArray_FromStringAndSize(__pyx_v_s.data(), __pyx_v_s.size()); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - /* "string.to_py":55 - * - * @cname("__pyx_convert_PyByteArray_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyByteArray_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyByteArray_FromStringAndSize(s.data(), s.size()) - * - */ - - /* function exit code */ - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("string.to_py.__pyx_convert_PyByteArray_string_to_py_std__in_string", __pyx_clineno, __pyx_lineno, __pyx_filename); - __pyx_r = 0; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} -static struct __pyx_vtabstruct_4sasl_11saslwrapper_Client __pyx_vtable_4sasl_11saslwrapper_Client; - -static PyObject *__pyx_tp_new_4sasl_11saslwrapper_Client(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { - struct __pyx_obj_4sasl_11saslwrapper_Client *p; - PyObject *o; - if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { - o = (*t->tp_alloc)(t, 0); - } else { - o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); - } - if (unlikely(!o)) return 0; - p = ((struct __pyx_obj_4sasl_11saslwrapper_Client *)o); - p->__pyx_vtab = __pyx_vtabptr_4sasl_11saslwrapper_Client; - new((void*)&(p->_this)) saslwrapper::ClientImpl(); - return o; -} - -static void __pyx_tp_dealloc_4sasl_11saslwrapper_Client(PyObject *o) { - struct __pyx_obj_4sasl_11saslwrapper_Client *p = (struct __pyx_obj_4sasl_11saslwrapper_Client *)o; - #if CYTHON_USE_TP_FINALIZE - if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && (!PyType_IS_GC(Py_TYPE(o)) || !_PyGC_FINALIZED(o))) { - if (PyObject_CallFinalizerFromDealloc(o)) return; - } - #endif - __Pyx_call_destructor(p->_this); - (*Py_TYPE(o)->tp_free)(o); -} - -static PyMethodDef __pyx_methods_4sasl_11saslwrapper_Client[] = { - {"setAttr", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_4sasl_11saslwrapper_6Client_1setAttr, METH_VARARGS|METH_KEYWORDS, 0}, - {"init", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_3init, METH_NOARGS, 0}, - {"start", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_5start, METH_O, 0}, - {"step", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_7step, METH_O, 0}, - {"encode", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_9encode, METH_O, 0}, - {"decode", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_11decode, METH_O, 0}, - {"getUserId", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_13getUserId, METH_NOARGS, 0}, - {"getSSF", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_15getSSF, METH_NOARGS, 0}, - {"getError", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_17getError, METH_NOARGS, 0}, - {"__reduce_cython__", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_19__reduce_cython__, METH_NOARGS, 0}, - {"__setstate_cython__", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_6Client_21__setstate_cython__, METH_O, 0}, - {0, 0, 0, 0} -}; - -static PyTypeObject __pyx_type_4sasl_11saslwrapper_Client = { - PyVarObject_HEAD_INIT(0, 0) - "sasl.saslwrapper.Client", /*tp_name*/ - sizeof(struct __pyx_obj_4sasl_11saslwrapper_Client), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_4sasl_11saslwrapper_Client, /*tp_dealloc*/ - #if PY_VERSION_HEX < 0x030800b4 - 0, /*tp_print*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 - 0, /*tp_vectorcall_offset*/ - #endif - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #endif - #if PY_MAJOR_VERSION >= 3 - 0, /*tp_as_async*/ - #endif - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE, /*tp_flags*/ - 0, /*tp_doc*/ - 0, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_4sasl_11saslwrapper_Client, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - 0, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_4sasl_11saslwrapper_Client, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - 0, /*tp_version_tag*/ - #if PY_VERSION_HEX >= 0x030400a1 - 0, /*tp_finalize*/ - #endif - #if PY_VERSION_HEX >= 0x030800b1 - 0, /*tp_vectorcall*/ - #endif - #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 - 0, /*tp_print*/ - #endif -}; - -static PyMethodDef __pyx_methods[] = { - {"to_bytes", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_1to_bytes, METH_O, 0}, - {"to_string", (PyCFunction)__pyx_pw_4sasl_11saslwrapper_3to_string, METH_O, 0}, - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -#if CYTHON_PEP489_MULTI_PHASE_INIT -static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/ -static int __pyx_pymod_exec_saslwrapper(PyObject* module); /*proto*/ -static PyModuleDef_Slot __pyx_moduledef_slots[] = { - {Py_mod_create, (void*)__pyx_pymod_create}, - {Py_mod_exec, (void*)__pyx_pymod_exec_saslwrapper}, - {0, NULL} -}; -#endif - -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - "saslwrapper", - 0, /* m_doc */ - #if CYTHON_PEP489_MULTI_PHASE_INIT - 0, /* m_size */ - #else - -1, /* m_size */ - #endif - __pyx_methods /* m_methods */, - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_moduledef_slots, /* m_slots */ - #else - NULL, /* m_reload */ - #endif - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif -#ifndef CYTHON_SMALL_CODE -#if defined(__clang__) - #define CYTHON_SMALL_CODE -#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) - #define CYTHON_SMALL_CODE __attribute__((cold)) -#else - #define CYTHON_SMALL_CODE -#endif -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_n_s_Client, __pyx_k_Client, sizeof(__pyx_k_Client), 0, 0, 1, 1}, - {&__pyx_n_s_PY3, __pyx_k_PY3, sizeof(__pyx_k_PY3), 0, 0, 1, 1}, - {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1}, - {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, - {&__pyx_n_s_decode, __pyx_k_decode, sizeof(__pyx_k_decode), 0, 0, 1, 1}, - {&__pyx_n_s_encode, __pyx_k_encode, sizeof(__pyx_k_encode), 0, 0, 1, 1}, - {&__pyx_n_s_getError, __pyx_k_getError, sizeof(__pyx_k_getError), 0, 0, 1, 1}, - {&__pyx_n_s_getSSF, __pyx_k_getSSF, sizeof(__pyx_k_getSSF), 0, 0, 1, 1}, - {&__pyx_n_s_getUserId, __pyx_k_getUserId, sizeof(__pyx_k_getUserId), 0, 0, 1, 1}, - {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1}, - {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, - {&__pyx_n_s_init, __pyx_k_init, sizeof(__pyx_k_init), 0, 0, 1, 1}, - {&__pyx_n_s_key, __pyx_k_key, sizeof(__pyx_k_key), 0, 0, 1, 1}, - {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, - {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1}, - {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, - {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, - {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1}, - {&__pyx_kp_s_self__this_cannot_be_converted_t, __pyx_k_self__this_cannot_be_converted_t, sizeof(__pyx_k_self__this_cannot_be_converted_t), 0, 0, 1, 0}, - {&__pyx_n_s_setAttr, __pyx_k_setAttr, sizeof(__pyx_k_setAttr), 0, 0, 1, 1}, - {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1}, - {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, - {&__pyx_n_s_six, __pyx_k_six, sizeof(__pyx_k_six), 0, 0, 1, 1}, - {&__pyx_n_s_start, __pyx_k_start, sizeof(__pyx_k_start), 0, 0, 1, 1}, - {&__pyx_n_s_step, __pyx_k_step, sizeof(__pyx_k_step), 0, 0, 1, 1}, - {&__pyx_n_s_string_types, __pyx_k_string_types, sizeof(__pyx_k_string_types), 0, 0, 1, 1}, - {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, - {&__pyx_kp_u_utf_8, __pyx_k_utf_8, sizeof(__pyx_k_utf_8), 0, 1, 0, 0}, - {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 2, __pyx_L1_error) - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); - - /* "(tree fragment)":2 - * def __reduce_cython__(self): - * raise TypeError("self._this cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("self._this cannot be converted to a Python object for pickling") - */ - __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_self__this_cannot_be_converted_t); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 2, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple_); - __Pyx_GIVEREF(__pyx_tuple_); - - /* "(tree fragment)":4 - * raise TypeError("self._this cannot be converted to a Python object for pickling") - * def __setstate_cython__(self, __pyx_state): - * raise TypeError("self._this cannot be converted to a Python object for pickling") # <<<<<<<<<<<<<< - */ - __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_self__this_cannot_be_converted_t); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 4, __pyx_L1_error) - __Pyx_GOTREF(__pyx_tuple__2); - __Pyx_GIVEREF(__pyx_tuple__2); - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(1, 1, __pyx_L1_error); - return 0; - __pyx_L1_error:; - return -1; -} - -static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/ -static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/ - -static int __Pyx_modinit_global_init_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0); - /*--- Global init code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0); - /*--- Variable export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_export_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0); - /*--- Function export code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_type_init_code(void) { - __Pyx_RefNannyDeclarations - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0); - /*--- Type init code ---*/ - __pyx_vtabptr_4sasl_11saslwrapper_Client = &__pyx_vtable_4sasl_11saslwrapper_Client; - __pyx_vtable_4sasl_11saslwrapper_Client.setAttr = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, PyObject *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_setAttr; - __pyx_vtable_4sasl_11saslwrapper_Client.init = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_init; - __pyx_vtable_4sasl_11saslwrapper_Client.start = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_start; - __pyx_vtable_4sasl_11saslwrapper_Client.step = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_step; - __pyx_vtable_4sasl_11saslwrapper_Client.encode = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_encode; - __pyx_vtable_4sasl_11saslwrapper_Client.decode = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, PyObject *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_decode; - __pyx_vtable_4sasl_11saslwrapper_Client.getUserId = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_getUserId; - __pyx_vtable_4sasl_11saslwrapper_Client.getSSF = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_getSSF; - __pyx_vtable_4sasl_11saslwrapper_Client.getError = (PyObject *(*)(struct __pyx_obj_4sasl_11saslwrapper_Client *, int __pyx_skip_dispatch))__pyx_f_4sasl_11saslwrapper_6Client_getError; - if (PyType_Ready(&__pyx_type_4sasl_11saslwrapper_Client) < 0) __PYX_ERR(1, 50, __pyx_L1_error) - #if PY_VERSION_HEX < 0x030800B1 - __pyx_type_4sasl_11saslwrapper_Client.tp_print = 0; - #endif - if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_4sasl_11saslwrapper_Client.tp_dictoffset && __pyx_type_4sasl_11saslwrapper_Client.tp_getattro == PyObject_GenericGetAttr)) { - __pyx_type_4sasl_11saslwrapper_Client.tp_getattro = __Pyx_PyObject_GenericGetAttr; - } - if (__Pyx_SetVtable(__pyx_type_4sasl_11saslwrapper_Client.tp_dict, __pyx_vtabptr_4sasl_11saslwrapper_Client) < 0) __PYX_ERR(1, 50, __pyx_L1_error) - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_Client, (PyObject *)&__pyx_type_4sasl_11saslwrapper_Client) < 0) __PYX_ERR(1, 50, __pyx_L1_error) - if (__Pyx_setup_reduce((PyObject*)&__pyx_type_4sasl_11saslwrapper_Client) < 0) __PYX_ERR(1, 50, __pyx_L1_error) - __pyx_ptype_4sasl_11saslwrapper_Client = &__pyx_type_4sasl_11saslwrapper_Client; - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_modinit_type_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0); - /*--- Type import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_variable_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0); - /*--- Variable import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - -static int __Pyx_modinit_function_import_code(void) { - __Pyx_RefNannyDeclarations - __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0); - /*--- Function import code ---*/ - __Pyx_RefNannyFinishContext(); - return 0; -} - - -#ifndef CYTHON_NO_PYINIT_EXPORT -#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC -#elif PY_MAJOR_VERSION < 3 -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" void -#else -#define __Pyx_PyMODINIT_FUNC void -#endif -#else -#ifdef __cplusplus -#define __Pyx_PyMODINIT_FUNC extern "C" PyObject * -#else -#define __Pyx_PyMODINIT_FUNC PyObject * -#endif -#endif - - -#if PY_MAJOR_VERSION < 3 -__Pyx_PyMODINIT_FUNC initsaslwrapper(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC initsaslwrapper(void) -#else -__Pyx_PyMODINIT_FUNC PyInit_saslwrapper(void) CYTHON_SMALL_CODE; /*proto*/ -__Pyx_PyMODINIT_FUNC PyInit_saslwrapper(void) -#if CYTHON_PEP489_MULTI_PHASE_INIT -{ - return PyModuleDef_Init(&__pyx_moduledef); -} -static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) { - #if PY_VERSION_HEX >= 0x030700A1 - static PY_INT64_T main_interpreter_id = -1; - PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp); - if (main_interpreter_id == -1) { - main_interpreter_id = current_id; - return (unlikely(current_id == -1)) ? -1 : 0; - } else if (unlikely(main_interpreter_id != current_id)) - #else - static PyInterpreterState *main_interpreter = NULL; - PyInterpreterState *current_interpreter = PyThreadState_Get()->interp; - if (!main_interpreter) { - main_interpreter = current_interpreter; - } else if (unlikely(main_interpreter != current_interpreter)) - #endif - { - PyErr_SetString( - PyExc_ImportError, - "Interpreter change detected - this module can only be loaded into one interpreter per process."); - return -1; - } - return 0; -} -static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) { - PyObject *value = PyObject_GetAttrString(spec, from_name); - int result = 0; - if (likely(value)) { - if (allow_none || value != Py_None) { - result = PyDict_SetItemString(moddict, to_name, value); - } - Py_DECREF(value); - } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Clear(); - } else { - result = -1; - } - return result; -} -static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) { - PyObject *module = NULL, *moddict, *modname; - if (__Pyx_check_single_interpreter()) - return NULL; - if (__pyx_m) - return __Pyx_NewRef(__pyx_m); - modname = PyObject_GetAttrString(spec, "name"); - if (unlikely(!modname)) goto bad; - module = PyModule_NewObject(modname); - Py_DECREF(modname); - if (unlikely(!module)) goto bad; - moddict = PyModule_GetDict(module); - if (unlikely(!moddict)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad; - if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad; - return module; -bad: - Py_XDECREF(module); - return NULL; -} - - -static CYTHON_SMALL_CODE int __pyx_pymod_exec_saslwrapper(PyObject *__pyx_pyinit_module) -#endif -#endif -{ - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - int __pyx_lineno = 0; - const char *__pyx_filename = NULL; - int __pyx_clineno = 0; - __Pyx_RefNannyDeclarations - #if CYTHON_PEP489_MULTI_PHASE_INIT - if (__pyx_m) { - if (__pyx_m == __pyx_pyinit_module) return 0; - PyErr_SetString(PyExc_RuntimeError, "Module 'saslwrapper' has already been imported. Re-initialisation is not supported."); - return -1; - } - #elif PY_MAJOR_VERSION >= 3 - if (__pyx_m) return __Pyx_NewRef(__pyx_m); - #endif - #if CYTHON_REFNANNY -__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); -if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); -} -#endif - __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_saslwrapper(void)", 0); - if (__Pyx_check_binary_version() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #ifdef __Pxy_PyFrame_Initialize_Offsets - __Pxy_PyFrame_Initialize_Offsets(); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(1, 1, __pyx_L1_error) - __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(1, 1, __pyx_L1_error) - #ifdef __Pyx_CyFunction_USED - if (__pyx_CyFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_FusedFunction_USED - if (__pyx_FusedFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Coroutine_USED - if (__pyx_Coroutine_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_Generator_USED - if (__pyx_Generator_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_AsyncGen_USED - if (__pyx_AsyncGen_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - #ifdef __Pyx_StopAsyncIteration_USED - if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if CYTHON_PEP489_MULTI_PHASE_INIT - __pyx_m = __pyx_pyinit_module; - Py_INCREF(__pyx_m); - #else - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4("saslwrapper", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(1, 1, __pyx_L1_error) - Py_INCREF(__pyx_d); - __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(1, 1, __pyx_L1_error) - Py_INCREF(__pyx_b); - __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(1, 1, __pyx_L1_error) - Py_INCREF(__pyx_cython_runtime); - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(1, 1, __pyx_L1_error); - /*--- Initialize various global constants etc. ---*/ - if (__Pyx_InitGlobals() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) - if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - if (__pyx_module_is_main_sasl__saslwrapper) { - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - } - #if PY_MAJOR_VERSION >= 3 - { - PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(1, 1, __pyx_L1_error) - if (!PyDict_GetItemString(modules, "sasl.saslwrapper")) { - if (unlikely(PyDict_SetItemString(modules, "sasl.saslwrapper", __pyx_m) < 0)) __PYX_ERR(1, 1, __pyx_L1_error) - } - } - #endif - /*--- Builtin init code ---*/ - if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - /*--- Constants init code ---*/ - if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - /*--- Global type/function init code ---*/ - (void)__Pyx_modinit_global_init_code(); - (void)__Pyx_modinit_variable_export_code(); - (void)__Pyx_modinit_function_export_code(); - if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(1, 1, __pyx_L1_error) - (void)__Pyx_modinit_type_import_code(); - (void)__Pyx_modinit_variable_import_code(); - (void)__Pyx_modinit_function_import_code(); - /*--- Execution code ---*/ - #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) - if (__Pyx_patch_abc() < 0) __PYX_ERR(1, 1, __pyx_L1_error) - #endif - - /* "sasl/saslwrapper.pyx":17 - * #cython: language_level=3 - * - * from six import string_types, PY3 # <<<<<<<<<<<<<< - * - * from libcpp cimport bool - */ - __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - __Pyx_INCREF(__pyx_n_s_string_types); - __Pyx_GIVEREF(__pyx_n_s_string_types); - PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_string_types); - __Pyx_INCREF(__pyx_n_s_PY3); - __Pyx_GIVEREF(__pyx_n_s_PY3); - PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_PY3); - __pyx_t_2 = __Pyx_Import(__pyx_n_s_six, __pyx_t_1, 0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_string_types); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_string_types, __pyx_t_1) < 0) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_PY3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_1); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_PY3, __pyx_t_1) < 0) __PYX_ERR(1, 17, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "sasl/saslwrapper.pyx":1 - * # Copyright 2015 Cloudera Inc. # <<<<<<<<<<<<<< - * # - * # Licensed under the Apache License, Version 2.0 (the "License"); - */ - __pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_GOTREF(__pyx_t_2); - if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error) - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "string.to_py":55 - * - * @cname("__pyx_convert_PyByteArray_string_to_py_std__in_string") - * cdef inline object __pyx_convert_PyByteArray_string_to_py_std__in_string(const string& s): # <<<<<<<<<<<<<< - * return __Pyx_PyByteArray_FromStringAndSize(s.data(), s.size()) - * - */ - - /*--- Wrapped vars code ---*/ - - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - if (__pyx_m) { - if (__pyx_d) { - __Pyx_AddTraceback("init sasl.saslwrapper", __pyx_clineno, __pyx_lineno, __pyx_filename); - } - Py_CLEAR(__pyx_m); - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init sasl.saslwrapper"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if CYTHON_PEP489_MULTI_PHASE_INIT - return (__pyx_m != NULL) ? 0 : -1; - #elif PY_MAJOR_VERSION >= 3 - return __pyx_m; - #else - return; - #endif -} - -/* --- Runtime support code --- */ -/* Refnanny */ -#if CYTHON_REFNANNY -static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule(modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, "RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); -end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; -} -#endif - -/* PyObjectGetAttrStr */ -#if CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro)) - return tp->tp_getattro(obj, attr_name); -#if PY_MAJOR_VERSION < 3 - if (likely(tp->tp_getattr)) - return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); -#endif - return PyObject_GetAttr(obj, attr_name); -} -#endif - -/* GetBuiltinName */ -static PyObject *__Pyx_GetBuiltinName(PyObject *name) { - PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); - if (unlikely(!result)) { - PyErr_Format(PyExc_NameError, -#if PY_MAJOR_VERSION >= 3 - "name '%U' is not defined", name); -#else - "name '%.200s' is not defined", PyString_AS_STRING(name)); -#endif - } - return result; -} - -/* PyDictVersioning */ -#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS -static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0; -} -static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) { - PyObject **dictptr = NULL; - Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset; - if (offset) { -#if CYTHON_COMPILING_IN_CPYTHON - dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj); -#else - dictptr = _PyObject_GetDictPtr(obj); -#endif - } - return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0; -} -static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) { - PyObject *dict = Py_TYPE(obj)->tp_dict; - if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict))) - return 0; - return obj_dict_version == __Pyx_get_object_dict_version(obj); -} -#endif - -/* GetModuleGlobalName */ -#if CYTHON_USE_DICT_VERSIONS -static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value) -#else -static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name) -#endif -{ - PyObject *result; -#if !CYTHON_AVOID_BORROWED_REFS -#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 - result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } else if (unlikely(PyErr_Occurred())) { - return NULL; - } -#else - result = PyDict_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } -#endif -#else - result = PyObject_GetItem(__pyx_d, name); - __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version) - if (likely(result)) { - return __Pyx_NewRef(result); - } - PyErr_Clear(); -#endif - return __Pyx_GetBuiltinName(name); -} - -/* PyCFunctionFastCall */ -#if CYTHON_FAST_PYCCALL -static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { - PyCFunctionObject *func = (PyCFunctionObject*)func_obj; - PyCFunction meth = PyCFunction_GET_FUNCTION(func); - PyObject *self = PyCFunction_GET_SELF(func); - int flags = PyCFunction_GET_FLAGS(func); - assert(PyCFunction_Check(func)); - assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS))); - assert(nargs >= 0); - assert(nargs == 0 || args != NULL); - /* _PyCFunction_FastCallDict() must not be called with an exception set, - because it may clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!PyErr_Occurred()); - if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { - return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL); - } else { - return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs); - } -} -#endif - -/* PyFunctionFastCall */ -#if CYTHON_FAST_PYCALL -static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, - PyObject *globals) { - PyFrameObject *f; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject **fastlocals; - Py_ssize_t i; - PyObject *result; - assert(globals != NULL); - /* XXX Perhaps we should create a specialized - PyFrame_New() that doesn't take locals, but does - take builtins without sanity checking them. - */ - assert(tstate != NULL); - f = PyFrame_New(tstate, co, globals, NULL); - if (f == NULL) { - return NULL; - } - fastlocals = __Pyx_PyFrame_GetLocalsplus(f); - for (i = 0; i < na; i++) { - Py_INCREF(*args); - fastlocals[i] = *args++; - } - result = PyEval_EvalFrameEx(f,0); - ++tstate->recursion_depth; - Py_DECREF(f); - --tstate->recursion_depth; - return result; -} -#if 1 || PY_VERSION_HEX < 0x030600B1 -static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) { - PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); - PyObject *globals = PyFunction_GET_GLOBALS(func); - PyObject *argdefs = PyFunction_GET_DEFAULTS(func); - PyObject *closure; -#if PY_MAJOR_VERSION >= 3 - PyObject *kwdefs; -#endif - PyObject *kwtuple, **k; - PyObject **d; - Py_ssize_t nd; - Py_ssize_t nk; - PyObject *result; - assert(kwargs == NULL || PyDict_Check(kwargs)); - nk = kwargs ? PyDict_Size(kwargs) : 0; - if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { - return NULL; - } - if ( -#if PY_MAJOR_VERSION >= 3 - co->co_kwonlyargcount == 0 && -#endif - likely(kwargs == NULL || nk == 0) && - co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { - if (argdefs == NULL && co->co_argcount == nargs) { - result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); - goto done; - } - else if (nargs == 0 && argdefs != NULL - && co->co_argcount == Py_SIZE(argdefs)) { - /* function called with no arguments, but all parameters have - a default value: use default values as arguments .*/ - args = &PyTuple_GET_ITEM(argdefs, 0); - result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); - goto done; - } - } - if (kwargs != NULL) { - Py_ssize_t pos, i; - kwtuple = PyTuple_New(2 * nk); - if (kwtuple == NULL) { - result = NULL; - goto done; - } - k = &PyTuple_GET_ITEM(kwtuple, 0); - pos = i = 0; - while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { - Py_INCREF(k[i]); - Py_INCREF(k[i+1]); - i += 2; - } - nk = i / 2; - } - else { - kwtuple = NULL; - k = NULL; - } - closure = PyFunction_GET_CLOSURE(func); -#if PY_MAJOR_VERSION >= 3 - kwdefs = PyFunction_GET_KW_DEFAULTS(func); -#endif - if (argdefs != NULL) { - d = &PyTuple_GET_ITEM(argdefs, 0); - nd = Py_SIZE(argdefs); - } - else { - d = NULL; - nd = 0; - } -#if PY_MAJOR_VERSION >= 3 - result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, kwdefs, closure); -#else - result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, - args, (int)nargs, - k, (int)nk, - d, (int)nd, closure); -#endif - Py_XDECREF(kwtuple); -done: - Py_LeaveRecursiveCall(); - return result; -} -#endif -#endif - -/* PyObjectCall */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { - PyObject *result; - ternaryfunc call = func->ob_type->tp_call; - if (unlikely(!call)) - return PyObject_Call(func, arg, kw); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = (*call)(func, arg, kw); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCall2Args */ -static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) { - PyObject *args, *result = NULL; - #if CYTHON_FAST_PYCALL - if (PyFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyFunction_FastCall(function, args, 2); - } - #endif - #if CYTHON_FAST_PYCCALL - if (__Pyx_PyFastCFunction_Check(function)) { - PyObject *args[2] = {arg1, arg2}; - return __Pyx_PyCFunction_FastCall(function, args, 2); - } - #endif - args = PyTuple_New(2); - if (unlikely(!args)) goto done; - Py_INCREF(arg1); - PyTuple_SET_ITEM(args, 0, arg1); - Py_INCREF(arg2); - PyTuple_SET_ITEM(args, 1, arg2); - Py_INCREF(function); - result = __Pyx_PyObject_Call(function, args, NULL); - Py_DECREF(args); - Py_DECREF(function); -done: - return result; -} - -/* PyObjectCallMethO */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { - PyObject *self, *result; - PyCFunction cfunc; - cfunc = PyCFunction_GET_FUNCTION(func); - self = PyCFunction_GET_SELF(func); - if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) - return NULL; - result = cfunc(self, arg); - Py_LeaveRecursiveCall(); - if (unlikely(!result) && unlikely(!PyErr_Occurred())) { - PyErr_SetString( - PyExc_SystemError, - "NULL result without error in PyObject_Call"); - } - return result; -} -#endif - -/* PyObjectCallOneArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_New(1); - if (unlikely(!args)) return NULL; - Py_INCREF(arg); - PyTuple_SET_ITEM(args, 0, arg); - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, &arg, 1); - } -#endif - if (likely(PyCFunction_Check(func))) { - if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { - return __Pyx_PyObject_CallMethO(func, arg); -#if CYTHON_FAST_PYCCALL - } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { - return __Pyx_PyCFunction_FastCall(func, &arg, 1); -#endif - } - } - return __Pyx__PyObject_CallOneArg(func, arg); -} -#else -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { - PyObject *result; - PyObject *args = PyTuple_Pack(1, arg); - if (unlikely(!args)) return NULL; - result = __Pyx_PyObject_Call(func, args, NULL); - Py_DECREF(args); - return result; -} -#endif - -/* PyErrFetchRestore */ -#if CYTHON_FAST_THREAD_STATE -static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} -static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} -#endif - -/* WriteUnraisableException */ -static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, - CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, - int full_traceback, CYTHON_UNUSED int nogil) { - PyObject *old_exc, *old_val, *old_tb; - PyObject *ctx; - __Pyx_PyThreadState_declare -#ifdef WITH_THREAD - PyGILState_STATE state; - if (nogil) - state = PyGILState_Ensure(); -#ifdef _MSC_VER - else state = (PyGILState_STATE)-1; -#endif -#endif - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); - if (full_traceback) { - Py_XINCREF(old_exc); - Py_XINCREF(old_val); - Py_XINCREF(old_tb); - __Pyx_ErrRestore(old_exc, old_val, old_tb); - PyErr_PrintEx(1); - } - #if PY_MAJOR_VERSION < 3 - ctx = PyString_FromString(name); - #else - ctx = PyUnicode_FromString(name); - #endif - __Pyx_ErrRestore(old_exc, old_val, old_tb); - if (!ctx) { - PyErr_WriteUnraisable(Py_None); - } else { - PyErr_WriteUnraisable(ctx); - Py_DECREF(ctx); - } -#ifdef WITH_THREAD - if (nogil) - PyGILState_Release(state); -#endif -} - -/* RaiseArgTupleInvalid */ -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *more_or_less; - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - PyErr_Format(PyExc_TypeError, - "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", - func_name, more_or_less, num_expected, - (num_expected == 1) ? "" : "s", num_found); -} - -/* RaiseDoubleKeywords */ -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AsString(kw_name)); - #endif -} - -/* ParseKeywords */ -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - continue; - } - name = first_kw_arg; - #if PY_MAJOR_VERSION < 3 - if (likely(PyString_Check(key))) { - while (*name) { - if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) - && _PyString_Eq(**name, key)) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - if ((**argname == key) || ( - (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) - && _PyString_Eq(**argname, key))) { - goto arg_passed_twice; - } - argname++; - } - } - } else - #endif - if (likely(PyUnicode_Check(key))) { - while (*name) { - int cmp = (**name == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**name, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) { - values[name-argnames] = value; - break; - } - name++; - } - if (*name) continue; - else { - PyObject*** argname = argnames; - while (argname != first_kw_arg) { - int cmp = (**argname == key) ? 0 : - #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 - (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 : - #endif - PyUnicode_Compare(**argname, key); - if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; - if (cmp == 0) goto arg_passed_twice; - argname++; - } - } - } else - goto invalid_keyword_type; - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, key); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%.200s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%.200s() got an unexpected keyword argument '%.200s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -/* PyObjectCallNoArg */ -#if CYTHON_COMPILING_IN_CPYTHON -static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { -#if CYTHON_FAST_PYCALL - if (PyFunction_Check(func)) { - return __Pyx_PyFunction_FastCall(func, NULL, 0); - } -#endif -#ifdef __Pyx_CyFunction_USED - if (likely(PyCFunction_Check(func) || __Pyx_CyFunction_Check(func))) -#else - if (likely(PyCFunction_Check(func))) -#endif - { - if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { - return __Pyx_PyObject_CallMethO(func, NULL); - } - } - return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); -} -#endif - -/* RaiseException */ -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, - CYTHON_UNUSED PyObject *cause) { - __Pyx_PyThreadState_declare - Py_XINCREF(type); - if (!value || value == Py_None) - value = NULL; - else - Py_INCREF(value); - if (!tb || tb == Py_None) - tb = NULL; - else { - Py_INCREF(tb); - if (!PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - } - if (PyType_Check(type)) { -#if CYTHON_COMPILING_IN_PYPY - if (!value) { - Py_INCREF(Py_None); - value = Py_None; - } -#endif - PyErr_NormalizeException(&type, &value, &tb); - } else { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - value = type; - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - } - __Pyx_PyThreadState_assign - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} -#else -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { - PyObject* owned_instance = NULL; - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (PyExceptionClass_Check(type)) { - PyObject *instance_class = NULL; - if (value && PyExceptionInstance_Check(value)) { - instance_class = (PyObject*) Py_TYPE(value); - if (instance_class != type) { - int is_subclass = PyObject_IsSubclass(instance_class, type); - if (!is_subclass) { - instance_class = NULL; - } else if (unlikely(is_subclass == -1)) { - goto bad; - } else { - type = instance_class; - } - } - } - if (!instance_class) { - PyObject *args; - if (!value) - args = PyTuple_New(0); - else if (PyTuple_Check(value)) { - Py_INCREF(value); - args = value; - } else - args = PyTuple_Pack(1, value); - if (!args) - goto bad; - owned_instance = PyObject_Call(type, args, NULL); - Py_DECREF(args); - if (!owned_instance) - goto bad; - value = owned_instance; - if (!PyExceptionInstance_Check(value)) { - PyErr_Format(PyExc_TypeError, - "calling %R should have returned an instance of " - "BaseException, not %R", - type, Py_TYPE(value)); - goto bad; - } - } - } else { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - if (cause) { - PyObject *fixed_cause; - if (cause == Py_None) { - fixed_cause = NULL; - } else if (PyExceptionClass_Check(cause)) { - fixed_cause = PyObject_CallObject(cause, NULL); - if (fixed_cause == NULL) - goto bad; - } else if (PyExceptionInstance_Check(cause)) { - fixed_cause = cause; - Py_INCREF(fixed_cause); - } else { - PyErr_SetString(PyExc_TypeError, - "exception causes must derive from " - "BaseException"); - goto bad; - } - PyException_SetCause(value, fixed_cause); - } - PyErr_SetObject(type, value); - if (tb) { -#if CYTHON_COMPILING_IN_PYPY - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); - Py_INCREF(tb); - PyErr_Restore(tmp_type, tmp_value, tb); - Py_XDECREF(tmp_tb); -#else - PyThreadState *tstate = __Pyx_PyThreadState_Current; - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } -#endif - } -bad: - Py_XDECREF(owned_instance); - return; -} -#endif - -/* PyObject_GenericGetAttrNoDict */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) { - PyErr_Format(PyExc_AttributeError, -#if PY_MAJOR_VERSION >= 3 - "'%.50s' object has no attribute '%U'", - tp->tp_name, attr_name); -#else - "'%.50s' object has no attribute '%.400s'", - tp->tp_name, PyString_AS_STRING(attr_name)); -#endif - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) { - PyObject *descr; - PyTypeObject *tp = Py_TYPE(obj); - if (unlikely(!PyString_Check(attr_name))) { - return PyObject_GenericGetAttr(obj, attr_name); - } - assert(!tp->tp_dictoffset); - descr = _PyType_Lookup(tp, attr_name); - if (unlikely(!descr)) { - return __Pyx_RaiseGenericGetAttributeError(tp, attr_name); - } - Py_INCREF(descr); - #if PY_MAJOR_VERSION < 3 - if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS))) - #endif - { - descrgetfunc f = Py_TYPE(descr)->tp_descr_get; - if (unlikely(f)) { - PyObject *res = f(descr, obj, (PyObject *)tp); - Py_DECREF(descr); - return res; - } - } - return descr; -} -#endif - -/* PyObject_GenericGetAttr */ -#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000 -static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) { - if (unlikely(Py_TYPE(obj)->tp_dictoffset)) { - return PyObject_GenericGetAttr(obj, attr_name); - } - return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name); -} -#endif - -/* SetVTable */ -static int __Pyx_SetVtable(PyObject *dict, void *vtable) { -#if PY_VERSION_HEX >= 0x02070000 - PyObject *ob = PyCapsule_New(vtable, 0, 0); -#else - PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); -#endif - if (!ob) - goto bad; - if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -/* PyErrExceptionMatches */ -#if CYTHON_FAST_THREAD_STATE -static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; icurexc_type; - if (exc_type == err) return 1; - if (unlikely(!exc_type)) return 0; - if (unlikely(PyTuple_Check(err))) - return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err); - return __Pyx_PyErr_GivenExceptionMatches(exc_type, err); -} -#endif - -/* PyObjectGetAttrStrNoError */ -static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) { - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError))) - __Pyx_PyErr_Clear(); -} -static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) { - PyObject *result; -#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1 - PyTypeObject* tp = Py_TYPE(obj); - if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) { - return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1); - } -#endif - result = __Pyx_PyObject_GetAttrStr(obj, attr_name); - if (unlikely(!result)) { - __Pyx_PyObject_GetAttrStr_ClearAttributeError(); - } - return result; -} - -/* SetupReduce */ -static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { - int ret; - PyObject *name_attr; - name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name); - if (likely(name_attr)) { - ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); - } else { - ret = -1; - } - if (unlikely(ret < 0)) { - PyErr_Clear(); - ret = 0; - } - Py_XDECREF(name_attr); - return ret; -} -static int __Pyx_setup_reduce(PyObject* type_obj) { - int ret = 0; - PyObject *object_reduce = NULL; - PyObject *object_reduce_ex = NULL; - PyObject *reduce = NULL; - PyObject *reduce_ex = NULL; - PyObject *reduce_cython = NULL; - PyObject *setstate = NULL; - PyObject *setstate_cython = NULL; -#if CYTHON_USE_PYTYPE_LOOKUP - if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#else - if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD; -#endif -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#else - object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD; -#endif - reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD; - if (reduce_ex == object_reduce_ex) { -#if CYTHON_USE_PYTYPE_LOOKUP - object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#else - object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD; -#endif - reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD; - if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { - reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython); - if (likely(reduce_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (reduce == object_reduce || PyErr_Occurred()) { - goto __PYX_BAD; - } - setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate); - if (!setstate) PyErr_Clear(); - if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { - setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython); - if (likely(setstate_cython)) { - ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD; - } else if (!setstate || PyErr_Occurred()) { - goto __PYX_BAD; - } - } - PyType_Modified((PyTypeObject*)type_obj); - } - } - goto __PYX_GOOD; -__PYX_BAD: - if (!PyErr_Occurred()) - PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); - ret = -1; -__PYX_GOOD: -#if !CYTHON_USE_PYTYPE_LOOKUP - Py_XDECREF(object_reduce); - Py_XDECREF(object_reduce_ex); -#endif - Py_XDECREF(reduce); - Py_XDECREF(reduce_ex); - Py_XDECREF(reduce_cython); - Py_XDECREF(setstate); - Py_XDECREF(setstate_cython); - return ret; -} - -/* Import */ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - #if PY_MAJOR_VERSION < 3 - PyObject *py_import; - py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); - if (!py_import) - goto bad; - #endif - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - { - #if PY_MAJOR_VERSION >= 3 - if (level == -1) { - if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) { - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, 1); - if (!module) { - if (!PyErr_ExceptionMatches(PyExc_ImportError)) - goto bad; - PyErr_Clear(); - } - } - level = 0; - } - #endif - if (!module) { - #if PY_MAJOR_VERSION < 3 - PyObject *py_level = PyInt_FromLong(level); - if (!py_level) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, py_level, (PyObject *)NULL); - Py_DECREF(py_level); - #else - module = PyImport_ImportModuleLevelObject( - name, global_dict, empty_dict, list, level); - #endif - } - } -bad: - #if PY_MAJOR_VERSION < 3 - Py_XDECREF(py_import); - #endif - Py_XDECREF(empty_list); - Py_XDECREF(empty_dict); - return module; -} - -/* ImportFrom */ -static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { - PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); - if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { - PyErr_Format(PyExc_ImportError, - #if PY_MAJOR_VERSION < 3 - "cannot import name %.230s", PyString_AS_STRING(name)); - #else - "cannot import name %S", name); - #endif - } - return value; -} - -/* CLineInTraceback */ -#ifndef CYTHON_CLINE_IN_TRACEBACK -static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) { - PyObject *use_cline; - PyObject *ptype, *pvalue, *ptraceback; -#if CYTHON_COMPILING_IN_CPYTHON - PyObject **cython_runtime_dict; -#endif - if (unlikely(!__pyx_cython_runtime)) { - return c_line; - } - __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback); -#if CYTHON_COMPILING_IN_CPYTHON - cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); - if (likely(cython_runtime_dict)) { - __PYX_PY_DICT_LOOKUP_IF_MODIFIED( - use_cline, *cython_runtime_dict, - __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback)) - } else -#endif - { - PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); - if (use_cline_obj) { - use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; - Py_DECREF(use_cline_obj); - } else { - PyErr_Clear(); - use_cline = NULL; - } - } - if (!use_cline) { - c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); - } - else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { - c_line = 0; - } - __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback); - return c_line; -} -#endif - -/* CodeObjectCache */ -static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { - int start = 0, mid = 0, end = count - 1; - if (end >= 0 && code_line > entries[end].code_line) { - return count; - } - while (start < end) { - mid = start + (end - start) / 2; - if (code_line < entries[mid].code_line) { - end = mid; - } else if (code_line > entries[mid].code_line) { - start = mid + 1; - } else { - return mid; - } - } - if (code_line <= entries[mid].code_line) { - return mid; - } else { - return mid + 1; - } -} -static PyCodeObject *__pyx_find_code_object(int code_line) { - PyCodeObject* code_object; - int pos; - if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { - return NULL; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { - return NULL; - } - code_object = __pyx_code_cache.entries[pos].code_object; - Py_INCREF(code_object); - return code_object; -} -static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { - int pos, i; - __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; - if (unlikely(!code_line)) { - return; - } - if (unlikely(!entries)) { - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); - if (likely(entries)) { - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = 64; - __pyx_code_cache.count = 1; - entries[0].code_line = code_line; - entries[0].code_object = code_object; - Py_INCREF(code_object); - } - return; - } - pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); - if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { - PyCodeObject* tmp = entries[pos].code_object; - entries[pos].code_object = code_object; - Py_DECREF(tmp); - return; - } - if (__pyx_code_cache.count == __pyx_code_cache.max_count) { - int new_max = __pyx_code_cache.max_count + 64; - entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( - __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry)); - if (unlikely(!entries)) { - return; - } - __pyx_code_cache.entries = entries; - __pyx_code_cache.max_count = new_max; - } - for (i=__pyx_code_cache.count; i>pos; i--) { - entries[i] = entries[i-1]; - } - entries[pos].code_line = code_line; - entries[pos].code_object = code_object; - __pyx_code_cache.count++; - Py_INCREF(code_object); -} - -/* AddTraceback */ -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" -static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( - const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif - if (!py_srcfile) goto bad; - if (c_line) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_code = __Pyx_PyCode_New( - 0, - 0, - 0, - 0, - 0, - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - py_line, - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); - return py_code; -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - return NULL; -} -static void __Pyx_AddTraceback(const char *funcname, int c_line, - int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - PyThreadState *tstate = __Pyx_PyThreadState_Current; - if (c_line) { - c_line = __Pyx_CLineForTraceback(tstate, c_line); - } - py_code = __pyx_find_code_object(c_line ? -c_line : py_line); - if (!py_code) { - py_code = __Pyx_CreateCodeObjectForTraceback( - funcname, c_line, py_line, filename); - if (!py_code) goto bad; - __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); - } - py_frame = PyFrame_New( - tstate, /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - __pyx_d, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - __Pyx_PyFrame_SetLineNumber(py_frame, py_line); - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -/* CIntFromPyVerify */ -#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) -#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ - __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) -#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ - {\ - func_type value = func_value;\ - if (sizeof(target_type) < sizeof(func_type)) {\ - if (unlikely(value != (func_type) (target_type) value)) {\ - func_type zero = 0;\ - if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ - return (target_type) -1;\ - if (is_unsigned && unlikely(value < zero))\ - goto raise_neg_overflow;\ - else\ - goto raise_overflow;\ - }\ - }\ - return (target_type) value;\ - } - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(int) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(int) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(int) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(int), - little, !is_unsigned); - } -} - -/* CIntFromPy */ -static CYTHON_INLINE uint32_t __Pyx_PyInt_As_uint32_t(PyObject *x) { - const uint32_t neg_one = (uint32_t) ((uint32_t) 0 - (uint32_t) 1), const_zero = (uint32_t) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(uint32_t) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(uint32_t, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (uint32_t) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (uint32_t) 0; - case 1: __PYX_VERIFY_RETURN_INT(uint32_t, digit, digits[0]) - case 2: - if (8 * sizeof(uint32_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) >= 2 * PyLong_SHIFT) { - return (uint32_t) (((((uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(uint32_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) >= 3 * PyLong_SHIFT) { - return (uint32_t) (((((((uint32_t)digits[2]) << PyLong_SHIFT) | (uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(uint32_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) >= 4 * PyLong_SHIFT) { - return (uint32_t) (((((((((uint32_t)digits[3]) << PyLong_SHIFT) | (uint32_t)digits[2]) << PyLong_SHIFT) | (uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (uint32_t) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(uint32_t) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(uint32_t, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint32_t) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(uint32_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (uint32_t) 0; - case -1: __PYX_VERIFY_RETURN_INT(uint32_t, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(uint32_t, digit, +digits[0]) - case -2: - if (8 * sizeof(uint32_t) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) - 1 > 2 * PyLong_SHIFT) { - return (uint32_t) (((uint32_t)-1)*(((((uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(uint32_t) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) - 1 > 2 * PyLong_SHIFT) { - return (uint32_t) ((((((uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(uint32_t) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) - 1 > 3 * PyLong_SHIFT) { - return (uint32_t) (((uint32_t)-1)*(((((((uint32_t)digits[2]) << PyLong_SHIFT) | (uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(uint32_t) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) - 1 > 3 * PyLong_SHIFT) { - return (uint32_t) ((((((((uint32_t)digits[2]) << PyLong_SHIFT) | (uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(uint32_t) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) - 1 > 4 * PyLong_SHIFT) { - return (uint32_t) (((uint32_t)-1)*(((((((((uint32_t)digits[3]) << PyLong_SHIFT) | (uint32_t)digits[2]) << PyLong_SHIFT) | (uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(uint32_t) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(uint32_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(uint32_t) - 1 > 4 * PyLong_SHIFT) { - return (uint32_t) ((((((((((uint32_t)digits[3]) << PyLong_SHIFT) | (uint32_t)digits[2]) << PyLong_SHIFT) | (uint32_t)digits[1]) << PyLong_SHIFT) | (uint32_t)digits[0]))); - } - } - break; - } -#endif - if (sizeof(uint32_t) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(uint32_t, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(uint32_t) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(uint32_t, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - uint32_t val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (uint32_t) -1; - } - } else { - uint32_t val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (uint32_t) -1; - val = __Pyx_PyInt_As_uint32_t(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to uint32_t"); - return (uint32_t) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to uint32_t"); - return (uint32_t) -1; -} - -/* CIntToPy */ -static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; - if (is_unsigned) { - if (sizeof(long) < sizeof(long)) { - return PyInt_FromLong((long) value); - } else if (sizeof(long) <= sizeof(unsigned long)) { - return PyLong_FromUnsignedLong((unsigned long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); -#endif - } - } else { - if (sizeof(long) <= sizeof(long)) { - return PyInt_FromLong((long) value); -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - return PyLong_FromLongLong((PY_LONG_LONG) value); -#endif - } - } - { - int one = 1; int little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&value; - return _PyLong_FromByteArray(bytes, sizeof(long), - little, !is_unsigned); - } -} - -/* CIntFromPy */ -static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { - const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(long) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (long) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { - return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { - return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { - return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (long) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(long) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (long) 0; - case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) - case -2: - if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(long) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(long) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(long) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { - return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); - } - } - break; - } -#endif - if (sizeof(long) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - long val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (long) -1; - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (long) -1; - val = __Pyx_PyInt_As_long(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to long"); - return (long) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long) -1; -} - -/* CIntFromPy */ -static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { - const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0; - const int is_unsigned = neg_one > const_zero; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x))) { - if (sizeof(int) < sizeof(long)) { - __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) - } else { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - goto raise_neg_overflow; - } - return (int) val; - } - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { - return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { - return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { - return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); - } - } - break; - } -#endif -#if CYTHON_COMPILING_IN_CPYTHON - if (unlikely(Py_SIZE(x) < 0)) { - goto raise_neg_overflow; - } -#else - { - int result = PyObject_RichCompareBool(x, Py_False, Py_LT); - if (unlikely(result < 0)) - return (int) -1; - if (unlikely(result == 1)) - goto raise_neg_overflow; - } -#endif - if (sizeof(int) <= sizeof(unsigned long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) -#endif - } - } else { -#if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)x)->ob_digit; - switch (Py_SIZE(x)) { - case 0: return (int) 0; - case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) - case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) - case -2: - if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 2: - if (8 * sizeof(int) > 1 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -3: - if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 3: - if (8 * sizeof(int) > 2 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case -4: - if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - case 4: - if (8 * sizeof(int) > 3 * PyLong_SHIFT) { - if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { - __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) - } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { - return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); - } - } - break; - } -#endif - if (sizeof(int) <= sizeof(long)) { - __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) -#ifdef HAVE_LONG_LONG - } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { - __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) -#endif - } - } - { -#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) - PyErr_SetString(PyExc_RuntimeError, - "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); -#else - int val; - PyObject *v = __Pyx_PyNumber_IntOrLong(x); - #if PY_MAJOR_VERSION < 3 - if (likely(v) && !PyLong_Check(v)) { - PyObject *tmp = v; - v = PyNumber_Long(tmp); - Py_DECREF(tmp); - } - #endif - if (likely(v)) { - int one = 1; int is_little = (int)*(unsigned char *)&one; - unsigned char *bytes = (unsigned char *)&val; - int ret = _PyLong_AsByteArray((PyLongObject *)v, - bytes, sizeof(val), - is_little, !is_unsigned); - Py_DECREF(v); - if (likely(!ret)) - return val; - } -#endif - return (int) -1; - } - } else { - int val; - PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); - if (!tmp) return (int) -1; - val = __Pyx_PyInt_As_int(tmp); - Py_DECREF(tmp); - return val; - } -raise_overflow: - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to int"); - return (int) -1; -raise_neg_overflow: - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to int"); - return (int) -1; -} - -/* FastTypeChecks */ -#if CYTHON_COMPILING_IN_CPYTHON -static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) { - while (a) { - a = a->tp_base; - if (a == b) - return 1; - } - return b == &PyBaseObject_Type; -} -static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) { - PyObject *mro; - if (a == b) return 1; - mro = a->tp_mro; - if (likely(mro)) { - Py_ssize_t i, n; - n = PyTuple_GET_SIZE(mro); - for (i = 0; i < n; i++) { - if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b) - return 1; - } - return 0; - } - return __Pyx_InBases(a, b); -} -#if PY_MAJOR_VERSION == 2 -static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) { - PyObject *exception, *value, *tb; - int res; - __Pyx_PyThreadState_declare - __Pyx_PyThreadState_assign - __Pyx_ErrFetch(&exception, &value, &tb); - res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0; - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - if (!res) { - res = PyObject_IsSubclass(err, exc_type2); - if (unlikely(res == -1)) { - PyErr_WriteUnraisable(err); - res = 0; - } - } - __Pyx_ErrRestore(exception, value, tb); - return res; -} -#else -static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) { - int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0; - if (!res) { - res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2); - } - return res; -} -#endif -static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) { - Py_ssize_t i, n; - assert(PyExceptionClass_Check(exc_type)); - n = PyTuple_GET_SIZE(tuple); -#if PY_MAJOR_VERSION >= 3 - for (i=0; ip) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - if (PyObject_Hash(*t->p) == -1) - return -1; - ++t; - } - return 0; -} - -static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { - return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); -} -static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { - Py_ssize_t ignore; - return __Pyx_PyObject_AsStringAndSize(o, &ignore); -} -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT -#if !CYTHON_PEP393_ENABLED -static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - char* defenc_c; - PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); - if (!defenc) return NULL; - defenc_c = PyBytes_AS_STRING(defenc); -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - { - char* end = defenc_c + PyBytes_GET_SIZE(defenc); - char* c; - for (c = defenc_c; c < end; c++) { - if ((unsigned char) (*c) >= 128) { - PyUnicode_AsASCIIString(o); - return NULL; - } - } - } -#endif - *length = PyBytes_GET_SIZE(defenc); - return defenc_c; -} -#else -static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) { - if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL; -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - if (likely(PyUnicode_IS_ASCII(o))) { - *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); - } else { - PyUnicode_AsASCIIString(o); - return NULL; - } -#else - return PyUnicode_AsUTF8AndSize(o, length); -#endif -} -#endif -#endif -static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { -#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT - if ( -#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII - __Pyx_sys_getdefaultencoding_not_ascii && -#endif - PyUnicode_Check(o)) { - return __Pyx_PyUnicode_AsStringAndSize(o, length); - } else -#endif -#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) - if (PyByteArray_Check(o)) { - *length = PyByteArray_GET_SIZE(o); - return PyByteArray_AS_STRING(o); - } else -#endif - { - char* result; - int r = PyBytes_AsStringAndSize(o, &result, length); - if (unlikely(r < 0)) { - return NULL; - } else { - return result; - } - } -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} -static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) { - int retval; - if (unlikely(!x)) return -1; - retval = __Pyx_PyObject_IsTrue(x); - Py_DECREF(x); - return retval; -} -static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) { -#if PY_MAJOR_VERSION >= 3 - if (PyLong_Check(result)) { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "__int__ returned non-int (type %.200s). " - "The ability to return an instance of a strict subclass of int " - "is deprecated, and may be removed in a future version of Python.", - Py_TYPE(result)->tp_name)) { - Py_DECREF(result); - return NULL; - } - return result; - } -#endif - PyErr_Format(PyExc_TypeError, - "__%.4s__ returned non-%.4s (type %.200s)", - type_name, type_name, Py_TYPE(result)->tp_name); - Py_DECREF(result); - return NULL; -} -static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { -#if CYTHON_USE_TYPE_SLOTS - PyNumberMethods *m; -#endif - const char *name = NULL; - PyObject *res = NULL; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_Check(x) || PyLong_Check(x))) -#else - if (likely(PyLong_Check(x))) -#endif - return __Pyx_NewRef(x); -#if CYTHON_USE_TYPE_SLOTS - m = Py_TYPE(x)->tp_as_number; - #if PY_MAJOR_VERSION < 3 - if (m && m->nb_int) { - name = "int"; - res = m->nb_int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = m->nb_long(x); - } - #else - if (likely(m && m->nb_int)) { - name = "int"; - res = m->nb_int(x); - } - #endif -#else - if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) { - res = PyNumber_Int(x); - } -#endif - if (likely(res)) { -#if PY_MAJOR_VERSION < 3 - if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) { -#else - if (unlikely(!PyLong_CheckExact(res))) { -#endif - return __Pyx_PyNumber_IntOrLongWrongResultType(res, name); - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject *x; -#if PY_MAJOR_VERSION < 3 - if (likely(PyInt_CheckExact(b))) { - if (sizeof(Py_ssize_t) >= sizeof(long)) - return PyInt_AS_LONG(b); - else - return PyInt_AsSsize_t(b); - } -#endif - if (likely(PyLong_CheckExact(b))) { - #if CYTHON_USE_PYLONG_INTERNALS - const digit* digits = ((PyLongObject*)b)->ob_digit; - const Py_ssize_t size = Py_SIZE(b); - if (likely(__Pyx_sst_abs(size) <= 1)) { - ival = likely(size) ? digits[0] : 0; - if (size == -1) ival = -ival; - return ival; - } else { - switch (size) { - case 2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -2: - if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -3: - if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case 4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - case -4: - if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { - return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); - } - break; - } - } - #endif - return PyLong_AsSsize_t(b); - } - x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} -static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { - if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { - return __Pyx_PyIndex_AsSsize_t(o); -#if PY_MAJOR_VERSION < 3 - } else if (likely(PyInt_CheckExact(o))) { - return PyInt_AS_LONG(o); -#endif - } else { - Py_ssize_t ival; - PyObject *x; - x = PyNumber_Index(o); - if (!x) return -1; - ival = PyInt_AsLong(x); - Py_DECREF(x); - return ival; - } -} -static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { - return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); -} -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { - return PyInt_FromSize_t(ival); -} - - -#endif /* Py_PYTHON_H */ diff --git a/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.h b/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.h deleted file mode 100644 index 099ed2231..000000000 --- a/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.h +++ /dev/null @@ -1,483 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. -*/ - -#include -#include -#include -#include -#include -#include -#include -#include - -using namespace std; - -namespace saslwrapper { - - class ClientImpl { - public: - ClientImpl() : conn(0), cbIndex(0), maxBufSize(65535), minSsf(0), maxSsf(65535), externalSsf(0), secret(0) {} - ~ClientImpl() { if (conn) sasl_dispose(&conn); conn = 0; } - - /** - * Set attributes to be used in authenticating the session. All attributes should be set - * before init() is called. - * - * @param key Name of attribute being set - * @param value Value of attribute being set - * @return true iff success. If false is returned, call getError() for error details. - * - * Available attribute keys: - * - * service - Name of the service being accessed - * username - User identity for authentication - * authname - User identity for authorization (if different from username) - * password - Password associated with username - * host - Fully qualified domain name of the server host - * maxbufsize - Maximum receive buffer size for the security layer - * minssf - Minimum acceptable security strength factor (integer) - * maxssf - Maximum acceptable security strength factor (integer) - * externalssf - Security strength factor supplied by external mechanism (i.e. SSL/TLS) - * externaluser - Authentication ID (of client) as established by external mechanism - */ - bool setAttr(const string& key, const string& value); - bool setAttr(const string& key, uint32_t value); - - /** - * Initialize the client object. This should be called after all of the properties have been set. - * - * @return true iff success. If false is returned, call getError() for error details. - */ - bool init(); - - /** - * Start the SASL exchange with the server. - * - * @param mechList List of mechanisms provided by the server - * @param chosen The mechanism chosen by the client - * @param initialResponse Initial block of data to send to the server - * - * @return true iff success. If false is returned, call getError() for error details. - */ - bool start(const string& mechList, string& chosen, string& initialResponse); - - /** - * Step the SASL handshake. - * - * @param challenge The challenge supplied by the server - * @param response (output) The response to be sent back to the server - * - * @return true iff success. If false is returned, call getError() for error details. - */ - bool step(const string& challenge, string& response); - - /** - * Encode data for secure transmission to the server. - * - * @param clearText Clear text data to be encrypted - * @param cipherText (output) Encrypted data to be transmitted - * - * @return true iff success. If false is returned, call getError() for error details. - */ - bool encode(const string& clearText, string& cipherText); - - /** - * Decode data received from the server. - * - * @param cipherText Encrypted data received from the server - * @param clearText (output) Decrypted clear text data - * - * @return true iff success. If false is returned, call getError() for error details. - */ - bool decode(const string& cipherText, string& clearText); - - /** - * Get the user identity (used for authentication) associated with this session. - * Note that this is particularly useful for single-sign-on mechanisms in which the - * username is not supplied by the application. - * - * @param userId (output) Authenticated user ID for this session. - */ - bool getUserId(string& userId); - - /** - * Get the security strength factor associated with this session. - * - * @param ssf (output) Negotiated SSF value. - */ - bool getSSF(int *ssf); - - /** - * Get error message for last error. - * This function will return the last error message then clear the error state. - * If there was no error or the error state has been cleared, this function will output - * an empty string. - * - * @param error Error message string - */ - void getError(string& error); - - private: - // Declare private copy constructor and assignment operator. Ensure that this - // class is non-copyable. - ClientImpl(const ClientImpl&); - const ClientImpl& operator=(const ClientImpl&); - - void addCallback(unsigned long id, void* proc); - void lastCallback() { addCallback(SASL_CB_LIST_END, 0); } - void setError(const string& context, int code, const string& text = "", const string& text2 = ""); - void interact(sasl_interact_t* prompt); - - static int cbName(void *context, int id, const char **result, unsigned *len); - static int cbPassword(sasl_conn_t *conn, void *context, int id, sasl_secret_t **psecret); - - static bool initialized; - sasl_conn_t* conn; - sasl_callback_t callbacks[8]; - int cbIndex; - string error; - string serviceName; - string userName; - string authName; - string password; - string hostName; - string externalUserName; - uint32_t maxBufSize; - uint32_t minSsf; - uint32_t maxSsf; - uint32_t externalSsf; - sasl_secret_t* secret; - }; -} - -using namespace saslwrapper; - -bool ClientImpl::initialized = false; - -bool ClientImpl::init() -{ - int result; - - if (!initialized) { - initialized = true; - result = sasl_client_init(0); - if (result != SASL_OK) { - setError("sasl_client_init", result, sasl_errstring(result, 0, 0)); - return false; - } - } - - - addCallback(SASL_CB_GETREALM, 0); - if (!userName.empty()) { - addCallback(SASL_CB_USER, (void*) cbName); - addCallback(SASL_CB_AUTHNAME, (void*) cbName); - - if (!password.empty()) - addCallback(SASL_CB_PASS, (void*) cbPassword); - else - addCallback(SASL_CB_PASS, 0); - } - lastCallback(); - - unsigned flags; - - flags = 0; - if (!authName.empty() && authName != userName) - flags |= SASL_NEED_PROXY; - - result = sasl_client_new(serviceName.c_str(), hostName.c_str(), 0, 0, callbacks, flags, &conn); - if (result != SASL_OK) { - setError("sasl_client_new", result, sasl_errstring(result, 0, 0)); - return false; - } - - sasl_security_properties_t secprops; - - secprops.min_ssf = minSsf; - secprops.max_ssf = maxSsf; - secprops.maxbufsize = maxBufSize; - secprops.property_names = 0; - secprops.property_values = 0; - secprops.security_flags = 0; - - result = sasl_setprop(conn, SASL_SEC_PROPS, &secprops); - if (result != SASL_OK) { - setError("sasl_setprop(SASL_SEC_PROPS)", result); - sasl_dispose(&conn); - conn = 0; - return false; - } - - if (!externalUserName.empty()) { - result = sasl_setprop(conn, SASL_AUTH_EXTERNAL, externalUserName.c_str()); - if (result != SASL_OK) { - setError("sasl_setprop(SASL_AUTH_EXTERNAL)", result); - sasl_dispose(&conn); - conn = 0; - return false; - } - - result = sasl_setprop(conn, SASL_SSF_EXTERNAL, &externalSsf); - if (result != SASL_OK) { - setError("sasl_setprop(SASL_SSF_EXTERNAL)", result); - sasl_dispose(&conn); - conn = 0; - return false; - } - } - - return true; -} - -bool ClientImpl::setAttr(const string& key, const string& value) -{ - if (key == "service") - serviceName = value; - else if (key == "username") - userName = value; - else if (key == "authname") - authName = value; - else if (key == "password") { - password = value; - free(secret); - secret = (sasl_secret_t*) malloc(sizeof(sasl_secret_t) + password.length()); - } - else if (key == "host") - hostName = value; - else if (key == "externaluser") - externalUserName = value; - else { - setError("setAttr", -1, "Unknown string attribute name", key); - return false; - } - - return true; -} - -bool ClientImpl::setAttr(const string& key, uint32_t value) -{ - if (key == "minssf") - minSsf = value; - else if (key == "maxssf") - maxSsf = value; - else if (key == "externalssf") - externalSsf = value; - else if (key == "maxbufsize") - maxBufSize = value; - else { - setError("setAttr", -1, "Unknown integer attribute name", key); - return false; - } - - return true; -} - -bool ClientImpl::start(const string& mechList, string& chosen, string& initialResponse) -{ - int result; - sasl_interact_t* prompt = 0; - const char* resp; - const char* mech; - unsigned int len; - - do { - result = sasl_client_start(conn, mechList.c_str(), &prompt, &resp, &len, &mech); - if (result == SASL_INTERACT) - interact(prompt); - } while (result == SASL_INTERACT); - if (result != SASL_OK && result != SASL_CONTINUE) { - setError("sasl_client_start", result); - return false; - } - - chosen = string(mech); - initialResponse = string(resp, len); - return true; -} - -bool ClientImpl::step(const string& challenge, string& response) -{ - int result; - sasl_interact_t* prompt = 0; - const char* resp; - unsigned int len; - - do { - result = sasl_client_step(conn, challenge.c_str(), challenge.size(), &prompt, &resp, &len); - if (result == SASL_INTERACT) - interact(prompt); - } while (result == SASL_INTERACT); - if (result != SASL_OK && result != SASL_CONTINUE) { - setError("sasl_client_step", result); - return false; - } - - response = string(resp, len); - return true; -} - -bool ClientImpl::encode(const string& clearText, string& cipherText) -{ - const char* output; - unsigned int outlen; - int result = sasl_encode(conn, clearText.c_str(), clearText.size(), &output, &outlen); - if (result != SASL_OK) { - setError("sasl_encode", result); - return false; - } - cipherText = string(output, outlen); - return true; -} - -bool ClientImpl::decode(const string& cipherText, string& clearText) -{ - const char* input = cipherText.c_str(); - unsigned int inLen = cipherText.size(); - unsigned int remaining = inLen; - const char* cursor = input; - const char* output; - unsigned int outlen; - - clearText = string(); - while (remaining > 0) { - unsigned int segmentLen = (remaining < maxBufSize) ? remaining : maxBufSize; - int result = sasl_decode(conn, cursor, segmentLen, &output, &outlen); - if (result != SASL_OK) { - setError("sasl_decode", result); - return false; - } - clearText = clearText + string(output, outlen); - cursor += segmentLen; - remaining -= segmentLen; - } - return true; -} - -bool ClientImpl::getUserId(string& userId) -{ - int result; - const char* operName; - - result = sasl_getprop(conn, SASL_USERNAME, (const void**) &operName); - if (result != SASL_OK) { - setError("sasl_getprop(SASL_USERNAME)", result); - return false; - } - - userId = string(operName); - return true; -} - -bool ClientImpl::getSSF(int *ssf) -{ - int result = sasl_getprop(conn, SASL_SSF, (const void **)&ssf); - if (result != SASL_OK) { - setError("sasl_getprop(SASL_SSF)", result); - return false; - } - - return true; -} - -void ClientImpl::getError(string& _error) -{ - _error = error; - error.clear(); -} - -void ClientImpl::addCallback(unsigned long id, void* proc) -{ - callbacks[cbIndex].id = id; - callbacks[cbIndex].proc = (int (*)()) proc; - callbacks[cbIndex].context = this; - cbIndex++; -} - -void ClientImpl::setError(const string& context, int code, const string& text, const string& text2) -{ - stringstream err; - string errtext; - - if (text.empty()) { - if (conn) { - errtext = sasl_errdetail(conn); - } else { - errtext = sasl_errstring(code, NULL, NULL); - } - } else { - errtext = text; - } - - err << "Error in " << context << " (" << code << ") " << errtext; - if (!text2.empty()) - err << " - " << text2; - error = err.str(); -} - -void ClientImpl::interact(sasl_interact_t* prompt) -{ - string output; - char* input; - - if (prompt->id == SASL_CB_PASS) { - string ppt(prompt->prompt); - ppt += ": "; - char* pass = getpass(ppt.c_str()); - output = string(pass); - } else { - cout << prompt->prompt; - if (prompt->defresult) - cout << " [" << prompt->defresult << "]"; - cout << ": "; - cin >> output; - } - prompt->result = output.c_str(); - prompt->len = output.length(); -} - -int ClientImpl::cbName(void *context, int id, const char **result, unsigned *len) -{ - ClientImpl* impl = (ClientImpl*) context; - - if (id == SASL_CB_USER || (id == SASL_CB_AUTHNAME && impl->authName.empty())) { - *result = impl->userName.c_str(); - //*len = impl->userName.length(); - } else if (id == SASL_CB_AUTHNAME) { - *result = impl->authName.c_str(); - //*len = impl->authName.length(); - } - - return SASL_OK; -} - -int ClientImpl::cbPassword(sasl_conn_t *conn, void *context, int id, sasl_secret_t **psecret) -{ - ClientImpl* impl = (ClientImpl*) context; - size_t length = impl->password.length(); - - if (id == SASL_CB_PASS) { - impl->secret->len = length; - ::memcpy(impl->secret->data, impl->password.c_str(), length); - } else - impl->secret->len = 0; - - *psecret = impl->secret; - return SASL_OK; -} diff --git a/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.pyx b/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.pyx deleted file mode 100644 index ef5ab7ee2..000000000 --- a/shell/ext-py/sasl-0.3.1/sasl/saslwrapper.pyx +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2015 Cloudera Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -#cython: language_level=3 - -from six import string_types, PY3 - -from libcpp cimport bool -from libc.stdint cimport uint32_t -from libcpp.string cimport string as string_t - -cdef extern from 'saslwrapper.h' namespace 'saslwrapper': - cdef cppclass ClientImpl: - ClientImpl() except + - bool setAttr(const string_t& key, const string_t& value) - bool setAttr(const string_t& key, uint32_t value) - bool init() - bool start(const string_t& mechList, string_t& chosen, string_t& initialResponse) - bool step(const string_t& challenge, string_t& response) - bool encode(const string_t& clearText, string_t& cipherText) - bool decode(const string_t& cipherText, string_t& clearText) - bool getUserId(string_t& userId) - bool getSSF(int *ssf) - void getError(string_t& error) - - -cpdef string_t to_bytes(bytes_or_str): - if PY3 and isinstance(bytes_or_str, string_types): - return bytes_or_str.encode('utf-8') - return bytes_or_str - - -cpdef to_string(bytes_or_str): - if isinstance(bytes_or_str, bytes): - return bytes_or_str.decode('utf-8') - return bytes_or_str - - -cdef class Client: - cdef ClientImpl _this - - cpdef setAttr(self, key, value): - if isinstance(value, int): - return self._this.setAttr(to_bytes(key), value) - elif isinstance(value, string_types): - return self._this.setAttr(to_bytes(key), to_bytes(value)) - - cpdef init(self): - return self._this.init() - - cpdef start(self, mech_list): - cdef string_t chosen - cdef string_t initial_response - success = self._this.start(to_bytes(mech_list), chosen, initial_response) - return (success, chosen, initial_response) - - cpdef step(self, challenge): - cdef string_t response - success = self._this.step(to_bytes(challenge), response) - return (success, response) - - cpdef encode(self, clear_text): - cdef string_t cipher_text - success = self._this.encode(to_bytes(clear_text), cipher_text) - return (success, cipher_text) - - cpdef decode(self, cipher_text): - cdef string_t clear_text - success = self._this.decode(to_bytes(cipher_text), clear_text) - return (success, clear_text) - - cpdef getUserId(self): - cdef string_t user_id - success = self._this.getUserId(user_id) - return (success, user_id) - - cpdef getSSF(self): - cdef int ssf - success = self._this.getSSF(&ssf) - return (success, ssf) - - cpdef getError(self): - cdef string_t error - self._this.getError(error) - return error diff --git a/shell/ext-py/sasl-0.3.1/setup.cfg b/shell/ext-py/sasl-0.3.1/setup.cfg deleted file mode 100644 index 8bfd5a12f..000000000 --- a/shell/ext-py/sasl-0.3.1/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[egg_info] -tag_build = -tag_date = 0 - diff --git a/shell/ext-py/sasl-0.3.1/setup.py b/shell/ext-py/sasl-0.3.1/setup.py deleted file mode 100644 index 85e40c982..000000000 --- a/shell/ext-py/sasl-0.3.1/setup.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2015 Cloudera Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Parts of this file were taken from the pandas project -# (https://github.com/pandas-dev/pandas), which is permitted for use under -# the BSD 3-Clause License - -from distutils.sysconfig import get_config_var -from distutils.version import LooseVersion -import os -import platform -from setuptools import setup, Extension -import sys - - -# From https://github.com/pandas-dev/pandas/pull/24274: -# For mac, ensure extensions are built for macos 10.9 when compiling on a -# 10.9 system or above, overriding distuitls behaviour which is to target -# the version that python was built for. This may be overridden by setting -# MACOSX_DEPLOYMENT_TARGET before calling setup.py -if sys.platform == 'darwin': - if 'MACOSX_DEPLOYMENT_TARGET' not in os.environ: - current_system = LooseVersion(platform.mac_ver()[0]) - python_target = LooseVersion( - get_config_var('MACOSX_DEPLOYMENT_TARGET')) - if python_target < '10.9' and current_system >= '10.9': - os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9' - - -sasl_module = Extension('sasl.saslwrapper', - sources=['sasl/saslwrapper.cpp'], - include_dirs=["sasl"], - libraries=["sasl2"], - language="c++") -setup(name='sasl', - version='0.3.1', - url="http://github.com/cloudera/python-sasl", - maintainer="Todd Lipcon", - maintainer_email="todd@cloudera.com", - description="""Cyrus-SASL bindings for Python""", - classifiers=[ - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 3', - ], - packages=['sasl'], - install_requires=['six'], - ext_modules=[sasl_module], - include_package_data=True, - license='Apache License, Version 2.0') diff --git a/shell/ext-py/six-1.14.0/CHANGES b/shell/ext-py/six-1.14.0/CHANGES deleted file mode 100644 index 63eb48ea3..000000000 --- a/shell/ext-py/six-1.14.0/CHANGES +++ /dev/null @@ -1,329 +0,0 @@ -Changelog for six -================= - -This file lists the changes in each six version. - -1.14.0 ------- - -- Issue #288, pull request #289: Add `six.assertNotRegex`. - -- Issue #317: `six.moves._dummy_thread` now points to the `_thread` module on - Python 3.9+. Python 3.7 and later requires threading and deprecated the - `_dummy_thread` module. - -- Issue #308, pull request #314: Remove support for Python 2.6 and Python 3.2. - -- Issue #250, issue #165, pull request #251: `six.wraps` now ignores missing - attributes. This follows the Python 3.2+ standard library behavior. - -1.13.0 ------- - -- Issue #298, pull request #299: Add `six.moves.dbm_ndbm`. - -- Issue #155: Add `six.moves.collections_abc`, which aliases the `collections` - module on Python 2-3.2 and the `collections.abc` on Python 3.3 and greater. - -- Pull request #304: Re-add distutils fallback in `setup.py`. - -- Pull request #305: On Python 3.7, `with_metaclass` supports classes using PEP - 560 features. - -1.12.0 ------- - -- Issue #259, pull request #260: `six.add_metaclass` now preserves - `__qualname__` from the original class. - -- Pull request #204: Add `six.ensure_binary`, `six.ensure_text`, and - `six.ensure_str`. - -1.11.0 ------- - -- Pull request #178: `with_metaclass` now properly proxies `__prepare__` to the - underlying metaclass. - -- Pull request #191: Allow `with_metaclass` to work with metaclasses implemented - in C. - -- Pull request #203: Add parse_http_list and parse_keqv_list to moved - urllib.request. - -- Pull request #172 and issue #171: Add unquote_to_bytes to moved urllib.parse. - -- Pull request #167: Add `six.moves.getoutput`. - -- Pull request #80: Add `six.moves.urllib_parse.splitvalue`. - -- Pull request #75: Add `six.moves.email_mime_image`. - -- Pull request #72: Avoid creating reference cycles through tracebacks in - `reraise`. - -1.10.0 ------- - -- Issue #122: Improve the performance of `six.int2byte` on Python 3. - -- Pull request #55 and issue #99: Don't add the `winreg` module to `six.moves` - on non-Windows platforms. - -- Pull request #60 and issue #108: Add `six.moves.getcwd` and - `six.moves.getcwdu`. - -- Pull request #64: Add `create_unbound_method` to create unbound methods. - -1.9.0 ------ - -- Issue #106: Support the `flush` parameter to `six.print_`. - -- Pull request #48 and issue #15: Add the `python_2_unicode_compatible` - decorator. - -- Pull request #57 and issue #50: Add several compatibility methods for unittest - assertions that were renamed between Python 2 and 3. - -- Issue #105 and pull request #58: Ensure `six.wraps` respects the *updated* and - *assigned* arguments. - -- Issue #102: Add `raise_from` to abstract out Python 3's raise from syntax. - -- Issue #97: Optimize `six.iterbytes` on Python 2. - -- Issue #98: Fix `six.moves` race condition in multi-threaded code. - -- Pull request #51: Add `six.view(keys|values|itmes)`, which provide dictionary - views on Python 2.7+. - -- Issue #112: `six.moves.reload_module` now uses the importlib module on - Python 3.4+. - -1.8.0 ------ - -- Issue #90: Add `six.moves.shlex_quote`. - -- Issue #59: Add `six.moves.intern`. - -- Add `six.urllib.parse.uses_(fragment|netloc|params|query|relative)`. - -- Issue #88: Fix add_metaclass when the class has `__slots__` containing - `__weakref__` or `__dict__`. - -- Issue #89: Make six use absolute imports. - -- Issue #85: Always accept *updated* and *assigned* arguments for `wraps()`. - -- Issue #86: In `reraise()`, instantiate the exception if the second argument is - `None`. - -- Pull request #45: Add `six.moves.email_mime_nonmultipart`. - -- Issue #81: Add `six.urllib.request.splittag` mapping. - -- Issue #80: Add `six.urllib.request.splituser` mapping. - -1.7.3 ------ - -- Issue #77: Fix import six on Python 3.4 with a custom loader. - -- Issue #74: `six.moves.xmlrpc_server` should map to `SimpleXMLRPCServer` on Python - 2 as documented not `xmlrpclib`. - -1.7.2 ------ - -- Issue #72: Fix installing on Python 2. - -1.7.1 ------ - -- Issue #71: Make the six.moves meta path importer handle reloading of the six - module gracefully. - -1.7.0 ------ - -- Pull request #30: Implement six.moves with a PEP 302 meta path hook. - -- Pull request #32: Add six.wraps, which is like functools.wraps but always sets - the __wrapped__ attribute. - -- Pull request #35: Improve add_metaclass, so that it doesn't end up inserting - another class into the hierarchy. - -- Pull request #34: Add import mappings for dummy_thread. - -- Pull request #33: Add import mappings for UserDict and UserList. - -- Pull request #31: Select the implementations of dictionary iterator routines - at import time for a 20% speed boost. - -1.6.1 ------ - -- Raise an AttributeError for six.moves.X when X is a module not available in - the current interpreter. - -1.6.0 ------ - -- Raise an AttributeError for every attribute of unimportable modules. - -- Issue #56: Make the fake modules six.moves puts into sys.modules appear not to - have a __path__ unless they are loaded. - -- Pull request #28: Add support for SplitResult. - -- Issue #55: Add move mapping for xmlrpc.server. - -- Pull request #29: Add move for urllib.parse.splitquery. - -1.5.2 ------ - -- Issue #53: Make the fake modules six.moves puts into sys.modules appear not to - have a __name__ unless they are loaded. - -1.5.1 ------ - -- Issue #51: Hack around the Django autoreloader after recent six.moves changes. - -1.5.0 ------ - -- Removed support for Python 2.4. This is because py.test no longer supports - 2.4. - -- Fix various import problems including issues #19 and #41. six.moves modules - are now lazy wrappers over the underlying modules instead of the actual - modules themselves. - -- Issue #49: Add six.moves mapping for tkinter.ttk. - -- Pull request #24: Add __dir__ special method to six.moves modules. - -- Issue #47: Fix add_metaclass on classes with a string for the __slots__ - variable. - -- Issue #44: Fix interpretation of backslashes on Python 2 in the u() function. - -- Pull request #21: Add import mapping for urllib's proxy_bypass function. - -- Issue #43: Add import mapping for the Python 2 xmlrpclib module. - -- Issue #39: Add import mapping for the Python 2 thread module. - -- Issue #40: Add import mapping for the Python 2 gdbm module. - -- Issue #35: On Python versions less than 2.7, print_ now encodes unicode - strings when outputing to standard streams. (Python 2.7 handles this - automatically.) - -1.4.1 ------ - -- Issue #32: urllib module wrappings don't work when six is not a toplevel file. - -1.4.0 ------ - -- Issue #31: Add six.moves mapping for UserString. - -- Pull request #12: Add six.add_metaclass, a decorator for adding a metaclass to - a class. - -- Add six.moves.zip_longest and six.moves.filterfalse, which correspond - respectively to itertools.izip_longest and itertools.ifilterfalse on Python 2 - and itertools.zip_longest and itertools.filterfalse on Python 3. - -- Issue #25: Add the unichr function, which returns a string for a Unicode - codepoint. - -- Issue #26: Add byte2int function, which complements int2byte. - -- Add a PY2 constant with obvious semantics. - -- Add helpers for indexing and iterating over bytes: iterbytes and indexbytes. - -- Add create_bound_method() wrapper. - -- Issue #23: Allow multiple base classes to be passed to with_metaclass. - -- Issue #24: Add six.moves.range alias. This exactly the same as the current - xrange alias. - -- Pull request #5: Create six.moves.urllib, which contains abstractions for a - bunch of things which are in urllib in Python 3 and spread out across urllib, - urllib2, and urlparse in Python 2. - -1.3.0 ------ - -- Issue #21: Add methods to access the closure and globals of a function. - -- In six.iter(items/keys/values/lists), passed keyword arguments through to the - underlying method. - -- Add six.iterlists(). - -- Issue #20: Fix tests if tkinter is not available. - -- Issue #17: Define callable to be builtin callable when it is available again - in Python 3.2+. - -- Issue #16: Rename Python 2 exec_'s arguments, so casually calling exec_ with - keyword arguments will raise. - -- Issue #14: Put the six.moves package in sys.modules based on the name six is - imported under. - -- Fix Jython detection. - -- Pull request #4: Add email_mime_multipart, email_mime_text, and - email_mime_base to six.moves. - -1.2.0 ------ - -- Issue #13: Make iterkeys/itervalues/iteritems return iterators on Python 3 - instead of iterables. - -- Issue #11: Fix maxsize support on Jython. - -- Add six.next() as an alias for six.advance_iterator(). - -- Use the builtin next() function for advance_iterator() where is available - (2.6+), not just Python 3. - -- Add the Iterator class for writing portable iterators. - -1.1.0 ------ - -- Add the int2byte function. - -- Add compatibility mappings for iterators over the keys, values, and items of a - dictionary. - -- Fix six.MAXSIZE on platforms where sizeof(long) != sizeof(Py_ssize_t). - -- Issue #3: Add six.moves mappings for filter, map, and zip. - -1.0.0 ------ - -- Issue #2: u() on Python 2.x now resolves unicode escapes. - -- Expose an API for adding mappings to six.moves. - -1.0 beta 1 ----------- - -- Reworked six into one .py file. This breaks imports. Please tell me if you - are interested in an import compatibility layer. diff --git a/shell/ext-py/six-1.14.0/CONTRIBUTORS b/shell/ext-py/six-1.14.0/CONTRIBUTORS deleted file mode 100644 index 4480c34b1..000000000 --- a/shell/ext-py/six-1.14.0/CONTRIBUTORS +++ /dev/null @@ -1,42 +0,0 @@ -The primary author and maintainer of six is Benjamin Peterson. He would like to -acknowledge the following people who submitted bug reports, pull requests, and -otherwise worked to improve six: - -Marc Abramowitz -immerrr again -Alexander Artemenko -Aymeric Augustin -Lee Ball -Ben Bariteau -Ned Batchelder -Wouter Bolsterlee -Brett Cannon -Jason R. Coombs -Julien Danjou -Ben Darnell -Ben Davis -Jon Dufresne -Tim Graham -Thomas Grainger -Max Grender-Jones -Joshua Harlow -Toshiki Kataoka -Hugo van Kemenade -Anselm Kruis -Ivan Levkivskyi -Alexander Lukanin -James Mills -Jordan Moldow -Berker Peksag -Sridhar Ratnakumar -Erik Rose -Mirko Rossini -Peter Ruibal -Miroslav Shubernetskiy -Eli Schwartz -Anthony Sottile -Jonathan Vanasco -Lucas Wiman -Jingxin Zhu - -If you think you belong on this list, please let me know! --Benjamin diff --git a/shell/ext-py/six-1.14.0/LICENSE b/shell/ext-py/six-1.14.0/LICENSE deleted file mode 100644 index de6633112..000000000 --- a/shell/ext-py/six-1.14.0/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright (c) 2010-2020 Benjamin Peterson - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/shell/ext-py/six-1.14.0/MANIFEST.in b/shell/ext-py/six-1.14.0/MANIFEST.in deleted file mode 100644 index b924e068e..000000000 --- a/shell/ext-py/six-1.14.0/MANIFEST.in +++ /dev/null @@ -1,6 +0,0 @@ -include CHANGES -include LICENSE -include test_six.py - -recursive-include documentation * -prune documentation/_build diff --git a/shell/ext-py/six-1.14.0/README.rst b/shell/ext-py/six-1.14.0/README.rst deleted file mode 100644 index 6339ba5d9..000000000 --- a/shell/ext-py/six-1.14.0/README.rst +++ /dev/null @@ -1,29 +0,0 @@ -.. image:: https://img.shields.io/pypi/v/six.svg - :target: https://pypi.org/project/six/ - :alt: six on PyPI - -.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master - :target: https://travis-ci.org/benjaminp/six - :alt: six on TravisCI - -.. image:: https://readthedocs.org/projects/six/badge/?version=latest - :target: https://six.readthedocs.io/ - :alt: six's documentation on Read the Docs - -.. image:: https://img.shields.io/badge/license-MIT-green.svg - :target: https://github.com/benjaminp/six/blob/master/LICENSE - :alt: MIT License badge - -Six is a Python 2 and 3 compatibility library. It provides utility functions -for smoothing over the differences between the Python versions with the goal of -writing Python code that is compatible on both Python versions. See the -documentation for more information on what is provided. - -Six supports Python 2.7 and 3.3+. It is contained in only one Python -file, so it can be easily copied into your project. (The copyright and license -notice must be retained.) - -Online documentation is at https://six.readthedocs.io/. - -Bugs can be reported to https://github.com/benjaminp/six. The code can also -be found there. diff --git a/shell/ext-py/six-1.14.0/setup.cfg b/shell/ext-py/six-1.14.0/setup.cfg deleted file mode 100644 index 317e016c8..000000000 --- a/shell/ext-py/six-1.14.0/setup.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[bdist_wheel] -universal = 1 - -[flake8] -max-line-length = 100 -ignore = F821 - -[metadata] -license_file = LICENSE - -[tool:pytest] -minversion=2.2.0 -pep8ignore = - documentation/*.py ALL - test_six.py ALL - -flakes-ignore = - documentation/*.py ALL - test_six.py ALL - six.py UndefinedName diff --git a/shell/ext-py/six-1.14.0/setup.py b/shell/ext-py/six-1.14.0/setup.py deleted file mode 100644 index d90958b69..000000000 --- a/shell/ext-py/six-1.14.0/setup.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -from __future__ import with_statement - -# Six is a dependency of setuptools, so using setuptools creates a -# circular dependency when building a Python stack from source. We -# therefore allow falling back to distutils to install six. -try: - from setuptools import setup -except ImportError: - from distutils.core import setup - -import six - -six_classifiers = [ - "Development Status :: 5 - Production/Stable", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 3", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Topic :: Software Development :: Libraries", - "Topic :: Utilities", -] - -with open("README.rst", "r") as fp: - six_long_description = fp.read() - -setup(name="six", - version=six.__version__, - author="Benjamin Peterson", - author_email="benjamin@python.org", - url="https://github.com/benjaminp/six", - tests_require=["pytest"], - py_modules=["six"], - description="Python 2 and 3 compatibility utilities", - long_description=six_long_description, - license="MIT", - classifiers=six_classifiers, - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*", - ) diff --git a/shell/ext-py/six-1.14.0/six.py b/shell/ext-py/six-1.14.0/six.py deleted file mode 100644 index 5fe9f8e14..000000000 --- a/shell/ext-py/six-1.14.0/six.py +++ /dev/null @@ -1,980 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Utilities for writing code that runs on Python 2 and 3""" - -from __future__ import absolute_import - -import functools -import itertools -import operator -import sys -import types - -__author__ = "Benjamin Peterson " -__version__ = "1.14.0" - - -# Useful for very coarse version differentiation. -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 -PY34 = sys.version_info[0:2] >= (3, 4) - -if PY3: - string_types = str, - integer_types = int, - class_types = type, - text_type = str - binary_type = bytes - - MAXSIZE = sys.maxsize -else: - string_types = basestring, - integer_types = (int, long) - class_types = (type, types.ClassType) - text_type = unicode - binary_type = str - - if sys.platform.startswith("java"): - # Jython always uses 32 bits. - MAXSIZE = int((1 << 31) - 1) - else: - # It's possible to have sizeof(long) != sizeof(Py_ssize_t). - class X(object): - - def __len__(self): - return 1 << 31 - try: - len(X()) - except OverflowError: - # 32-bit - MAXSIZE = int((1 << 31) - 1) - else: - # 64-bit - MAXSIZE = int((1 << 63) - 1) - del X - - -def _add_doc(func, doc): - """Add documentation to a function.""" - func.__doc__ = doc - - -def _import_module(name): - """Import module, returning the module after the last dot.""" - __import__(name) - return sys.modules[name] - - -class _LazyDescr(object): - - def __init__(self, name): - self.name = name - - def __get__(self, obj, tp): - result = self._resolve() - setattr(obj, self.name, result) # Invokes __set__. - try: - # This is a bit ugly, but it avoids running this again by - # removing this descriptor. - delattr(obj.__class__, self.name) - except AttributeError: - pass - return result - - -class MovedModule(_LazyDescr): - - def __init__(self, name, old, new=None): - super(MovedModule, self).__init__(name) - if PY3: - if new is None: - new = name - self.mod = new - else: - self.mod = old - - def _resolve(self): - return _import_module(self.mod) - - def __getattr__(self, attr): - _module = self._resolve() - value = getattr(_module, attr) - setattr(self, attr, value) - return value - - -class _LazyModule(types.ModuleType): - - def __init__(self, name): - super(_LazyModule, self).__init__(name) - self.__doc__ = self.__class__.__doc__ - - def __dir__(self): - attrs = ["__doc__", "__name__"] - attrs += [attr.name for attr in self._moved_attributes] - return attrs - - # Subclasses should override this - _moved_attributes = [] - - -class MovedAttribute(_LazyDescr): - - def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): - super(MovedAttribute, self).__init__(name) - if PY3: - if new_mod is None: - new_mod = name - self.mod = new_mod - if new_attr is None: - if old_attr is None: - new_attr = name - else: - new_attr = old_attr - self.attr = new_attr - else: - self.mod = old_mod - if old_attr is None: - old_attr = name - self.attr = old_attr - - def _resolve(self): - module = _import_module(self.mod) - return getattr(module, self.attr) - - -class _SixMetaPathImporter(object): - - """ - A meta path importer to import six.moves and its submodules. - - This class implements a PEP302 finder and loader. It should be compatible - with Python 2.5 and all existing versions of Python3 - """ - - def __init__(self, six_module_name): - self.name = six_module_name - self.known_modules = {} - - def _add_module(self, mod, *fullnames): - for fullname in fullnames: - self.known_modules[self.name + "." + fullname] = mod - - def _get_module(self, fullname): - return self.known_modules[self.name + "." + fullname] - - def find_module(self, fullname, path=None): - if fullname in self.known_modules: - return self - return None - - def __get_module(self, fullname): - try: - return self.known_modules[fullname] - except KeyError: - raise ImportError("This loader does not know module " + fullname) - - def load_module(self, fullname): - try: - # in case of a reload - return sys.modules[fullname] - except KeyError: - pass - mod = self.__get_module(fullname) - if isinstance(mod, MovedModule): - mod = mod._resolve() - else: - mod.__loader__ = self - sys.modules[fullname] = mod - return mod - - def is_package(self, fullname): - """ - Return true, if the named module is a package. - - We need this method to get correct spec objects with - Python 3.4 (see PEP451) - """ - return hasattr(self.__get_module(fullname), "__path__") - - def get_code(self, fullname): - """Return None - - Required, if is_package is implemented""" - self.__get_module(fullname) # eventually raises ImportError - return None - get_source = get_code # same as get_code - -_importer = _SixMetaPathImporter(__name__) - - -class _MovedItems(_LazyModule): - - """Lazy loading of moved objects""" - __path__ = [] # mark as package - - -_moved_attributes = [ - MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), - MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), - MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), - MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), - MovedAttribute("intern", "__builtin__", "sys"), - MovedAttribute("map", "itertools", "builtins", "imap", "map"), - MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), - MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), - MovedAttribute("getoutput", "commands", "subprocess"), - MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), - MovedAttribute("reduce", "__builtin__", "functools"), - MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), - MovedAttribute("StringIO", "StringIO", "io"), - MovedAttribute("UserDict", "UserDict", "collections"), - MovedAttribute("UserList", "UserList", "collections"), - MovedAttribute("UserString", "UserString", "collections"), - MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), - MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), - MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), - MovedModule("builtins", "__builtin__"), - MovedModule("configparser", "ConfigParser"), - MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), - MovedModule("copyreg", "copy_reg"), - MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), - MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), - MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), - MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), - MovedModule("http_cookies", "Cookie", "http.cookies"), - MovedModule("html_entities", "htmlentitydefs", "html.entities"), - MovedModule("html_parser", "HTMLParser", "html.parser"), - MovedModule("http_client", "httplib", "http.client"), - MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), - MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), - MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), - MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), - MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), - MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), - MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), - MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), - MovedModule("cPickle", "cPickle", "pickle"), - MovedModule("queue", "Queue"), - MovedModule("reprlib", "repr"), - MovedModule("socketserver", "SocketServer"), - MovedModule("_thread", "thread", "_thread"), - MovedModule("tkinter", "Tkinter"), - MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), - MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), - MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), - MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), - MovedModule("tkinter_tix", "Tix", "tkinter.tix"), - MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), - MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), - MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), - MovedModule("tkinter_colorchooser", "tkColorChooser", - "tkinter.colorchooser"), - MovedModule("tkinter_commondialog", "tkCommonDialog", - "tkinter.commondialog"), - MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), - MovedModule("tkinter_font", "tkFont", "tkinter.font"), - MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), - MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", - "tkinter.simpledialog"), - MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), - MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), - MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), - MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), - MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), - MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), -] -# Add windows specific modules. -if sys.platform == "win32": - _moved_attributes += [ - MovedModule("winreg", "_winreg"), - ] - -for attr in _moved_attributes: - setattr(_MovedItems, attr.name, attr) - if isinstance(attr, MovedModule): - _importer._add_module(attr, "moves." + attr.name) -del attr - -_MovedItems._moved_attributes = _moved_attributes - -moves = _MovedItems(__name__ + ".moves") -_importer._add_module(moves, "moves") - - -class Module_six_moves_urllib_parse(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_parse""" - - -_urllib_parse_moved_attributes = [ - MovedAttribute("ParseResult", "urlparse", "urllib.parse"), - MovedAttribute("SplitResult", "urlparse", "urllib.parse"), - MovedAttribute("parse_qs", "urlparse", "urllib.parse"), - MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), - MovedAttribute("urldefrag", "urlparse", "urllib.parse"), - MovedAttribute("urljoin", "urlparse", "urllib.parse"), - MovedAttribute("urlparse", "urlparse", "urllib.parse"), - MovedAttribute("urlsplit", "urlparse", "urllib.parse"), - MovedAttribute("urlunparse", "urlparse", "urllib.parse"), - MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), - MovedAttribute("quote", "urllib", "urllib.parse"), - MovedAttribute("quote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote", "urllib", "urllib.parse"), - MovedAttribute("unquote_plus", "urllib", "urllib.parse"), - MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), - MovedAttribute("urlencode", "urllib", "urllib.parse"), - MovedAttribute("splitquery", "urllib", "urllib.parse"), - MovedAttribute("splittag", "urllib", "urllib.parse"), - MovedAttribute("splituser", "urllib", "urllib.parse"), - MovedAttribute("splitvalue", "urllib", "urllib.parse"), - MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), - MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), - MovedAttribute("uses_params", "urlparse", "urllib.parse"), - MovedAttribute("uses_query", "urlparse", "urllib.parse"), - MovedAttribute("uses_relative", "urlparse", "urllib.parse"), -] -for attr in _urllib_parse_moved_attributes: - setattr(Module_six_moves_urllib_parse, attr.name, attr) -del attr - -Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes - -_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), - "moves.urllib_parse", "moves.urllib.parse") - - -class Module_six_moves_urllib_error(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_error""" - - -_urllib_error_moved_attributes = [ - MovedAttribute("URLError", "urllib2", "urllib.error"), - MovedAttribute("HTTPError", "urllib2", "urllib.error"), - MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), -] -for attr in _urllib_error_moved_attributes: - setattr(Module_six_moves_urllib_error, attr.name, attr) -del attr - -Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes - -_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), - "moves.urllib_error", "moves.urllib.error") - - -class Module_six_moves_urllib_request(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_request""" - - -_urllib_request_moved_attributes = [ - MovedAttribute("urlopen", "urllib2", "urllib.request"), - MovedAttribute("install_opener", "urllib2", "urllib.request"), - MovedAttribute("build_opener", "urllib2", "urllib.request"), - MovedAttribute("pathname2url", "urllib", "urllib.request"), - MovedAttribute("url2pathname", "urllib", "urllib.request"), - MovedAttribute("getproxies", "urllib", "urllib.request"), - MovedAttribute("Request", "urllib2", "urllib.request"), - MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), - MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), - MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), - MovedAttribute("BaseHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), - MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), - MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), - MovedAttribute("FileHandler", "urllib2", "urllib.request"), - MovedAttribute("FTPHandler", "urllib2", "urllib.request"), - MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), - MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), - MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), - MovedAttribute("urlretrieve", "urllib", "urllib.request"), - MovedAttribute("urlcleanup", "urllib", "urllib.request"), - MovedAttribute("URLopener", "urllib", "urllib.request"), - MovedAttribute("FancyURLopener", "urllib", "urllib.request"), - MovedAttribute("proxy_bypass", "urllib", "urllib.request"), - MovedAttribute("parse_http_list", "urllib2", "urllib.request"), - MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), -] -for attr in _urllib_request_moved_attributes: - setattr(Module_six_moves_urllib_request, attr.name, attr) -del attr - -Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes - -_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), - "moves.urllib_request", "moves.urllib.request") - - -class Module_six_moves_urllib_response(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_response""" - - -_urllib_response_moved_attributes = [ - MovedAttribute("addbase", "urllib", "urllib.response"), - MovedAttribute("addclosehook", "urllib", "urllib.response"), - MovedAttribute("addinfo", "urllib", "urllib.response"), - MovedAttribute("addinfourl", "urllib", "urllib.response"), -] -for attr in _urllib_response_moved_attributes: - setattr(Module_six_moves_urllib_response, attr.name, attr) -del attr - -Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes - -_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), - "moves.urllib_response", "moves.urllib.response") - - -class Module_six_moves_urllib_robotparser(_LazyModule): - - """Lazy loading of moved objects in six.moves.urllib_robotparser""" - - -_urllib_robotparser_moved_attributes = [ - MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), -] -for attr in _urllib_robotparser_moved_attributes: - setattr(Module_six_moves_urllib_robotparser, attr.name, attr) -del attr - -Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes - -_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), - "moves.urllib_robotparser", "moves.urllib.robotparser") - - -class Module_six_moves_urllib(types.ModuleType): - - """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" - __path__ = [] # mark as package - parse = _importer._get_module("moves.urllib_parse") - error = _importer._get_module("moves.urllib_error") - request = _importer._get_module("moves.urllib_request") - response = _importer._get_module("moves.urllib_response") - robotparser = _importer._get_module("moves.urllib_robotparser") - - def __dir__(self): - return ['parse', 'error', 'request', 'response', 'robotparser'] - -_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), - "moves.urllib") - - -def add_move(move): - """Add an item to six.moves.""" - setattr(_MovedItems, move.name, move) - - -def remove_move(name): - """Remove item from six.moves.""" - try: - delattr(_MovedItems, name) - except AttributeError: - try: - del moves.__dict__[name] - except KeyError: - raise AttributeError("no such move, %r" % (name,)) - - -if PY3: - _meth_func = "__func__" - _meth_self = "__self__" - - _func_closure = "__closure__" - _func_code = "__code__" - _func_defaults = "__defaults__" - _func_globals = "__globals__" -else: - _meth_func = "im_func" - _meth_self = "im_self" - - _func_closure = "func_closure" - _func_code = "func_code" - _func_defaults = "func_defaults" - _func_globals = "func_globals" - - -try: - advance_iterator = next -except NameError: - def advance_iterator(it): - return it.next() -next = advance_iterator - - -try: - callable = callable -except NameError: - def callable(obj): - return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) - - -if PY3: - def get_unbound_function(unbound): - return unbound - - create_bound_method = types.MethodType - - def create_unbound_method(func, cls): - return func - - Iterator = object -else: - def get_unbound_function(unbound): - return unbound.im_func - - def create_bound_method(func, obj): - return types.MethodType(func, obj, obj.__class__) - - def create_unbound_method(func, cls): - return types.MethodType(func, None, cls) - - class Iterator(object): - - def next(self): - return type(self).__next__(self) - - callable = callable -_add_doc(get_unbound_function, - """Get the function out of a possibly unbound function""") - - -get_method_function = operator.attrgetter(_meth_func) -get_method_self = operator.attrgetter(_meth_self) -get_function_closure = operator.attrgetter(_func_closure) -get_function_code = operator.attrgetter(_func_code) -get_function_defaults = operator.attrgetter(_func_defaults) -get_function_globals = operator.attrgetter(_func_globals) - - -if PY3: - def iterkeys(d, **kw): - return iter(d.keys(**kw)) - - def itervalues(d, **kw): - return iter(d.values(**kw)) - - def iteritems(d, **kw): - return iter(d.items(**kw)) - - def iterlists(d, **kw): - return iter(d.lists(**kw)) - - viewkeys = operator.methodcaller("keys") - - viewvalues = operator.methodcaller("values") - - viewitems = operator.methodcaller("items") -else: - def iterkeys(d, **kw): - return d.iterkeys(**kw) - - def itervalues(d, **kw): - return d.itervalues(**kw) - - def iteritems(d, **kw): - return d.iteritems(**kw) - - def iterlists(d, **kw): - return d.iterlists(**kw) - - viewkeys = operator.methodcaller("viewkeys") - - viewvalues = operator.methodcaller("viewvalues") - - viewitems = operator.methodcaller("viewitems") - -_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") -_add_doc(itervalues, "Return an iterator over the values of a dictionary.") -_add_doc(iteritems, - "Return an iterator over the (key, value) pairs of a dictionary.") -_add_doc(iterlists, - "Return an iterator over the (key, [values]) pairs of a dictionary.") - - -if PY3: - def b(s): - return s.encode("latin-1") - - def u(s): - return s - unichr = chr - import struct - int2byte = struct.Struct(">B").pack - del struct - byte2int = operator.itemgetter(0) - indexbytes = operator.getitem - iterbytes = iter - import io - StringIO = io.StringIO - BytesIO = io.BytesIO - del io - _assertCountEqual = "assertCountEqual" - if sys.version_info[1] <= 1: - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" - else: - _assertRaisesRegex = "assertRaisesRegex" - _assertRegex = "assertRegex" - _assertNotRegex = "assertNotRegex" -else: - def b(s): - return s - # Workaround for standalone backslash - - def u(s): - return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") - unichr = unichr - int2byte = chr - - def byte2int(bs): - return ord(bs[0]) - - def indexbytes(buf, i): - return ord(buf[i]) - iterbytes = functools.partial(itertools.imap, ord) - import StringIO - StringIO = BytesIO = StringIO.StringIO - _assertCountEqual = "assertItemsEqual" - _assertRaisesRegex = "assertRaisesRegexp" - _assertRegex = "assertRegexpMatches" - _assertNotRegex = "assertNotRegexpMatches" -_add_doc(b, """Byte literal""") -_add_doc(u, """Text literal""") - - -def assertCountEqual(self, *args, **kwargs): - return getattr(self, _assertCountEqual)(*args, **kwargs) - - -def assertRaisesRegex(self, *args, **kwargs): - return getattr(self, _assertRaisesRegex)(*args, **kwargs) - - -def assertRegex(self, *args, **kwargs): - return getattr(self, _assertRegex)(*args, **kwargs) - - -def assertNotRegex(self, *args, **kwargs): - return getattr(self, _assertNotRegex)(*args, **kwargs) - - -if PY3: - exec_ = getattr(moves.builtins, "exec") - - def reraise(tp, value, tb=None): - try: - if value is None: - value = tp() - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - finally: - value = None - tb = None - -else: - def exec_(_code_, _globs_=None, _locs_=None): - """Execute code in a namespace.""" - if _globs_ is None: - frame = sys._getframe(1) - _globs_ = frame.f_globals - if _locs_ is None: - _locs_ = frame.f_locals - del frame - elif _locs_ is None: - _locs_ = _globs_ - exec("""exec _code_ in _globs_, _locs_""") - - exec_("""def reraise(tp, value, tb=None): - try: - raise tp, value, tb - finally: - tb = None -""") - - -if sys.version_info[:2] > (3,): - exec_("""def raise_from(value, from_value): - try: - raise value from from_value - finally: - value = None -""") -else: - def raise_from(value, from_value): - raise value - - -print_ = getattr(moves.builtins, "print", None) -if print_ is None: - def print_(*args, **kwargs): - """The new-style print function for Python 2.4 and 2.5.""" - fp = kwargs.pop("file", sys.stdout) - if fp is None: - return - - def write(data): - if not isinstance(data, basestring): - data = str(data) - # If the file has an encoding, encode unicode with it. - if (isinstance(fp, file) and - isinstance(data, unicode) and - fp.encoding is not None): - errors = getattr(fp, "errors", None) - if errors is None: - errors = "strict" - data = data.encode(fp.encoding, errors) - fp.write(data) - want_unicode = False - sep = kwargs.pop("sep", None) - if sep is not None: - if isinstance(sep, unicode): - want_unicode = True - elif not isinstance(sep, str): - raise TypeError("sep must be None or a string") - end = kwargs.pop("end", None) - if end is not None: - if isinstance(end, unicode): - want_unicode = True - elif not isinstance(end, str): - raise TypeError("end must be None or a string") - if kwargs: - raise TypeError("invalid keyword arguments to print()") - if not want_unicode: - for arg in args: - if isinstance(arg, unicode): - want_unicode = True - break - if want_unicode: - newline = unicode("\n") - space = unicode(" ") - else: - newline = "\n" - space = " " - if sep is None: - sep = space - if end is None: - end = newline - for i, arg in enumerate(args): - if i: - write(sep) - write(arg) - write(end) -if sys.version_info[:2] < (3, 3): - _print = print_ - - def print_(*args, **kwargs): - fp = kwargs.get("file", sys.stdout) - flush = kwargs.pop("flush", False) - _print(*args, **kwargs) - if flush and fp is not None: - fp.flush() - -_add_doc(reraise, """Reraise an exception.""") - -if sys.version_info[0:2] < (3, 4): - # This does exactly the same what the :func:`py3:functools.update_wrapper` - # function does on Python versions after 3.2. It sets the ``__wrapped__`` - # attribute on ``wrapper`` object and it doesn't raise an error if any of - # the attributes mentioned in ``assigned`` and ``updated`` are missing on - # ``wrapped`` object. - def _update_wrapper(wrapper, wrapped, - assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - for attr in assigned: - try: - value = getattr(wrapped, attr) - except AttributeError: - continue - else: - setattr(wrapper, attr, value) - for attr in updated: - getattr(wrapper, attr).update(getattr(wrapped, attr, {})) - wrapper.__wrapped__ = wrapped - return wrapper - _update_wrapper.__doc__ = functools.update_wrapper.__doc__ - - def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, - updated=functools.WRAPPER_UPDATES): - return functools.partial(_update_wrapper, wrapped=wrapped, - assigned=assigned, updated=updated) - wraps.__doc__ = functools.wraps.__doc__ - -else: - wraps = functools.wraps - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass.""" - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - - def __new__(cls, name, this_bases, d): - if sys.version_info[:2] >= (3, 7): - # This version introduced PEP 560 that requires a bit - # of extra care (we mimic what is done by __build_class__). - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d['__orig_bases__'] = bases - else: - resolved_bases = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - return type.__new__(metaclass, 'temporary_class', (), {}) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - if hasattr(cls, '__qualname__'): - orig_vars['__qualname__'] = cls.__qualname__ - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -def ensure_binary(s, encoding='utf-8', errors='strict'): - """Coerce **s** to six.binary_type. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> encoded to `bytes` - - `bytes` -> `bytes` - """ - if isinstance(s, text_type): - return s.encode(encoding, errors) - elif isinstance(s, binary_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def ensure_str(s, encoding='utf-8', errors='strict'): - """Coerce *s* to `str`. - - For Python 2: - - `unicode` -> encoded to `str` - - `str` -> `str` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if not isinstance(s, (text_type, binary_type)): - raise TypeError("not expecting type '%s'" % type(s)) - if PY2 and isinstance(s, text_type): - s = s.encode(encoding, errors) - elif PY3 and isinstance(s, binary_type): - s = s.decode(encoding, errors) - return s - - -def ensure_text(s, encoding='utf-8', errors='strict'): - """Coerce *s* to six.text_type. - - For Python 2: - - `unicode` -> `unicode` - - `str` -> `unicode` - - For Python 3: - - `str` -> `str` - - `bytes` -> decoded to `str` - """ - if isinstance(s, binary_type): - return s.decode(encoding, errors) - elif isinstance(s, text_type): - return s - else: - raise TypeError("not expecting type '%s'" % type(s)) - - -def python_2_unicode_compatible(klass): - """ - A class decorator that defines __unicode__ and __str__ methods under Python 2. - Under Python 3 it does nothing. - - To support Python 2 and 3 with a single code base, define a __str__ method - returning text and apply this decorator to the class. - """ - if PY2: - if '__str__' not in klass.__dict__: - raise ValueError("@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % - klass.__name__) - klass.__unicode__ = klass.__str__ - klass.__str__ = lambda self: self.__unicode__().encode('utf-8') - return klass - - -# Complete the moves implementation. -# This code is at the end of this module to speed up module loading. -# Turn this module into a package. -__path__ = [] # required for PEP 302 and PEP 451 -__package__ = __name__ # see PEP 366 @ReservedAssignment -if globals().get("__spec__") is not None: - __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable -# Remove other six meta path importers, since they cause problems. This can -# happen if six is removed from sys.modules and then reloaded. (Setuptools does -# this for some reason.) -if sys.meta_path: - for i, importer in enumerate(sys.meta_path): - # Here's some real nastiness: Another "instance" of the six module might - # be floating around. Therefore, we can't use isinstance() to check for - # the six meta path importer, since the other six instance will have - # inserted an importer with different class. - if (type(importer).__name__ == "_SixMetaPathImporter" and - importer.name == __name__): - del sys.meta_path[i] - break - del i, importer -# Finally, add the importer to the meta path import hook. -sys.meta_path.append(_importer) diff --git a/shell/ext-py/six-1.14.0/test_six.py b/shell/ext-py/six-1.14.0/test_six.py deleted file mode 100644 index 7b8b03b5e..000000000 --- a/shell/ext-py/six-1.14.0/test_six.py +++ /dev/null @@ -1,1041 +0,0 @@ -# Copyright (c) 2010-2020 Benjamin Peterson -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -import operator -import sys -import types -import unittest -import abc - -import pytest - -import six - - -def test_add_doc(): - def f(): - """Icky doc""" - pass - six._add_doc(f, """New doc""") - assert f.__doc__ == "New doc" - - -def test_import_module(): - from logging import handlers - m = six._import_module("logging.handlers") - assert m is handlers - - -def test_integer_types(): - assert isinstance(1, six.integer_types) - assert isinstance(-1, six.integer_types) - assert isinstance(six.MAXSIZE + 23, six.integer_types) - assert not isinstance(.1, six.integer_types) - - -def test_string_types(): - assert isinstance("hi", six.string_types) - assert isinstance(six.u("hi"), six.string_types) - assert issubclass(six.text_type, six.string_types) - - -def test_class_types(): - class X: - pass - class Y(object): - pass - assert isinstance(X, six.class_types) - assert isinstance(Y, six.class_types) - assert not isinstance(X(), six.class_types) - - -def test_text_type(): - assert type(six.u("hi")) is six.text_type - - -def test_binary_type(): - assert type(six.b("hi")) is six.binary_type - - -def test_MAXSIZE(): - try: - # This shouldn't raise an overflow error. - six.MAXSIZE.__index__() - except AttributeError: - # Before Python 2.6. - pass - pytest.raises( - (ValueError, OverflowError), - operator.mul, [None], six.MAXSIZE + 1) - - -def test_lazy(): - if six.PY3: - html_name = "html.parser" - else: - html_name = "HTMLParser" - assert html_name not in sys.modules - mod = six.moves.html_parser - assert sys.modules[html_name] is mod - assert "htmlparser" not in six._MovedItems.__dict__ - - -try: - import _tkinter -except ImportError: - have_tkinter = False -else: - have_tkinter = True - -have_gdbm = True -try: - import gdbm -except ImportError: - try: - import dbm.gnu - except ImportError: - have_gdbm = False - -@pytest.mark.parametrize("item_name", - [item.name for item in six._moved_attributes]) -def test_move_items(item_name): - """Ensure that everything loads correctly.""" - try: - item = getattr(six.moves, item_name) - if isinstance(item, types.ModuleType): - __import__("six.moves." + item_name) - except ImportError: - if item_name == "winreg" and not sys.platform.startswith("win"): - pytest.skip("Windows only module") - if item_name.startswith("tkinter"): - if not have_tkinter: - pytest.skip("requires tkinter") - if item_name.startswith("dbm_gnu") and not have_gdbm: - pytest.skip("requires gdbm") - raise - assert item_name in dir(six.moves) - - -@pytest.mark.parametrize("item_name", - [item.name for item in six._urllib_parse_moved_attributes]) -def test_move_items_urllib_parse(item_name): - """Ensure that everything loads correctly.""" - assert item_name in dir(six.moves.urllib.parse) - getattr(six.moves.urllib.parse, item_name) - - -@pytest.mark.parametrize("item_name", - [item.name for item in six._urllib_error_moved_attributes]) -def test_move_items_urllib_error(item_name): - """Ensure that everything loads correctly.""" - assert item_name in dir(six.moves.urllib.error) - getattr(six.moves.urllib.error, item_name) - - -@pytest.mark.parametrize("item_name", - [item.name for item in six._urllib_request_moved_attributes]) -def test_move_items_urllib_request(item_name): - """Ensure that everything loads correctly.""" - assert item_name in dir(six.moves.urllib.request) - getattr(six.moves.urllib.request, item_name) - - -@pytest.mark.parametrize("item_name", - [item.name for item in six._urllib_response_moved_attributes]) -def test_move_items_urllib_response(item_name): - """Ensure that everything loads correctly.""" - assert item_name in dir(six.moves.urllib.response) - getattr(six.moves.urllib.response, item_name) - - -@pytest.mark.parametrize("item_name", - [item.name for item in six._urllib_robotparser_moved_attributes]) -def test_move_items_urllib_robotparser(item_name): - """Ensure that everything loads correctly.""" - assert item_name in dir(six.moves.urllib.robotparser) - getattr(six.moves.urllib.robotparser, item_name) - - -def test_import_moves_error_1(): - from six.moves.urllib.parse import urljoin - from six import moves - # In 1.4.1: AttributeError: 'Module_six_moves_urllib_parse' object has no attribute 'urljoin' - assert moves.urllib.parse.urljoin - - -def test_import_moves_error_2(): - from six import moves - assert moves.urllib.parse.urljoin - # In 1.4.1: ImportError: cannot import name urljoin - from six.moves.urllib.parse import urljoin - - -def test_import_moves_error_3(): - from six.moves.urllib.parse import urljoin - # In 1.4.1: ImportError: cannot import name urljoin - from six.moves.urllib_parse import urljoin - - -def test_from_imports(): - from six.moves.queue import Queue - assert isinstance(Queue, six.class_types) - from six.moves.configparser import ConfigParser - assert isinstance(ConfigParser, six.class_types) - - -def test_filter(): - from six.moves import filter - f = filter(lambda x: x % 2, range(10)) - assert six.advance_iterator(f) == 1 - - -def test_filter_false(): - from six.moves import filterfalse - f = filterfalse(lambda x: x % 3, range(10)) - assert six.advance_iterator(f) == 0 - assert six.advance_iterator(f) == 3 - assert six.advance_iterator(f) == 6 - -def test_map(): - from six.moves import map - assert six.advance_iterator(map(lambda x: x + 1, range(2))) == 1 - - -def test_getoutput(): - from six.moves import getoutput - output = getoutput('echo "foo"') - assert output == 'foo' - - -def test_zip(): - from six.moves import zip - assert six.advance_iterator(zip(range(2), range(2))) == (0, 0) - - -def test_zip_longest(): - from six.moves import zip_longest - it = zip_longest(range(2), range(1)) - - assert six.advance_iterator(it) == (0, 0) - assert six.advance_iterator(it) == (1, None) - - -class TestCustomizedMoves: - - def teardown_method(self, meth): - try: - del six._MovedItems.spam - except AttributeError: - pass - try: - del six.moves.__dict__["spam"] - except KeyError: - pass - - - def test_moved_attribute(self): - attr = six.MovedAttribute("spam", "foo", "bar") - if six.PY3: - assert attr.mod == "bar" - else: - assert attr.mod == "foo" - assert attr.attr == "spam" - attr = six.MovedAttribute("spam", "foo", "bar", "lemma") - assert attr.attr == "lemma" - attr = six.MovedAttribute("spam", "foo", "bar", "lemma", "theorm") - if six.PY3: - assert attr.attr == "theorm" - else: - assert attr.attr == "lemma" - - - def test_moved_module(self): - attr = six.MovedModule("spam", "foo") - if six.PY3: - assert attr.mod == "spam" - else: - assert attr.mod == "foo" - attr = six.MovedModule("spam", "foo", "bar") - if six.PY3: - assert attr.mod == "bar" - else: - assert attr.mod == "foo" - - - def test_custom_move_module(self): - attr = six.MovedModule("spam", "six", "six") - six.add_move(attr) - six.remove_move("spam") - assert not hasattr(six.moves, "spam") - attr = six.MovedModule("spam", "six", "six") - six.add_move(attr) - from six.moves import spam - assert spam is six - six.remove_move("spam") - assert not hasattr(six.moves, "spam") - - - def test_custom_move_attribute(self): - attr = six.MovedAttribute("spam", "six", "six", "u", "u") - six.add_move(attr) - six.remove_move("spam") - assert not hasattr(six.moves, "spam") - attr = six.MovedAttribute("spam", "six", "six", "u", "u") - six.add_move(attr) - from six.moves import spam - assert spam is six.u - six.remove_move("spam") - assert not hasattr(six.moves, "spam") - - - def test_empty_remove(self): - pytest.raises(AttributeError, six.remove_move, "eggs") - - -def test_get_unbound_function(): - class X(object): - def m(self): - pass - assert six.get_unbound_function(X.m) is X.__dict__["m"] - - -def test_get_method_self(): - class X(object): - def m(self): - pass - x = X() - assert six.get_method_self(x.m) is x - pytest.raises(AttributeError, six.get_method_self, 42) - - -def test_get_method_function(): - class X(object): - def m(self): - pass - x = X() - assert six.get_method_function(x.m) is X.__dict__["m"] - pytest.raises(AttributeError, six.get_method_function, hasattr) - - -def test_get_function_closure(): - def f(): - x = 42 - def g(): - return x - return g - cell = six.get_function_closure(f())[0] - assert type(cell).__name__ == "cell" - - -def test_get_function_code(): - def f(): - pass - assert isinstance(six.get_function_code(f), types.CodeType) - if not hasattr(sys, "pypy_version_info"): - pytest.raises(AttributeError, six.get_function_code, hasattr) - - -def test_get_function_defaults(): - def f(x, y=3, b=4): - pass - assert six.get_function_defaults(f) == (3, 4) - - -def test_get_function_globals(): - def f(): - pass - assert six.get_function_globals(f) is globals() - - -def test_dictionary_iterators(monkeypatch): - def stock_method_name(iterwhat): - """Given a method suffix like "lists" or "values", return the name - of the dict method that delivers those on the version of Python - we're running in.""" - if six.PY3: - return iterwhat - return 'iter' + iterwhat - - class MyDict(dict): - if not six.PY3: - def lists(self, **kw): - return [1, 2, 3] - def iterlists(self, **kw): - return iter([1, 2, 3]) - f = MyDict.iterlists - del MyDict.iterlists - setattr(MyDict, stock_method_name('lists'), f) - - d = MyDict(zip(range(10), reversed(range(10)))) - for name in "keys", "values", "items", "lists": - meth = getattr(six, "iter" + name) - it = meth(d) - assert not isinstance(it, list) - assert list(it) == list(getattr(d, name)()) - pytest.raises(StopIteration, six.advance_iterator, it) - record = [] - def with_kw(*args, **kw): - record.append(kw["kw"]) - return old(*args) - old = getattr(MyDict, stock_method_name(name)) - monkeypatch.setattr(MyDict, stock_method_name(name), with_kw) - meth(d, kw=42) - assert record == [42] - monkeypatch.undo() - - -def test_dictionary_views(): - d = dict(zip(range(10), (range(11, 20)))) - for name in "keys", "values", "items": - meth = getattr(six, "view" + name) - view = meth(d) - assert set(view) == set(getattr(d, name)()) - - -def test_advance_iterator(): - assert six.next is six.advance_iterator - l = [1, 2] - it = iter(l) - assert six.next(it) == 1 - assert six.next(it) == 2 - pytest.raises(StopIteration, six.next, it) - pytest.raises(StopIteration, six.next, it) - - -def test_iterator(): - class myiter(six.Iterator): - def __next__(self): - return 13 - assert six.advance_iterator(myiter()) == 13 - class myitersub(myiter): - def __next__(self): - return 14 - assert six.advance_iterator(myitersub()) == 14 - - -def test_callable(): - class X: - def __call__(self): - pass - def method(self): - pass - assert six.callable(X) - assert six.callable(X()) - assert six.callable(test_callable) - assert six.callable(hasattr) - assert six.callable(X.method) - assert six.callable(X().method) - assert not six.callable(4) - assert not six.callable("string") - - -def test_create_bound_method(): - class X(object): - pass - def f(self): - return self - x = X() - b = six.create_bound_method(f, x) - assert isinstance(b, types.MethodType) - assert b() is x - - -def test_create_unbound_method(): - class X(object): - pass - - def f(self): - return self - u = six.create_unbound_method(f, X) - pytest.raises(TypeError, u) - if six.PY2: - assert isinstance(u, types.MethodType) - x = X() - assert f(x) is x - - -if six.PY3: - - def test_b(): - data = six.b("\xff") - assert isinstance(data, bytes) - assert len(data) == 1 - assert data == bytes([255]) - - - def test_u(): - s = six.u("hi \u0439 \U00000439 \\ \\\\ \n") - assert isinstance(s, str) - assert s == "hi \u0439 \U00000439 \\ \\\\ \n" - -else: - - def test_b(): - data = six.b("\xff") - assert isinstance(data, str) - assert len(data) == 1 - assert data == "\xff" - - - def test_u(): - s = six.u("hi \u0439 \U00000439 \\ \\\\ \n") - assert isinstance(s, unicode) - assert s == "hi \xd0\xb9 \xd0\xb9 \\ \\\\ \n".decode("utf8") - - -def test_u_escapes(): - s = six.u("\u1234") - assert len(s) == 1 - - -def test_unichr(): - assert six.u("\u1234") == six.unichr(0x1234) - assert type(six.u("\u1234")) is type(six.unichr(0x1234)) - - -def test_int2byte(): - assert six.int2byte(3) == six.b("\x03") - pytest.raises(Exception, six.int2byte, 256) - - -def test_byte2int(): - assert six.byte2int(six.b("\x03")) == 3 - assert six.byte2int(six.b("\x03\x04")) == 3 - pytest.raises(IndexError, six.byte2int, six.b("")) - - -def test_bytesindex(): - assert six.indexbytes(six.b("hello"), 3) == ord("l") - - -def test_bytesiter(): - it = six.iterbytes(six.b("hi")) - assert six.next(it) == ord("h") - assert six.next(it) == ord("i") - pytest.raises(StopIteration, six.next, it) - - -def test_StringIO(): - fp = six.StringIO() - fp.write(six.u("hello")) - assert fp.getvalue() == six.u("hello") - - -def test_BytesIO(): - fp = six.BytesIO() - fp.write(six.b("hello")) - assert fp.getvalue() == six.b("hello") - - -def test_exec_(): - def f(): - l = [] - six.exec_("l.append(1)") - assert l == [1] - f() - ns = {} - six.exec_("x = 42", ns) - assert ns["x"] == 42 - glob = {} - loc = {} - six.exec_("global y; y = 42; x = 12", glob, loc) - assert glob["y"] == 42 - assert "x" not in glob - assert loc["x"] == 12 - assert "y" not in loc - - -def test_reraise(): - def get_next(tb): - if six.PY3: - return tb.tb_next.tb_next - else: - return tb.tb_next - e = Exception("blah") - try: - raise e - except Exception: - tp, val, tb = sys.exc_info() - try: - six.reraise(tp, val, tb) - except Exception: - tp2, value2, tb2 = sys.exc_info() - assert tp2 is Exception - assert value2 is e - assert tb is get_next(tb2) - try: - six.reraise(tp, val) - except Exception: - tp2, value2, tb2 = sys.exc_info() - assert tp2 is Exception - assert value2 is e - assert tb2 is not tb - try: - six.reraise(tp, val, tb2) - except Exception: - tp2, value2, tb3 = sys.exc_info() - assert tp2 is Exception - assert value2 is e - assert get_next(tb3) is tb2 - try: - six.reraise(tp, None, tb) - except Exception: - tp2, value2, tb2 = sys.exc_info() - assert tp2 is Exception - assert value2 is not val - assert isinstance(value2, Exception) - assert tb is get_next(tb2) - - -def test_raise_from(): - try: - try: - raise Exception("blah") - except Exception: - ctx = sys.exc_info()[1] - f = Exception("foo") - six.raise_from(f, None) - except Exception: - tp, val, tb = sys.exc_info() - if sys.version_info[:2] > (3, 0): - # We should have done a raise f from None equivalent. - assert val.__cause__ is None - assert val.__context__ is ctx - # And that should suppress the context on the exception. - assert val.__suppress_context__ - # For all versions the outer exception should have raised successfully. - assert str(val) == "foo" - - -def test_print_(): - save = sys.stdout - out = sys.stdout = six.moves.StringIO() - try: - six.print_("Hello,", "person!") - finally: - sys.stdout = save - assert out.getvalue() == "Hello, person!\n" - out = six.StringIO() - six.print_("Hello,", "person!", file=out) - assert out.getvalue() == "Hello, person!\n" - out = six.StringIO() - six.print_("Hello,", "person!", file=out, end="") - assert out.getvalue() == "Hello, person!" - out = six.StringIO() - six.print_("Hello,", "person!", file=out, sep="X") - assert out.getvalue() == "Hello,Xperson!\n" - out = six.StringIO() - six.print_(six.u("Hello,"), six.u("person!"), file=out) - result = out.getvalue() - assert isinstance(result, six.text_type) - assert result == six.u("Hello, person!\n") - six.print_("Hello", file=None) # This works. - out = six.StringIO() - six.print_(None, file=out) - assert out.getvalue() == "None\n" - class FlushableStringIO(six.StringIO): - def __init__(self): - six.StringIO.__init__(self) - self.flushed = False - def flush(self): - self.flushed = True - out = FlushableStringIO() - six.print_("Hello", file=out) - assert not out.flushed - six.print_("Hello", file=out, flush=True) - assert out.flushed - - -def test_print_exceptions(): - pytest.raises(TypeError, six.print_, x=3) - pytest.raises(TypeError, six.print_, end=3) - pytest.raises(TypeError, six.print_, sep=42) - - -def test_with_metaclass(): - class Meta(type): - pass - class X(six.with_metaclass(Meta)): - pass - assert type(X) is Meta - assert issubclass(X, object) - class Base(object): - pass - class X(six.with_metaclass(Meta, Base)): - pass - assert type(X) is Meta - assert issubclass(X, Base) - class Base2(object): - pass - class X(six.with_metaclass(Meta, Base, Base2)): - pass - assert type(X) is Meta - assert issubclass(X, Base) - assert issubclass(X, Base2) - assert X.__mro__ == (X, Base, Base2, object) - class X(six.with_metaclass(Meta)): - pass - class MetaSub(Meta): - pass - class Y(six.with_metaclass(MetaSub, X)): - pass - assert type(Y) is MetaSub - assert Y.__mro__ == (Y, X, object) - - -def test_with_metaclass_typing(): - try: - import typing - except ImportError: - pytest.skip("typing module required") - class Meta(type): - pass - if sys.version_info[:2] < (3, 7): - # Generics with custom metaclasses were broken on older versions. - class Meta(Meta, typing.GenericMeta): - pass - T = typing.TypeVar('T') - class G(six.with_metaclass(Meta, typing.Generic[T])): - pass - class GA(six.with_metaclass(abc.ABCMeta, typing.Generic[T])): - pass - assert isinstance(G, Meta) - assert isinstance(GA, abc.ABCMeta) - assert G[int] is not G[G[int]] - assert GA[int] is not GA[GA[int]] - assert G.__bases__ == (typing.Generic,) - assert G.__orig_bases__ == (typing.Generic[T],) - - -@pytest.mark.skipif("sys.version_info[:2] < (3, 7)") -def test_with_metaclass_pep_560(): - class Meta(type): - pass - class A: - pass - class B: - pass - class Fake: - def __mro_entries__(self, bases): - return (A, B) - fake = Fake() - class G(six.with_metaclass(Meta, fake)): - pass - class GA(six.with_metaclass(abc.ABCMeta, fake)): - pass - assert isinstance(G, Meta) - assert isinstance(GA, abc.ABCMeta) - assert G.__bases__ == (A, B) - assert G.__orig_bases__ == (fake,) - - -@pytest.mark.skipif("sys.version_info[:2] < (3, 0)") -def test_with_metaclass_prepare(): - """Test that with_metaclass causes Meta.__prepare__ to be called with the correct arguments.""" - - class MyDict(dict): - pass - - class Meta(type): - - @classmethod - def __prepare__(cls, name, bases): - namespace = MyDict(super().__prepare__(name, bases), cls=cls, bases=bases) - namespace['namespace'] = namespace - return namespace - - class Base(object): - pass - - bases = (Base,) - - class X(six.with_metaclass(Meta, *bases)): - pass - - assert getattr(X, 'cls', type) is Meta - assert getattr(X, 'bases', ()) == bases - assert isinstance(getattr(X, 'namespace', {}), MyDict) - - -def test_wraps(): - def f(g): - @six.wraps(g) - def w(): - return 42 - return w - def k(): - pass - original_k = k - k = f(f(k)) - assert hasattr(k, '__wrapped__') - k = k.__wrapped__ - assert hasattr(k, '__wrapped__') - k = k.__wrapped__ - assert k is original_k - assert not hasattr(k, '__wrapped__') - - def f(g, assign, update): - def w(): - return 42 - w.glue = {"foo": "bar"} - w.xyzzy = {"qux": "quux"} - return six.wraps(g, assign, update)(w) - k.glue = {"melon": "egg"} - k.turnip = 43 - k = f(k, ["turnip", "baz"], ["glue", "xyzzy"]) - assert k.__name__ == "w" - assert k.turnip == 43 - assert not hasattr(k, "baz") - assert k.glue == {"melon": "egg", "foo": "bar"} - assert k.xyzzy == {"qux": "quux"} - - -def test_wraps_raises_on_missing_updated_field_on_wrapper(): - """Ensure six.wraps doesn't ignore missing attrs wrapper. - - Because that's what happens in Py3's functools.update_wrapper. - """ - def wrapped(): - pass - - def wrapper(): - pass - - with pytest.raises(AttributeError, match='has no attribute.*xyzzy'): - six.wraps(wrapped, [], ['xyzzy'])(wrapper) - - - -def test_add_metaclass(): - class Meta(type): - pass - class X: - "success" - X = six.add_metaclass(Meta)(X) - assert type(X) is Meta - assert issubclass(X, object) - assert X.__module__ == __name__ - assert X.__doc__ == "success" - class Base(object): - pass - class X(Base): - pass - X = six.add_metaclass(Meta)(X) - assert type(X) is Meta - assert issubclass(X, Base) - class Base2(object): - pass - class X(Base, Base2): - pass - X = six.add_metaclass(Meta)(X) - assert type(X) is Meta - assert issubclass(X, Base) - assert issubclass(X, Base2) - - # Test a second-generation subclass of a type. - class Meta1(type): - m1 = "m1" - class Meta2(Meta1): - m2 = "m2" - class Base: - b = "b" - Base = six.add_metaclass(Meta1)(Base) - class X(Base): - x = "x" - X = six.add_metaclass(Meta2)(X) - assert type(X) is Meta2 - assert issubclass(X, Base) - assert type(Base) is Meta1 - assert "__dict__" not in vars(X) - instance = X() - instance.attr = "test" - assert vars(instance) == {"attr": "test"} - assert instance.b == Base.b - assert instance.x == X.x - - # Test a class with slots. - class MySlots(object): - __slots__ = ["a", "b"] - MySlots = six.add_metaclass(Meta1)(MySlots) - - assert MySlots.__slots__ == ["a", "b"] - instance = MySlots() - instance.a = "foo" - pytest.raises(AttributeError, setattr, instance, "c", "baz") - - # Test a class with string for slots. - class MyStringSlots(object): - __slots__ = "ab" - MyStringSlots = six.add_metaclass(Meta1)(MyStringSlots) - assert MyStringSlots.__slots__ == "ab" - instance = MyStringSlots() - instance.ab = "foo" - pytest.raises(AttributeError, setattr, instance, "a", "baz") - pytest.raises(AttributeError, setattr, instance, "b", "baz") - - class MySlotsWeakref(object): - __slots__ = "__weakref__", - MySlotsWeakref = six.add_metaclass(Meta)(MySlotsWeakref) - assert type(MySlotsWeakref) is Meta - - -@pytest.mark.skipif("sys.version_info[:2] < (3, 3)") -def test_add_metaclass_nested(): - # Regression test for https://github.com/benjaminp/six/issues/259 - class Meta(type): - pass - - class A: - class B: pass - - expected = 'test_add_metaclass_nested..A.B' - - assert A.B.__qualname__ == expected - - class A: - @six.add_metaclass(Meta) - class B: pass - - assert A.B.__qualname__ == expected - - -def test_assertCountEqual(): - class TestAssertCountEqual(unittest.TestCase): - def test(self): - with self.assertRaises(AssertionError): - six.assertCountEqual(self, (1, 2), [3, 4, 5]) - - six.assertCountEqual(self, (1, 2), [2, 1]) - - TestAssertCountEqual('test').test() - - -def test_assertRegex(): - class TestAssertRegex(unittest.TestCase): - def test(self): - with self.assertRaises(AssertionError): - six.assertRegex(self, 'test', r'^a') - - six.assertRegex(self, 'test', r'^t') - - TestAssertRegex('test').test() - - -def test_assertNotRegex(): - class TestAssertNotRegex(unittest.TestCase): - def test(self): - with self.assertRaises(AssertionError): - six.assertNotRegex(self, 'test', r'^t') - - six.assertNotRegex(self, 'test', r'^a') - - TestAssertNotRegex('test').test() - - -def test_assertRaisesRegex(): - class TestAssertRaisesRegex(unittest.TestCase): - def test(self): - with six.assertRaisesRegex(self, AssertionError, '^Foo'): - raise AssertionError('Foo') - - with self.assertRaises(AssertionError): - with six.assertRaisesRegex(self, AssertionError, r'^Foo'): - raise AssertionError('Bar') - - TestAssertRaisesRegex('test').test() - - -def test_python_2_unicode_compatible(): - @six.python_2_unicode_compatible - class MyTest(object): - def __str__(self): - return six.u('hello') - - def __bytes__(self): - return six.b('hello') - - my_test = MyTest() - - if six.PY2: - assert str(my_test) == six.b("hello") - assert unicode(my_test) == six.u("hello") - elif six.PY3: - assert bytes(my_test) == six.b("hello") - assert str(my_test) == six.u("hello") - - assert getattr(six.moves.builtins, 'bytes', str)(my_test) == six.b("hello") - - -class EnsureTests: - - # grinning face emoji - UNICODE_EMOJI = six.u("\U0001F600") - BINARY_EMOJI = b"\xf0\x9f\x98\x80" - - def test_ensure_binary_raise_type_error(self): - with pytest.raises(TypeError): - six.ensure_str(8) - - def test_errors_and_encoding(self): - six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='ignore') - with pytest.raises(UnicodeEncodeError): - six.ensure_binary(self.UNICODE_EMOJI, encoding='latin-1', errors='strict') - - def test_ensure_binary_raise(self): - converted_unicode = six.ensure_binary(self.UNICODE_EMOJI, encoding='utf-8', errors='strict') - converted_binary = six.ensure_binary(self.BINARY_EMOJI, encoding="utf-8", errors='strict') - if six.PY2: - # PY2: unicode -> str - assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, str) - # PY2: str -> str - assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, str) - else: - # PY3: str -> bytes - assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, bytes) - # PY3: bytes -> bytes - assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, bytes) - - def test_ensure_str(self): - converted_unicode = six.ensure_str(self.UNICODE_EMOJI, encoding='utf-8', errors='strict') - converted_binary = six.ensure_str(self.BINARY_EMOJI, encoding="utf-8", errors='strict') - if six.PY2: - # PY2: unicode -> str - assert converted_unicode == self.BINARY_EMOJI and isinstance(converted_unicode, str) - # PY2: str -> str - assert converted_binary == self.BINARY_EMOJI and isinstance(converted_binary, str) - else: - # PY3: str -> str - assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, str) - # PY3: bytes -> str - assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, str) - - def test_ensure_text(self): - converted_unicode = six.ensure_text(self.UNICODE_EMOJI, encoding='utf-8', errors='strict') - converted_binary = six.ensure_text(self.BINARY_EMOJI, encoding="utf-8", errors='strict') - if six.PY2: - # PY2: unicode -> unicode - assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, unicode) - # PY2: str -> unicode - assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, unicode) - else: - # PY3: str -> str - assert converted_unicode == self.UNICODE_EMOJI and isinstance(converted_unicode, str) - # PY3: bytes -> str - assert converted_binary == self.UNICODE_EMOJI and isinstance(converted_unicode, str) diff --git a/shell/ext-py/six-1.14.0/tox.ini b/shell/ext-py/six-1.14.0/tox.ini deleted file mode 100644 index a1e346762..000000000 --- a/shell/ext-py/six-1.14.0/tox.ini +++ /dev/null @@ -1,11 +0,0 @@ -[tox] -envlist=py27,py33,py34,py35,py36,py37,py38,pypy,flake8 - -[testenv] -deps= pytest -commands= python -m pytest -rfsxX {posargs} - -[testenv:flake8] -basepython=python -deps=flake8 -commands= flake8 six.py diff --git a/shell/ext-py/sqlparse-0.3.1/AUTHORS b/shell/ext-py/sqlparse-0.3.1/AUTHORS deleted file mode 100644 index 24f3fda19..000000000 --- a/shell/ext-py/sqlparse-0.3.1/AUTHORS +++ /dev/null @@ -1,67 +0,0 @@ -python-sqlparse is written and maintained by Andi Albrecht . - -This module contains code (namely the lexer and filter mechanism) from -the pygments project that was written by Georg Brandl. - -This module contains code (Python 2/3 compatibility) from the six -project: https://bitbucket.org/gutworth/six. - -Alphabetical list of contributors: -* Adam Greenhall -* Alexander Beedie -* Alexey Malyshev -* andrew deryabin -* Andrew Tipton -* atronah -* casey -* Cauê Beloni -* circld -* Corey Zumar -* Cristian Orellana -* Dag Wieers -* Darik Gamble -* Demetrio92 -* Dennis Taylor -* Dvořák Václav -* Florian Bauer -* Fredy Wijaya -* Gavin Wahl -* Ian Robertson -* JacekPliszka -* Jesús Leganés Combarro "Piranna" -* Johannes Hoff -* John Bodley -* Jon Dufresne -* Josh Soref -* Kevin Jing Qiu -* koljonen -* Likai Liu -* mathilde.oustlant -* Michael Schuller -* Mike Amy -* mulos -* Oleg Broytman -* Patrick Schemitz -* Pi Delport -* Prudhvi Vatala -* quest -* Robert Nix -* Rocky Meza -* Romain Rigaux -* Rowan Seymour -* Ryan Wooden -* saaj -* Shen Longxing -* Sjoerd Job Postmus -* Soloman Weng -* spigwitmer -* Tao Wang -* Tenghuan -* Tim Graham -* Victor Hahn -* Victor Uriarte -* Ville Skyttä -* vthriller -* wayne.wuw -* William Ivanski -* Yago Riveiro diff --git a/shell/ext-py/sqlparse-0.3.1/CHANGELOG b/shell/ext-py/sqlparse-0.3.1/CHANGELOG deleted file mode 100644 index 6eca53e1b..000000000 --- a/shell/ext-py/sqlparse-0.3.1/CHANGELOG +++ /dev/null @@ -1,522 +0,0 @@ -Release 0.3.1 (Feb 29, 2020) ----------------------------- - -Enhancements - -* Add HQL keywords (pr475, by matwalk). -* Add support for time zone casts (issue489). -* Enhance formatting of AS keyword (issue507, by john-bodley). -* Stabilize grouping engine when parsing invalid SQL statements. - -Bug Fixes - -* Fix splitting of SQL with multiple statements inside - parentheses (issue485, pr486 by win39). -* Correctly identify NULLS FIRST / NULLS LAST as keywords (issue487). -* Fix splitting of SQL statements that contain dollar signs in - identifiers (issue491). -* Remove support for parsing double slash comments introduced in - 0.3.0 (issue456) as it had some side-effects with other dialects and - doesn't seem to be widely used (issue476). -* Restrict detection of alias names to objects that acutally could - have an alias (issue455, adopted some parts of pr509 by john-bodley). -* Fix parsing of date/time literals (issue438, by vashek). -* Fix initialization of TokenList (issue499, pr505 by john-bodley). -* Fix parsing of LIKE (issue493, pr525 by dbczumar). -* Improve parsing of identifiers (pr527 by liulk). - - -Release 0.3.0 (Mar 11, 2019) ----------------------------- - -Notable Changes - -* Remove support for Python 3.3. - -Enhancements - -* New formatting option "--indent_after_first" (pr345, by johshoff). -* New formatting option "--indent_columns" (pr393, by digitalarbeiter). -* Add UPSERT keyword (issue408). -* Strip multiple whitespace within parentheses (issue473, by john-bodley). -* Support double slash (//) comments (issue456, by theianrobertson). -* Support for Calcite temporal keywords (pr468, by john-bodley). - -Bug Fixes - -* Fix occasional IndexError (pr390, by circld, issue313). -* Fix incorrect splitting of strings containing new lines (pr396, by fredyw). -* Fix reindent issue for parenthesis (issue427, by fredyw). -* Fix from( parsing issue (issue446, by fredyw) . -* Fix for get_real_name() to return correct name (issue369, by fredyw). -* Wrap function params when wrap_after is set (pr398, by soloman1124). -* Fix parsing of "WHEN name" clauses (pr418, by andrew deryabin). -* Add missing EXPLAIN keyword (issue421). -* Fix issue with strip_comments causing a syntax error (issue425, by fredyw). -* Fix formatting on INSERT which caused staircase effect on values (issue329, - by fredyw). -* Avoid formatting of psql commands (issue469). - -Internal Changes - -* Unify handling of GROUP BY/ORDER BY (pr457, by john-bodley). -* Remove unnecessary compat shim for bytes (pr453, by jdufresne). - - -Release 0.2.4 (Sep 27, 2017) ----------------------------- - -Enhancements - -* Add more keywords for MySQL table options (pr328, pr333, by phdru). -* Add more PL/pgSQL keywords (pr357, by Demetrio92). -* Improve parsing of floats (pr330, by atronah). - -Bug Fixes - -* Fix parsing of MySQL table names starting with digits (issue337). -* Fix detection of identifiers using comparisons (issue327). -* Fix parsing of UNION ALL after WHERE (issue349). -* Fix handling of semicolon in assignments (issue359, issue358). - - - -Release 0.2.3 (Mar 02, 2017) ----------------------------- - -Enhancements - -* New command line option "--encoding" (by twang2218, pr317). -* Support CONCURRENTLY keyword (issue322, by rowanseymour). - -Bug Fixes - -* Fix some edge-cases when parsing invalid SQL statements. -* Fix indentation of LIMIT (by romainr, issue320). -* Fix parsing of INTO keyword (issue324). - -Internal Changes - -* Several improvements regarding encodings. - - -Release 0.2.2 (Oct 22, 2016) ----------------------------- - -Enhancements - -* Add comma_first option: When splitting list "comma first" notation - is used (issue141). - -Bug Fixes - -* Fix parsing of incomplete AS (issue284, by vmuriart). -* Fix parsing of Oracle names containing dollars (issue291). -* Fix parsing of UNION ALL (issue294). -* Fix grouping of identifiers containing typecasts (issue297). -* Add Changelog to sdist again (issue302). - -Internal Changes - -* `is_whitespace` and `is_group` changed into properties - - -Release 0.2.1 (Aug 13, 2016) ----------------------------- - -Notable Changes - -* PostgreSQL: Function bodys are parsed as literal string. Previously - sqlparse assumed that all function bodys are parsable psql - strings (see issue277). - -Bug Fixes - -* Fix a regression to parse streams again (issue273, reported and - test case by gmccreight). -* Improve Python 2/3 compatibility when using parsestream (issue190, - by phdru). -* Improve splitting of PostgreSQL functions (issue277). - - -Release 0.2.0 (Jul 20, 2016) ----------------------------- - -IMPORTANT: The supported Python versions have changed with this release. -sqlparse 0.2.x supports Python 2.7 and Python >= 3.3. - -Thanks to the many contributors for writing bug reports and working -on pull requests who made this version possible! - -Internal Changes - -* sqlparse.SQLParseError was removed from top-level module and moved to - sqlparse.exceptions. -* sqlparse.sql.Token.to_unicode was removed. -* The signature of a filter's process method has changed from - process(stack, stream) -> to process(stream). Stack was never used at - all. -* Lots of code cleanups and modernization (thanks esp. to vmuriart!). -* Improved grouping performance. (sjoerdjob) - -Enhancements - -* Support WHILE loops (issue215, by shenlongxing). -* Better support for CTEs (issue217, by Andrew Tipton). -* Recognize USING as a keyword more consistently (issue236, by koljonen). -* Improve alignment of columns (issue207, issue235, by vmuriat). -* Add wrap_after option for better alignment when formatting - lists (issue248, by Dennis Taylor). -* Add reindent-aligned option for alternate formatting (Adam Greenhall) -* Improved grouping of operations (issue211, by vmuriat). - -Bug Fixes - -* Leading whitespaces are now removed when format() is called with - strip_whitespace=True (issue213, by shenlongxing). -* Fix typo in keywords list (issue229, by cbeloni). -* Fix parsing of functions in comparisons (issue230, by saaj). -* Fix grouping of identifiers (issue233). -* Fix parsing of CREATE TABLE statements (issue242, by Tenghuan). -* Minor bug fixes (issue101). -* Improve formatting of CASE WHEN constructs (issue164, by vmuriat). - - -Release 0.1.19 (Mar 07, 2016) ------------------------------ - -Bug Fixes - -* Fix IndexError when statement contains WITH clauses (issue205). - - -Release 0.1.18 (Oct 25, 2015) ------------------------------ - -Bug Fixes - -* Remove universal wheel support, added in 0.1.17 by mistake. - - -Release 0.1.17 (Oct 24, 2015) ------------------------------ - -Enhancements - -* Speed up parsing of large SQL statements (pull request: issue201, fixes the - following issues: issue199, issue135, issue62, issue41, by Ryan Wooden). - -Bug Fixes - -* Fix another splitter bug regarding DECLARE (issue194). - -Misc - -* Packages on PyPI are signed from now on. - - -Release 0.1.16 (Jul 26, 2015) ------------------------------ - -Bug Fixes - -* Fix a regression in get_alias() introduced in 0.1.15 (issue185). -* Fix a bug in the splitter regarding DECLARE (issue193). -* sqlformat command line tool doesn't duplicate newlines anymore (issue191). -* Don't mix up MySQL comments starting with hash and MSSQL - temp tables (issue192). -* Statement.get_type() now ignores comments at the beginning of - a statement (issue186). - - -Release 0.1.15 (Apr 15, 2015) ------------------------------ - -Bug Fixes - -* Fix a regression for identifiers with square bracktes - notation (issue153, by darikg). -* Add missing SQL types (issue154, issue155, issue156, by jukebox). -* Fix parsing of multi-line comments (issue172, by JacekPliszka). -* Fix parsing of escaped backslashes (issue174, by caseyching). -* Fix parsing of identifiers starting with underscore (issue175). -* Fix misinterpretation of IN keyword (issue183). - -Enhancements - -* Improve formatting of HAVING statements. -* Improve parsing of inline comments (issue163). -* Group comments to parent object (issue128, issue160). -* Add double precision builtin (issue169, by darikg). -* Add support for square bracket array indexing (issue170, issue176, - issue177 by darikg). -* Improve grouping of aliased elements (issue167, by darikg). -* Support comments starting with '#' character (issue178). - - -Release 0.1.14 (Nov 30, 2014) ------------------------------ - -Bug Fixes - -* Floats in UPDATE statements are now handled correctly (issue145). -* Properly handle string literals in comparisons (issue148, change proposed - by aadis). -* Fix indentation when using tabs (issue146). - -Enhancements - -* Improved formatting in list when newlines precede commas (issue140). - - -Release 0.1.13 (Oct 09, 2014) ------------------------------ - -Bug Fixes - -* Fix a regression in handling of NULL keywords introduced in 0.1.12. - - -Release 0.1.12 (Sep 20, 2014) ------------------------------ - -Bug Fixes - -* Fix handling of NULL keywords in aliased identifiers. -* Fix SerializerUnicode to split unquoted newlines (issue131, by Michael Schuller). -* Fix handling of modulo operators without spaces (by gavinwahl). - -Enhancements - -* Improve parsing of identifier lists containing placeholders. -* Speed up query parsing of unquoted lines (by Michael Schuller). - - -Release 0.1.11 (Feb 07, 2014) ------------------------------ - -Bug Fixes - -* Fix incorrect parsing of string literals containing line breaks (issue118). -* Fix typo in keywords, add MERGE, COLLECT keywords (issue122/124, - by Cristian Orellana). -* Improve parsing of string literals in columns. -* Fix parsing and formatting of statements containing EXCEPT keyword. -* Fix Function.get_parameters() (issue126/127, by spigwitmer). - -Enhancements - -* Classify DML keywords (issue116, by Victor Hahn). -* Add missing FOREACH keyword. -* Grouping of BEGIN/END blocks. - -Other - -* Python 2.5 isn't automatically tested anymore, neither Travis nor Tox - still support it out of the box. - - -Release 0.1.10 (Nov 02, 2013) ------------------------------ - -Bug Fixes - -* Removed buffered reading again, it obviously causes wrong parsing in some rare - cases (issue114). -* Fix regression in setup.py introduced 10 months ago (issue115). - -Enhancements - -* Improved support for JOINs, by Alexander Beedie. - - -Release 0.1.9 (Sep 28, 2013) ----------------------------- - -Bug Fixes - -* Fix an regression introduced in 0.1.5 where sqlparse didn't properly - distinguished between single and double quoted strings when tagging - identifier (issue111). - -Enhancements - -* New option to truncate long string literals when formatting. -* Scientific numbers are pares correctly (issue107). -* Support for arithmetic expressions (issue109, issue106; by prudhvi). - - -Release 0.1.8 (Jun 29, 2013) ----------------------------- - -Bug Fixes - -* Whitespaces within certain keywords are now allowed (issue97, patch proposed - by xcombelle). - -Enhancements - -* Improve parsing of assignments in UPDATE statements (issue90). -* Add STRAIGHT_JOIN statement (by Yago Riveiro). -* Function.get_parameters() now returns the parameter if only one parameter is - given (issue94, by wayne.wuw). -* sqlparse.split() now removes leading and trailing whitespaces from split - statements. -* Add USE as keyword token (by mulos). -* Improve parsing of PEP249-style placeholders (issue103). - - -Release 0.1.7 (Apr 06, 2013) ----------------------------- - -Bug Fixes - -* Fix Python 3 compatibility of sqlformat script (by Pi Delport). -* Fix parsing of SQL statements that contain binary data (by Alexey - Malyshev). -* Fix a bug where keywords were identified as aliased identifiers in - invalid SQL statements. -* Fix parsing of identifier lists where identifiers are keywords too - (issue10). - -Enhancements - -* Top-level API functions now accept encoding keyword to parse - statements in certain encodings more reliable (issue20). -* Improve parsing speed when SQL contains CLOBs or BLOBs (issue86). -* Improve formatting of ORDER BY clauses (issue89). -* Formatter now tries to detect runaway indentations caused by - parsing errors or invalid SQL statements. When re-indenting such - statements the formatter flips back to column 0 before going crazy. - -Other - -* Documentation updates. - - -Release 0.1.6 (Jan 01, 2013) ----------------------------- - -sqlparse is now compatible with Python 3 without any patches. The -Python 3 version is generated during install by 2to3. You'll need -distribute to install sqlparse for Python 3. - -Bug Fixes - -* Fix parsing error with dollar-quoted procedure bodies (issue83). - -Other - -* Documentation updates. -* Test suite now uses tox and pytest. -* py3k fixes (by vthriller). -* py3k fixes in setup.py (by Florian Bauer). -* setup.py now requires distribute (by Florian Bauer). - - -Release 0.1.5 (Nov 13, 2012) ----------------------------- - -Bug Fixes - -* Improve handling of quoted identifiers (issue78). -* Improve grouping and formatting of identifiers with operators (issue53). -* Improve grouping and formatting of concatenated strings (issue53). -* Improve handling of varchar() (by Mike Amy). -* Clean up handling of various SQL elements. -* Switch to pytest and clean up tests. -* Several minor fixes. - -Other - -* Deprecate sqlparse.SQLParseError. Please use - sqlparse.exceptions.SQLParseError instead. -* Add caching to speed up processing. -* Add experimental filters for token processing. -* Add sqlformat.parsestream (by quest). - - -Release 0.1.4 (Apr 20, 2012) ----------------------------- - -Bug Fixes - -* Avoid "stair case" effects when identifiers, functions, - placeholders or keywords are mixed in identifier lists (issue45, - issue49, issue52) and when asterisks are used as operators - (issue58). -* Make keyword detection more restrict (issue47). -* Improve handling of CASE statements (issue46). -* Fix statement splitting when parsing recursive statements (issue57, - thanks to piranna). -* Fix for negative numbers (issue56, thanks to kevinjqiu). -* Pretty format comments in identifier lists (issue59). -* Several minor bug fixes and improvements. - - -Release 0.1.3 (Jul 29, 2011) ----------------------------- - -Bug Fixes - -* Improve parsing of floats (thanks to Kris). -* When formatting a statement a space before LIMIT was removed (issue35). -* Fix strip_comments flag (issue38, reported by ooberm...@gmail.com). -* Avoid parsing names as keywords (issue39, reported by djo...@taket.org). -* Make sure identifier lists in subselects are grouped (issue40, - reported by djo...@taket.org). -* Split statements with IF as functions correctly (issue33 and - issue29, reported by charles....@unige.ch). -* Relax detection of keywords, esp. when used as function names - (issue36, nyuhu...@gmail.com). -* Don't treat single characters as keywords (issue32). -* Improve parsing of stand-alone comments (issue26). -* Detection of placeholders in paramterized queries (issue22, - reported by Glyph Lefkowitz). -* Add parsing of MS Access column names with braces (issue27, - reported by frankz...@gmail.com). - -Other - -* Replace Django by Flask in App Engine frontend (issue11). - - -Release 0.1.2 (Nov 23, 2010) ----------------------------- - -Bug Fixes - -* Fixed incorrect detection of keyword fragments embed in names (issue7, - reported and initial patch by andyboyko). -* Stricter detection of identifier aliases (issue8, reported by estama). -* WHERE grouping consumed closing parenthesis (issue9, reported by estama). -* Fixed an issue with trailing whitespaces (reported by Kris). -* Better detection of escaped single quotes (issue13, reported by - Martin Brochhaus, patch by bluemaro with test case by Dan Carley). -* Ignore identifier in double-quotes when changing cases (issue 21). -* Lots of minor fixes targeting encoding, indentation, statement - parsing and more (issues 12, 14, 15, 16, 18, 19). -* Code cleanup with a pinch of refactoring. - - -Release 0.1.1 (May 6, 2009) ---------------------------- - -Bug Fixes - -* Lexers preserves original line breaks (issue1). -* Improved identifier parsing: backtick quotes, wildcards, T-SQL variables - prefixed with @. -* Improved parsing of identifier lists (issue2). -* Recursive recognition of AS (issue4) and CASE. -* Improved support for UPDATE statements. - -Other - -* Code cleanup and better test coverage. - - -Release 0.1.0 (Apr 8, 2009) ---------------------------- - -Initial release. diff --git a/shell/ext-py/sqlparse-0.3.1/LICENSE b/shell/ext-py/sqlparse-0.3.1/LICENSE deleted file mode 100644 index de414c557..000000000 --- a/shell/ext-py/sqlparse-0.3.1/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2016, Andi Albrecht -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the authors nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/shell/ext-py/sqlparse-0.3.1/MANIFEST.in b/shell/ext-py/sqlparse-0.3.1/MANIFEST.in deleted file mode 100644 index 8043b359f..000000000 --- a/shell/ext-py/sqlparse-0.3.1/MANIFEST.in +++ /dev/null @@ -1,11 +0,0 @@ -recursive-include docs source/* -include docs/sqlformat.1 -include docs/Makefile -recursive-include tests *.py *.sql -include LICENSE -include TODO -include AUTHORS -include CHANGELOG -include Makefile -include setup.cfg -include tox.ini diff --git a/shell/ext-py/sqlparse-0.3.1/README.rst b/shell/ext-py/sqlparse-0.3.1/README.rst deleted file mode 100644 index 2e38ebc6c..000000000 --- a/shell/ext-py/sqlparse-0.3.1/README.rst +++ /dev/null @@ -1,77 +0,0 @@ -python-sqlparse - Parse SQL statements -====================================== - -|buildstatus|_ -|coverage|_ - -.. docincludebegin - -sqlparse is a non-validating SQL parser for Python. -It provides support for parsing, splitting and formatting SQL statements. - -The module is compatible with Python 2.7 and Python 3 (>= 3.4) -and released under the terms of the `New BSD license -`_. - -.. note:: - - Support for Python<3.4 (including 2.x) will be dropped soon. - -Visit the project page at https://github.com/andialbrecht/sqlparse for -further information about this project. - - -Quick Start ------------ - -.. code-block:: sh - - $ pip install sqlparse - -.. code-block:: python - - >>> import sqlparse - - >>> # Split a string containing two SQL statements: - >>> raw = 'select * from foo; select * from bar;' - >>> statements = sqlparse.split(raw) - >>> statements - ['select * from foo;', 'select * from bar;'] - - >>> # Format the first statement and print it out: - >>> first = statements[0] - >>> print(sqlparse.format(first, reindent=True, keyword_case='upper')) - SELECT * - FROM foo; - - >>> # Parsing a SQL statement: - >>> parsed = sqlparse.parse('select * from foo')[0] - >>> parsed.tokens - [, , >> - -Links ------ - -Project page - https://github.com/andialbrecht/sqlparse - -Bug tracker - https://github.com/andialbrecht/sqlparse/issues - -Documentation - https://sqlparse.readthedocs.io/ - -Online Demo - https://sqlformat.org/ - - -sqlparse is licensed under the BSD license. - -Parts of the code are based on pygments written by Georg Brandl and others. -pygments-Homepage: http://pygments.org/ - -.. |buildstatus| image:: https://secure.travis-ci.org/andialbrecht/sqlparse.png?branch=master -.. _buildstatus: https://travis-ci.org/#!/andialbrecht/sqlparse -.. |coverage| image:: https://coveralls.io/repos/andialbrecht/sqlparse/badge.svg?branch=master&service=github -.. _coverage: https://coveralls.io/github/andialbrecht/sqlparse?branch=master diff --git a/shell/ext-py/sqlparse-0.3.1/TODO b/shell/ext-py/sqlparse-0.3.1/TODO deleted file mode 100644 index cbbe7fff9..000000000 --- a/shell/ext-py/sqlparse-0.3.1/TODO +++ /dev/null @@ -1,5 +0,0 @@ -* See - https://groups.google.com/d/msg/sqlparse/huz9lKXt0Lc/11ybIKPJWbUJ - for some interesting hints and suggestions. -* Provide a function to replace tokens. See this thread: https://groups.google.com/d/msg/sqlparse/5xmBL2UKqX4/ZX9z_peve-AJ -* Document filter stack and processing phases. diff --git a/shell/ext-py/sqlparse-0.3.1/setup.cfg b/shell/ext-py/sqlparse-0.3.1/setup.cfg deleted file mode 100644 index c4dee1dff..000000000 --- a/shell/ext-py/sqlparse-0.3.1/setup.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[bdist_wheel] -universal = 1 - -[metadata] -license_file = LICENSE - -[tool:pytest] -xfail_strict = True - -[flake8] -exclude = - sqlparse/compat.py -ignore = - W503, - E731 - -[coverage:run] -branch = False -omit = - sqlparse/__main__.py diff --git a/shell/ext-py/sqlparse-0.3.1/setup.py b/shell/ext-py/sqlparse-0.3.1/setup.py deleted file mode 100644 index 3de94e7ba..000000000 --- a/shell/ext-py/sqlparse-0.3.1/setup.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This setup script is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -import re - -from setuptools import setup, find_packages - - -def get_version(): - """Parse __init__.py for version number instead of importing the file.""" - VERSIONFILE = 'sqlparse/__init__.py' - VSRE = r'^__version__ = [\'"]([^\'"]*)[\'"]' - with open(VERSIONFILE) as f: - verstrline = f.read() - mo = re.search(VSRE, verstrline, re.M) - if mo: - return mo.group(1) - raise RuntimeError('Unable to find version in {fn}'.format(fn=VERSIONFILE)) - - -LONG_DESCRIPTION = """ -``sqlparse`` is a non-validating SQL parser module. -It provides support for parsing, splitting and formatting SQL statements. - -Visit the `project page `_ for -additional information and documentation. - -**Example Usage** - - -Splitting SQL statements:: - - >>> import sqlparse - >>> sqlparse.split('select * from foo; select * from bar;') - [u'select * from foo; ', u'select * from bar;'] - - -Formatting statements:: - - >>> sql = 'select * from foo where id in (select id from bar);' - >>> print(sqlparse.format(sql, reindent=True, keyword_case='upper')) - SELECT * - FROM foo - WHERE id IN - (SELECT id - FROM bar); - - -Parsing:: - - >>> sql = 'select * from someschema.mytable where id = 1' - >>> res = sqlparse.parse(sql) - >>> res - (,) - >>> stmt = res[0] - >>> str(stmt) # converting it back to unicode - 'select * from someschema.mytable where id = 1' - >>> # This is how the internal representation looks like: - >>> stmt.tokens - (, - , - , - , - , - , - , - , - ) - -""" - -setup( - name='sqlparse', - version=get_version(), - author='Andi Albrecht', - author_email='albrecht.andi@gmail.com', - url='https://github.com/andialbrecht/sqlparse', - description='Non-validating SQL parser', - long_description=LONG_DESCRIPTION, - license='BSD', - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Topic :: Database', - 'Topic :: Software Development', - ], - packages=find_packages(exclude=('tests',)), - entry_points={ - 'console_scripts': [ - 'sqlformat = sqlparse.__main__:main', - ] - }, -) diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/__init__.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/__init__.py deleted file mode 100644 index f7e7aa6f9..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""Parse SQL statements.""" - -# Setup namespace -from sqlparse import sql -from sqlparse import cli -from sqlparse import engine -from sqlparse import tokens -from sqlparse import filters -from sqlparse import formatter - -from sqlparse.compat import text_type - -__version__ = '0.3.1' -__all__ = ['engine', 'filters', 'formatter', 'sql', 'tokens', 'cli'] - - -def parse(sql, encoding=None): - """Parse sql and return a list of statements. - - :param sql: A string containing one or more SQL statements. - :param encoding: The encoding of the statement (optional). - :returns: A tuple of :class:`~sqlparse.sql.Statement` instances. - """ - return tuple(parsestream(sql, encoding)) - - -def parsestream(stream, encoding=None): - """Parses sql statements from file-like object. - - :param stream: A file-like object. - :param encoding: The encoding of the stream contents (optional). - :returns: A generator of :class:`~sqlparse.sql.Statement` instances. - """ - stack = engine.FilterStack() - stack.enable_grouping() - return stack.run(stream, encoding) - - -def format(sql, encoding=None, **options): - """Format *sql* according to *options*. - - Available options are documented in :ref:`formatting`. - - In addition to the formatting options this function accepts the - keyword "encoding" which determines the encoding of the statement. - - :returns: The formatted SQL statement as string. - """ - stack = engine.FilterStack() - options = formatter.validate_options(options) - stack = formatter.build_filter_stack(stack, options) - stack.postprocess.append(filters.SerializerUnicode()) - return u''.join(stack.run(sql, encoding)) - - -def split(sql, encoding=None): - """Split *sql* into single statements. - - :param sql: A string containing one or more SQL statements. - :param encoding: The encoding of the statement (optional). - :returns: A list of strings. - """ - stack = engine.FilterStack() - return [text_type(stmt).strip() for stmt in stack.run(sql, encoding)] diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/__main__.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/__main__.py deleted file mode 100644 index 867d75d5f..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/__main__.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""Entrypoint module for `python -m sqlparse`. - -Why does this file exist, and why __main__? For more info, read: -- https://www.python.org/dev/peps/pep-0338/ -- https://docs.python.org/2/using/cmdline.html#cmdoption-m -- https://docs.python.org/3/using/cmdline.html#cmdoption-m -""" - -import sys - -from sqlparse.cli import main - -if __name__ == '__main__': - sys.exit(main()) diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/cli.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/cli.py deleted file mode 100755 index 25555a591..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/cli.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""Module that contains the command line app. - -Why does this file exist, and why not put this in __main__? - You might be tempted to import things from __main__ later, but that will - cause problems: the code will get executed twice: - - When you run `python -m sqlparse` python will execute - ``__main__.py`` as a script. That means there won't be any - ``sqlparse.__main__`` in ``sys.modules``. - - When you import __main__ it will get executed again (as a module) because - there's no ``sqlparse.__main__`` in ``sys.modules``. - Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration -""" - -import argparse -import sys -from io import TextIOWrapper -from codecs import open, getreader - -import sqlparse -from sqlparse.compat import PY2 -from sqlparse.exceptions import SQLParseError - - -# TODO: Add CLI Tests -# TODO: Simplify formatter by using argparse `type` arguments -def create_parser(): - _CASE_CHOICES = ['upper', 'lower', 'capitalize'] - - parser = argparse.ArgumentParser( - prog='sqlformat', - description='Format FILE according to OPTIONS. Use "-" as FILE ' - 'to read from stdin.', - usage='%(prog)s [OPTIONS] FILE, ...', - ) - - parser.add_argument('filename') - - parser.add_argument( - '-o', '--outfile', - dest='outfile', - metavar='FILE', - help='write output to FILE (defaults to stdout)') - - parser.add_argument( - '--version', - action='version', - version=sqlparse.__version__) - - group = parser.add_argument_group('Formatting Options') - - group.add_argument( - '-k', '--keywords', - metavar='CHOICE', - dest='keyword_case', - choices=_CASE_CHOICES, - help='change case of keywords, CHOICE is one of {0}'.format( - ', '.join('"{0}"'.format(x) for x in _CASE_CHOICES))) - - group.add_argument( - '-i', '--identifiers', - metavar='CHOICE', - dest='identifier_case', - choices=_CASE_CHOICES, - help='change case of identifiers, CHOICE is one of {0}'.format( - ', '.join('"{0}"'.format(x) for x in _CASE_CHOICES))) - - group.add_argument( - '-l', '--language', - metavar='LANG', - dest='output_format', - choices=['python', 'php'], - help='output a snippet in programming language LANG, ' - 'choices are "python", "php"') - - group.add_argument( - '--strip-comments', - dest='strip_comments', - action='store_true', - default=False, - help='remove comments') - - group.add_argument( - '-r', '--reindent', - dest='reindent', - action='store_true', - default=False, - help='reindent statements') - - group.add_argument( - '--indent_width', - dest='indent_width', - default=2, - type=int, - help='indentation width (defaults to 2 spaces)') - - group.add_argument( - '--indent_after_first', - dest='indent_after_first', - action='store_true', - default=False, - help='indent after first line of statement (e.g. SELECT)') - - group.add_argument( - '--indent_columns', - dest='indent_columns', - action='store_true', - default=False, - help='indent all columns by indent_width instead of keyword length') - - group.add_argument( - '-a', '--reindent_aligned', - action='store_true', - default=False, - help='reindent statements to aligned format') - - group.add_argument( - '-s', '--use_space_around_operators', - action='store_true', - default=False, - help='place spaces around mathematical operators') - - group.add_argument( - '--wrap_after', - dest='wrap_after', - default=0, - type=int, - help='Column after which lists should be wrapped') - - group.add_argument( - '--comma_first', - dest='comma_first', - default=False, - type=bool, - help='Insert linebreak before comma (default False)') - - group.add_argument( - '--encoding', - dest='encoding', - default='utf-8', - help='Specify the input encoding (default utf-8)') - - return parser - - -def _error(msg): - """Print msg and optionally exit with return code exit_.""" - sys.stderr.write(u'[ERROR] {0}\n'.format(msg)) - return 1 - - -def main(args=None): - parser = create_parser() - args = parser.parse_args(args) - - if args.filename == '-': # read from stdin - if PY2: - data = getreader(args.encoding)(sys.stdin).read() - else: - wrapper = TextIOWrapper(sys.stdin.buffer, encoding=args.encoding) - try: - data = wrapper.read() - finally: - wrapper.detach() - else: - try: - with open(args.filename, 'r', args.encoding) as f: - data = ''.join(f.readlines()) - except IOError as e: - return _error( - u'Failed to read {0}: {1}'.format(args.filename, e)) - - close_stream = False - if args.outfile: - try: - stream = open(args.outfile, 'w', args.encoding) - close_stream = True - except IOError as e: - return _error(u'Failed to open {0}: {1}'.format(args.outfile, e)) - else: - stream = sys.stdout - - formatter_opts = vars(args) - try: - formatter_opts = sqlparse.formatter.validate_options(formatter_opts) - except SQLParseError as e: - return _error(u'Invalid options: {0}'.format(e)) - - s = sqlparse.format(data, **formatter_opts) - stream.write(s) - stream.flush() - if close_stream: - stream.close() - return 0 diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/compat.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/compat.py deleted file mode 100644 index d2214bed6..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/compat.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""Python 2/3 compatibility. - -This module only exists to avoid a dependency on six -for very trivial stuff. We only need to take care of -string types, buffers and metaclasses. - -Parts of the code is copied directly from six: -https://bitbucket.org/gutworth/six -""" - -import sys -from io import TextIOBase - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - - -if PY3: - def unicode_compatible(cls): - return cls - - text_type = str - string_types = (str,) - from io import StringIO - file_types = (StringIO, TextIOBase) - - -elif PY2: - def unicode_compatible(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode('utf-8') - return cls - - text_type = unicode - string_types = (str, unicode,) - from StringIO import StringIO - file_types = (file, StringIO, TextIOBase) diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/__init__.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/__init__.py deleted file mode 100644 index 0b3f3eb52..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse.engine import grouping -from sqlparse.engine.filter_stack import FilterStack -from sqlparse.engine.statement_splitter import StatementSplitter - -__all__ = [ - 'grouping', - 'FilterStack', - 'StatementSplitter', -] diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/filter_stack.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/filter_stack.py deleted file mode 100644 index fc77fd644..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/filter_stack.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""filter""" - -from sqlparse import lexer -from sqlparse.engine import grouping -from sqlparse.engine.statement_splitter import StatementSplitter - - -class FilterStack(object): - def __init__(self): - self.preprocess = [] - self.stmtprocess = [] - self.postprocess = [] - self._grouping = False - - def enable_grouping(self): - self._grouping = True - - def run(self, sql, encoding=None): - stream = lexer.tokenize(sql, encoding) - # Process token stream - for filter_ in self.preprocess: - stream = filter_.process(stream) - - stream = StatementSplitter().process(stream) - - # Output: Stream processed Statements - for stmt in stream: - if self._grouping: - stmt = grouping.group(stmt) - - for filter_ in self.stmtprocess: - filter_.process(stmt) - - for filter_ in self.postprocess: - stmt = filter_.process(stmt) - - yield stmt diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/grouping.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/grouping.py deleted file mode 100644 index daaffb0bd..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/grouping.py +++ /dev/null @@ -1,453 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse import sql -from sqlparse import tokens as T -from sqlparse.utils import recurse, imt - -T_NUMERICAL = (T.Number, T.Number.Integer, T.Number.Float) -T_STRING = (T.String, T.String.Single, T.String.Symbol) -T_NAME = (T.Name, T.Name.Placeholder) - - -def _group_matching(tlist, cls): - """Groups Tokens that have beginning and end.""" - opens = [] - tidx_offset = 0 - for idx, token in enumerate(list(tlist)): - tidx = idx - tidx_offset - - if token.is_whitespace: - # ~50% of tokens will be whitespace. Will checking early - # for them avoid 3 comparisons, but then add 1 more comparison - # for the other ~50% of tokens... - continue - - if token.is_group and not isinstance(token, cls): - # Check inside previously grouped (i.e. parenthesis) if group - # of different type is inside (i.e., case). though ideally should - # should check for all open/close tokens at once to avoid recursion - _group_matching(token, cls) - continue - - if token.match(*cls.M_OPEN): - opens.append(tidx) - - elif token.match(*cls.M_CLOSE): - try: - open_idx = opens.pop() - except IndexError: - # this indicates invalid sql and unbalanced tokens. - # instead of break, continue in case other "valid" groups exist - continue - close_idx = tidx - tlist.group_tokens(cls, open_idx, close_idx) - tidx_offset += close_idx - open_idx - - -def group_brackets(tlist): - _group_matching(tlist, sql.SquareBrackets) - - -def group_parenthesis(tlist): - _group_matching(tlist, sql.Parenthesis) - - -def group_case(tlist): - _group_matching(tlist, sql.Case) - - -def group_if(tlist): - _group_matching(tlist, sql.If) - - -def group_for(tlist): - _group_matching(tlist, sql.For) - - -def group_begin(tlist): - _group_matching(tlist, sql.Begin) - - -def group_typecasts(tlist): - def match(token): - return token.match(T.Punctuation, '::') - - def valid(token): - return token is not None - - def post(tlist, pidx, tidx, nidx): - return pidx, nidx - - valid_prev = valid_next = valid - _group(tlist, sql.Identifier, match, valid_prev, valid_next, post) - - -def group_tzcasts(tlist): - def match(token): - return token.ttype == T.Keyword.TZCast - - def valid(token): - return token is not None - - def post(tlist, pidx, tidx, nidx): - return pidx, nidx - - _group(tlist, sql.Identifier, match, valid, valid, post) - - -def group_typed_literal(tlist): - # definitely not complete, see e.g.: - # https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/interval-literal-syntax - # https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/interval-literals - # https://www.postgresql.org/docs/9.1/datatype-datetime.html - # https://www.postgresql.org/docs/9.1/functions-datetime.html - def match(token): - return imt(token, m=sql.TypedLiteral.M_OPEN) - - def match_to_extend(token): - return isinstance(token, sql.TypedLiteral) - - def valid_prev(token): - return token is not None - - def valid_next(token): - return token is not None and token.match(*sql.TypedLiteral.M_CLOSE) - - def valid_final(token): - return token is not None and token.match(*sql.TypedLiteral.M_EXTEND) - - def post(tlist, pidx, tidx, nidx): - return tidx, nidx - - _group(tlist, sql.TypedLiteral, match, valid_prev, valid_next, - post, extend=False) - _group(tlist, sql.TypedLiteral, match_to_extend, valid_prev, valid_final, - post, extend=True) - - -def group_period(tlist): - def match(token): - return token.match(T.Punctuation, '.') - - def valid_prev(token): - sqlcls = sql.SquareBrackets, sql.Identifier - ttypes = T.Name, T.String.Symbol - return imt(token, i=sqlcls, t=ttypes) - - def valid_next(token): - # issue261, allow invalid next token - return True - - def post(tlist, pidx, tidx, nidx): - # next_ validation is being performed here. issue261 - sqlcls = sql.SquareBrackets, sql.Function - ttypes = T.Name, T.String.Symbol, T.Wildcard - next_ = tlist[nidx] if nidx is not None else None - valid_next = imt(next_, i=sqlcls, t=ttypes) - - return (pidx, nidx) if valid_next else (pidx, tidx) - - _group(tlist, sql.Identifier, match, valid_prev, valid_next, post) - - -def group_as(tlist): - def match(token): - return token.is_keyword and token.normalized == 'AS' - - def valid_prev(token): - return token.normalized == 'NULL' or not token.is_keyword - - def valid_next(token): - ttypes = T.DML, T.DDL, T.CTE - return not imt(token, t=ttypes) and token is not None - - def post(tlist, pidx, tidx, nidx): - return pidx, nidx - - _group(tlist, sql.Identifier, match, valid_prev, valid_next, post) - - -def group_assignment(tlist): - def match(token): - return token.match(T.Assignment, ':=') - - def valid(token): - return token is not None and token.ttype not in (T.Keyword) - - def post(tlist, pidx, tidx, nidx): - m_semicolon = T.Punctuation, ';' - snidx, _ = tlist.token_next_by(m=m_semicolon, idx=nidx) - nidx = snidx or nidx - return pidx, nidx - - valid_prev = valid_next = valid - _group(tlist, sql.Assignment, match, valid_prev, valid_next, post) - - -def group_comparison(tlist): - sqlcls = (sql.Parenthesis, sql.Function, sql.Identifier, - sql.Operation) - ttypes = T_NUMERICAL + T_STRING + T_NAME - - def match(token): - return token.ttype == T.Operator.Comparison - - def valid(token): - if imt(token, t=ttypes, i=sqlcls): - return True - elif token and token.is_keyword and token.normalized == 'NULL': - return True - else: - return False - - def post(tlist, pidx, tidx, nidx): - return pidx, nidx - - valid_prev = valid_next = valid - _group(tlist, sql.Comparison, match, - valid_prev, valid_next, post, extend=False) - - -@recurse(sql.Identifier) -def group_identifier(tlist): - ttypes = (T.String.Symbol, T.Name) - - tidx, token = tlist.token_next_by(t=ttypes) - while token: - tlist.group_tokens(sql.Identifier, tidx, tidx) - tidx, token = tlist.token_next_by(t=ttypes, idx=tidx) - - -def group_arrays(tlist): - sqlcls = sql.SquareBrackets, sql.Identifier, sql.Function - ttypes = T.Name, T.String.Symbol - - def match(token): - return isinstance(token, sql.SquareBrackets) - - def valid_prev(token): - return imt(token, i=sqlcls, t=ttypes) - - def valid_next(token): - return True - - def post(tlist, pidx, tidx, nidx): - return pidx, tidx - - _group(tlist, sql.Identifier, match, - valid_prev, valid_next, post, extend=True, recurse=False) - - -def group_operator(tlist): - ttypes = T_NUMERICAL + T_STRING + T_NAME - sqlcls = (sql.SquareBrackets, sql.Parenthesis, sql.Function, - sql.Identifier, sql.Operation, sql.TypedLiteral) - - def match(token): - return imt(token, t=(T.Operator, T.Wildcard)) - - def valid(token): - return imt(token, i=sqlcls, t=ttypes) \ - or (token and token.match( - T.Keyword, - ('CURRENT_DATE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP'))) - - def post(tlist, pidx, tidx, nidx): - tlist[tidx].ttype = T.Operator - return pidx, nidx - - valid_prev = valid_next = valid - _group(tlist, sql.Operation, match, - valid_prev, valid_next, post, extend=False) - - -def group_identifier_list(tlist): - m_role = T.Keyword, ('null', 'role') - sqlcls = (sql.Function, sql.Case, sql.Identifier, sql.Comparison, - sql.IdentifierList, sql.Operation) - ttypes = (T_NUMERICAL + T_STRING + T_NAME - + (T.Keyword, T.Comment, T.Wildcard)) - - def match(token): - return token.match(T.Punctuation, ',') - - def valid(token): - return imt(token, i=sqlcls, m=m_role, t=ttypes) - - def post(tlist, pidx, tidx, nidx): - return pidx, nidx - - valid_prev = valid_next = valid - _group(tlist, sql.IdentifierList, match, - valid_prev, valid_next, post, extend=True) - - -@recurse(sql.Comment) -def group_comments(tlist): - tidx, token = tlist.token_next_by(t=T.Comment) - while token: - eidx, end = tlist.token_not_matching( - lambda tk: imt(tk, t=T.Comment) or tk.is_whitespace, idx=tidx) - if end is not None: - eidx, end = tlist.token_prev(eidx, skip_ws=False) - tlist.group_tokens(sql.Comment, tidx, eidx) - - tidx, token = tlist.token_next_by(t=T.Comment, idx=tidx) - - -@recurse(sql.Where) -def group_where(tlist): - tidx, token = tlist.token_next_by(m=sql.Where.M_OPEN) - while token: - eidx, end = tlist.token_next_by(m=sql.Where.M_CLOSE, idx=tidx) - - if end is None: - end = tlist._groupable_tokens[-1] - else: - end = tlist.tokens[eidx - 1] - # TODO: convert this to eidx instead of end token. - # i think above values are len(tlist) and eidx-1 - eidx = tlist.token_index(end) - tlist.group_tokens(sql.Where, tidx, eidx) - tidx, token = tlist.token_next_by(m=sql.Where.M_OPEN, idx=tidx) - - -@recurse() -def group_aliased(tlist): - I_ALIAS = (sql.Parenthesis, sql.Function, sql.Case, sql.Identifier, - sql.Operation, sql.Comparison) - - tidx, token = tlist.token_next_by(i=I_ALIAS, t=T.Number) - while token: - nidx, next_ = tlist.token_next(tidx) - if isinstance(next_, sql.Identifier): - tlist.group_tokens(sql.Identifier, tidx, nidx, extend=True) - tidx, token = tlist.token_next_by(i=I_ALIAS, t=T.Number, idx=tidx) - - -@recurse(sql.Function) -def group_functions(tlist): - has_create = False - has_table = False - for tmp_token in tlist.tokens: - if tmp_token.value == 'CREATE': - has_create = True - if tmp_token.value == 'TABLE': - has_table = True - if has_create and has_table: - return - - tidx, token = tlist.token_next_by(t=T.Name) - while token: - nidx, next_ = tlist.token_next(tidx) - if isinstance(next_, sql.Parenthesis): - tlist.group_tokens(sql.Function, tidx, nidx) - tidx, token = tlist.token_next_by(t=T.Name, idx=tidx) - - -def group_order(tlist): - """Group together Identifier and Asc/Desc token""" - tidx, token = tlist.token_next_by(t=T.Keyword.Order) - while token: - pidx, prev_ = tlist.token_prev(tidx) - if imt(prev_, i=sql.Identifier, t=T.Number): - tlist.group_tokens(sql.Identifier, pidx, tidx) - tidx = pidx - tidx, token = tlist.token_next_by(t=T.Keyword.Order, idx=tidx) - - -@recurse() -def align_comments(tlist): - tidx, token = tlist.token_next_by(i=sql.Comment) - while token: - pidx, prev_ = tlist.token_prev(tidx) - if isinstance(prev_, sql.TokenList): - tlist.group_tokens(sql.TokenList, pidx, tidx, extend=True) - tidx = pidx - tidx, token = tlist.token_next_by(i=sql.Comment, idx=tidx) - - -def group_values(tlist): - tidx, token = tlist.token_next_by(m=(T.Keyword, 'VALUES')) - start_idx = tidx - end_idx = -1 - while token: - if isinstance(token, sql.Parenthesis): - end_idx = tidx - tidx, token = tlist.token_next(tidx) - if end_idx != -1: - tlist.group_tokens(sql.Values, start_idx, end_idx, extend=True) - - -def group(stmt): - for func in [ - group_comments, - - # _group_matching - group_brackets, - group_parenthesis, - group_case, - group_if, - group_for, - group_begin, - - group_functions, - group_where, - group_period, - group_arrays, - group_identifier, - group_order, - group_typecasts, - group_tzcasts, - group_typed_literal, - group_operator, - group_comparison, - group_as, - group_aliased, - group_assignment, - - align_comments, - group_identifier_list, - group_values, - ]: - func(stmt) - return stmt - - -def _group(tlist, cls, match, - valid_prev=lambda t: True, - valid_next=lambda t: True, - post=None, - extend=True, - recurse=True - ): - """Groups together tokens that are joined by a middle token. i.e. x < y""" - - tidx_offset = 0 - pidx, prev_ = None, None - for idx, token in enumerate(list(tlist)): - tidx = idx - tidx_offset - - if token.is_whitespace: - continue - - if recurse and token.is_group and not isinstance(token, cls): - _group(token, cls, match, valid_prev, valid_next, post, extend) - - if match(token): - nidx, next_ = tlist.token_next(tidx) - if prev_ and valid_prev(prev_) and valid_next(next_): - from_idx, to_idx = post(tlist, pidx, tidx, nidx) - grp = tlist.group_tokens(cls, from_idx, to_idx, extend=extend) - - tidx_offset += to_idx - from_idx - pidx, prev_ = from_idx, grp - continue - - pidx, prev_ = tidx, token diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/statement_splitter.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/statement_splitter.py deleted file mode 100644 index 1e9af3cf3..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/engine/statement_splitter.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse import sql, tokens as T - - -class StatementSplitter(object): - """Filter that split stream at individual statements""" - - def __init__(self): - self._reset() - - def _reset(self): - """Set the filter attributes to its default values""" - self._in_declare = False - self._is_create = False - self._begin_depth = 0 - - self.consume_ws = False - self.tokens = [] - self.level = 0 - - def _change_splitlevel(self, ttype, value): - """Get the new split level (increase, decrease or remain equal)""" - - # parenthesis increase/decrease a level - if ttype is T.Punctuation and value == '(': - return 1 - elif ttype is T.Punctuation and value == ')': - return -1 - elif ttype not in T.Keyword: # if normal token return - return 0 - - # Everything after here is ttype = T.Keyword - # Also to note, once entered an If statement you are done and basically - # returning - unified = value.upper() - - # three keywords begin with CREATE, but only one of them is DDL - # DDL Create though can contain more words such as "or replace" - if ttype is T.Keyword.DDL and unified.startswith('CREATE'): - self._is_create = True - return 0 - - # can have nested declare inside of being... - if unified == 'DECLARE' and self._is_create and self._begin_depth == 0: - self._in_declare = True - return 1 - - if unified == 'BEGIN': - self._begin_depth += 1 - if self._is_create: - # FIXME(andi): This makes no sense. - return 1 - return 0 - - # Should this respect a preceding BEGIN? - # In CASE ... WHEN ... END this results in a split level -1. - # Would having multiple CASE WHEN END and a Assignment Operator - # cause the statement to cut off prematurely? - if unified == 'END': - self._begin_depth = max(0, self._begin_depth - 1) - return -1 - - if (unified in ('IF', 'FOR', 'WHILE') - and self._is_create and self._begin_depth > 0): - return 1 - - if unified in ('END IF', 'END FOR', 'END WHILE'): - return -1 - - # Default - return 0 - - def process(self, stream): - """Process the stream""" - EOS_TTYPE = T.Whitespace, T.Comment.Single - - # Run over all stream tokens - for ttype, value in stream: - # Yield token if we finished a statement and there's no whitespaces - # It will count newline token as a non whitespace. In this context - # whitespace ignores newlines. - # why don't multi line comments also count? - if self.consume_ws and ttype not in EOS_TTYPE: - yield sql.Statement(self.tokens) - - # Reset filter and prepare to process next statement - self._reset() - - # Change current split level (increase, decrease or remain equal) - self.level += self._change_splitlevel(ttype, value) - - # Append the token to the current statement - self.tokens.append(sql.Token(ttype, value)) - - # Check if we get the end of a statement - if self.level <= 0 and ttype is T.Punctuation and value == ';': - self.consume_ws = True - - # Yield pending statement (if any) - if self.tokens: - yield sql.Statement(self.tokens) diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/exceptions.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/exceptions.py deleted file mode 100644 index 01e60f7b0..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/exceptions.py +++ /dev/null @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""Exceptions used in this package.""" - - -class SQLParseError(Exception): - """Base class for exceptions in this module.""" diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/__init__.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/__init__.py deleted file mode 100644 index c60d84d79..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse.filters.others import SerializerUnicode -from sqlparse.filters.others import StripCommentsFilter -from sqlparse.filters.others import StripWhitespaceFilter -from sqlparse.filters.others import SpacesAroundOperatorsFilter - -from sqlparse.filters.output import OutputPHPFilter -from sqlparse.filters.output import OutputPythonFilter - -from sqlparse.filters.tokens import KeywordCaseFilter -from sqlparse.filters.tokens import IdentifierCaseFilter -from sqlparse.filters.tokens import TruncateStringFilter - -from sqlparse.filters.reindent import ReindentFilter -from sqlparse.filters.right_margin import RightMarginFilter -from sqlparse.filters.aligned_indent import AlignedIndentFilter - -__all__ = [ - 'SerializerUnicode', - 'StripCommentsFilter', - 'StripWhitespaceFilter', - 'SpacesAroundOperatorsFilter', - - 'OutputPHPFilter', - 'OutputPythonFilter', - - 'KeywordCaseFilter', - 'IdentifierCaseFilter', - 'TruncateStringFilter', - - 'ReindentFilter', - 'RightMarginFilter', - 'AlignedIndentFilter', -] diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/aligned_indent.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/aligned_indent.py deleted file mode 100644 index 85b11e587..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/aligned_indent.py +++ /dev/null @@ -1,138 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse import sql, tokens as T -from sqlparse.compat import text_type -from sqlparse.utils import offset, indent - - -class AlignedIndentFilter(object): - join_words = (r'((LEFT\s+|RIGHT\s+|FULL\s+)?' - r'(INNER\s+|OUTER\s+|STRAIGHT\s+)?|' - r'(CROSS\s+|NATURAL\s+)?)?JOIN\b') - by_words = r'(GROUP|ORDER)\s+BY\b' - split_words = ('FROM', - join_words, 'ON', by_words, - 'WHERE', 'AND', 'OR', - 'HAVING', 'LIMIT', - 'UNION', 'VALUES', - 'SET', 'BETWEEN', 'EXCEPT') - - def __init__(self, char=' ', n='\n'): - self.n = n - self.offset = 0 - self.indent = 0 - self.char = char - self._max_kwd_len = len('select') - - def nl(self, offset=1): - # offset = 1 represent a single space after SELECT - offset = -len(offset) if not isinstance(offset, int) else offset - # add two for the space and parenthesis - indent = self.indent * (2 + self._max_kwd_len) - - return sql.Token(T.Whitespace, self.n + self.char * ( - self._max_kwd_len + offset + indent + self.offset)) - - def _process_statement(self, tlist): - if len(tlist.tokens) > 0 and tlist.tokens[0].is_whitespace \ - and self.indent == 0: - tlist.tokens.pop(0) - - # process the main query body - self._process(sql.TokenList(tlist.tokens)) - - def _process_parenthesis(self, tlist): - # if this isn't a subquery, don't re-indent - _, token = tlist.token_next_by(m=(T.DML, 'SELECT')) - if token is not None: - with indent(self): - tlist.insert_after(tlist[0], self.nl('SELECT')) - # process the inside of the parenthesis - self._process_default(tlist) - - # de-indent last parenthesis - tlist.insert_before(tlist[-1], self.nl()) - - def _process_identifierlist(self, tlist): - # columns being selected - identifiers = list(tlist.get_identifiers()) - identifiers.pop(0) - [tlist.insert_before(token, self.nl()) for token in identifiers] - self._process_default(tlist) - - def _process_case(self, tlist): - offset_ = len('case ') + len('when ') - cases = tlist.get_cases(skip_ws=True) - # align the end as well - end_token = tlist.token_next_by(m=(T.Keyword, 'END'))[1] - cases.append((None, [end_token])) - - condition_width = [len(' '.join(map(text_type, cond))) if cond else 0 - for cond, _ in cases] - max_cond_width = max(condition_width) - - for i, (cond, value) in enumerate(cases): - # cond is None when 'else or end' - stmt = cond[0] if cond else value[0] - - if i > 0: - tlist.insert_before(stmt, self.nl( - offset_ - len(text_type(stmt)))) - if cond: - ws = sql.Token(T.Whitespace, self.char * ( - max_cond_width - condition_width[i])) - tlist.insert_after(cond[-1], ws) - - def _next_token(self, tlist, idx=-1): - split_words = T.Keyword, self.split_words, True - tidx, token = tlist.token_next_by(m=split_words, idx=idx) - # treat "BETWEEN x and y" as a single statement - if token and token.normalized == 'BETWEEN': - tidx, token = self._next_token(tlist, tidx) - if token and token.normalized == 'AND': - tidx, token = self._next_token(tlist, tidx) - return tidx, token - - def _split_kwds(self, tlist): - tidx, token = self._next_token(tlist) - while token: - # joins, group/order by are special case. only consider the first - # word as aligner - if ( - token.match(T.Keyword, self.join_words, regex=True) - or token.match(T.Keyword, self.by_words, regex=True) - ): - token_indent = token.value.split()[0] - else: - token_indent = text_type(token) - tlist.insert_before(token, self.nl(token_indent)) - tidx += 1 - tidx, token = self._next_token(tlist, tidx) - - def _process_default(self, tlist): - self._split_kwds(tlist) - # process any sub-sub statements - for sgroup in tlist.get_sublists(): - idx = tlist.token_index(sgroup) - pidx, prev_ = tlist.token_prev(idx) - # HACK: make "group/order by" work. Longer than max_len. - offset_ = 3 if ( - prev_ and prev_.match(T.Keyword, self.by_words, regex=True) - ) else 0 - with offset(self, offset_): - self._process(sgroup) - - def _process(self, tlist): - func_name = '_process_{cls}'.format(cls=type(tlist).__name__) - func = getattr(self, func_name.lower(), self._process_default) - func(tlist) - - def process(self, stmt): - self._process(stmt) - return stmt diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/others.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/others.py deleted file mode 100644 index 52b861705..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/others.py +++ /dev/null @@ -1,125 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse import sql, tokens as T -from sqlparse.utils import split_unquoted_newlines - - -class StripCommentsFilter(object): - @staticmethod - def _process(tlist): - def get_next_comment(): - # TODO(andi) Comment types should be unified, see related issue38 - return tlist.token_next_by(i=sql.Comment, t=T.Comment) - - tidx, token = get_next_comment() - while token: - pidx, prev_ = tlist.token_prev(tidx, skip_ws=False) - nidx, next_ = tlist.token_next(tidx, skip_ws=False) - # Replace by whitespace if prev and next exist and if they're not - # whitespaces. This doesn't apply if prev or next is a parenthesis. - if (prev_ is None or next_ is None - or prev_.is_whitespace or prev_.match(T.Punctuation, '(') - or next_.is_whitespace or next_.match(T.Punctuation, ')')): - # Insert a whitespace to ensure the following SQL produces - # a valid SQL (see #425). For example: - # - # Before: select a--comment\nfrom foo - # After: select a from foo - if prev_ is not None and next_ is None: - tlist.tokens.insert(tidx, sql.Token(T.Whitespace, ' ')) - tlist.tokens.remove(token) - else: - tlist.tokens[tidx] = sql.Token(T.Whitespace, ' ') - - tidx, token = get_next_comment() - - def process(self, stmt): - [self.process(sgroup) for sgroup in stmt.get_sublists()] - StripCommentsFilter._process(stmt) - return stmt - - -class StripWhitespaceFilter(object): - def _stripws(self, tlist): - func_name = '_stripws_{cls}'.format(cls=type(tlist).__name__) - func = getattr(self, func_name.lower(), self._stripws_default) - func(tlist) - - @staticmethod - def _stripws_default(tlist): - last_was_ws = False - is_first_char = True - for token in tlist.tokens: - if token.is_whitespace: - token.value = '' if last_was_ws or is_first_char else ' ' - last_was_ws = token.is_whitespace - is_first_char = False - - def _stripws_identifierlist(self, tlist): - # Removes newlines before commas, see issue140 - last_nl = None - for token in list(tlist.tokens): - if last_nl and token.ttype is T.Punctuation and token.value == ',': - tlist.tokens.remove(last_nl) - last_nl = token if token.is_whitespace else None - - # next_ = tlist.token_next(token, skip_ws=False) - # if (next_ and not next_.is_whitespace and - # token.ttype is T.Punctuation and token.value == ','): - # tlist.insert_after(token, sql.Token(T.Whitespace, ' ')) - return self._stripws_default(tlist) - - def _stripws_parenthesis(self, tlist): - while tlist.tokens[1].is_whitespace: - tlist.tokens.pop(1) - while tlist.tokens[-2].is_whitespace: - tlist.tokens.pop(-2) - self._stripws_default(tlist) - - def process(self, stmt, depth=0): - [self.process(sgroup, depth + 1) for sgroup in stmt.get_sublists()] - self._stripws(stmt) - if depth == 0 and stmt.tokens and stmt.tokens[-1].is_whitespace: - stmt.tokens.pop(-1) - return stmt - - -class SpacesAroundOperatorsFilter(object): - @staticmethod - def _process(tlist): - - ttypes = (T.Operator, T.Comparison) - tidx, token = tlist.token_next_by(t=ttypes) - while token: - nidx, next_ = tlist.token_next(tidx, skip_ws=False) - if next_ and next_.ttype != T.Whitespace: - tlist.insert_after(tidx, sql.Token(T.Whitespace, ' ')) - - pidx, prev_ = tlist.token_prev(tidx, skip_ws=False) - if prev_ and prev_.ttype != T.Whitespace: - tlist.insert_before(tidx, sql.Token(T.Whitespace, ' ')) - tidx += 1 # has to shift since token inserted before it - - # assert tlist.token_index(token) == tidx - tidx, token = tlist.token_next_by(t=ttypes, idx=tidx) - - def process(self, stmt): - [self.process(sgroup) for sgroup in stmt.get_sublists()] - SpacesAroundOperatorsFilter._process(stmt) - return stmt - - -# --------------------------- -# postprocess - -class SerializerUnicode(object): - @staticmethod - def process(stmt): - lines = split_unquoted_newlines(stmt) - return '\n'.join(line.rstrip() for line in lines) diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/output.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/output.py deleted file mode 100644 index 3fbc46d13..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/output.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse import sql, tokens as T -from sqlparse.compat import text_type - - -class OutputFilter(object): - varname_prefix = '' - - def __init__(self, varname='sql'): - self.varname = self.varname_prefix + varname - self.count = 0 - - def _process(self, stream, varname, has_nl): - raise NotImplementedError - - def process(self, stmt): - self.count += 1 - if self.count > 1: - varname = u'{f.varname}{f.count}'.format(f=self) - else: - varname = self.varname - - has_nl = len(text_type(stmt).strip().splitlines()) > 1 - stmt.tokens = self._process(stmt.tokens, varname, has_nl) - return stmt - - -class OutputPythonFilter(OutputFilter): - def _process(self, stream, varname, has_nl): - # SQL query assignation to varname - if self.count > 1: - yield sql.Token(T.Whitespace, '\n') - yield sql.Token(T.Name, varname) - yield sql.Token(T.Whitespace, ' ') - yield sql.Token(T.Operator, '=') - yield sql.Token(T.Whitespace, ' ') - if has_nl: - yield sql.Token(T.Operator, '(') - yield sql.Token(T.Text, "'") - - # Print the tokens on the quote - for token in stream: - # Token is a new line separator - if token.is_whitespace and '\n' in token.value: - # Close quote and add a new line - yield sql.Token(T.Text, " '") - yield sql.Token(T.Whitespace, '\n') - - # Quote header on secondary lines - yield sql.Token(T.Whitespace, ' ' * (len(varname) + 4)) - yield sql.Token(T.Text, "'") - - # Indentation - after_lb = token.value.split('\n', 1)[1] - if after_lb: - yield sql.Token(T.Whitespace, after_lb) - continue - - # Token has escape chars - elif "'" in token.value: - token.value = token.value.replace("'", "\\'") - - # Put the token - yield sql.Token(T.Text, token.value) - - # Close quote - yield sql.Token(T.Text, "'") - if has_nl: - yield sql.Token(T.Operator, ')') - - -class OutputPHPFilter(OutputFilter): - varname_prefix = '$' - - def _process(self, stream, varname, has_nl): - # SQL query assignation to varname (quote header) - if self.count > 1: - yield sql.Token(T.Whitespace, '\n') - yield sql.Token(T.Name, varname) - yield sql.Token(T.Whitespace, ' ') - if has_nl: - yield sql.Token(T.Whitespace, ' ') - yield sql.Token(T.Operator, '=') - yield sql.Token(T.Whitespace, ' ') - yield sql.Token(T.Text, '"') - - # Print the tokens on the quote - for token in stream: - # Token is a new line separator - if token.is_whitespace and '\n' in token.value: - # Close quote and add a new line - yield sql.Token(T.Text, ' ";') - yield sql.Token(T.Whitespace, '\n') - - # Quote header on secondary lines - yield sql.Token(T.Name, varname) - yield sql.Token(T.Whitespace, ' ') - yield sql.Token(T.Operator, '.=') - yield sql.Token(T.Whitespace, ' ') - yield sql.Token(T.Text, '"') - - # Indentation - after_lb = token.value.split('\n', 1)[1] - if after_lb: - yield sql.Token(T.Whitespace, after_lb) - continue - - # Token has escape chars - elif '"' in token.value: - token.value = token.value.replace('"', '\\"') - - # Put the token - yield sql.Token(T.Text, token.value) - - # Close quote - yield sql.Token(T.Text, '"') - yield sql.Token(T.Punctuation, ';') diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/reindent.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/reindent.py deleted file mode 100644 index acec8ca4b..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/reindent.py +++ /dev/null @@ -1,241 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse import sql, tokens as T -from sqlparse.compat import text_type -from sqlparse.utils import offset, indent - - -class ReindentFilter(object): - def __init__(self, width=2, char=' ', wrap_after=0, n='\n', - comma_first=False, indent_after_first=False, - indent_columns=False): - self.n = n - self.width = width - self.char = char - self.indent = 1 if indent_after_first else 0 - self.offset = 0 - self.wrap_after = wrap_after - self.comma_first = comma_first - self.indent_columns = indent_columns - self._curr_stmt = None - self._last_stmt = None - self._last_func = None - - def _flatten_up_to_token(self, token): - """Yields all tokens up to token but excluding current.""" - if token.is_group: - token = next(token.flatten()) - - for t in self._curr_stmt.flatten(): - if t == token: - break - yield t - - @property - def leading_ws(self): - return self.offset + self.indent * self.width - - def _get_offset(self, token): - raw = u''.join(map(text_type, self._flatten_up_to_token(token))) - line = (raw or '\n').splitlines()[-1] - # Now take current offset into account and return relative offset. - return len(line) - len(self.char * self.leading_ws) - - def nl(self, offset=0): - return sql.Token( - T.Whitespace, - self.n + self.char * max(0, self.leading_ws + offset)) - - def _next_token(self, tlist, idx=-1): - split_words = ('FROM', 'STRAIGHT_JOIN$', 'JOIN$', 'AND', 'OR', - 'GROUP BY', 'ORDER BY', 'UNION', 'VALUES', - 'SET', 'BETWEEN', 'EXCEPT', 'HAVING', 'LIMIT') - m_split = T.Keyword, split_words, True - tidx, token = tlist.token_next_by(m=m_split, idx=idx) - - if token and token.normalized == 'BETWEEN': - tidx, token = self._next_token(tlist, tidx) - - if token and token.normalized == 'AND': - tidx, token = self._next_token(tlist, tidx) - - return tidx, token - - def _split_kwds(self, tlist): - tidx, token = self._next_token(tlist) - while token: - pidx, prev_ = tlist.token_prev(tidx, skip_ws=False) - uprev = text_type(prev_) - - if prev_ and prev_.is_whitespace: - del tlist.tokens[pidx] - tidx -= 1 - - if not (uprev.endswith('\n') or uprev.endswith('\r')): - tlist.insert_before(tidx, self.nl()) - tidx += 1 - - tidx, token = self._next_token(tlist, tidx) - - def _split_statements(self, tlist): - ttypes = T.Keyword.DML, T.Keyword.DDL - tidx, token = tlist.token_next_by(t=ttypes) - while token: - pidx, prev_ = tlist.token_prev(tidx, skip_ws=False) - if prev_ and prev_.is_whitespace: - del tlist.tokens[pidx] - tidx -= 1 - # only break if it's not the first token - if prev_: - tlist.insert_before(tidx, self.nl()) - tidx += 1 - tidx, token = tlist.token_next_by(t=ttypes, idx=tidx) - - def _process(self, tlist): - func_name = '_process_{cls}'.format(cls=type(tlist).__name__) - func = getattr(self, func_name.lower(), self._process_default) - func(tlist) - - def _process_where(self, tlist): - tidx, token = tlist.token_next_by(m=(T.Keyword, 'WHERE')) - # issue121, errors in statement fixed?? - tlist.insert_before(tidx, self.nl()) - - with indent(self): - self._process_default(tlist) - - def _process_parenthesis(self, tlist): - ttypes = T.Keyword.DML, T.Keyword.DDL - _, is_dml_dll = tlist.token_next_by(t=ttypes) - fidx, first = tlist.token_next_by(m=sql.Parenthesis.M_OPEN) - - with indent(self, 1 if is_dml_dll else 0): - tlist.tokens.insert(0, self.nl()) if is_dml_dll else None - with offset(self, self._get_offset(first) + 1): - self._process_default(tlist, not is_dml_dll) - - def _process_function(self, tlist): - self._last_func = tlist[0] - self._process_default(tlist) - - def _process_identifierlist(self, tlist): - identifiers = list(tlist.get_identifiers()) - if self.indent_columns: - first = next(identifiers[0].flatten()) - num_offset = 1 if self.char == '\t' else self.width - else: - first = next(identifiers.pop(0).flatten()) - num_offset = 1 if self.char == '\t' else self._get_offset(first) - - if not tlist.within(sql.Function) and not tlist.within(sql.Values): - with offset(self, num_offset): - position = 0 - for token in identifiers: - # Add 1 for the "," separator - position += len(token.value) + 1 - if position > (self.wrap_after - self.offset): - adjust = 0 - if self.comma_first: - adjust = -2 - _, comma = tlist.token_prev( - tlist.token_index(token)) - if comma is None: - continue - token = comma - tlist.insert_before(token, self.nl(offset=adjust)) - if self.comma_first: - _, ws = tlist.token_next( - tlist.token_index(token), skip_ws=False) - if (ws is not None - and ws.ttype is not T.Text.Whitespace): - tlist.insert_after( - token, sql.Token(T.Whitespace, ' ')) - position = 0 - else: - # ensure whitespace - for token in tlist: - _, next_ws = tlist.token_next( - tlist.token_index(token), skip_ws=False) - if token.value == ',' and not next_ws.is_whitespace: - tlist.insert_after( - token, sql.Token(T.Whitespace, ' ')) - - end_at = self.offset + sum(len(i.value) + 1 for i in identifiers) - adjusted_offset = 0 - if (self.wrap_after > 0 - and end_at > (self.wrap_after - self.offset) - and self._last_func): - adjusted_offset = -len(self._last_func.value) - 1 - - with offset(self, adjusted_offset), indent(self): - if adjusted_offset < 0: - tlist.insert_before(identifiers[0], self.nl()) - position = 0 - for token in identifiers: - # Add 1 for the "," separator - position += len(token.value) + 1 - if (self.wrap_after > 0 - and position > (self.wrap_after - self.offset)): - adjust = 0 - tlist.insert_before(token, self.nl(offset=adjust)) - position = 0 - self._process_default(tlist) - - def _process_case(self, tlist): - iterable = iter(tlist.get_cases()) - cond, _ = next(iterable) - first = next(cond[0].flatten()) - - with offset(self, self._get_offset(tlist[0])): - with offset(self, self._get_offset(first)): - for cond, value in iterable: - token = value[0] if cond is None else cond[0] - tlist.insert_before(token, self.nl()) - - # Line breaks on group level are done. let's add an offset of - # len "when ", "then ", "else " - with offset(self, len("WHEN ")): - self._process_default(tlist) - end_idx, end = tlist.token_next_by(m=sql.Case.M_CLOSE) - if end_idx is not None: - tlist.insert_before(end_idx, self.nl()) - - def _process_values(self, tlist): - tlist.insert_before(0, self.nl()) - tidx, token = tlist.token_next_by(i=sql.Parenthesis) - first_token = token - while token: - ptidx, ptoken = tlist.token_next_by(m=(T.Punctuation, ','), - idx=tidx) - if ptoken: - if self.comma_first: - adjust = -2 - offset = self._get_offset(first_token) + adjust - tlist.insert_before(ptoken, self.nl(offset)) - else: - tlist.insert_after(ptoken, - self.nl(self._get_offset(token))) - tidx, token = tlist.token_next_by(i=sql.Parenthesis, idx=tidx) - - def _process_default(self, tlist, stmts=True): - self._split_statements(tlist) if stmts else None - self._split_kwds(tlist) - for sgroup in tlist.get_sublists(): - self._process(sgroup) - - def process(self, stmt): - self._curr_stmt = stmt - self._process(stmt) - - if self._last_stmt is not None: - nl = '\n' if text_type(self._last_stmt).endswith('\n') else '\n\n' - stmt.tokens.insert(0, sql.Token(T.Whitespace, nl)) - - self._last_stmt = stmt - return stmt diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/right_margin.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/right_margin.py deleted file mode 100644 index 165813896..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/right_margin.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -import re - -from sqlparse import sql, tokens as T -from sqlparse.compat import text_type - - -# FIXME: Doesn't work -class RightMarginFilter(object): - keep_together = ( - # sql.TypeCast, sql.Identifier, sql.Alias, - ) - - def __init__(self, width=79): - self.width = width - self.line = '' - - def _process(self, group, stream): - for token in stream: - if token.is_whitespace and '\n' in token.value: - if token.value.endswith('\n'): - self.line = '' - else: - self.line = token.value.splitlines()[-1] - elif token.is_group and type(token) not in self.keep_together: - token.tokens = self._process(token, token.tokens) - else: - val = text_type(token) - if len(self.line) + len(val) > self.width: - match = re.search(r'^ +', self.line) - if match is not None: - indent = match.group() - else: - indent = '' - yield sql.Token(T.Whitespace, '\n{0}'.format(indent)) - self.line = indent - self.line += val - yield token - - def process(self, group): - # return - # group.tokens = self._process(group, group.tokens) - raise NotImplementedError diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/tokens.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/tokens.py deleted file mode 100644 index 93182b194..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/filters/tokens.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -from sqlparse import tokens as T -from sqlparse.compat import text_type - - -class _CaseFilter(object): - ttype = None - - def __init__(self, case=None): - case = case or 'upper' - self.convert = getattr(text_type, case) - - def process(self, stream): - for ttype, value in stream: - if ttype in self.ttype: - value = self.convert(value) - yield ttype, value - - -class KeywordCaseFilter(_CaseFilter): - ttype = T.Keyword - - -class IdentifierCaseFilter(_CaseFilter): - ttype = T.Name, T.String.Symbol - - def process(self, stream): - for ttype, value in stream: - if ttype in self.ttype and value.strip()[0] != '"': - value = self.convert(value) - yield ttype, value - - -class TruncateStringFilter(object): - def __init__(self, width, char): - self.width = width - self.char = char - - def process(self, stream): - for ttype, value in stream: - if ttype != T.Literal.String.Single: - yield ttype, value - continue - - if value[:2] == "''": - inner = value[2:-2] - quote = "''" - else: - inner = value[1:-1] - quote = "'" - - if len(inner) > self.width: - value = ''.join((quote, inner[:self.width], self.char, quote)) - yield ttype, value diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/formatter.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/formatter.py deleted file mode 100644 index 89627596c..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/formatter.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""SQL formatter""" - -from sqlparse import filters -from sqlparse.exceptions import SQLParseError - - -def validate_options(options): - """Validates options.""" - kwcase = options.get('keyword_case') - if kwcase not in [None, 'upper', 'lower', 'capitalize']: - raise SQLParseError('Invalid value for keyword_case: ' - '{0!r}'.format(kwcase)) - - idcase = options.get('identifier_case') - if idcase not in [None, 'upper', 'lower', 'capitalize']: - raise SQLParseError('Invalid value for identifier_case: ' - '{0!r}'.format(idcase)) - - ofrmt = options.get('output_format') - if ofrmt not in [None, 'sql', 'python', 'php']: - raise SQLParseError('Unknown output format: ' - '{0!r}'.format(ofrmt)) - - strip_comments = options.get('strip_comments', False) - if strip_comments not in [True, False]: - raise SQLParseError('Invalid value for strip_comments: ' - '{0!r}'.format(strip_comments)) - - space_around_operators = options.get('use_space_around_operators', False) - if space_around_operators not in [True, False]: - raise SQLParseError('Invalid value for use_space_around_operators: ' - '{0!r}'.format(space_around_operators)) - - strip_ws = options.get('strip_whitespace', False) - if strip_ws not in [True, False]: - raise SQLParseError('Invalid value for strip_whitespace: ' - '{0!r}'.format(strip_ws)) - - truncate_strings = options.get('truncate_strings') - if truncate_strings is not None: - try: - truncate_strings = int(truncate_strings) - except (ValueError, TypeError): - raise SQLParseError('Invalid value for truncate_strings: ' - '{0!r}'.format(truncate_strings)) - if truncate_strings <= 1: - raise SQLParseError('Invalid value for truncate_strings: ' - '{0!r}'.format(truncate_strings)) - options['truncate_strings'] = truncate_strings - options['truncate_char'] = options.get('truncate_char', '[...]') - - indent_columns = options.get('indent_columns', False) - if indent_columns not in [True, False]: - raise SQLParseError('Invalid value for indent_columns: ' - '{0!r}'.format(indent_columns)) - elif indent_columns: - options['reindent'] = True # enforce reindent - options['indent_columns'] = indent_columns - - reindent = options.get('reindent', False) - if reindent not in [True, False]: - raise SQLParseError('Invalid value for reindent: ' - '{0!r}'.format(reindent)) - elif reindent: - options['strip_whitespace'] = True - - reindent_aligned = options.get('reindent_aligned', False) - if reindent_aligned not in [True, False]: - raise SQLParseError('Invalid value for reindent_aligned: ' - '{0!r}'.format(reindent)) - elif reindent_aligned: - options['strip_whitespace'] = True - - indent_after_first = options.get('indent_after_first', False) - if indent_after_first not in [True, False]: - raise SQLParseError('Invalid value for indent_after_first: ' - '{0!r}'.format(indent_after_first)) - options['indent_after_first'] = indent_after_first - - indent_tabs = options.get('indent_tabs', False) - if indent_tabs not in [True, False]: - raise SQLParseError('Invalid value for indent_tabs: ' - '{0!r}'.format(indent_tabs)) - elif indent_tabs: - options['indent_char'] = '\t' - else: - options['indent_char'] = ' ' - - indent_width = options.get('indent_width', 2) - try: - indent_width = int(indent_width) - except (TypeError, ValueError): - raise SQLParseError('indent_width requires an integer') - if indent_width < 1: - raise SQLParseError('indent_width requires a positive integer') - options['indent_width'] = indent_width - - wrap_after = options.get('wrap_after', 0) - try: - wrap_after = int(wrap_after) - except (TypeError, ValueError): - raise SQLParseError('wrap_after requires an integer') - if wrap_after < 0: - raise SQLParseError('wrap_after requires a positive integer') - options['wrap_after'] = wrap_after - - comma_first = options.get('comma_first', False) - if comma_first not in [True, False]: - raise SQLParseError('comma_first requires a boolean value') - options['comma_first'] = comma_first - - right_margin = options.get('right_margin') - if right_margin is not None: - try: - right_margin = int(right_margin) - except (TypeError, ValueError): - raise SQLParseError('right_margin requires an integer') - if right_margin < 10: - raise SQLParseError('right_margin requires an integer > 10') - options['right_margin'] = right_margin - - return options - - -def build_filter_stack(stack, options): - """Setup and return a filter stack. - - Args: - stack: :class:`~sqlparse.filters.FilterStack` instance - options: Dictionary with options validated by validate_options. - """ - # Token filter - if options.get('keyword_case'): - stack.preprocess.append( - filters.KeywordCaseFilter(options['keyword_case'])) - - if options.get('identifier_case'): - stack.preprocess.append( - filters.IdentifierCaseFilter(options['identifier_case'])) - - if options.get('truncate_strings'): - stack.preprocess.append(filters.TruncateStringFilter( - width=options['truncate_strings'], char=options['truncate_char'])) - - if options.get('use_space_around_operators', False): - stack.enable_grouping() - stack.stmtprocess.append(filters.SpacesAroundOperatorsFilter()) - - # After grouping - if options.get('strip_comments'): - stack.enable_grouping() - stack.stmtprocess.append(filters.StripCommentsFilter()) - - if options.get('strip_whitespace') or options.get('reindent'): - stack.enable_grouping() - stack.stmtprocess.append(filters.StripWhitespaceFilter()) - - if options.get('reindent'): - stack.enable_grouping() - stack.stmtprocess.append( - filters.ReindentFilter( - char=options['indent_char'], - width=options['indent_width'], - indent_after_first=options['indent_after_first'], - indent_columns=options['indent_columns'], - wrap_after=options['wrap_after'], - comma_first=options['comma_first'])) - - if options.get('reindent_aligned', False): - stack.enable_grouping() - stack.stmtprocess.append( - filters.AlignedIndentFilter(char=options['indent_char'])) - - if options.get('right_margin'): - stack.enable_grouping() - stack.stmtprocess.append( - filters.RightMarginFilter(width=options['right_margin'])) - - # Serializer - if options.get('output_format'): - frmt = options['output_format'] - if frmt.lower() == 'php': - fltr = filters.OutputPHPFilter() - elif frmt.lower() == 'python': - fltr = filters.OutputPythonFilter() - else: - fltr = None - if fltr is not None: - stack.postprocess.append(fltr) - - return stack diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/keywords.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/keywords.py deleted file mode 100644 index 9c37e5024..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/keywords.py +++ /dev/null @@ -1,955 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -import re - -from sqlparse import tokens - - -def is_keyword(value): - val = value.upper() - return (KEYWORDS_COMMON.get(val) - or KEYWORDS_ORACLE.get(val) - or KEYWORDS_PLPGSQL.get(val) - or KEYWORDS_HQL.get(val) - or KEYWORDS.get(val, tokens.Name)), value - - -SQL_REGEX = { - 'root': [ - (r'(--|# )\+.*?(\r\n|\r|\n|$)', tokens.Comment.Single.Hint), - (r'/\*\+[\s\S]*?\*/', tokens.Comment.Multiline.Hint), - - (r'(--|# ).*?(\r\n|\r|\n|$)', tokens.Comment.Single), - (r'/\*[\s\S]*?\*/', tokens.Comment.Multiline), - - (r'(\r\n|\r|\n)', tokens.Newline), - (r'\s+?', tokens.Whitespace), - - (r':=', tokens.Assignment), - (r'::', tokens.Punctuation), - - (r'\*', tokens.Wildcard), - - (r"`(``|[^`])*`", tokens.Name), - (r"´(´´|[^´])*´", tokens.Name), - (r'((?=~!]+', tokens.Operator.Comparison), - (r'[+/@#%^&|`?^-]+', tokens.Operator), - ]} - -FLAGS = re.IGNORECASE | re.UNICODE -SQL_REGEX = [(re.compile(rx, FLAGS).match, tt) for rx, tt in SQL_REGEX['root']] - -KEYWORDS = { - 'ABORT': tokens.Keyword, - 'ABS': tokens.Keyword, - 'ABSOLUTE': tokens.Keyword, - 'ACCESS': tokens.Keyword, - 'ADA': tokens.Keyword, - 'ADD': tokens.Keyword, - 'ADMIN': tokens.Keyword, - 'AFTER': tokens.Keyword, - 'AGGREGATE': tokens.Keyword, - 'ALIAS': tokens.Keyword, - 'ALL': tokens.Keyword, - 'ALLOCATE': tokens.Keyword, - 'ANALYSE': tokens.Keyword, - 'ANALYZE': tokens.Keyword, - 'ANY': tokens.Keyword, - 'ARRAYLEN': tokens.Keyword, - 'ARE': tokens.Keyword, - 'ASC': tokens.Keyword.Order, - 'ASENSITIVE': tokens.Keyword, - 'ASSERTION': tokens.Keyword, - 'ASSIGNMENT': tokens.Keyword, - 'ASYMMETRIC': tokens.Keyword, - 'AT': tokens.Keyword, - 'ATOMIC': tokens.Keyword, - 'AUDIT': tokens.Keyword, - 'AUTHORIZATION': tokens.Keyword, - 'AUTO_INCREMENT': tokens.Keyword, - 'AVG': tokens.Keyword, - - 'BACKWARD': tokens.Keyword, - 'BEFORE': tokens.Keyword, - 'BEGIN': tokens.Keyword, - 'BETWEEN': tokens.Keyword, - 'BITVAR': tokens.Keyword, - 'BIT_LENGTH': tokens.Keyword, - 'BOTH': tokens.Keyword, - 'BREADTH': tokens.Keyword, - - # 'C': tokens.Keyword, # most likely this is an alias - 'CACHE': tokens.Keyword, - 'CALL': tokens.Keyword, - 'CALLED': tokens.Keyword, - 'CARDINALITY': tokens.Keyword, - 'CASCADE': tokens.Keyword, - 'CASCADED': tokens.Keyword, - 'CAST': tokens.Keyword, - 'CATALOG': tokens.Keyword, - 'CATALOG_NAME': tokens.Keyword, - 'CHAIN': tokens.Keyword, - 'CHARACTERISTICS': tokens.Keyword, - 'CHARACTER_LENGTH': tokens.Keyword, - 'CHARACTER_SET_CATALOG': tokens.Keyword, - 'CHARACTER_SET_NAME': tokens.Keyword, - 'CHARACTER_SET_SCHEMA': tokens.Keyword, - 'CHAR_LENGTH': tokens.Keyword, - 'CHARSET': tokens.Keyword, - 'CHECK': tokens.Keyword, - 'CHECKED': tokens.Keyword, - 'CHECKPOINT': tokens.Keyword, - 'CLASS': tokens.Keyword, - 'CLASS_ORIGIN': tokens.Keyword, - 'CLOB': tokens.Keyword, - 'CLOSE': tokens.Keyword, - 'CLUSTER': tokens.Keyword, - 'COALESCE': tokens.Keyword, - 'COBOL': tokens.Keyword, - 'COLLATE': tokens.Keyword, - 'COLLATION': tokens.Keyword, - 'COLLATION_CATALOG': tokens.Keyword, - 'COLLATION_NAME': tokens.Keyword, - 'COLLATION_SCHEMA': tokens.Keyword, - 'COLLECT': tokens.Keyword, - 'COLUMN': tokens.Keyword, - 'COLUMN_NAME': tokens.Keyword, - 'COMPRESS': tokens.Keyword, - 'COMMAND_FUNCTION': tokens.Keyword, - 'COMMAND_FUNCTION_CODE': tokens.Keyword, - 'COMMENT': tokens.Keyword, - 'COMMIT': tokens.Keyword.DML, - 'COMMITTED': tokens.Keyword, - 'COMPLETION': tokens.Keyword, - 'CONCURRENTLY': tokens.Keyword, - 'CONDITION_NUMBER': tokens.Keyword, - 'CONNECT': tokens.Keyword, - 'CONNECTION': tokens.Keyword, - 'CONNECTION_NAME': tokens.Keyword, - 'CONSTRAINT': tokens.Keyword, - 'CONSTRAINTS': tokens.Keyword, - 'CONSTRAINT_CATALOG': tokens.Keyword, - 'CONSTRAINT_NAME': tokens.Keyword, - 'CONSTRAINT_SCHEMA': tokens.Keyword, - 'CONSTRUCTOR': tokens.Keyword, - 'CONTAINS': tokens.Keyword, - 'CONTINUE': tokens.Keyword, - 'CONVERSION': tokens.Keyword, - 'CONVERT': tokens.Keyword, - 'COPY': tokens.Keyword, - 'CORRESPONDING': tokens.Keyword, - 'COUNT': tokens.Keyword, - 'CREATEDB': tokens.Keyword, - 'CREATEUSER': tokens.Keyword, - 'CROSS': tokens.Keyword, - 'CUBE': tokens.Keyword, - 'CURRENT': tokens.Keyword, - 'CURRENT_DATE': tokens.Keyword, - 'CURRENT_PATH': tokens.Keyword, - 'CURRENT_ROLE': tokens.Keyword, - 'CURRENT_TIME': tokens.Keyword, - 'CURRENT_TIMESTAMP': tokens.Keyword, - 'CURRENT_USER': tokens.Keyword, - 'CURSOR': tokens.Keyword, - 'CURSOR_NAME': tokens.Keyword, - 'CYCLE': tokens.Keyword, - - 'DATA': tokens.Keyword, - 'DATABASE': tokens.Keyword, - 'DATETIME_INTERVAL_CODE': tokens.Keyword, - 'DATETIME_INTERVAL_PRECISION': tokens.Keyword, - 'DAY': tokens.Keyword, - 'DEALLOCATE': tokens.Keyword, - 'DECLARE': tokens.Keyword, - 'DEFAULT': tokens.Keyword, - 'DEFAULTS': tokens.Keyword, - 'DEFERRABLE': tokens.Keyword, - 'DEFERRED': tokens.Keyword, - 'DEFINED': tokens.Keyword, - 'DEFINER': tokens.Keyword, - 'DELIMITER': tokens.Keyword, - 'DELIMITERS': tokens.Keyword, - 'DEREF': tokens.Keyword, - 'DESC': tokens.Keyword.Order, - 'DESCRIBE': tokens.Keyword, - 'DESCRIPTOR': tokens.Keyword, - 'DESTROY': tokens.Keyword, - 'DESTRUCTOR': tokens.Keyword, - 'DETERMINISTIC': tokens.Keyword, - 'DIAGNOSTICS': tokens.Keyword, - 'DICTIONARY': tokens.Keyword, - 'DISABLE': tokens.Keyword, - 'DISCONNECT': tokens.Keyword, - 'DISPATCH': tokens.Keyword, - 'DO': tokens.Keyword, - 'DOMAIN': tokens.Keyword, - 'DYNAMIC': tokens.Keyword, - 'DYNAMIC_FUNCTION': tokens.Keyword, - 'DYNAMIC_FUNCTION_CODE': tokens.Keyword, - - 'EACH': tokens.Keyword, - 'ENABLE': tokens.Keyword, - 'ENCODING': tokens.Keyword, - 'ENCRYPTED': tokens.Keyword, - 'END-EXEC': tokens.Keyword, - 'ENGINE': tokens.Keyword, - 'EQUALS': tokens.Keyword, - 'ESCAPE': tokens.Keyword, - 'EVERY': tokens.Keyword, - 'EXCEPT': tokens.Keyword, - 'EXCEPTION': tokens.Keyword, - 'EXCLUDING': tokens.Keyword, - 'EXCLUSIVE': tokens.Keyword, - 'EXEC': tokens.Keyword, - 'EXECUTE': tokens.Keyword, - 'EXISTING': tokens.Keyword, - 'EXISTS': tokens.Keyword, - 'EXPLAIN': tokens.Keyword, - 'EXTERNAL': tokens.Keyword, - 'EXTRACT': tokens.Keyword, - - 'FALSE': tokens.Keyword, - 'FETCH': tokens.Keyword, - 'FILE': tokens.Keyword, - 'FINAL': tokens.Keyword, - 'FIRST': tokens.Keyword, - 'FORCE': tokens.Keyword, - 'FOREACH': tokens.Keyword, - 'FOREIGN': tokens.Keyword, - 'FORTRAN': tokens.Keyword, - 'FORWARD': tokens.Keyword, - 'FOUND': tokens.Keyword, - 'FREE': tokens.Keyword, - 'FREEZE': tokens.Keyword, - 'FULL': tokens.Keyword, - 'FUNCTION': tokens.Keyword, - - # 'G': tokens.Keyword, - 'GENERAL': tokens.Keyword, - 'GENERATED': tokens.Keyword, - 'GET': tokens.Keyword, - 'GLOBAL': tokens.Keyword, - 'GO': tokens.Keyword, - 'GOTO': tokens.Keyword, - 'GRANT': tokens.Keyword, - 'GRANTED': tokens.Keyword, - 'GROUPING': tokens.Keyword, - - 'HANDLER': tokens.Keyword, - 'HAVING': tokens.Keyword, - 'HIERARCHY': tokens.Keyword, - 'HOLD': tokens.Keyword, - 'HOUR': tokens.Keyword, - 'HOST': tokens.Keyword, - - 'IDENTIFIED': tokens.Keyword, - 'IDENTITY': tokens.Keyword, - 'IGNORE': tokens.Keyword, - 'ILIKE': tokens.Keyword, - 'IMMEDIATE': tokens.Keyword, - 'IMMUTABLE': tokens.Keyword, - - 'IMPLEMENTATION': tokens.Keyword, - 'IMPLICIT': tokens.Keyword, - 'INCLUDING': tokens.Keyword, - 'INCREMENT': tokens.Keyword, - 'INDEX': tokens.Keyword, - - 'INDITCATOR': tokens.Keyword, - 'INFIX': tokens.Keyword, - 'INHERITS': tokens.Keyword, - 'INITIAL': tokens.Keyword, - 'INITIALIZE': tokens.Keyword, - 'INITIALLY': tokens.Keyword, - 'INOUT': tokens.Keyword, - 'INPUT': tokens.Keyword, - 'INSENSITIVE': tokens.Keyword, - 'INSTANTIABLE': tokens.Keyword, - 'INSTEAD': tokens.Keyword, - 'INTERSECT': tokens.Keyword, - 'INTO': tokens.Keyword, - 'INVOKER': tokens.Keyword, - 'IS': tokens.Keyword, - 'ISNULL': tokens.Keyword, - 'ISOLATION': tokens.Keyword, - 'ITERATE': tokens.Keyword, - - # 'K': tokens.Keyword, - 'KEY': tokens.Keyword, - 'KEY_MEMBER': tokens.Keyword, - 'KEY_TYPE': tokens.Keyword, - - 'LANCOMPILER': tokens.Keyword, - 'LANGUAGE': tokens.Keyword, - 'LARGE': tokens.Keyword, - 'LAST': tokens.Keyword, - 'LATERAL': tokens.Keyword, - 'LEADING': tokens.Keyword, - 'LENGTH': tokens.Keyword, - 'LESS': tokens.Keyword, - 'LEVEL': tokens.Keyword, - 'LIMIT': tokens.Keyword, - 'LISTEN': tokens.Keyword, - 'LOAD': tokens.Keyword, - 'LOCAL': tokens.Keyword, - 'LOCALTIME': tokens.Keyword, - 'LOCALTIMESTAMP': tokens.Keyword, - 'LOCATION': tokens.Keyword, - 'LOCATOR': tokens.Keyword, - 'LOCK': tokens.Keyword, - 'LOWER': tokens.Keyword, - - # 'M': tokens.Keyword, - 'MAP': tokens.Keyword, - 'MATCH': tokens.Keyword, - 'MAXEXTENTS': tokens.Keyword, - 'MAXVALUE': tokens.Keyword, - 'MESSAGE_LENGTH': tokens.Keyword, - 'MESSAGE_OCTET_LENGTH': tokens.Keyword, - 'MESSAGE_TEXT': tokens.Keyword, - 'METHOD': tokens.Keyword, - 'MINUTE': tokens.Keyword, - 'MINUS': tokens.Keyword, - 'MINVALUE': tokens.Keyword, - 'MOD': tokens.Keyword, - 'MODE': tokens.Keyword, - 'MODIFIES': tokens.Keyword, - 'MODIFY': tokens.Keyword, - 'MONTH': tokens.Keyword, - 'MORE': tokens.Keyword, - 'MOVE': tokens.Keyword, - 'MUMPS': tokens.Keyword, - - 'NAMES': tokens.Keyword, - 'NATIONAL': tokens.Keyword, - 'NATURAL': tokens.Keyword, - 'NCHAR': tokens.Keyword, - 'NCLOB': tokens.Keyword, - 'NEW': tokens.Keyword, - 'NEXT': tokens.Keyword, - 'NO': tokens.Keyword, - 'NOAUDIT': tokens.Keyword, - 'NOCOMPRESS': tokens.Keyword, - 'NOCREATEDB': tokens.Keyword, - 'NOCREATEUSER': tokens.Keyword, - 'NONE': tokens.Keyword, - 'NOT': tokens.Keyword, - 'NOTFOUND': tokens.Keyword, - 'NOTHING': tokens.Keyword, - 'NOTIFY': tokens.Keyword, - 'NOTNULL': tokens.Keyword, - 'NOWAIT': tokens.Keyword, - 'NULL': tokens.Keyword, - 'NULLABLE': tokens.Keyword, - 'NULLIF': tokens.Keyword, - - 'OBJECT': tokens.Keyword, - 'OCTET_LENGTH': tokens.Keyword, - 'OF': tokens.Keyword, - 'OFF': tokens.Keyword, - 'OFFLINE': tokens.Keyword, - 'OFFSET': tokens.Keyword, - 'OIDS': tokens.Keyword, - 'OLD': tokens.Keyword, - 'ONLINE': tokens.Keyword, - 'ONLY': tokens.Keyword, - 'OPEN': tokens.Keyword, - 'OPERATION': tokens.Keyword, - 'OPERATOR': tokens.Keyword, - 'OPTION': tokens.Keyword, - 'OPTIONS': tokens.Keyword, - 'ORDINALITY': tokens.Keyword, - 'OUT': tokens.Keyword, - 'OUTPUT': tokens.Keyword, - 'OVERLAPS': tokens.Keyword, - 'OVERLAY': tokens.Keyword, - 'OVERRIDING': tokens.Keyword, - 'OWNER': tokens.Keyword, - - 'QUARTER': tokens.Keyword, - - 'PAD': tokens.Keyword, - 'PARAMETER': tokens.Keyword, - 'PARAMETERS': tokens.Keyword, - 'PARAMETER_MODE': tokens.Keyword, - 'PARAMETER_NAME': tokens.Keyword, - 'PARAMETER_ORDINAL_POSITION': tokens.Keyword, - 'PARAMETER_SPECIFIC_CATALOG': tokens.Keyword, - 'PARAMETER_SPECIFIC_NAME': tokens.Keyword, - 'PARAMETER_SPECIFIC_SCHEMA': tokens.Keyword, - 'PARTIAL': tokens.Keyword, - 'PASCAL': tokens.Keyword, - 'PCTFREE': tokens.Keyword, - 'PENDANT': tokens.Keyword, - 'PLACING': tokens.Keyword, - 'PLI': tokens.Keyword, - 'POSITION': tokens.Keyword, - 'POSTFIX': tokens.Keyword, - 'PRECISION': tokens.Keyword, - 'PREFIX': tokens.Keyword, - 'PREORDER': tokens.Keyword, - 'PREPARE': tokens.Keyword, - 'PRESERVE': tokens.Keyword, - 'PRIMARY': tokens.Keyword, - 'PRIOR': tokens.Keyword, - 'PRIVILEGES': tokens.Keyword, - 'PROCEDURAL': tokens.Keyword, - 'PROCEDURE': tokens.Keyword, - 'PUBLIC': tokens.Keyword, - - 'RAISE': tokens.Keyword, - 'RAW': tokens.Keyword, - 'READ': tokens.Keyword, - 'READS': tokens.Keyword, - 'RECHECK': tokens.Keyword, - 'RECURSIVE': tokens.Keyword, - 'REF': tokens.Keyword, - 'REFERENCES': tokens.Keyword, - 'REFERENCING': tokens.Keyword, - 'REINDEX': tokens.Keyword, - 'RELATIVE': tokens.Keyword, - 'RENAME': tokens.Keyword, - 'REPEATABLE': tokens.Keyword, - 'RESET': tokens.Keyword, - 'RESOURCE': tokens.Keyword, - 'RESTART': tokens.Keyword, - 'RESTRICT': tokens.Keyword, - 'RESULT': tokens.Keyword, - 'RETURN': tokens.Keyword, - 'RETURNED_LENGTH': tokens.Keyword, - 'RETURNED_OCTET_LENGTH': tokens.Keyword, - 'RETURNED_SQLSTATE': tokens.Keyword, - 'RETURNING': tokens.Keyword, - 'RETURNS': tokens.Keyword, - 'REVOKE': tokens.Keyword, - 'RIGHT': tokens.Keyword, - 'ROLE': tokens.Keyword, - 'ROLLBACK': tokens.Keyword.DML, - 'ROLLUP': tokens.Keyword, - 'ROUTINE': tokens.Keyword, - 'ROUTINE_CATALOG': tokens.Keyword, - 'ROUTINE_NAME': tokens.Keyword, - 'ROUTINE_SCHEMA': tokens.Keyword, - 'ROW': tokens.Keyword, - 'ROWS': tokens.Keyword, - 'ROW_COUNT': tokens.Keyword, - 'RULE': tokens.Keyword, - - 'SAVE_POINT': tokens.Keyword, - 'SCALE': tokens.Keyword, - 'SCHEMA': tokens.Keyword, - 'SCHEMA_NAME': tokens.Keyword, - 'SCOPE': tokens.Keyword, - 'SCROLL': tokens.Keyword, - 'SEARCH': tokens.Keyword, - 'SECOND': tokens.Keyword, - 'SECURITY': tokens.Keyword, - 'SELF': tokens.Keyword, - 'SENSITIVE': tokens.Keyword, - 'SEQUENCE': tokens.Keyword, - 'SERIALIZABLE': tokens.Keyword, - 'SERVER_NAME': tokens.Keyword, - 'SESSION': tokens.Keyword, - 'SESSION_USER': tokens.Keyword, - 'SETOF': tokens.Keyword, - 'SETS': tokens.Keyword, - 'SHARE': tokens.Keyword, - 'SHOW': tokens.Keyword, - 'SIMILAR': tokens.Keyword, - 'SIMPLE': tokens.Keyword, - 'SIZE': tokens.Keyword, - 'SOME': tokens.Keyword, - 'SOURCE': tokens.Keyword, - 'SPACE': tokens.Keyword, - 'SPECIFIC': tokens.Keyword, - 'SPECIFICTYPE': tokens.Keyword, - 'SPECIFIC_NAME': tokens.Keyword, - 'SQL': tokens.Keyword, - 'SQLBUF': tokens.Keyword, - 'SQLCODE': tokens.Keyword, - 'SQLERROR': tokens.Keyword, - 'SQLEXCEPTION': tokens.Keyword, - 'SQLSTATE': tokens.Keyword, - 'SQLWARNING': tokens.Keyword, - 'STABLE': tokens.Keyword, - 'START': tokens.Keyword.DML, - # 'STATE': tokens.Keyword, - 'STATEMENT': tokens.Keyword, - 'STATIC': tokens.Keyword, - 'STATISTICS': tokens.Keyword, - 'STDIN': tokens.Keyword, - 'STDOUT': tokens.Keyword, - 'STORAGE': tokens.Keyword, - 'STRICT': tokens.Keyword, - 'STRUCTURE': tokens.Keyword, - 'STYPE': tokens.Keyword, - 'SUBCLASS_ORIGIN': tokens.Keyword, - 'SUBLIST': tokens.Keyword, - 'SUBSTRING': tokens.Keyword, - 'SUCCESSFUL': tokens.Keyword, - 'SUM': tokens.Keyword, - 'SYMMETRIC': tokens.Keyword, - 'SYNONYM': tokens.Keyword, - 'SYSID': tokens.Keyword, - 'SYSTEM': tokens.Keyword, - 'SYSTEM_USER': tokens.Keyword, - - 'TABLE': tokens.Keyword, - 'TABLE_NAME': tokens.Keyword, - 'TEMP': tokens.Keyword, - 'TEMPLATE': tokens.Keyword, - 'TEMPORARY': tokens.Keyword, - 'TERMINATE': tokens.Keyword, - 'THAN': tokens.Keyword, - 'TIMESTAMP': tokens.Keyword, - 'TIMEZONE_HOUR': tokens.Keyword, - 'TIMEZONE_MINUTE': tokens.Keyword, - 'TO': tokens.Keyword, - 'TOAST': tokens.Keyword, - 'TRAILING': tokens.Keyword, - 'TRANSATION': tokens.Keyword, - 'TRANSACTIONS_COMMITTED': tokens.Keyword, - 'TRANSACTIONS_ROLLED_BACK': tokens.Keyword, - 'TRANSATION_ACTIVE': tokens.Keyword, - 'TRANSFORM': tokens.Keyword, - 'TRANSFORMS': tokens.Keyword, - 'TRANSLATE': tokens.Keyword, - 'TRANSLATION': tokens.Keyword, - 'TREAT': tokens.Keyword, - 'TRIGGER': tokens.Keyword, - 'TRIGGER_CATALOG': tokens.Keyword, - 'TRIGGER_NAME': tokens.Keyword, - 'TRIGGER_SCHEMA': tokens.Keyword, - 'TRIM': tokens.Keyword, - 'TRUE': tokens.Keyword, - 'TRUNCATE': tokens.Keyword, - 'TRUSTED': tokens.Keyword, - 'TYPE': tokens.Keyword, - - 'UID': tokens.Keyword, - 'UNCOMMITTED': tokens.Keyword, - 'UNDER': tokens.Keyword, - 'UNENCRYPTED': tokens.Keyword, - 'UNION': tokens.Keyword, - 'UNIQUE': tokens.Keyword, - 'UNKNOWN': tokens.Keyword, - 'UNLISTEN': tokens.Keyword, - 'UNNAMED': tokens.Keyword, - 'UNNEST': tokens.Keyword, - 'UNTIL': tokens.Keyword, - 'UPPER': tokens.Keyword, - 'USAGE': tokens.Keyword, - 'USE': tokens.Keyword, - 'USER': tokens.Keyword, - 'USER_DEFINED_TYPE_CATALOG': tokens.Keyword, - 'USER_DEFINED_TYPE_NAME': tokens.Keyword, - 'USER_DEFINED_TYPE_SCHEMA': tokens.Keyword, - 'USING': tokens.Keyword, - - 'VACUUM': tokens.Keyword, - 'VALID': tokens.Keyword, - 'VALIDATE': tokens.Keyword, - 'VALIDATOR': tokens.Keyword, - 'VALUES': tokens.Keyword, - 'VARIABLE': tokens.Keyword, - 'VERBOSE': tokens.Keyword, - 'VERSION': tokens.Keyword, - 'VIEW': tokens.Keyword, - 'VOLATILE': tokens.Keyword, - - 'WEEK': tokens.Keyword, - 'WHENEVER': tokens.Keyword, - 'WITH': tokens.Keyword.CTE, - 'WITHOUT': tokens.Keyword, - 'WORK': tokens.Keyword, - 'WRITE': tokens.Keyword, - - 'YEAR': tokens.Keyword, - - 'ZONE': tokens.Keyword, - - # Name.Builtin - 'ARRAY': tokens.Name.Builtin, - 'BIGINT': tokens.Name.Builtin, - 'BINARY': tokens.Name.Builtin, - 'BIT': tokens.Name.Builtin, - 'BLOB': tokens.Name.Builtin, - 'BOOLEAN': tokens.Name.Builtin, - 'CHAR': tokens.Name.Builtin, - 'CHARACTER': tokens.Name.Builtin, - 'DATE': tokens.Name.Builtin, - 'DEC': tokens.Name.Builtin, - 'DECIMAL': tokens.Name.Builtin, - 'FILE_TYPE': tokens.Name.Builtin, - 'FLOAT': tokens.Name.Builtin, - 'INT': tokens.Name.Builtin, - 'INT8': tokens.Name.Builtin, - 'INTEGER': tokens.Name.Builtin, - 'INTERVAL': tokens.Name.Builtin, - 'LONG': tokens.Name.Builtin, - 'NATURALN': tokens.Name.Builtin, - 'NVARCHAR': tokens.Name.Builtin, - 'NUMBER': tokens.Name.Builtin, - 'NUMERIC': tokens.Name.Builtin, - 'PLS_INTEGER': tokens.Name.Builtin, - 'POSITIVE': tokens.Name.Builtin, - 'POSITIVEN': tokens.Name.Builtin, - 'REAL': tokens.Name.Builtin, - 'ROWID': tokens.Name.Builtin, - 'ROWLABEL': tokens.Name.Builtin, - 'ROWNUM': tokens.Name.Builtin, - 'SERIAL': tokens.Name.Builtin, - 'SERIAL8': tokens.Name.Builtin, - 'SIGNED': tokens.Name.Builtin, - 'SIGNTYPE': tokens.Name.Builtin, - 'SIMPLE_DOUBLE': tokens.Name.Builtin, - 'SIMPLE_FLOAT': tokens.Name.Builtin, - 'SIMPLE_INTEGER': tokens.Name.Builtin, - 'SMALLINT': tokens.Name.Builtin, - 'SYS_REFCURSOR': tokens.Name.Builtin, - 'SYSDATE': tokens.Name, - 'TEXT': tokens.Name.Builtin, - 'TINYINT': tokens.Name.Builtin, - 'UNSIGNED': tokens.Name.Builtin, - 'UROWID': tokens.Name.Builtin, - 'UTL_FILE': tokens.Name.Builtin, - 'VARCHAR': tokens.Name.Builtin, - 'VARCHAR2': tokens.Name.Builtin, - 'VARYING': tokens.Name.Builtin, -} - -KEYWORDS_COMMON = { - 'SELECT': tokens.Keyword.DML, - 'INSERT': tokens.Keyword.DML, - 'DELETE': tokens.Keyword.DML, - 'UPDATE': tokens.Keyword.DML, - 'UPSERT': tokens.Keyword.DML, - 'REPLACE': tokens.Keyword.DML, - 'MERGE': tokens.Keyword.DML, - 'DROP': tokens.Keyword.DDL, - 'CREATE': tokens.Keyword.DDL, - 'ALTER': tokens.Keyword.DDL, - - 'WHERE': tokens.Keyword, - 'FROM': tokens.Keyword, - 'INNER': tokens.Keyword, - 'JOIN': tokens.Keyword, - 'STRAIGHT_JOIN': tokens.Keyword, - 'AND': tokens.Keyword, - 'OR': tokens.Keyword, - 'LIKE': tokens.Keyword, - 'ON': tokens.Keyword, - 'IN': tokens.Keyword, - 'SET': tokens.Keyword, - - 'BY': tokens.Keyword, - 'GROUP': tokens.Keyword, - 'ORDER': tokens.Keyword, - 'LEFT': tokens.Keyword, - 'OUTER': tokens.Keyword, - 'FULL': tokens.Keyword, - - 'IF': tokens.Keyword, - 'END': tokens.Keyword, - 'THEN': tokens.Keyword, - 'LOOP': tokens.Keyword, - 'AS': tokens.Keyword, - 'ELSE': tokens.Keyword, - 'FOR': tokens.Keyword, - 'WHILE': tokens.Keyword, - - 'CASE': tokens.Keyword, - 'WHEN': tokens.Keyword, - 'MIN': tokens.Keyword, - 'MAX': tokens.Keyword, - 'DISTINCT': tokens.Keyword, -} - -KEYWORDS_ORACLE = { - 'ARCHIVE': tokens.Keyword, - 'ARCHIVELOG': tokens.Keyword, - - 'BACKUP': tokens.Keyword, - 'BECOME': tokens.Keyword, - 'BLOCK': tokens.Keyword, - 'BODY': tokens.Keyword, - - 'CANCEL': tokens.Keyword, - 'CHANGE': tokens.Keyword, - 'COMPILE': tokens.Keyword, - 'CONTENTS': tokens.Keyword, - 'CONTROLFILE': tokens.Keyword, - - 'DATAFILE': tokens.Keyword, - 'DBA': tokens.Keyword, - 'DISMOUNT': tokens.Keyword, - 'DOUBLE': tokens.Keyword, - 'DUMP': tokens.Keyword, - - 'EVENTS': tokens.Keyword, - 'EXCEPTIONS': tokens.Keyword, - 'EXPLAIN': tokens.Keyword, - 'EXTENT': tokens.Keyword, - 'EXTERNALLY': tokens.Keyword, - - 'FLUSH': tokens.Keyword, - 'FREELIST': tokens.Keyword, - 'FREELISTS': tokens.Keyword, - - # groups seems too common as table name - # 'GROUPS': tokens.Keyword, - - 'INDICATOR': tokens.Keyword, - 'INITRANS': tokens.Keyword, - 'INSTANCE': tokens.Keyword, - - 'LAYER': tokens.Keyword, - 'LINK': tokens.Keyword, - 'LISTS': tokens.Keyword, - 'LOGFILE': tokens.Keyword, - - 'MANAGE': tokens.Keyword, - 'MANUAL': tokens.Keyword, - 'MAXDATAFILES': tokens.Keyword, - 'MAXINSTANCES': tokens.Keyword, - 'MAXLOGFILES': tokens.Keyword, - 'MAXLOGHISTORY': tokens.Keyword, - 'MAXLOGMEMBERS': tokens.Keyword, - 'MAXTRANS': tokens.Keyword, - 'MINEXTENTS': tokens.Keyword, - 'MODULE': tokens.Keyword, - 'MOUNT': tokens.Keyword, - - 'NOARCHIVELOG': tokens.Keyword, - 'NOCACHE': tokens.Keyword, - 'NOCYCLE': tokens.Keyword, - 'NOMAXVALUE': tokens.Keyword, - 'NOMINVALUE': tokens.Keyword, - 'NOORDER': tokens.Keyword, - 'NORESETLOGS': tokens.Keyword, - 'NORMAL': tokens.Keyword, - 'NOSORT': tokens.Keyword, - - 'OPTIMAL': tokens.Keyword, - 'OWN': tokens.Keyword, - - 'PACKAGE': tokens.Keyword, - 'PARALLEL': tokens.Keyword, - 'PCTINCREASE': tokens.Keyword, - 'PCTUSED': tokens.Keyword, - 'PLAN': tokens.Keyword, - 'PRIVATE': tokens.Keyword, - 'PROFILE': tokens.Keyword, - - 'QUOTA': tokens.Keyword, - - 'RECOVER': tokens.Keyword, - 'RESETLOGS': tokens.Keyword, - 'RESTRICTED': tokens.Keyword, - 'REUSE': tokens.Keyword, - 'ROLES': tokens.Keyword, - - 'SAVEPOINT': tokens.Keyword, - 'SCN': tokens.Keyword, - 'SECTION': tokens.Keyword, - 'SEGMENT': tokens.Keyword, - 'SHARED': tokens.Keyword, - 'SNAPSHOT': tokens.Keyword, - 'SORT': tokens.Keyword, - 'STATEMENT_ID': tokens.Keyword, - 'STOP': tokens.Keyword, - 'SWITCH': tokens.Keyword, - - 'TABLES': tokens.Keyword, - 'TABLESPACE': tokens.Keyword, - 'THREAD': tokens.Keyword, - 'TIME': tokens.Keyword, - 'TRACING': tokens.Keyword, - 'TRANSACTION': tokens.Keyword, - 'TRIGGERS': tokens.Keyword, - - 'UNLIMITED': tokens.Keyword, - 'UNLOCK': tokens.Keyword, -} - -# PostgreSQL Syntax -KEYWORDS_PLPGSQL = { - 'PARTITION': tokens.Keyword, - 'OVER': tokens.Keyword, - 'PERFORM': tokens.Keyword, - 'NOTICE': tokens.Keyword, - 'PLPGSQL': tokens.Keyword, - 'INHERIT': tokens.Keyword, - 'INDEXES': tokens.Keyword, - - 'BYTEA': tokens.Keyword, - 'BIGSERIAL': tokens.Keyword, - 'BIT VARYING': tokens.Keyword, - 'BOX': tokens.Keyword, - 'CHARACTER': tokens.Keyword, - 'CHARACTER VARYING': tokens.Keyword, - 'CIDR': tokens.Keyword, - 'CIRCLE': tokens.Keyword, - 'DOUBLE PRECISION': tokens.Keyword, - 'INET': tokens.Keyword, - 'JSON': tokens.Keyword, - 'JSONB': tokens.Keyword, - 'LINE': tokens.Keyword, - 'LSEG': tokens.Keyword, - 'MACADDR': tokens.Keyword, - 'MONEY': tokens.Keyword, - 'PATH': tokens.Keyword, - 'PG_LSN': tokens.Keyword, - 'POINT': tokens.Keyword, - 'POLYGON': tokens.Keyword, - 'SMALLSERIAL': tokens.Keyword, - 'TSQUERY': tokens.Keyword, - 'TSVECTOR': tokens.Keyword, - 'TXID_SNAPSHOT': tokens.Keyword, - 'UUID': tokens.Keyword, - 'XML': tokens.Keyword, - - 'FOR': tokens.Keyword, - 'IN': tokens.Keyword, - 'LOOP': tokens.Keyword, -} - -# Hive Syntax -KEYWORDS_HQL = { - 'EXPLODE': tokens.Keyword, - 'DIRECTORY': tokens.Keyword, - 'DISTRIBUTE': tokens.Keyword, - 'INCLUDE': tokens.Keyword, - 'LOCATE': tokens.Keyword, - 'OVERWRITE': tokens.Keyword, - 'POSEXPLODE': tokens.Keyword, - - 'ARRAY_CONTAINS': tokens.Keyword, - 'CMP': tokens.Keyword, - 'COLLECT_LIST': tokens.Keyword, - 'CONCAT': tokens.Keyword, - 'CONDITION': tokens.Keyword, - 'DATE_ADD': tokens.Keyword, - 'DATE_SUB': tokens.Keyword, - 'DECODE': tokens.Keyword, - 'DBMS_OUTPUT': tokens.Keyword, - 'ELEMENTS': tokens.Keyword, - 'EXCHANGE': tokens.Keyword, - 'EXTENDED': tokens.Keyword, - 'FLOOR': tokens.Keyword, - 'FOLLOWING': tokens.Keyword, - 'FROM_UNIXTIME': tokens.Keyword, - 'FTP': tokens.Keyword, - 'HOUR': tokens.Keyword, - 'INLINE': tokens.Keyword, - 'INSTR': tokens.Keyword, - 'LEN': tokens.Keyword, - 'MAXELEMENT': tokens.Keyword, - 'MAXINDEX': tokens.Keyword, - 'MAX_PART_DATE': tokens.Keyword, - 'MAX_PART_INT': tokens.Keyword, - 'MAX_PART_STRING': tokens.Keyword, - 'MINELEMENT': tokens.Keyword, - 'MININDEX': tokens.Keyword, - 'MIN_PART_DATE': tokens.Keyword, - 'MIN_PART_INT': tokens.Keyword, - 'MIN_PART_STRING': tokens.Keyword, - 'NOW': tokens.Keyword, - 'NVL': tokens.Keyword, - 'NVL2': tokens.Keyword, - 'PARSE_URL_TUPLE': tokens.Keyword, - 'PART_LOC': tokens.Keyword, - 'PART_COUNT': tokens.Keyword, - 'PART_COUNT_BY': tokens.Keyword, - 'PRINT': tokens.Keyword, - 'PUT_LINE': tokens.Keyword, - 'RANGE': tokens.Keyword, - 'REDUCE': tokens.Keyword, - 'REGEXP_REPLACE': tokens.Keyword, - 'RESIGNAL': tokens.Keyword, - 'RTRIM': tokens.Keyword, - 'SIGN': tokens.Keyword, - 'SIGNAL': tokens.Keyword, - 'SIN': tokens.Keyword, - 'SPLIT': tokens.Keyword, - 'SQRT': tokens.Keyword, - 'STACK': tokens.Keyword, - 'STR': tokens.Keyword, - 'SUBSTR': tokens.Keyword, - 'SUMMARY': tokens.Keyword, - 'TBLPROPERTIES': tokens.Keyword, - 'TIMESTAMP_ISO': tokens.Keyword, - 'TO_CHAR': tokens.Keyword, - 'TO_DATE': tokens.Keyword, - 'TO_TIMESTAMP': tokens.Keyword, - 'TRUNC': tokens.Keyword, - 'UNBOUNDED': tokens.Keyword, - 'UNIQUEJOIN': tokens.Keyword, - 'UNIX_TIMESTAMP': tokens.Keyword, - 'UTC_TIMESTAMP': tokens.Keyword, - 'VIEWS': tokens.Keyword, - - 'EXIT': tokens.Keyword, - 'BREAK': tokens.Keyword, - 'LEAVE': tokens.Keyword, -} diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/lexer.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/lexer.py deleted file mode 100644 index fd007a4c8..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/lexer.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""SQL Lexer""" - -# This code is based on the SqlLexer in pygments. -# http://pygments.org/ -# It's separated from the rest of pygments to increase performance -# and to allow some customizations. - -from sqlparse import tokens -from sqlparse.keywords import SQL_REGEX -from sqlparse.compat import text_type, file_types -from sqlparse.utils import consume - - -class Lexer(object): - """Lexer - Empty class. Leaving for backwards-compatibility - """ - - @staticmethod - def get_tokens(text, encoding=None): - """ - Return an iterable of (tokentype, value) pairs generated from - `text`. If `unfiltered` is set to `True`, the filtering mechanism - is bypassed even if filters are defined. - - Also preprocess the text, i.e. expand tabs and strip it if - wanted and applies registered filters. - - Split ``text`` into (tokentype, text) pairs. - - ``stack`` is the initial stack (default: ``['root']``) - """ - if isinstance(text, file_types): - text = text.read() - - if isinstance(text, text_type): - pass - elif isinstance(text, bytes): - if encoding: - text = text.decode(encoding) - else: - try: - text = text.decode('utf-8') - except UnicodeDecodeError: - text = text.decode('unicode-escape') - else: - raise TypeError(u"Expected text or file-like object, got {!r}". - format(type(text))) - - iterable = enumerate(text) - for pos, char in iterable: - for rexmatch, action in SQL_REGEX: - m = rexmatch(text, pos) - - if not m: - continue - elif isinstance(action, tokens._TokenType): - yield action, m.group() - elif callable(action): - yield action(m.group()) - - consume(iterable, m.end() - pos - 1) - break - else: - yield tokens.Error, char - - -def tokenize(sql, encoding=None): - """Tokenize sql. - - Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream - of ``(token type, value)`` items. - """ - return Lexer().get_tokens(sql, encoding) diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/sql.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/sql.py deleted file mode 100644 index a942bcd6c..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/sql.py +++ /dev/null @@ -1,650 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -"""This module contains classes representing syntactical elements of SQL.""" -from __future__ import print_function - -import re - -from sqlparse import tokens as T -from sqlparse.compat import string_types, text_type, unicode_compatible -from sqlparse.utils import imt, remove_quotes - - -class NameAliasMixin: - """Implements get_real_name and get_alias.""" - - def get_real_name(self): - """Returns the real name (object name) of this identifier.""" - # a.b - dot_idx, _ = self.token_next_by(m=(T.Punctuation, '.')) - return self._get_first_name(dot_idx, real_name=True) - - def get_alias(self): - """Returns the alias for this identifier or ``None``.""" - - # "name AS alias" - kw_idx, kw = self.token_next_by(m=(T.Keyword, 'AS')) - if kw is not None: - return self._get_first_name(kw_idx + 1, keywords=True) - - # "name alias" or "complicated column expression alias" - _, ws = self.token_next_by(t=T.Whitespace) - if len(self.tokens) > 2 and ws is not None: - return self._get_first_name(reverse=True) - - -@unicode_compatible -class Token(object): - """Base class for all other classes in this module. - - It represents a single token and has two instance attributes: - ``value`` is the unchanged value of the token and ``ttype`` is - the type of the token. - """ - - __slots__ = ('value', 'ttype', 'parent', 'normalized', 'is_keyword', - 'is_group', 'is_whitespace') - - def __init__(self, ttype, value): - value = text_type(value) - self.value = value - self.ttype = ttype - self.parent = None - self.is_group = False - self.is_keyword = ttype in T.Keyword - self.is_whitespace = self.ttype in T.Whitespace - self.normalized = value.upper() if self.is_keyword else value - - def __str__(self): - return self.value - - # Pending tokenlist __len__ bug fix - # def __len__(self): - # return len(self.value) - - def __repr__(self): - cls = self._get_repr_name() - value = self._get_repr_value() - - q = u'"' if value.startswith("'") and value.endswith("'") else u"'" - return u"<{cls} {q}{value}{q} at 0x{id:2X}>".format( - id=id(self), **locals()) - - def _get_repr_name(self): - return str(self.ttype).split('.')[-1] - - def _get_repr_value(self): - raw = text_type(self) - if len(raw) > 7: - raw = raw[:6] + '...' - return re.sub(r'\s+', ' ', raw) - - def flatten(self): - """Resolve subgroups.""" - yield self - - def match(self, ttype, values, regex=False): - """Checks whether the token matches the given arguments. - - *ttype* is a token type. If this token doesn't match the given token - type. - *values* is a list of possible values for this token. The values - are OR'ed together so if only one of the values matches ``True`` - is returned. Except for keyword tokens the comparison is - case-sensitive. For convenience it's OK to pass in a single string. - If *regex* is ``True`` (default is ``False``) the given values are - treated as regular expressions. - """ - type_matched = self.ttype is ttype - if not type_matched or values is None: - return type_matched - - if isinstance(values, string_types): - values = (values,) - - if regex: - # TODO: Add test for regex with is_keyboard = false - flag = re.IGNORECASE if self.is_keyword else 0 - values = (re.compile(v, flag) for v in values) - - for pattern in values: - if pattern.search(self.normalized): - return True - return False - - if self.is_keyword: - values = (v.upper() for v in values) - - return self.normalized in values - - def within(self, group_cls): - """Returns ``True`` if this token is within *group_cls*. - - Use this method for example to check if an identifier is within - a function: ``t.within(sql.Function)``. - """ - parent = self.parent - while parent: - if isinstance(parent, group_cls): - return True - parent = parent.parent - return False - - def is_child_of(self, other): - """Returns ``True`` if this token is a direct child of *other*.""" - return self.parent == other - - def has_ancestor(self, other): - """Returns ``True`` if *other* is in this tokens ancestry.""" - parent = self.parent - while parent: - if parent == other: - return True - parent = parent.parent - return False - - -@unicode_compatible -class TokenList(Token): - """A group of tokens. - - It has an additional instance attribute ``tokens`` which holds a - list of child-tokens. - """ - - __slots__ = 'tokens' - - def __init__(self, tokens=None): - self.tokens = tokens or [] - [setattr(token, 'parent', self) for token in self.tokens] - super(TokenList, self).__init__(None, text_type(self)) - self.is_group = True - - def __str__(self): - return u''.join(token.value for token in self.flatten()) - - # weird bug - # def __len__(self): - # return len(self.tokens) - - def __iter__(self): - return iter(self.tokens) - - def __getitem__(self, item): - return self.tokens[item] - - def _get_repr_name(self): - return type(self).__name__ - - def _pprint_tree(self, max_depth=None, depth=0, f=None, _pre=''): - """Pretty-print the object tree.""" - token_count = len(self.tokens) - for idx, token in enumerate(self.tokens): - cls = token._get_repr_name() - value = token._get_repr_value() - - last = idx == (token_count - 1) - pre = u'`- ' if last else u'|- ' - - q = u'"' if value.startswith("'") and value.endswith("'") else u"'" - print(u"{_pre}{pre}{idx} {cls} {q}{value}{q}" - .format(**locals()), file=f) - - if token.is_group and (max_depth is None or depth < max_depth): - parent_pre = u' ' if last else u'| ' - token._pprint_tree(max_depth, depth + 1, f, _pre + parent_pre) - - def get_token_at_offset(self, offset): - """Returns the token that is on position offset.""" - idx = 0 - for token in self.flatten(): - end = idx + len(token.value) - if idx <= offset < end: - return token - idx = end - - def flatten(self): - """Generator yielding ungrouped tokens. - - This method is recursively called for all child tokens. - """ - for token in self.tokens: - if token.is_group: - for item in token.flatten(): - yield item - else: - yield token - - def get_sublists(self): - for token in self.tokens: - if token.is_group: - yield token - - @property - def _groupable_tokens(self): - return self.tokens - - def _token_matching(self, funcs, start=0, end=None, reverse=False): - """next token that match functions""" - if start is None: - return None - - if not isinstance(funcs, (list, tuple)): - funcs = (funcs,) - - if reverse: - assert end is None - for idx in range(start - 2, -1, -1): - token = self.tokens[idx] - for func in funcs: - if func(token): - return idx, token - else: - for idx, token in enumerate(self.tokens[start:end], start=start): - for func in funcs: - if func(token): - return idx, token - return None, None - - def token_first(self, skip_ws=True, skip_cm=False): - """Returns the first child token. - - If *skip_ws* is ``True`` (the default), whitespace - tokens are ignored. - - if *skip_cm* is ``True`` (default: ``False``), comments are - ignored too. - """ - # this on is inconsistent, using Comment instead of T.Comment... - def matcher(tk): - return not ((skip_ws and tk.is_whitespace) - or (skip_cm and imt(tk, t=T.Comment, i=Comment))) - return self._token_matching(matcher)[1] - - def token_next_by(self, i=None, m=None, t=None, idx=-1, end=None): - idx += 1 - return self._token_matching(lambda tk: imt(tk, i, m, t), idx, end) - - def token_not_matching(self, funcs, idx): - funcs = (funcs,) if not isinstance(funcs, (list, tuple)) else funcs - funcs = [lambda tk: not func(tk) for func in funcs] - return self._token_matching(funcs, idx) - - def token_matching(self, funcs, idx): - return self._token_matching(funcs, idx)[1] - - def token_prev(self, idx, skip_ws=True, skip_cm=False): - """Returns the previous token relative to *idx*. - - If *skip_ws* is ``True`` (the default) whitespace tokens are ignored. - If *skip_cm* is ``True`` comments are ignored. - ``None`` is returned if there's no previous token. - """ - return self.token_next(idx, skip_ws, skip_cm, _reverse=True) - - # TODO: May need to re-add default value to idx - def token_next(self, idx, skip_ws=True, skip_cm=False, _reverse=False): - """Returns the next token relative to *idx*. - - If *skip_ws* is ``True`` (the default) whitespace tokens are ignored. - If *skip_cm* is ``True`` comments are ignored. - ``None`` is returned if there's no next token. - """ - if idx is None: - return None, None - idx += 1 # alot of code usage current pre-compensates for this - - def matcher(tk): - return not ((skip_ws and tk.is_whitespace) - or (skip_cm and imt(tk, t=T.Comment, i=Comment))) - return self._token_matching(matcher, idx, reverse=_reverse) - - def token_index(self, token, start=0): - """Return list index of token.""" - start = start if isinstance(start, int) else self.token_index(start) - return start + self.tokens[start:].index(token) - - def group_tokens(self, grp_cls, start, end, include_end=True, - extend=False): - """Replace tokens by an instance of *grp_cls*.""" - start_idx = start - start = self.tokens[start_idx] - - end_idx = end + include_end - - # will be needed later for new group_clauses - # while skip_ws and tokens and tokens[-1].is_whitespace: - # tokens = tokens[:-1] - - if extend and isinstance(start, grp_cls): - subtokens = self.tokens[start_idx + 1:end_idx] - - grp = start - grp.tokens.extend(subtokens) - del self.tokens[start_idx + 1:end_idx] - grp.value = text_type(start) - else: - subtokens = self.tokens[start_idx:end_idx] - grp = grp_cls(subtokens) - self.tokens[start_idx:end_idx] = [grp] - grp.parent = self - - for token in subtokens: - token.parent = grp - - return grp - - def insert_before(self, where, token): - """Inserts *token* before *where*.""" - if not isinstance(where, int): - where = self.token_index(where) - token.parent = self - self.tokens.insert(where, token) - - def insert_after(self, where, token, skip_ws=True): - """Inserts *token* after *where*.""" - if not isinstance(where, int): - where = self.token_index(where) - nidx, next_ = self.token_next(where, skip_ws=skip_ws) - token.parent = self - if next_ is None: - self.tokens.append(token) - else: - self.tokens.insert(nidx, token) - - def has_alias(self): - """Returns ``True`` if an alias is present.""" - return self.get_alias() is not None - - def get_alias(self): - """Returns the alias for this identifier or ``None``.""" - return None - - def get_name(self): - """Returns the name of this identifier. - - This is either it's alias or it's real name. The returned valued can - be considered as the name under which the object corresponding to - this identifier is known within the current statement. - """ - return self.get_alias() or self.get_real_name() - - def get_real_name(self): - """Returns the real name (object name) of this identifier.""" - return None - - def get_parent_name(self): - """Return name of the parent object if any. - - A parent object is identified by the first occurring dot. - """ - dot_idx, _ = self.token_next_by(m=(T.Punctuation, '.')) - _, prev_ = self.token_prev(dot_idx) - return remove_quotes(prev_.value) if prev_ is not None else None - - def _get_first_name(self, idx=None, reverse=False, keywords=False, - real_name=False): - """Returns the name of the first token with a name""" - - tokens = self.tokens[idx:] if idx else self.tokens - tokens = reversed(tokens) if reverse else tokens - types = [T.Name, T.Wildcard, T.String.Symbol] - - if keywords: - types.append(T.Keyword) - - for token in tokens: - if token.ttype in types: - return remove_quotes(token.value) - elif isinstance(token, (Identifier, Function)): - return token.get_real_name() if real_name else token.get_name() - - -class Statement(TokenList): - """Represents a SQL statement.""" - - def get_type(self): - """Returns the type of a statement. - - The returned value is a string holding an upper-cased reprint of - the first DML or DDL keyword. If the first token in this group - isn't a DML or DDL keyword "UNKNOWN" is returned. - - Whitespaces and comments at the beginning of the statement - are ignored. - """ - first_token = self.token_first(skip_cm=True) - if first_token is None: - # An "empty" statement that either has not tokens at all - # or only whitespace tokens. - return 'UNKNOWN' - - elif first_token.ttype in (T.Keyword.DML, T.Keyword.DDL): - return first_token.normalized - - elif first_token.ttype == T.Keyword.CTE: - # The WITH keyword should be followed by either an Identifier or - # an IdentifierList containing the CTE definitions; the actual - # DML keyword (e.g. SELECT, INSERT) will follow next. - fidx = self.token_index(first_token) - tidx, token = self.token_next(fidx, skip_ws=True) - if isinstance(token, (Identifier, IdentifierList)): - _, dml_keyword = self.token_next(tidx, skip_ws=True) - - if dml_keyword is not None \ - and dml_keyword.ttype == T.Keyword.DML: - return dml_keyword.normalized - - # Hmm, probably invalid syntax, so return unknown. - return 'UNKNOWN' - - -class Identifier(NameAliasMixin, TokenList): - """Represents an identifier. - - Identifiers may have aliases or typecasts. - """ - - def is_wildcard(self): - """Return ``True`` if this identifier contains a wildcard.""" - _, token = self.token_next_by(t=T.Wildcard) - return token is not None - - def get_typecast(self): - """Returns the typecast or ``None`` of this object as a string.""" - midx, marker = self.token_next_by(m=(T.Punctuation, '::')) - nidx, next_ = self.token_next(midx, skip_ws=False) - return next_.value if next_ else None - - def get_ordering(self): - """Returns the ordering or ``None`` as uppercase string.""" - _, ordering = self.token_next_by(t=T.Keyword.Order) - return ordering.normalized if ordering else None - - def get_array_indices(self): - """Returns an iterator of index token lists""" - - for token in self.tokens: - if isinstance(token, SquareBrackets): - # Use [1:-1] index to discard the square brackets - yield token.tokens[1:-1] - - -class IdentifierList(TokenList): - """A list of :class:`~sqlparse.sql.Identifier`\'s.""" - - def get_identifiers(self): - """Returns the identifiers. - - Whitespaces and punctuations are not included in this generator. - """ - for token in self.tokens: - if not (token.is_whitespace or token.match(T.Punctuation, ',')): - yield token - - -class TypedLiteral(TokenList): - """A typed literal, such as "date '2001-09-28'" or "interval '2 hours'".""" - M_OPEN = [(T.Name.Builtin, None), (T.Keyword, "TIMESTAMP")] - M_CLOSE = T.String.Single, None - M_EXTEND = T.Keyword, ("DAY", "HOUR", "MINUTE", "MONTH", "SECOND", "YEAR") - - -class Parenthesis(TokenList): - """Tokens between parenthesis.""" - M_OPEN = T.Punctuation, '(' - M_CLOSE = T.Punctuation, ')' - - @property - def _groupable_tokens(self): - return self.tokens[1:-1] - - -class SquareBrackets(TokenList): - """Tokens between square brackets""" - M_OPEN = T.Punctuation, '[' - M_CLOSE = T.Punctuation, ']' - - @property - def _groupable_tokens(self): - return self.tokens[1:-1] - - -class Assignment(TokenList): - """An assignment like 'var := val;'""" - - -class If(TokenList): - """An 'if' clause with possible 'else if' or 'else' parts.""" - M_OPEN = T.Keyword, 'IF' - M_CLOSE = T.Keyword, 'END IF' - - -class For(TokenList): - """A 'FOR' loop.""" - M_OPEN = T.Keyword, ('FOR', 'FOREACH') - M_CLOSE = T.Keyword, 'END LOOP' - - -class Comparison(TokenList): - """A comparison used for example in WHERE clauses.""" - - @property - def left(self): - return self.tokens[0] - - @property - def right(self): - return self.tokens[-1] - - -class Comment(TokenList): - """A comment.""" - - def is_multiline(self): - return self.tokens and self.tokens[0].ttype == T.Comment.Multiline - - -class Where(TokenList): - """A WHERE clause.""" - M_OPEN = T.Keyword, 'WHERE' - M_CLOSE = T.Keyword, ( - 'ORDER BY', 'GROUP BY', 'LIMIT', 'UNION', 'UNION ALL', 'EXCEPT', - 'HAVING', 'RETURNING', 'INTO') - - -class Having(TokenList): - """A HAVING clause.""" - M_OPEN = T.Keyword, 'HAVING' - M_CLOSE = T.Keyword, ('ORDER BY', 'LIMIT') - - -class Case(TokenList): - """A CASE statement with one or more WHEN and possibly an ELSE part.""" - M_OPEN = T.Keyword, 'CASE' - M_CLOSE = T.Keyword, 'END' - - def get_cases(self, skip_ws=False): - """Returns a list of 2-tuples (condition, value). - - If an ELSE exists condition is None. - """ - CONDITION = 1 - VALUE = 2 - - ret = [] - mode = CONDITION - - for token in self.tokens: - # Set mode from the current statement - if token.match(T.Keyword, 'CASE'): - continue - - elif skip_ws and token.ttype in T.Whitespace: - continue - - elif token.match(T.Keyword, 'WHEN'): - ret.append(([], [])) - mode = CONDITION - - elif token.match(T.Keyword, 'THEN'): - mode = VALUE - - elif token.match(T.Keyword, 'ELSE'): - ret.append((None, [])) - mode = VALUE - - elif token.match(T.Keyword, 'END'): - mode = None - - # First condition without preceding WHEN - if mode and not ret: - ret.append(([], [])) - - # Append token depending of the current mode - if mode == CONDITION: - ret[-1][0].append(token) - - elif mode == VALUE: - ret[-1][1].append(token) - - # Return cases list - return ret - - -class Function(NameAliasMixin, TokenList): - """A function or procedure call.""" - - def get_parameters(self): - """Return a list of parameters.""" - parenthesis = self.tokens[-1] - for token in parenthesis.tokens: - if isinstance(token, IdentifierList): - return token.get_identifiers() - elif imt(token, i=(Function, Identifier), t=T.Literal): - return [token, ] - return [] - - -class Begin(TokenList): - """A BEGIN/END block.""" - M_OPEN = T.Keyword, 'BEGIN' - M_CLOSE = T.Keyword, 'END' - - -class Operation(TokenList): - """Grouping of operations""" - - -class Values(TokenList): - """Grouping of values""" - - -class Command(TokenList): - """Grouping of CLI commands.""" diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/tokens.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/tokens.py deleted file mode 100644 index eefc0b498..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/tokens.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause -# -# The Token implementation is based on pygment's token system written -# by Georg Brandl. -# http://pygments.org/ - -"""Tokens""" - - -class _TokenType(tuple): - parent = None - - def __contains__(self, item): - return item is not None and (self is item or item[:len(self)] == self) - - def __getattr__(self, name): - new = _TokenType(self + (name,)) - setattr(self, name, new) - new.parent = self - return new - - def __repr__(self): - # self can be False only if its the `root` i.e. Token itself - return 'Token' + ('.' if self else '') + '.'.join(self) - - -Token = _TokenType() - -# Special token types -Text = Token.Text -Whitespace = Text.Whitespace -Newline = Whitespace.Newline -Error = Token.Error -# Text that doesn't belong to this lexer (e.g. HTML in PHP) -Other = Token.Other - -# Common token types for source code -Keyword = Token.Keyword -Name = Token.Name -Literal = Token.Literal -String = Literal.String -Number = Literal.Number -Punctuation = Token.Punctuation -Operator = Token.Operator -Comparison = Operator.Comparison -Wildcard = Token.Wildcard -Comment = Token.Comment -Assignment = Token.Assignment - -# Generic types for non-source code -Generic = Token.Generic -Command = Generic.Command - -# String and some others are not direct children of Token. -# alias them: -Token.Token = Token -Token.String = String -Token.Number = Number - -# SQL specific tokens -DML = Keyword.DML -DDL = Keyword.DDL -CTE = Keyword.CTE diff --git a/shell/ext-py/sqlparse-0.3.1/sqlparse/utils.py b/shell/ext-py/sqlparse-0.3.1/sqlparse/utils.py deleted file mode 100644 index 3283274df..000000000 --- a/shell/ext-py/sqlparse-0.3.1/sqlparse/utils.py +++ /dev/null @@ -1,123 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2009-2018 the sqlparse authors and contributors -# -# -# This module is part of python-sqlparse and is released under -# the BSD License: https://opensource.org/licenses/BSD-3-Clause - -import itertools -import re -from collections import deque -from contextlib import contextmanager -from sqlparse.compat import text_type - -# This regular expression replaces the home-cooked parser that was here before. -# It is much faster, but requires an extra post-processing step to get the -# desired results (that are compatible with what you would expect from the -# str.splitlines() method). -# -# It matches groups of characters: newlines, quoted strings, or unquoted text, -# and splits on that basis. The post-processing step puts those back together -# into the actual lines of SQL. -SPLIT_REGEX = re.compile(r""" -( - (?: # Start of non-capturing group - (?:\r\n|\r|\n) | # Match any single newline, or - [^\r\n'"]+ | # Match any character series without quotes or - # newlines, or - "(?:[^"\\]|\\.)*" | # Match double-quoted strings, or - '(?:[^'\\]|\\.)*' # Match single quoted strings - ) -) -""", re.VERBOSE) - -LINE_MATCH = re.compile(r'(\r\n|\r|\n)') - - -def split_unquoted_newlines(stmt): - """Split a string on all unquoted newlines. - - Unlike str.splitlines(), this will ignore CR/LF/CR+LF if the requisite - character is inside of a string.""" - text = text_type(stmt) - lines = SPLIT_REGEX.split(text) - outputlines = [''] - for line in lines: - if not line: - continue - elif LINE_MATCH.match(line): - outputlines.append('') - else: - outputlines[-1] += line - return outputlines - - -def remove_quotes(val): - """Helper that removes surrounding quotes from strings.""" - if val is None: - return - if val[0] in ('"', "'") and val[0] == val[-1]: - val = val[1:-1] - return val - - -def recurse(*cls): - """Function decorator to help with recursion - - :param cls: Classes to not recurse over - :return: function - """ - def wrap(f): - def wrapped_f(tlist): - for sgroup in tlist.get_sublists(): - if not isinstance(sgroup, cls): - wrapped_f(sgroup) - f(tlist) - - return wrapped_f - - return wrap - - -def imt(token, i=None, m=None, t=None): - """Helper function to simplify comparisons Instance, Match and TokenType - :param token: - :param i: Class or Tuple/List of Classes - :param m: Tuple of TokenType & Value. Can be list of Tuple for multiple - :param t: TokenType or Tuple/List of TokenTypes - :return: bool - """ - clss = i - types = [t, ] if t and not isinstance(t, list) else t - mpatterns = [m, ] if m and not isinstance(m, list) else m - - if token is None: - return False - elif clss and isinstance(token, clss): - return True - elif mpatterns and any(token.match(*pattern) for pattern in mpatterns): - return True - elif types and any(token.ttype in ttype for ttype in types): - return True - else: - return False - - -def consume(iterator, n): - """Advance the iterator n-steps ahead. If n is none, consume entirely.""" - deque(itertools.islice(iterator, n), maxlen=0) - - -@contextmanager -def offset(filter_, n=0): - filter_.offset += n - yield - filter_.offset -= n - - -@contextmanager -def indent(filter_, n=1): - filter_.indent += n - yield - filter_.indent -= n diff --git a/shell/ext-py/sqlparse-0.3.1/tests/__init__.py b/shell/ext-py/sqlparse-0.3.1/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/shell/ext-py/sqlparse-0.3.1/tests/conftest.py b/shell/ext-py/sqlparse-0.3.1/tests/conftest.py deleted file mode 100644 index f2473a43d..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/conftest.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Helpers for testing.""" - -import io -import os - -import pytest - -DIR_PATH = os.path.dirname(__file__) -FILES_DIR = os.path.join(DIR_PATH, 'files') - - -@pytest.fixture() -def filepath(): - """Returns full file path for test files.""" - - def make_filepath(filename): - # https://stackoverflow.com/questions/18011902/py-test-pass-a-parameter-to-a-fixture-function - # Alternate solution is to use parametrization `indirect=True` - # https://stackoverflow.com/questions/18011902/py-test-pass-a-parameter-to-a-fixture-function/33879151#33879151 - # Syntax is noisy and requires specific variable names - return os.path.join(FILES_DIR, filename) - - return make_filepath - - -@pytest.fixture() -def load_file(filepath): - """Opens filename with encoding and return its contents.""" - - def make_load_file(filename, encoding='utf-8'): - # https://stackoverflow.com/questions/18011902/py-test-pass-a-parameter-to-a-fixture-function - # Alternate solution is to use parametrization `indirect=True` - # https://stackoverflow.com/questions/18011902/py-test-pass-a-parameter-to-a-fixture-function/33879151#33879151 - # Syntax is noisy and requires specific variable names - # And seems to be limited to only 1 argument. - with io.open(filepath(filename), encoding=encoding) as f: - return f.read().strip() - - return make_load_file - - -@pytest.fixture() -def get_stream(filepath): - def make_stream(filename, encoding='utf-8'): - return io.open(filepath(filename), encoding=encoding) - - return make_stream diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/_Make_DirEntry.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/_Make_DirEntry.sql deleted file mode 100644 index e877bf123..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/_Make_DirEntry.sql +++ /dev/null @@ -1,6 +0,0 @@ --- Make a new dir entry --- and return its inode - - -INSERT INTO dir_entries(type) - VALUES(:type) \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/begintag.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/begintag.sql deleted file mode 100644 index 699b36596..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/begintag.sql +++ /dev/null @@ -1,4 +0,0 @@ -begin; -update foo - set bar = 1; -commit; \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/begintag_2.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/begintag_2.sql deleted file mode 100644 index 0de26d666..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/begintag_2.sql +++ /dev/null @@ -1,13 +0,0 @@ -CREATE TRIGGER IF NOT EXISTS remove_if_it_was_the_last_file_link --- Delete the direntry when is removed it's last static link - AFTER DELETE ON links - WHEN NOT EXISTS - ( - SELECT * FROM links - WHERE child_entry = OLD.child_entry - LIMIT 1 - ) -BEGIN - DELETE FROM dir_entries - WHERE dir_entries.inode = OLD.child_entry; -END; \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/dashcomment.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/dashcomment.sql deleted file mode 100644 index 0d5ac62c6..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/dashcomment.sql +++ /dev/null @@ -1,5 +0,0 @@ -select * from user; ---select * from host; -select * from user; -select * -- foo; -from foo; \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/encoding_gbk.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/encoding_gbk.sql deleted file mode 100644 index a61322965..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/encoding_gbk.sql +++ /dev/null @@ -1,3 +0,0 @@ -select * -from foo -where bar = '²»ÒÔÎïϲ£¬²»ÒÔ¼º±¯' \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/encoding_utf8.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/encoding_utf8.sql deleted file mode 100644 index 26e7ad45f..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/encoding_utf8.sql +++ /dev/null @@ -1,3 +0,0 @@ -select * -from foo -where bar = 'é½å¤©å¤§åœ£.カラフルãªé›².사랑해요' \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/function.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/function.sql deleted file mode 100644 index d19227f0f..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/function.sql +++ /dev/null @@ -1,13 +0,0 @@ -CREATE OR REPLACE FUNCTION foo( - p_in1 VARCHAR - , p_in2 INTEGER -) RETURNS INTEGER AS - - DECLARE - v_foo INTEGER; - BEGIN - SELECT * - FROM foo - INTO v_foo; - RETURN v_foo.id; - END; \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql.sql deleted file mode 100644 index e485f7a12..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql.sql +++ /dev/null @@ -1,72 +0,0 @@ -CREATE OR REPLACE FUNCTION public.delete_data ( - p_tabelle VARCHAR - , p_key VARCHAR - , p_value INTEGER -) RETURNS INTEGER AS -$$ -DECLARE - p_retval INTEGER; - v_constraint RECORD; - v_count INTEGER; - v_data RECORD; - v_fieldname VARCHAR; - v_sql VARCHAR; - v_key VARCHAR; - v_value INTEGER; -BEGIN - v_sql := 'SELECT COUNT(*) FROM ' || p_tabelle || ' WHERE ' || p_key || ' = ' || p_value; - --RAISE NOTICE '%', v_sql; - EXECUTE v_sql INTO v_count; - IF v_count::integer != 0 THEN - SELECT att.attname - INTO v_key - FROM pg_attribute att - LEFT JOIN pg_constraint con ON con.conrelid = att.attrelid - AND con.conkey[1] = att.attnum - AND con.contype = 'p', pg_type typ, pg_class rel, pg_namespace ns - WHERE att.attrelid = rel.oid - AND att.attnum > 0 - AND typ.oid = att.atttypid - AND att.attisdropped = false - AND rel.relname = p_tabelle - AND con.conkey[1] = 1 - AND ns.oid = rel.relnamespace - AND ns.nspname = 'public' - ORDER BY att.attnum; - v_sql := 'SELECT ' || v_key || ' AS id FROM ' || p_tabelle || ' WHERE ' || p_key || ' = ' || p_value; - FOR v_data IN EXECUTE v_sql - LOOP - --RAISE NOTICE ' -> % %', p_tabelle, v_data.id; - FOR v_constraint IN SELECT t.constraint_name - , t.constraint_type - , t.table_name - , c.column_name - FROM public.v_table_constraints t - , public.v_constraint_columns c - WHERE t.constraint_name = c.constraint_name - AND t.constraint_type = 'FOREIGN KEY' - AND c.table_name = p_tabelle - AND t.table_schema = 'public' - AND c.table_schema = 'public' - LOOP - v_fieldname := substring(v_constraint.constraint_name from 1 for length(v_constraint.constraint_name) - length(v_constraint.column_name) - 1); - IF (v_constraint.table_name = p_tabelle) AND (p_value = v_data.id) THEN - --RAISE NOTICE 'Skip (Selbstverweis)'; - CONTINUE; - ELSE - PERFORM delete_data(v_constraint.table_name::varchar, v_fieldname::varchar, v_data.id::integer); - END IF; - END LOOP; - END LOOP; - v_sql := 'DELETE FROM ' || p_tabelle || ' WHERE ' || p_key || ' = ' || p_value; - --RAISE NOTICE '%', v_sql; - EXECUTE v_sql; - p_retval := 1; - ELSE - --RAISE NOTICE ' -> Keine Sätze gefunden'; - p_retval := 0; - END IF; - RETURN p_retval; -END; -$$ -LANGUAGE plpgsql; \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql2.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql2.sql deleted file mode 100644 index b5d494cc4..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql2.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE OR REPLACE FUNCTION update_something() RETURNS void AS -$body$ -BEGIN - raise notice 'foo'; -END; -$body$ -LANGUAGE 'plpgsql' VOLATILE CALLED ON NULL INPUT SECURITY INVOKER; \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql3.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql3.sql deleted file mode 100644 index b25d81892..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql3.sql +++ /dev/null @@ -1,8 +0,0 @@ -CREATE OR REPLACE FUNCTION foo() RETURNS integer AS -$body$ -DECLARE -BEGIN - select * from foo; -END; -$body$ -LANGUAGE 'plpgsql' VOLATILE CALLED ON NULL INPUT SECURITY INVOKER; \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql4.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql4.sql deleted file mode 100644 index 02900a6f2..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/function_psql4.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE FUNCTION doubledollarinbody(var1 text) RETURNS text -/* see issue277 */ -LANGUAGE plpgsql -AS $_$ -DECLARE - str text; - BEGIN - str = $$'foo'$$||var1; - execute 'select '||str into str; - return str; - END -$_$; diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/huge_select.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/huge_select.sql deleted file mode 100644 index ab39823b6..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/huge_select.sql +++ /dev/null @@ -1 +0,0 @@ -select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo UNION select case when i = 0 then 1 else 0 end as col0, case when i = 1 then 1 else 1 end as col1, case when i = 2 then 1 else 2 end as col2, case when i = 3 then 1 else 3 end as col3, case when i = 4 then 1 else 4 end as col4, case when i = 5 then 1 else 5 end as col5, case when i = 6 then 1 else 6 end as col6, case when i = 7 then 1 else 7 end as col7, case when i = 8 then 1 else 8 end as col8, case when i = 9 then 1 else 9 end as col9, case when i = 10 then 1 else 10 end as col10, case when i = 11 then 1 else 11 end as col11, case when i = 12 then 1 else 12 end as col12, case when i = 13 then 1 else 13 end as col13, case when i = 14 then 1 else 14 end as col14, case when i = 15 then 1 else 15 end as col15, case when i = 16 then 1 else 16 end as col16, case when i = 17 then 1 else 17 end as col17, case when i = 18 then 1 else 18 end as col18, case when i = 19 then 1 else 19 end as col19, case when i = 20 then 1 else 20 end as col20, case when i = 21 then 1 else 21 end as col21, case when i = 22 then 1 else 22 end as col22 from foo \ No newline at end of file diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/stream.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/stream.sql deleted file mode 100644 index c1b73b03a..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/stream.sql +++ /dev/null @@ -1,2 +0,0 @@ --- this file is streamed in -insert into foo diff --git a/shell/ext-py/sqlparse-0.3.1/tests/files/test_cp1251.sql b/shell/ext-py/sqlparse-0.3.1/tests/files/test_cp1251.sql deleted file mode 100644 index 6c0228b7d..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/files/test_cp1251.sql +++ /dev/null @@ -1 +0,0 @@ -insert into foo values (1); -- Ïåñíÿ ïðî íàäåæäó diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_cli.py b/shell/ext-py/sqlparse-0.3.1/tests/test_cli.py deleted file mode 100644 index 5f1ea0e51..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_cli.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- - -import subprocess -import sys - -import pytest - -import sqlparse - - -def test_cli_main_empty(): - with pytest.raises(SystemExit): - sqlparse.cli.main([]) - - -def test_parser_empty(): - with pytest.raises(SystemExit): - parser = sqlparse.cli.create_parser() - parser.parse_args([]) - - -def test_main_help(): - # Call with the --help option as a basic sanity check. - with pytest.raises(SystemExit) as exinfo: - sqlparse.cli.main(["--help", ]) - assert exinfo.value.code == 0 - - -def test_valid_args(filepath): - # test doesn't abort - path = filepath('function.sql') - assert sqlparse.cli.main([path, '-r']) is not None - - -def test_invalid_choice(filepath): - path = filepath('function.sql') - with pytest.raises(SystemExit): - sqlparse.cli.main([path, '-l', 'Spanish']) - - -def test_invalid_args(filepath, capsys): - path = filepath('function.sql') - sqlparse.cli.main([path, '-r', '--indent_width', '0']) - _, err = capsys.readouterr() - assert err == ("[ERROR] Invalid options: indent_width requires " - "a positive integer\n") - - -def test_invalid_infile(filepath, capsys): - path = filepath('missing.sql') - sqlparse.cli.main([path, '-r']) - _, err = capsys.readouterr() - assert err[:22] == "[ERROR] Failed to read" - - -def test_invalid_outfile(filepath, capsys): - path = filepath('function.sql') - outpath = filepath('/missing/function.sql') - sqlparse.cli.main([path, '-r', '-o', outpath]) - _, err = capsys.readouterr() - assert err[:22] == "[ERROR] Failed to open" - - -def test_stdout(filepath, load_file, capsys): - path = filepath('begintag.sql') - expected = load_file('begintag.sql') - sqlparse.cli.main([path]) - out, _ = capsys.readouterr() - assert out == expected - - -def test_script(): - # Call with the --help option as a basic sanity check. - cmd = "{0:s} -m sqlparse.cli --help".format(sys.executable) - assert subprocess.call(cmd.split()) == 0 - - -def test_encoding_utf8_stdout(filepath, load_file, capfd): - path = filepath('encoding_utf8.sql') - expected = load_file('encoding_utf8.sql', 'utf-8') - sys.stdout.encoding = 'utf-8' - sqlparse.cli.main([path]) - out, _ = capfd.readouterr() - assert out == expected - - -def test_encoding_utf8_output_file(filepath, load_file, tmpdir): - in_path = filepath('encoding_utf8.sql') - expected = load_file('encoding_utf8.sql', 'utf-8') - out_path = tmpdir.dirname + '/encoding_utf8.out.sql' - sqlparse.cli.main([in_path, '-o', out_path]) - out = load_file(out_path, 'utf-8') - assert out == expected - - -def test_encoding_gbk_stdout(filepath, load_file, capfd): - path = filepath('encoding_gbk.sql') - expected = load_file('encoding_gbk.sql', 'gbk') - sys.stdout.encoding = 'gbk' - sqlparse.cli.main([path, '--encoding', 'gbk']) - out, _ = capfd.readouterr() - assert out == expected - - -def test_encoding_gbk_output_file(filepath, load_file, tmpdir): - in_path = filepath('encoding_gbk.sql') - expected = load_file('encoding_gbk.sql', 'gbk') - out_path = tmpdir.dirname + '/encoding_gbk.out.sql' - sqlparse.cli.main([in_path, '--encoding', 'gbk', '-o', out_path]) - out = load_file(out_path, 'gbk') - assert out == expected - - -def test_encoding_stdin_utf8(filepath, load_file, capfd): - path = filepath('encoding_utf8.sql') - expected = load_file('encoding_utf8.sql', 'utf-8') - old_stdin = sys.stdin - with open(path, 'r') as f: - sys.stdin = f - sys.stdout.encoding = 'utf-8' - sqlparse.cli.main(['-']) - sys.stdin = old_stdin - out, _ = capfd.readouterr() - assert out == expected - - -def test_encoding_stdin_gbk(filepath, load_file, capfd): - path = filepath('encoding_gbk.sql') - expected = load_file('encoding_gbk.sql', 'gbk') - old_stdin = sys.stdin - with open(path, 'r') as stream: - sys.stdin = stream - sys.stdout.encoding = 'gbk' - sqlparse.cli.main(['-', '--encoding', 'gbk']) - sys.stdin = old_stdin - out, _ = capfd.readouterr() - assert out == expected - - -def test_encoding(filepath, capsys): - path = filepath('test_cp1251.sql') - expected = u'insert into foo values (1); -- ПеÑÐ½Ñ Ð¿Ñ€Ð¾ надежду\n' - sqlparse.cli.main([path, '--encoding=cp1251']) - out, _ = capsys.readouterr() - assert out == expected diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_format.py b/shell/ext-py/sqlparse-0.3.1/tests/test_format.py deleted file mode 100644 index 811e0833c..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_format.py +++ /dev/null @@ -1,709 +0,0 @@ -# -*- coding: utf-8 -*- - -import pytest - -import sqlparse -from sqlparse.exceptions import SQLParseError - - -class TestFormat(object): - def test_keywordcase(self): - sql = 'select * from bar; -- select foo\n' - res = sqlparse.format(sql, keyword_case='upper') - assert res == 'SELECT * FROM bar; -- select foo\n' - res = sqlparse.format(sql, keyword_case='capitalize') - assert res == 'Select * From bar; -- select foo\n' - res = sqlparse.format(sql.upper(), keyword_case='lower') - assert res == 'select * from BAR; -- SELECT FOO\n' - - def test_keywordcase_invalid_option(self): - sql = 'select * from bar; -- select foo\n' - with pytest.raises(SQLParseError): - sqlparse.format(sql, keyword_case='foo') - - def test_identifiercase(self): - sql = 'select * from bar; -- select foo\n' - res = sqlparse.format(sql, identifier_case='upper') - assert res == 'select * from BAR; -- select foo\n' - res = sqlparse.format(sql, identifier_case='capitalize') - assert res == 'select * from Bar; -- select foo\n' - res = sqlparse.format(sql.upper(), identifier_case='lower') - assert res == 'SELECT * FROM bar; -- SELECT FOO\n' - - def test_identifiercase_invalid_option(self): - sql = 'select * from bar; -- select foo\n' - with pytest.raises(SQLParseError): - sqlparse.format(sql, identifier_case='foo') - - def test_identifiercase_quotes(self): - sql = 'select * from "foo"."bar"' - res = sqlparse.format(sql, identifier_case="upper") - assert res == 'select * from "foo"."bar"' - - def test_strip_comments_single(self): - sql = 'select *-- statement starts here\nfrom foo' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select * from foo' - sql = 'select * -- statement starts here\nfrom foo' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select * from foo' - sql = 'select-- foo\nfrom -- bar\nwhere' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select from where' - sql = 'select *-- statement starts here\n\nfrom foo' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select * from foo' - sql = 'select * from foo-- statement starts here\nwhere' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select * from foo where' - sql = 'select a-- statement starts here\nfrom foo' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select a from foo' - sql = '--comment\nselect a-- statement starts here\n' \ - 'from foo--comment\nf' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select a from foo f' - - def test_strip_comments_invalid_option(self): - sql = 'select-- foo\nfrom -- bar\nwhere' - with pytest.raises(SQLParseError): - sqlparse.format(sql, strip_comments=None) - - def test_strip_comments_multi(self): - sql = '/* sql starts here */\nselect' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select' - sql = '/* sql starts here */ select' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select' - sql = '/*\n * sql starts here\n */\nselect' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select' - sql = 'select (/* sql starts here */ select 2)' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select (select 2)' - sql = 'select (/* sql /* starts here */ select 2)' - res = sqlparse.format(sql, strip_comments=True) - assert res == 'select (select 2)' - - def test_strip_ws(self): - f = lambda sql: sqlparse.format(sql, strip_whitespace=True) - s = 'select\n* from foo\n\twhere ( 1 = 2 )\n' - assert f(s) == 'select * from foo where (1 = 2)' - s = 'select -- foo\nfrom bar\n' - assert f(s) == 'select -- foo\nfrom bar' - - def test_strip_ws_invalid_option(self): - s = 'select -- foo\nfrom bar\n' - with pytest.raises(SQLParseError): - sqlparse.format(s, strip_whitespace=None) - - def test_preserve_ws(self): - # preserve at least one whitespace after subgroups - f = lambda sql: sqlparse.format(sql, strip_whitespace=True) - s = 'select\n* /* foo */ from bar ' - assert f(s) == 'select * /* foo */ from bar' - - def test_notransform_of_quoted_crlf(self): - # Make sure that CR/CR+LF characters inside string literals don't get - # affected by the formatter. - - s1 = "SELECT some_column LIKE 'value\r'" - s2 = "SELECT some_column LIKE 'value\r'\r\nWHERE id = 1\n" - s3 = "SELECT some_column LIKE 'value\\'\r' WHERE id = 1\r" - s4 = "SELECT some_column LIKE 'value\\\\\\'\r' WHERE id = 1\r\n" - - f = lambda x: sqlparse.format(x) - - # Because of the use of - assert f(s1) == "SELECT some_column LIKE 'value\r'" - assert f(s2) == "SELECT some_column LIKE 'value\r'\nWHERE id = 1\n" - assert f(s3) == "SELECT some_column LIKE 'value\\'\r' WHERE id = 1\n" - assert (f(s4) - == "SELECT some_column LIKE 'value\\\\\\'\r' WHERE id = 1\n") - - -class TestFormatReindentAligned(object): - @staticmethod - def formatter(sql): - return sqlparse.format(sql, reindent_aligned=True) - - def test_basic(self): - sql = """ - select a, b as bb,c from table - join (select a * 2 as a from new_table) other - on table.a = other.a - where c is true - and b between 3 and 4 - or d is 'blue' - limit 10 - """ - - assert self.formatter(sql) == '\n'.join([ - 'select a,', - ' b as bb,', - ' c', - ' from table', - ' join (', - ' select a * 2 as a', - ' from new_table', - ' ) other', - ' on table.a = other.a', - ' where c is true', - ' and b between 3 and 4', - " or d is 'blue'", - ' limit 10']) - - def test_joins(self): - sql = """ - select * from a - join b on a.one = b.one - left join c on c.two = a.two and c.three = a.three - full outer join d on d.three = a.three - cross join e on e.four = a.four - join f using (one, two, three) - """ - assert self.formatter(sql) == '\n'.join([ - 'select *', - ' from a', - ' join b', - ' on a.one = b.one', - ' left join c', - ' on c.two = a.two', - ' and c.three = a.three', - ' full outer join d', - ' on d.three = a.three', - ' cross join e', - ' on e.four = a.four', - ' join f using (one, two, three)']) - - def test_case_statement(self): - sql = """ - select a, - case when a = 0 - then 1 - when bb = 1 then 1 - when c = 2 then 2 - else 0 end as d, - extra_col - from table - where c is true - and b between 3 and 4 - """ - assert self.formatter(sql) == '\n'.join([ - 'select a,', - ' case when a = 0 then 1', - ' when bb = 1 then 1', - ' when c = 2 then 2', - ' else 0', - ' end as d,', - ' extra_col', - ' from table', - ' where c is true', - ' and b between 3 and 4']) - - def test_case_statement_with_between(self): - sql = """ - select a, - case when a = 0 - then 1 - when bb = 1 then 1 - when c = 2 then 2 - when d between 3 and 5 then 3 - else 0 end as d, - extra_col - from table - where c is true - and b between 3 and 4 - """ - assert self.formatter(sql) == '\n'.join([ - 'select a,', - ' case when a = 0 then 1', - ' when bb = 1 then 1', - ' when c = 2 then 2', - ' when d between 3 and 5 then 3', - ' else 0', - ' end as d,', - ' extra_col', - ' from table', - ' where c is true', - ' and b between 3 and 4']) - - def test_group_by(self): - sql = """ - select a, b, c, sum(x) as sum_x, count(y) as cnt_y - from table - group by a,b,c - having sum(x) > 1 - and count(y) > 5 - order by 3,2,1 - """ - assert self.formatter(sql) == '\n'.join([ - 'select a,', - ' b,', - ' c,', - ' sum(x) as sum_x,', - ' count(y) as cnt_y', - ' from table', - ' group by a,', - ' b,', - ' c', - 'having sum(x) > 1', - ' and count(y) > 5', - ' order by 3,', - ' 2,', - ' 1']) - - def test_group_by_subquery(self): - # TODO: add subquery alias when test_identifier_list_subquery fixed - sql = """ - select *, sum_b + 2 as mod_sum - from ( - select a, sum(b) as sum_b - from table - group by a,z) - order by 1,2 - """ - assert self.formatter(sql) == '\n'.join([ - 'select *,', - ' sum_b + 2 as mod_sum', - ' from (', - ' select a,', - ' sum(b) as sum_b', - ' from table', - ' group by a,', - ' z', - ' )', - ' order by 1,', - ' 2']) - - def test_window_functions(self): - sql = """ - select a, - SUM(a) OVER (PARTITION BY b ORDER BY c ROWS - BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as sum_a, - ROW_NUMBER() OVER - (PARTITION BY b, c ORDER BY d DESC) as row_num - from table""" - assert self.formatter(sql) == '\n'.join([ - 'select a,', - ' SUM(a) OVER (PARTITION BY b ORDER BY c ROWS ' - 'BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as sum_a,', - ' ROW_NUMBER() OVER ' - '(PARTITION BY b, c ORDER BY d DESC) as row_num', - ' from table']) - - -class TestSpacesAroundOperators(object): - @staticmethod - def formatter(sql): - return sqlparse.format(sql, use_space_around_operators=True) - - def test_basic(self): - sql = ('select a+b as d from table ' - 'where (c-d)%2= 1 and e> 3.0/4 and z^2 <100') - assert self.formatter(sql) == ( - 'select a + b as d from table ' - 'where (c - d) % 2 = 1 and e > 3.0 / 4 and z ^ 2 < 100') - - def test_bools(self): - sql = 'select * from table where a &&b or c||d' - assert self.formatter( - sql) == 'select * from table where a && b or c || d' - - def test_nested(self): - sql = 'select *, case when a-b then c end from table' - assert self.formatter( - sql) == 'select *, case when a - b then c end from table' - - def test_wildcard_vs_mult(self): - sql = 'select a*b-c from table' - assert self.formatter(sql) == 'select a * b - c from table' - - -class TestFormatReindent(object): - def test_option(self): - with pytest.raises(SQLParseError): - sqlparse.format('foo', reindent=2) - with pytest.raises(SQLParseError): - sqlparse.format('foo', indent_tabs=2) - with pytest.raises(SQLParseError): - sqlparse.format('foo', reindent=True, indent_width='foo') - with pytest.raises(SQLParseError): - sqlparse.format('foo', reindent=True, indent_width=-12) - with pytest.raises(SQLParseError): - sqlparse.format('foo', reindent=True, wrap_after='foo') - with pytest.raises(SQLParseError): - sqlparse.format('foo', reindent=True, wrap_after=-12) - with pytest.raises(SQLParseError): - sqlparse.format('foo', reindent=True, comma_first='foo') - - def test_stmts(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select foo; select bar' - assert f(s) == 'select foo;\n\nselect bar' - s = 'select foo' - assert f(s) == 'select foo' - s = 'select foo; -- test\n select bar' - assert f(s) == 'select foo; -- test\n\nselect bar' - - def test_keywords(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select * from foo union select * from bar;' - assert f(s) == '\n'.join([ - 'select *', - 'from foo', - 'union', - 'select *', - 'from bar;']) - - def test_keywords_between(self): - # issue 14 - # don't break AND after BETWEEN - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'and foo between 1 and 2 and bar = 3' - assert f(s) == '\n'.join([ - '', - 'and foo between 1 and 2', - 'and bar = 3']) - - def test_parenthesis(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select count(*) from (select * from foo);' - assert f(s) == '\n'.join([ - 'select count(*)', - 'from', - ' (select *', - ' from foo);']) - assert f("select f(1)") == 'select f(1)' - assert f("select f( 1 )") == 'select f(1)' - assert f("select f(\n\n\n1\n\n\n)") == 'select f(1)' - assert f("select f(\n\n\n 1 \n\n\n)") == 'select f(1)' - assert f("select f(\n\n\n 1 \n\n\n)") == 'select f(1)' - - def test_where(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select * from foo where bar = 1 and baz = 2 or bzz = 3;' - assert f(s) == '\n'.join([ - 'select *', - 'from foo', - 'where bar = 1', - ' and baz = 2', - ' or bzz = 3;']) - - s = 'select * from foo where bar = 1 and (baz = 2 or bzz = 3);' - assert f(s) == '\n'.join([ - 'select *', - 'from foo', - 'where bar = 1', - ' and (baz = 2', - ' or bzz = 3);']) - - def test_join(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select * from foo join bar on 1 = 2' - assert f(s) == '\n'.join([ - 'select *', - 'from foo', - 'join bar on 1 = 2']) - s = 'select * from foo inner join bar on 1 = 2' - assert f(s) == '\n'.join([ - 'select *', - 'from foo', - 'inner join bar on 1 = 2']) - s = 'select * from foo left outer join bar on 1 = 2' - assert f(s) == '\n'.join([ - 'select *', - 'from foo', - 'left outer join bar on 1 = 2']) - s = 'select * from foo straight_join bar on 1 = 2' - assert f(s) == '\n'.join([ - 'select *', - 'from foo', - 'straight_join bar on 1 = 2']) - - def test_identifier_list(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select foo, bar, baz from table1, table2 where 1 = 2' - assert f(s) == '\n'.join([ - 'select foo,', - ' bar,', - ' baz', - 'from table1,', - ' table2', - 'where 1 = 2']) - s = 'select a.*, b.id from a, b' - assert f(s) == '\n'.join([ - 'select a.*,', - ' b.id', - 'from a,', - ' b']) - - def test_identifier_list_with_wrap_after(self): - f = lambda sql: sqlparse.format(sql, reindent=True, wrap_after=14) - s = 'select foo, bar, baz from table1, table2 where 1 = 2' - assert f(s) == '\n'.join([ - 'select foo, bar,', - ' baz', - 'from table1, table2', - 'where 1 = 2']) - - def test_identifier_list_comment_first(self): - f = lambda sql: sqlparse.format(sql, reindent=True, comma_first=True) - # not the 3: It cleans up whitespace too! - s = 'select foo, bar, baz from table where foo in (1, 2,3)' - assert f(s) == '\n'.join([ - 'select foo', - ' , bar', - ' , baz', - 'from table', - 'where foo in (1', - ' , 2', - ' , 3)']) - - def test_identifier_list_with_functions(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = ("select 'abc' as foo, coalesce(col1, col2)||col3 as bar," - "col3 from my_table") - assert f(s) == '\n'.join([ - "select 'abc' as foo,", - " coalesce(col1, col2)||col3 as bar,", - " col3", - "from my_table"]) - - def test_long_identifier_list_with_functions(self): - f = lambda sql: sqlparse.format(sql, reindent=True, wrap_after=30) - s = ("select 'abc' as foo, json_build_object('a', a," - "'b', b, 'c', c, 'd', d, 'e', e) as col2" - "col3 from my_table") - assert f(s) == '\n'.join([ - "select 'abc' as foo,", - " json_build_object('a',", - " a, 'b', b, 'c', c, 'd', d,", - " 'e', e) as col2col3", - "from my_table"]) - - def test_case(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'case when foo = 1 then 2 when foo = 3 then 4 else 5 end' - assert f(s) == '\n'.join([ - 'case', - ' when foo = 1 then 2', - ' when foo = 3 then 4', - ' else 5', - 'end']) - - def test_case2(self): - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'case(foo) when bar = 1 then 2 else 3 end' - assert f(s) == '\n'.join([ - 'case(foo)', - ' when bar = 1 then 2', - ' else 3', - 'end']) - - def test_nested_identifier_list(self): - # issue4 - f = lambda sql: sqlparse.format(sql, reindent=True) - s = '(foo as bar, bar1, bar2 as bar3, b4 as b5)' - assert f(s) == '\n'.join([ - '(foo as bar,', - ' bar1,', - ' bar2 as bar3,', - ' b4 as b5)']) - - def test_duplicate_linebreaks(self): - # issue3 - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select c1 -- column1\nfrom foo' - assert f(s) == '\n'.join([ - 'select c1 -- column1', - 'from foo']) - s = 'select c1 -- column1\nfrom foo' - r = sqlparse.format(s, reindent=True, strip_comments=True) - assert r == '\n'.join([ - 'select c1', - 'from foo']) - s = 'select c1\nfrom foo\norder by c1' - assert f(s) == '\n'.join([ - 'select c1', - 'from foo', - 'order by c1']) - s = 'select c1 from t1 where (c1 = 1) order by c1' - assert f(s) == '\n'.join([ - 'select c1', - 'from t1', - 'where (c1 = 1)', - 'order by c1']) - - def test_keywordfunctions(self): - # issue36 - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select max(a) b, foo, bar' - assert f(s) == '\n'.join([ - 'select max(a) b,', - ' foo,', - ' bar']) - - def test_identifier_and_functions(self): - # issue45 - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'select foo.bar, nvl(1) from dual' - assert f(s) == '\n'.join([ - 'select foo.bar,', - ' nvl(1)', - 'from dual']) - - def test_insert_values(self): - # issue 329 - f = lambda sql: sqlparse.format(sql, reindent=True) - s = 'insert into foo values (1, 2)' - assert f(s) == '\n'.join([ - 'insert into foo', - 'values (1, 2)']) - - s = 'insert into foo values (1, 2), (3, 4), (5, 6)' - assert f(s) == '\n'.join([ - 'insert into foo', - 'values (1, 2),', - ' (3, 4),', - ' (5, 6)']) - - s = 'insert into foo(a, b) values (1, 2), (3, 4), (5, 6)' - assert f(s) == '\n'.join([ - 'insert into foo(a, b)', - 'values (1, 2),', - ' (3, 4),', - ' (5, 6)']) - - f = lambda sql: sqlparse.format(sql, reindent=True, - comma_first=True) - s = 'insert into foo values (1, 2)' - assert f(s) == '\n'.join([ - 'insert into foo', - 'values (1, 2)']) - - s = 'insert into foo values (1, 2), (3, 4), (5, 6)' - assert f(s) == '\n'.join([ - 'insert into foo', - 'values (1, 2)', - ' , (3, 4)', - ' , (5, 6)']) - - s = 'insert into foo(a, b) values (1, 2), (3, 4), (5, 6)' - assert f(s) == '\n'.join([ - 'insert into foo(a, b)', - 'values (1, 2)', - ' , (3, 4)', - ' , (5, 6)']) - - -class TestOutputFormat(object): - def test_python(self): - sql = 'select * from foo;' - f = lambda sql: sqlparse.format(sql, output_format='python') - assert f(sql) == "sql = 'select * from foo;'" - f = lambda sql: sqlparse.format(sql, output_format='python', - reindent=True) - assert f(sql) == '\n'.join([ - "sql = ('select * '", - " 'from foo;')"]) - - def test_python_multiple_statements(self): - sql = 'select * from foo; select 1 from dual' - f = lambda sql: sqlparse.format(sql, output_format='python') - assert f(sql) == '\n'.join([ - "sql = 'select * from foo; '", - "sql2 = 'select 1 from dual'"]) - - @pytest.mark.xfail(reason="Needs fixing") - def test_python_multiple_statements_with_formatting(self): - sql = 'select * from foo; select 1 from dual' - f = lambda sql: sqlparse.format(sql, output_format='python', - reindent=True) - assert f(sql) == '\n'.join([ - "sql = ('select * '", - " 'from foo;')", - "sql2 = ('select 1 '", - " 'from dual')"]) - - def test_php(self): - sql = 'select * from foo;' - f = lambda sql: sqlparse.format(sql, output_format='php') - assert f(sql) == '$sql = "select * from foo;";' - f = lambda sql: sqlparse.format(sql, output_format='php', - reindent=True) - assert f(sql) == '\n'.join([ - '$sql = "select * ";', - '$sql .= "from foo;";']) - - def test_sql(self): - # "sql" is an allowed option but has no effect - sql = 'select * from foo;' - f = lambda sql: sqlparse.format(sql, output_format='sql') - assert f(sql) == 'select * from foo;' - - def test_invalid_option(self): - sql = 'select * from foo;' - with pytest.raises(SQLParseError): - sqlparse.format(sql, output_format='foo') - - -def test_format_column_ordering(): - # issue89 - sql = 'select * from foo order by c1 desc, c2, c3;' - formatted = sqlparse.format(sql, reindent=True) - expected = '\n'.join([ - 'select *', - 'from foo', - 'order by c1 desc,', - ' c2,', - ' c3;']) - assert formatted == expected - - -def test_truncate_strings(): - sql = "update foo set value = '{0}';".format('x' * 1000) - formatted = sqlparse.format(sql, truncate_strings=10) - assert formatted == "update foo set value = 'xxxxxxxxxx[...]';" - formatted = sqlparse.format(sql, truncate_strings=3, truncate_char='YYY') - assert formatted == "update foo set value = 'xxxYYY';" - - -@pytest.mark.parametrize('option', ['bar', -1, 0]) -def test_truncate_strings_invalid_option2(option): - with pytest.raises(SQLParseError): - sqlparse.format('foo', truncate_strings=option) - - -@pytest.mark.parametrize('sql', [ - 'select verrrylongcolumn from foo', - 'select "verrrylongcolumn" from "foo"']) -def test_truncate_strings_doesnt_truncate_identifiers(sql): - formatted = sqlparse.format(sql, truncate_strings=2) - assert formatted == sql - - -def test_having_produces_newline(): - sql = ('select * from foo, bar where bar.id = foo.bar_id ' - 'having sum(bar.value) > 100') - formatted = sqlparse.format(sql, reindent=True) - expected = [ - 'select *', - 'from foo,', - ' bar', - 'where bar.id = foo.bar_id', - 'having sum(bar.value) > 100'] - assert formatted == '\n'.join(expected) - - -@pytest.mark.parametrize('right_margin', ['ten', 2]) -def test_format_right_margin_invalid_option(right_margin): - with pytest.raises(SQLParseError): - sqlparse.format('foo', right_margin=right_margin) - - -@pytest.mark.xfail(reason="Needs fixing") -def test_format_right_margin(): - # TODO: Needs better test, only raises exception right now - sqlparse.format('foo', right_margin="79") diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_grouping.py b/shell/ext-py/sqlparse-0.3.1/tests/test_grouping.py deleted file mode 100644 index a147063b5..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_grouping.py +++ /dev/null @@ -1,642 +0,0 @@ -# -*- coding: utf-8 -*- - -import pytest - -import sqlparse -from sqlparse import sql, tokens as T - - -def test_grouping_parenthesis(): - s = 'select (select (x3) x2) and (y2) bar' - parsed = sqlparse.parse(s)[0] - assert str(parsed) == s - assert len(parsed.tokens) == 7 - assert isinstance(parsed.tokens[2], sql.Parenthesis) - assert isinstance(parsed.tokens[-1], sql.Identifier) - assert len(parsed.tokens[2].tokens) == 5 - assert isinstance(parsed.tokens[2].tokens[3], sql.Identifier) - assert isinstance(parsed.tokens[2].tokens[3].tokens[0], sql.Parenthesis) - assert len(parsed.tokens[2].tokens[3].tokens) == 3 - - -def test_grouping_comments(): - s = '/*\n * foo\n */ \n bar' - parsed = sqlparse.parse(s)[0] - assert str(parsed) == s - assert len(parsed.tokens) == 2 - - -@pytest.mark.parametrize('s', ['foo := 1;', 'foo := 1']) -def test_grouping_assignment(s): - parsed = sqlparse.parse(s)[0] - assert len(parsed.tokens) == 1 - assert isinstance(parsed.tokens[0], sql.Assignment) - - -@pytest.mark.parametrize('s', ["x > DATE '2020-01-01'", "x > TIMESTAMP '2020-01-01 00:00:00'"]) -def test_grouping_typed_literal(s): - parsed = sqlparse.parse(s)[0] - assert isinstance(parsed[4], sql.TypedLiteral) - - -@pytest.mark.parametrize('s, a, b', [ - ('select a from b where c < d + e', sql.Identifier, sql.Identifier), - ('select a from b where c < d + interval \'1 day\'', sql.Identifier, sql.TypedLiteral), - ('select a from b where c < d + interval \'6\' month', sql.Identifier, sql.TypedLiteral), - ('select a from b where c < current_timestamp - interval \'1 day\'', sql.Token, sql.TypedLiteral), -]) -def test_compare_expr(s, a, b): - parsed = sqlparse.parse(s)[0] - assert str(parsed) == s - assert isinstance(parsed.tokens[2], sql.Identifier) - assert isinstance(parsed.tokens[6], sql.Identifier) - assert isinstance(parsed.tokens[8], sql.Where) - assert len(parsed.tokens) == 9 - where = parsed.tokens[8] - assert isinstance(where.tokens[2], sql.Comparison) - assert len(where.tokens) == 3 - comparison = where.tokens[2] - assert isinstance(comparison.tokens[0], sql.Identifier) - assert comparison.tokens[2].ttype is T.Operator.Comparison - assert isinstance(comparison.tokens[4], sql.Operation) - assert len(comparison.tokens) == 5 - operation = comparison.tokens[4] - assert isinstance(operation.tokens[0], a) - assert operation.tokens[2].ttype is T.Operator - assert isinstance(operation.tokens[4], b) - assert len(operation.tokens) == 5 - - -def test_grouping_identifiers(): - s = 'select foo.bar from "myscheme"."table" where fail. order' - parsed = sqlparse.parse(s)[0] - assert str(parsed) == s - assert isinstance(parsed.tokens[2], sql.Identifier) - assert isinstance(parsed.tokens[6], sql.Identifier) - assert isinstance(parsed.tokens[8], sql.Where) - s = 'select * from foo where foo.id = 1' - parsed = sqlparse.parse(s)[0] - assert str(parsed) == s - assert isinstance(parsed.tokens[-1].tokens[-1].tokens[0], sql.Identifier) - s = 'select * from (select "foo"."id" from foo)' - parsed = sqlparse.parse(s)[0] - assert str(parsed) == s - assert isinstance(parsed.tokens[-1].tokens[3], sql.Identifier) - - for s in ["INSERT INTO `test` VALUES('foo', 'bar');", - "INSERT INTO `test` VALUES(1, 2), (3, 4), (5, 6);", - "INSERT INTO `test(a, b)` VALUES(1, 2), (3, 4), (5, 6);"]: - parsed = sqlparse.parse(s)[0] - types = [l.ttype for l in parsed.tokens if not l.is_whitespace] - assert types == [T.DML, T.Keyword, None, None, T.Punctuation] - assert isinstance(parsed.tokens[6], sql.Values) - - s = "select 1.0*(a+b) as col, sum(c)/sum(d) from myschema.mytable" - parsed = sqlparse.parse(s)[0] - assert len(parsed.tokens) == 7 - assert isinstance(parsed.tokens[2], sql.IdentifierList) - assert len(parsed.tokens[2].tokens) == 4 - identifiers = list(parsed.tokens[2].get_identifiers()) - assert len(identifiers) == 2 - assert identifiers[0].get_alias() == "col" - - -@pytest.mark.parametrize('s', [ - '1 as f', - 'foo as f', - 'foo f', - '1/2 as f', - '1/2 f', - '1<2 as f', # issue327 - '1<2 f', -]) -def test_simple_identifiers(s): - parsed = sqlparse.parse(s)[0] - assert isinstance(parsed.tokens[0], sql.Identifier) - - -@pytest.mark.parametrize('s', [ - 'foo, bar', - 'sum(a), sum(b)', - 'sum(a) as x, b as y', - 'sum(a)::integer, b', - 'sum(a)/count(b) as x, y', - 'sum(a)::integer as x, y', - 'sum(a)::integer/count(b) as x, y', # issue297 -]) -def test_group_identifier_list(s): - parsed = sqlparse.parse(s)[0] - assert isinstance(parsed.tokens[0], sql.IdentifierList) - - -def test_grouping_identifier_wildcard(): - p = sqlparse.parse('a.*, b.id')[0] - assert isinstance(p.tokens[0], sql.IdentifierList) - assert isinstance(p.tokens[0].tokens[0], sql.Identifier) - assert isinstance(p.tokens[0].tokens[-1], sql.Identifier) - - -def test_grouping_identifier_name_wildcard(): - p = sqlparse.parse('a.*')[0] - t = p.tokens[0] - assert t.get_name() == '*' - assert t.is_wildcard() is True - - -def test_grouping_identifier_invalid(): - p = sqlparse.parse('a.')[0] - assert isinstance(p.tokens[0], sql.Identifier) - assert p.tokens[0].has_alias() is False - assert p.tokens[0].get_name() is None - assert p.tokens[0].get_real_name() is None - assert p.tokens[0].get_parent_name() == 'a' - - -def test_grouping_identifier_invalid_in_middle(): - # issue261 - s = 'SELECT foo. FROM foo' - p = sqlparse.parse(s)[0] - assert isinstance(p[2], sql.Identifier) - assert p[2][1].ttype == T.Punctuation - assert p[3].ttype == T.Whitespace - assert str(p[2]) == 'foo.' - -@pytest.mark.parametrize('s', ['foo as (select *)', 'foo as(select *)']) -def test_grouping_identifer_as(s): - # issue507 - p = sqlparse.parse(s)[0] - assert isinstance(p.tokens[0], sql.Identifier) - token = p.tokens[0].tokens[2] - assert token.ttype == T.Keyword - assert token.normalized == 'AS' - -def test_grouping_identifier_as_invalid(): - # issue8 - p = sqlparse.parse('foo as select *')[0] - assert len(p.tokens), 5 - assert isinstance(p.tokens[0], sql.Identifier) - assert len(p.tokens[0].tokens) == 1 - assert p.tokens[2].ttype == T.Keyword - - -def test_grouping_identifier_function(): - p = sqlparse.parse('foo() as bar')[0] - assert isinstance(p.tokens[0], sql.Identifier) - assert isinstance(p.tokens[0].tokens[0], sql.Function) - p = sqlparse.parse('foo()||col2 bar')[0] - assert isinstance(p.tokens[0], sql.Identifier) - assert isinstance(p.tokens[0].tokens[0], sql.Operation) - assert isinstance(p.tokens[0].tokens[0].tokens[0], sql.Function) - - -@pytest.mark.parametrize('s', ['foo+100', 'foo + 100', 'foo*100']) -def test_grouping_operation(s): - p = sqlparse.parse(s)[0] - assert isinstance(p.tokens[0], sql.Operation) - - -def test_grouping_identifier_list(): - p = sqlparse.parse('a, b, c')[0] - assert isinstance(p.tokens[0], sql.IdentifierList) - p = sqlparse.parse('(a, b, c)')[0] - assert isinstance(p.tokens[0].tokens[1], sql.IdentifierList) - - -def test_grouping_identifier_list_subquery(): - """identifier lists should still work in subqueries with aliases""" - p = sqlparse.parse("select * from (" - "select a, b + c as d from table) sub")[0] - subquery = p.tokens[-1].tokens[0] - idx, iden_list = subquery.token_next_by(i=sql.IdentifierList) - assert iden_list is not None - # all the identifiers should be within the IdentifierList - _, ilist = subquery.token_next_by(i=sql.Identifier, idx=idx) - assert ilist is None - - -def test_grouping_identifier_list_case(): - p = sqlparse.parse('a, case when 1 then 2 else 3 end as b, c')[0] - assert isinstance(p.tokens[0], sql.IdentifierList) - p = sqlparse.parse('(a, case when 1 then 2 else 3 end as b, c)')[0] - assert isinstance(p.tokens[0].tokens[1], sql.IdentifierList) - - -def test_grouping_identifier_list_other(): - # issue2 - p = sqlparse.parse("select *, null, 1, 'foo', bar from mytable, x")[0] - assert isinstance(p.tokens[2], sql.IdentifierList) - assert len(p.tokens[2].tokens) == 13 - - -def test_grouping_identifier_list_with_inline_comments(): - # issue163 - p = sqlparse.parse('foo /* a comment */, bar')[0] - assert isinstance(p.tokens[0], sql.IdentifierList) - assert isinstance(p.tokens[0].tokens[0], sql.Identifier) - assert isinstance(p.tokens[0].tokens[3], sql.Identifier) - - -def test_grouping_identifiers_with_operators(): - p = sqlparse.parse('a+b as c from table where (d-e)%2= 1')[0] - assert len([x for x in p.flatten() if x.ttype == T.Name]) == 5 - - -def test_grouping_identifier_list_with_order(): - # issue101 - p = sqlparse.parse('1, 2 desc, 3')[0] - assert isinstance(p.tokens[0], sql.IdentifierList) - assert isinstance(p.tokens[0].tokens[3], sql.Identifier) - assert str(p.tokens[0].tokens[3]) == '2 desc' - - -def test_grouping_where(): - s = 'select * from foo where bar = 1 order by id desc' - p = sqlparse.parse(s)[0] - assert str(p) == s - assert len(p.tokens) == 12 - - s = 'select x from (select y from foo where bar = 1) z' - p = sqlparse.parse(s)[0] - assert str(p) == s - assert isinstance(p.tokens[-1].tokens[0].tokens[-2], sql.Where) - - -@pytest.mark.parametrize('s', ( - 'select 1 where 1 = 2 union select 2', - 'select 1 where 1 = 2 union all select 2', -)) -def test_grouping_where_union(s): - p = sqlparse.parse(s)[0] - assert p.tokens[5].value.startswith('union') - - -def test_returning_kw_ends_where_clause(): - s = 'delete from foo where x > y returning z' - p = sqlparse.parse(s)[0] - assert isinstance(p.tokens[6], sql.Where) - assert p.tokens[7].ttype == T.Keyword - assert p.tokens[7].value == 'returning' - - -def test_into_kw_ends_where_clause(): # issue324 - s = 'select * from foo where a = 1 into baz' - p = sqlparse.parse(s)[0] - assert isinstance(p.tokens[8], sql.Where) - assert p.tokens[9].ttype == T.Keyword - assert p.tokens[9].value == 'into' - - -@pytest.mark.parametrize('sql, expected', [ - # note: typecast needs to be 2nd token for this test - ('select foo::integer from bar', 'integer'), - ('select (current_database())::information_schema.sql_identifier', - 'information_schema.sql_identifier'), -]) -def test_grouping_typecast(sql, expected): - p = sqlparse.parse(sql)[0] - assert p.tokens[2].get_typecast() == expected - - -def test_grouping_alias(): - s = 'select foo as bar from mytable' - p = sqlparse.parse(s)[0] - assert str(p) == s - assert p.tokens[2].get_real_name() == 'foo' - assert p.tokens[2].get_alias() == 'bar' - s = 'select foo from mytable t1' - p = sqlparse.parse(s)[0] - assert str(p) == s - assert p.tokens[6].get_real_name() == 'mytable' - assert p.tokens[6].get_alias() == 't1' - s = 'select foo::integer as bar from mytable' - p = sqlparse.parse(s)[0] - assert str(p) == s - assert p.tokens[2].get_alias() == 'bar' - s = ('SELECT DISTINCT ' - '(current_database())::information_schema.sql_identifier AS view') - p = sqlparse.parse(s)[0] - assert str(p) == s - assert p.tokens[4].get_alias() == 'view' - - -def test_grouping_alias_case(): - # see issue46 - p = sqlparse.parse('CASE WHEN 1 THEN 2 ELSE 3 END foo')[0] - assert len(p.tokens) == 1 - assert p.tokens[0].get_alias() == 'foo' - - -def test_grouping_subquery_no_parens(): - # Not totally sure if this is the right approach... - # When a THEN clause contains a subquery w/o parenthesis around it *and* - # a WHERE condition, the WHERE grouper consumes END too. - # This takes makes sure that it doesn't fail. - p = sqlparse.parse('CASE WHEN 1 THEN select 2 where foo = 1 end')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Case) - - -@pytest.mark.parametrize('s', ['foo.bar', 'x, y', 'x > y', 'x / y']) -def test_grouping_alias_returns_none(s): - # see issue185 and issue445 - p = sqlparse.parse(s)[0] - assert len(p.tokens) == 1 - assert p.tokens[0].get_alias() is None - - -def test_grouping_idlist_function(): - # see issue10 too - p = sqlparse.parse('foo(1) x, bar')[0] - assert isinstance(p.tokens[0], sql.IdentifierList) - - -def test_grouping_comparison_exclude(): - # make sure operators are not handled too lazy - p = sqlparse.parse('(=)')[0] - assert isinstance(p.tokens[0], sql.Parenthesis) - assert not isinstance(p.tokens[0].tokens[1], sql.Comparison) - p = sqlparse.parse('(a=1)')[0] - assert isinstance(p.tokens[0].tokens[1], sql.Comparison) - p = sqlparse.parse('(a>=1)')[0] - assert isinstance(p.tokens[0].tokens[1], sql.Comparison) - - -def test_grouping_function(): - p = sqlparse.parse('foo()')[0] - assert isinstance(p.tokens[0], sql.Function) - p = sqlparse.parse('foo(null, bar)')[0] - assert isinstance(p.tokens[0], sql.Function) - assert len(list(p.tokens[0].get_parameters())) == 2 - - -def test_grouping_function_not_in(): - # issue183 - p = sqlparse.parse('in(1, 2)')[0] - assert len(p.tokens) == 2 - assert p.tokens[0].ttype == T.Keyword - assert isinstance(p.tokens[1], sql.Parenthesis) - - -def test_grouping_varchar(): - p = sqlparse.parse('"text" Varchar(50) NOT NULL')[0] - assert isinstance(p.tokens[2], sql.Function) - - -def test_statement_get_type(): - def f(sql): - return sqlparse.parse(sql)[0] - - assert f('select * from foo').get_type() == 'SELECT' - assert f('update foo').get_type() == 'UPDATE' - assert f(' update foo').get_type() == 'UPDATE' - assert f('\nupdate foo').get_type() == 'UPDATE' - assert f('foo').get_type() == 'UNKNOWN' - # Statements that have a whitespace after the closing semicolon - # are parsed as two statements where later only consists of the - # trailing whitespace. - assert f('\n').get_type() == 'UNKNOWN' - - -def test_identifier_with_operators(): - # issue 53 - p = sqlparse.parse('foo||bar')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Operation) - # again with whitespaces - p = sqlparse.parse('foo || bar')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Operation) - - -def test_identifier_with_op_trailing_ws(): - # make sure trailing whitespace isn't grouped with identifier - p = sqlparse.parse('foo || bar ')[0] - assert len(p.tokens) == 2 - assert isinstance(p.tokens[0], sql.Operation) - assert p.tokens[1].ttype is T.Whitespace - - -def test_identifier_with_string_literals(): - p = sqlparse.parse("foo + 'bar'")[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Operation) - - -# This test seems to be wrong. It was introduced when fixing #53, but #111 -# showed that this shouldn't be an identifier at all. I'm leaving this -# commented in the source for a while. -# def test_identifier_string_concat(): -# p = sqlparse.parse("'foo' || bar")[0] -# assert len(p.tokens) == 1 -# assert isinstance(p.tokens[0], sql.Identifier) - - -def test_identifier_consumes_ordering(): - # issue89 - p = sqlparse.parse('select * from foo order by c1 desc, c2, c3')[0] - assert isinstance(p.tokens[-1], sql.IdentifierList) - ids = list(p.tokens[-1].get_identifiers()) - assert len(ids) == 3 - assert ids[0].get_name() == 'c1' - assert ids[0].get_ordering() == 'DESC' - assert ids[1].get_name() == 'c2' - assert ids[1].get_ordering() is None - - -def test_comparison_with_keywords(): - # issue90 - # in fact these are assignments, but for now we don't distinguish them - p = sqlparse.parse('foo = NULL')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - assert len(p.tokens[0].tokens) == 5 - assert p.tokens[0].left.value == 'foo' - assert p.tokens[0].right.value == 'NULL' - # make sure it's case-insensitive - p = sqlparse.parse('foo = null')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - - -def test_comparison_with_floats(): - # issue145 - p = sqlparse.parse('foo = 25.5')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - assert len(p.tokens[0].tokens) == 5 - assert p.tokens[0].left.value == 'foo' - assert p.tokens[0].right.value == '25.5' - - -def test_comparison_with_parenthesis(): - # issue23 - p = sqlparse.parse('(3 + 4) = 7')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - comp = p.tokens[0] - assert isinstance(comp.left, sql.Parenthesis) - assert comp.right.ttype is T.Number.Integer - - -@pytest.mark.parametrize('operator', ( - '=', '!=', '>', '<', '<=', '>=', '~', '~~', '!~~', - 'LIKE', 'NOT LIKE', 'ILIKE', 'NOT ILIKE', -)) -def test_comparison_with_strings(operator): - # issue148 - p = sqlparse.parse("foo {0} 'bar'".format(operator))[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - assert p.tokens[0].right.value == "'bar'" - assert p.tokens[0].right.ttype == T.String.Single - - -def test_like_and_ilike_comparison(): - def validate_where_clause(where_clause, expected_tokens): - assert len(where_clause.tokens) == len(expected_tokens) - for where_token, expected_token in zip(where_clause, expected_tokens): - expected_ttype, expected_value = expected_token - if where_token.ttype is not None: - assert where_token.match(expected_ttype, expected_value, regex=True) - else: - # Certain tokens, such as comparison tokens, do not define a ttype that can be - # matched against. For these tokens, we ensure that the token instance is of - # the expected type and has a value conforming to specified regular expression - import re - assert (isinstance(where_token, expected_ttype) - and re.match(expected_value, where_token.value)) - - [p1] = sqlparse.parse("select * from mytable where mytable.mycolumn LIKE 'expr%' limit 5;") - [p1_where] = [token for token in p1 if isinstance(token, sql.Where)] - validate_where_clause(p1_where, [ - (T.Keyword, "where"), - (T.Whitespace, None), - (sql.Comparison, r"mytable.mycolumn LIKE.*"), - (T.Whitespace, None), - ]) - - [p2] = sqlparse.parse( - "select * from mytable where mycolumn NOT ILIKE '-expr' group by othercolumn;") - [p2_where] = [token for token in p2 if isinstance(token, sql.Where)] - validate_where_clause(p2_where, [ - (T.Keyword, "where"), - (T.Whitespace, None), - (sql.Comparison, r"mycolumn NOT ILIKE.*"), - (T.Whitespace, None), - ]) - - -def test_comparison_with_functions(): - # issue230 - p = sqlparse.parse('foo = DATE(bar.baz)')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - assert len(p.tokens[0].tokens) == 5 - assert p.tokens[0].left.value == 'foo' - assert p.tokens[0].right.value == 'DATE(bar.baz)' - - p = sqlparse.parse('DATE(foo.bar) = DATE(bar.baz)')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - assert len(p.tokens[0].tokens) == 5 - assert p.tokens[0].left.value == 'DATE(foo.bar)' - assert p.tokens[0].right.value == 'DATE(bar.baz)' - - p = sqlparse.parse('DATE(foo.bar) = bar.baz')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Comparison) - assert len(p.tokens[0].tokens) == 5 - assert p.tokens[0].left.value == 'DATE(foo.bar)' - assert p.tokens[0].right.value == 'bar.baz' - - -@pytest.mark.parametrize('start', ['FOR', 'FOREACH']) -def test_forloops(start): - p = sqlparse.parse('{0} foo in bar LOOP foobar END LOOP'.format(start))[0] - assert (len(p.tokens)) == 1 - assert isinstance(p.tokens[0], sql.For) - - -def test_nested_for(): - p = sqlparse.parse('FOR foo LOOP FOR bar LOOP END LOOP END LOOP')[0] - assert len(p.tokens) == 1 - for1 = p.tokens[0] - assert for1.tokens[0].value == 'FOR' - assert for1.tokens[-1].value == 'END LOOP' - for2 = for1.tokens[6] - assert isinstance(for2, sql.For) - assert for2.tokens[0].value == 'FOR' - assert for2.tokens[-1].value == 'END LOOP' - - -def test_begin(): - p = sqlparse.parse('BEGIN foo END')[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Begin) - - -def test_keyword_followed_by_parenthesis(): - p = sqlparse.parse('USING(somecol')[0] - assert len(p.tokens) == 3 - assert p.tokens[0].ttype == T.Keyword - assert p.tokens[1].ttype == T.Punctuation - - -def test_nested_begin(): - p = sqlparse.parse('BEGIN foo BEGIN bar END END')[0] - assert len(p.tokens) == 1 - outer = p.tokens[0] - assert outer.tokens[0].value == 'BEGIN' - assert outer.tokens[-1].value == 'END' - inner = outer.tokens[4] - assert inner.tokens[0].value == 'BEGIN' - assert inner.tokens[-1].value == 'END' - assert isinstance(inner, sql.Begin) - - -def test_aliased_column_without_as(): - p = sqlparse.parse('foo bar')[0].tokens - assert len(p) == 1 - assert p[0].get_real_name() == 'foo' - assert p[0].get_alias() == 'bar' - - p = sqlparse.parse('foo.bar baz')[0].tokens[0] - assert p.get_parent_name() == 'foo' - assert p.get_real_name() == 'bar' - assert p.get_alias() == 'baz' - - -def test_qualified_function(): - p = sqlparse.parse('foo()')[0].tokens[0] - assert p.get_parent_name() is None - assert p.get_real_name() == 'foo' - - p = sqlparse.parse('foo.bar()')[0].tokens[0] - assert p.get_parent_name() == 'foo' - assert p.get_real_name() == 'bar' - - -def test_aliased_function_without_as(): - p = sqlparse.parse('foo() bar')[0].tokens[0] - assert p.get_parent_name() is None - assert p.get_real_name() == 'foo' - assert p.get_alias() == 'bar' - - p = sqlparse.parse('foo.bar() baz')[0].tokens[0] - assert p.get_parent_name() == 'foo' - assert p.get_real_name() == 'bar' - assert p.get_alias() == 'baz' - - -def test_aliased_literal_without_as(): - p = sqlparse.parse('1 foo')[0].tokens - assert len(p) == 1 - assert p[0].get_alias() == 'foo' - - -def test_grouping_as_cte(): - p = sqlparse.parse('foo AS WITH apple AS 1, banana AS 2')[0].tokens - assert len(p) > 4 - assert p[0].get_alias() is None - assert p[2].value == 'AS' - assert p[4].value == 'WITH' diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_keywords.py b/shell/ext-py/sqlparse-0.3.1/tests/test_keywords.py deleted file mode 100644 index c197f367c..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_keywords.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -import pytest - -from sqlparse import tokens -from sqlparse.keywords import SQL_REGEX - - -class TestSQLREGEX: - @pytest.mark.parametrize('number', ['1.0', '-1.0', - '1.', '-1.', - '.1', '-.1']) - def test_float_numbers(self, number): - ttype = next(tt for action, tt in SQL_REGEX if action(number)) - assert tokens.Number.Float == ttype diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_parse.py b/shell/ext-py/sqlparse-0.3.1/tests/test_parse.py deleted file mode 100644 index c28cb06eb..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_parse.py +++ /dev/null @@ -1,474 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Tests sqlparse.parse().""" - -import pytest - -import sqlparse -from sqlparse import sql, tokens as T -from sqlparse.compat import StringIO, text_type - - -def test_parse_tokenize(): - s = 'select * from foo;' - stmts = sqlparse.parse(s) - assert len(stmts) == 1 - assert str(stmts[0]) == s - - -def test_parse_multistatement(): - sql1 = 'select * from foo;' - sql2 = 'select * from bar;' - stmts = sqlparse.parse(sql1 + sql2) - assert len(stmts) == 2 - assert str(stmts[0]) == sql1 - assert str(stmts[1]) == sql2 - - -@pytest.mark.parametrize('s', ['select\n*from foo;', - 'select\r\n*from foo', - 'select\r*from foo', - 'select\r\n*from foo\n']) -def test_parse_newlines(s): - p = sqlparse.parse(s)[0] - assert str(p) == s - - -def test_parse_within(): - s = 'foo(col1, col2)' - p = sqlparse.parse(s)[0] - col1 = p.tokens[0].tokens[1].tokens[1].tokens[0] - assert col1.within(sql.Function) - - -def test_parse_child_of(): - s = '(col1, col2)' - p = sqlparse.parse(s)[0] - assert p.tokens[0].tokens[1].is_child_of(p.tokens[0]) - s = 'select foo' - p = sqlparse.parse(s)[0] - assert not p.tokens[2].is_child_of(p.tokens[0]) - assert p.tokens[2].is_child_of(p) - - -def test_parse_has_ancestor(): - s = 'foo or (bar, baz)' - p = sqlparse.parse(s)[0] - baz = p.tokens[-1].tokens[1].tokens[-1] - assert baz.has_ancestor(p.tokens[-1].tokens[1]) - assert baz.has_ancestor(p.tokens[-1]) - assert baz.has_ancestor(p) - - -@pytest.mark.parametrize('s', ['.5', '.51', '1.5', '12.5']) -def test_parse_float(s): - t = sqlparse.parse(s)[0].tokens - assert len(t) == 1 - assert t[0].ttype is sqlparse.tokens.Number.Float - - -@pytest.mark.parametrize('s, holder', [ - ('select * from foo where user = ?', '?'), - ('select * from foo where user = :1', ':1'), - ('select * from foo where user = :name', ':name'), - ('select * from foo where user = %s', '%s'), - ('select * from foo where user = $a', '$a')]) -def test_parse_placeholder(s, holder): - t = sqlparse.parse(s)[0].tokens[-1].tokens - assert t[-1].ttype is sqlparse.tokens.Name.Placeholder - assert t[-1].value == holder - - -def test_parse_modulo_not_placeholder(): - tokens = list(sqlparse.lexer.tokenize('x %3')) - assert tokens[2][0] == sqlparse.tokens.Operator - - -def test_parse_access_symbol(): - # see issue27 - t = sqlparse.parse('select a.[foo bar] as foo')[0].tokens - assert isinstance(t[-1], sql.Identifier) - assert t[-1].get_name() == 'foo' - assert t[-1].get_real_name() == '[foo bar]' - assert t[-1].get_parent_name() == 'a' - - -def test_parse_square_brackets_notation_isnt_too_greedy(): - # see issue153 - t = sqlparse.parse('[foo], [bar]')[0].tokens - assert isinstance(t[0], sql.IdentifierList) - assert len(t[0].tokens) == 4 - assert t[0].tokens[0].get_real_name() == '[foo]' - assert t[0].tokens[-1].get_real_name() == '[bar]' - - -def test_parse_keyword_like_identifier(): - # see issue47 - t = sqlparse.parse('foo.key')[0].tokens - assert len(t) == 1 - assert isinstance(t[0], sql.Identifier) - - -def test_parse_function_parameter(): - # see issue94 - t = sqlparse.parse('abs(some_col)')[0].tokens[0].get_parameters() - assert len(t) == 1 - assert isinstance(t[0], sql.Identifier) - - -def test_parse_function_param_single_literal(): - t = sqlparse.parse('foo(5)')[0].tokens[0].get_parameters() - assert len(t) == 1 - assert t[0].ttype is T.Number.Integer - - -def test_parse_nested_function(): - t = sqlparse.parse('foo(bar(5))')[0].tokens[0].get_parameters() - assert len(t) == 1 - assert type(t[0]) is sql.Function - - -def test_quoted_identifier(): - t = sqlparse.parse('select x.y as "z" from foo')[0].tokens - assert isinstance(t[2], sql.Identifier) - assert t[2].get_name() == 'z' - assert t[2].get_real_name() == 'y' - - -@pytest.mark.parametrize('name', [ - 'foo', '_foo', # issue175 - '1_data', # valid MySQL table name, see issue337 -]) -def test_valid_identifier_names(name): - t = sqlparse.parse(name)[0].tokens - assert isinstance(t[0], sql.Identifier) - assert t[0].get_name() == name - - -def test_psql_quotation_marks(): - # issue83 - - # regression: make sure plain $$ work - t = sqlparse.split(""" - CREATE OR REPLACE FUNCTION testfunc1(integer) RETURNS integer AS $$ - .... - $$ LANGUAGE plpgsql; - CREATE OR REPLACE FUNCTION testfunc2(integer) RETURNS integer AS $$ - .... - $$ LANGUAGE plpgsql;""") - assert len(t) == 2 - - # make sure $SOMETHING$ works too - t = sqlparse.split(""" - CREATE OR REPLACE FUNCTION testfunc1(integer) RETURNS integer AS $PROC_1$ - .... - $PROC_1$ LANGUAGE plpgsql; - CREATE OR REPLACE FUNCTION testfunc2(integer) RETURNS integer AS $PROC_2$ - .... - $PROC_2$ LANGUAGE plpgsql;""") - assert len(t) == 2 - - -def test_double_precision_is_builtin(): - s = 'DOUBLE PRECISION' - t = sqlparse.parse(s)[0].tokens - assert len(t) == 1 - assert t[0].ttype == sqlparse.tokens.Name.Builtin - assert t[0].value == 'DOUBLE PRECISION' - - -@pytest.mark.parametrize('ph', ['?', ':1', ':foo', '%s', '%(foo)s']) -def test_placeholder(ph): - p = sqlparse.parse(ph)[0].tokens - assert len(p) == 1 - assert p[0].ttype is T.Name.Placeholder - - -@pytest.mark.parametrize('num', ['6.67428E-8', '1.988e33', '1e-12']) -def test_scientific_numbers(num): - p = sqlparse.parse(num)[0].tokens - assert len(p) == 1 - assert p[0].ttype is T.Number.Float - - -def test_single_quotes_are_strings(): - p = sqlparse.parse("'foo'")[0].tokens - assert len(p) == 1 - assert p[0].ttype is T.String.Single - - -def test_double_quotes_are_identifiers(): - p = sqlparse.parse('"foo"')[0].tokens - assert len(p) == 1 - assert isinstance(p[0], sql.Identifier) - - -def test_single_quotes_with_linebreaks(): - # issue118 - p = sqlparse.parse("'f\nf'")[0].tokens - assert len(p) == 1 - assert p[0].ttype is T.String.Single - - -def test_sqlite_identifiers(): - # Make sure we still parse sqlite style escapes - p = sqlparse.parse('[col1],[col2]')[0].tokens - id_names = [id_.get_name() for id_ in p[0].get_identifiers()] - assert len(p) == 1 - assert isinstance(p[0], sql.IdentifierList) - assert id_names == ['[col1]', '[col2]'] - - p = sqlparse.parse('[col1]+[col2]')[0] - types = [tok.ttype for tok in p.flatten()] - assert types == [T.Name, T.Operator, T.Name] - - -def test_simple_1d_array_index(): - p = sqlparse.parse('col[1]')[0].tokens - assert len(p) == 1 - assert p[0].get_name() == 'col' - indices = list(p[0].get_array_indices()) - assert len(indices) == 1 # 1-dimensional index - assert len(indices[0]) == 1 # index is single token - assert indices[0][0].value == '1' - - -def test_2d_array_index(): - p = sqlparse.parse('col[x][(y+1)*2]')[0].tokens - assert len(p) == 1 - assert p[0].get_name() == 'col' - assert len(list(p[0].get_array_indices())) == 2 # 2-dimensional index - - -def test_array_index_function_result(): - p = sqlparse.parse('somefunc()[1]')[0].tokens - assert len(p) == 1 - assert len(list(p[0].get_array_indices())) == 1 - - -def test_schema_qualified_array_index(): - p = sqlparse.parse('schem.col[1]')[0].tokens - assert len(p) == 1 - assert p[0].get_parent_name() == 'schem' - assert p[0].get_name() == 'col' - assert list(p[0].get_array_indices())[0][0].value == '1' - - -def test_aliased_array_index(): - p = sqlparse.parse('col[1] x')[0].tokens - assert len(p) == 1 - assert p[0].get_alias() == 'x' - assert p[0].get_real_name() == 'col' - assert list(p[0].get_array_indices())[0][0].value == '1' - - -def test_array_literal(): - # See issue #176 - p = sqlparse.parse('ARRAY[%s, %s]')[0] - assert len(p.tokens) == 2 - assert len(list(p.flatten())) == 7 - - -def test_typed_array_definition(): - # array indices aren't grouped with built-ins, but make sure we can extract - # identifier names - p = sqlparse.parse('x int, y int[], z int')[0] - names = [x.get_name() for x in p.get_sublists() - if isinstance(x, sql.Identifier)] - assert names == ['x', 'y', 'z'] - - -@pytest.mark.parametrize('s', ['select 1 -- foo', 'select 1 # foo']) -def test_single_line_comments(s): - # see issue178 - p = sqlparse.parse(s)[0] - assert len(p.tokens) == 5 - assert p.tokens[-1].ttype == T.Comment.Single - - -@pytest.mark.parametrize('s', ['foo', '@foo', '#foo', '##foo']) -def test_names_and_special_names(s): - # see issue192 - p = sqlparse.parse(s)[0] - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Identifier) - - -def test_get_token_at_offset(): - p = sqlparse.parse('select * from dual')[0] - # 0123456789 - assert p.get_token_at_offset(0) == p.tokens[0] - assert p.get_token_at_offset(1) == p.tokens[0] - assert p.get_token_at_offset(6) == p.tokens[1] - assert p.get_token_at_offset(7) == p.tokens[2] - assert p.get_token_at_offset(8) == p.tokens[3] - assert p.get_token_at_offset(9) == p.tokens[4] - assert p.get_token_at_offset(10) == p.tokens[4] - - -def test_pprint(): - p = sqlparse.parse('select a0, b0, c0, d0, e0 from ' - '(select * from dual) q0 where 1=1 and 2=2')[0] - output = StringIO() - - p._pprint_tree(f=output) - pprint = '\n'.join([ - "|- 0 DML 'select'", - "|- 1 Whitespace ' '", - "|- 2 IdentifierList 'a0, b0...'", - "| |- 0 Identifier 'a0'", - "| | `- 0 Name 'a0'", - "| |- 1 Punctuation ','", - "| |- 2 Whitespace ' '", - "| |- 3 Identifier 'b0'", - "| | `- 0 Name 'b0'", - "| |- 4 Punctuation ','", - "| |- 5 Whitespace ' '", - "| |- 6 Identifier 'c0'", - "| | `- 0 Name 'c0'", - "| |- 7 Punctuation ','", - "| |- 8 Whitespace ' '", - "| |- 9 Identifier 'd0'", - "| | `- 0 Name 'd0'", - "| |- 10 Punctuation ','", - "| |- 11 Whitespace ' '", - "| `- 12 Float 'e0'", - "|- 3 Whitespace ' '", - "|- 4 Keyword 'from'", - "|- 5 Whitespace ' '", - "|- 6 Identifier '(selec...'", - "| |- 0 Parenthesis '(selec...'", - "| | |- 0 Punctuation '('", - "| | |- 1 DML 'select'", - "| | |- 2 Whitespace ' '", - "| | |- 3 Wildcard '*'", - "| | |- 4 Whitespace ' '", - "| | |- 5 Keyword 'from'", - "| | |- 6 Whitespace ' '", - "| | |- 7 Identifier 'dual'", - "| | | `- 0 Name 'dual'", - "| | `- 8 Punctuation ')'", - "| |- 1 Whitespace ' '", - "| `- 2 Identifier 'q0'", - "| `- 0 Name 'q0'", - "|- 7 Whitespace ' '", - "`- 8 Where 'where ...'", - " |- 0 Keyword 'where'", - " |- 1 Whitespace ' '", - " |- 2 Comparison '1=1'", - " | |- 0 Integer '1'", - " | |- 1 Comparison '='", - " | `- 2 Integer '1'", - " |- 3 Whitespace ' '", - " |- 4 Keyword 'and'", - " |- 5 Whitespace ' '", - " `- 6 Comparison '2=2'", - " |- 0 Integer '2'", - " |- 1 Comparison '='", - " `- 2 Integer '2'", - ""]) - assert output.getvalue() == pprint - - -def test_wildcard_multiplication(): - p = sqlparse.parse('select * from dual')[0] - assert p.tokens[2].ttype == T.Wildcard - - p = sqlparse.parse('select a0.* from dual a0')[0] - assert p.tokens[2][2].ttype == T.Wildcard - - p = sqlparse.parse('select 1 * 2 from dual')[0] - assert p.tokens[2][2].ttype == T.Operator - - -def test_stmt_tokens_parents(): - # see issue 226 - s = "CREATE TABLE test();" - stmt = sqlparse.parse(s)[0] - for token in stmt.tokens: - assert token.has_ancestor(stmt) - - -@pytest.mark.parametrize('sql, is_literal', [ - ('$$foo$$', True), - ('$_$foo$_$', True), - ('$token$ foo $token$', True), - # don't parse inner tokens - ('$_$ foo $token$bar$token$ baz$_$', True), - ('$A$ foo $B$', False) # tokens don't match -]) -def test_dbldollar_as_literal(sql, is_literal): - # see issue 277 - p = sqlparse.parse(sql)[0] - if is_literal: - assert len(p.tokens) == 1 - assert p.tokens[0].ttype == T.Literal - else: - for token in p.tokens: - assert token.ttype != T.Literal - - -def test_non_ascii(): - _test_non_ascii = u"insert into test (id, name) values (1, 'теÑÑ‚');" - - s = _test_non_ascii - stmts = sqlparse.parse(s) - assert len(stmts) == 1 - statement = stmts[0] - assert text_type(statement) == s - assert statement._pprint_tree() is None - - s = _test_non_ascii.encode('utf-8') - stmts = sqlparse.parse(s, 'utf-8') - assert len(stmts) == 1 - statement = stmts[0] - assert text_type(statement) == _test_non_ascii - assert statement._pprint_tree() is None - - -def test_get_real_name(): - # issue 369 - s = u"update a t set t.b=1" - stmts = sqlparse.parse(s) - assert len(stmts) == 1 - assert 'a' == stmts[0].tokens[2].get_real_name() - assert 't' == stmts[0].tokens[2].get_alias() - - -def test_from_subquery(): - # issue 446 - s = u'from(select 1)' - stmts = sqlparse.parse(s) - assert len(stmts) == 1 - assert len(stmts[0].tokens) == 2 - assert stmts[0].tokens[0].value == 'from' - assert stmts[0].tokens[0].ttype == T.Keyword - - s = u'from (select 1)' - stmts = sqlparse.parse(s) - assert len(stmts) == 1 - assert len(stmts[0].tokens) == 3 - assert stmts[0].tokens[0].value == 'from' - assert stmts[0].tokens[0].ttype == T.Keyword - assert stmts[0].tokens[1].ttype == T.Whitespace - - -def test_parenthesis(): - tokens = sqlparse.parse("(\n\n1\n\n)")[0].tokens[0].tokens - assert list(map(lambda t: t.ttype, tokens)) == [T.Punctuation, - T.Newline, - T.Newline, - T.Number.Integer, - T.Newline, - T.Newline, - T.Punctuation] - tokens = sqlparse.parse("(\n\n 1 \n\n)")[0].tokens[0].tokens - assert list(map(lambda t: t.ttype, tokens)) == [T.Punctuation, - T.Newline, - T.Newline, - T.Whitespace, - T.Number.Integer, - T.Whitespace, - T.Newline, - T.Newline, - T.Punctuation] diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_regressions.py b/shell/ext-py/sqlparse-0.3.1/tests/test_regressions.py deleted file mode 100644 index 2ed0ff326..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_regressions.py +++ /dev/null @@ -1,408 +0,0 @@ -# -*- coding: utf-8 -*- - -import pytest - -import sqlparse -from sqlparse import sql, tokens as T -from sqlparse.compat import PY2 - - -def test_issue9(): - # make sure where doesn't consume parenthesis - p = sqlparse.parse('(where 1)')[0] - assert isinstance(p, sql.Statement) - assert len(p.tokens) == 1 - assert isinstance(p.tokens[0], sql.Parenthesis) - prt = p.tokens[0] - assert len(prt.tokens) == 3 - assert prt.tokens[0].ttype == T.Punctuation - assert prt.tokens[-1].ttype == T.Punctuation - - -def test_issue13(): - parsed = sqlparse.parse(("select 'one';\n" - "select 'two\\'';\n" - "select 'three';")) - assert len(parsed) == 3 - assert str(parsed[1]).strip() == "select 'two\\'';" - - -@pytest.mark.parametrize('s', ['--hello', '-- hello', '--hello\n', - '--', '--\n']) -def test_issue26(s): - # parse stand-alone comments - p = sqlparse.parse(s)[0] - assert len(p.tokens) == 1 - assert p.tokens[0].ttype is T.Comment.Single - - -@pytest.mark.parametrize('value', ['create', 'CREATE']) -def test_issue34(value): - t = sqlparse.parse("create")[0].token_first() - assert t.match(T.Keyword.DDL, value) is True - - -def test_issue35(): - # missing space before LIMIT. Updated for #321 - sql = sqlparse.format("select * from foo where bar = 1 limit 1", - reindent=True) - assert sql == "\n".join([ - "select *", - "from foo", - "where bar = 1", - "limit 1"]) - - -def test_issue38(): - sql = sqlparse.format("SELECT foo; -- comment", strip_comments=True) - assert sql == "SELECT foo;" - sql = sqlparse.format("/* foo */", strip_comments=True) - assert sql == "" - - -def test_issue39(): - p = sqlparse.parse('select user.id from user')[0] - assert len(p.tokens) == 7 - idt = p.tokens[2] - assert idt.__class__ == sql.Identifier - assert len(idt.tokens) == 3 - assert idt.tokens[0].match(T.Name, 'user') is True - assert idt.tokens[1].match(T.Punctuation, '.') is True - assert idt.tokens[2].match(T.Name, 'id') is True - - -def test_issue40(): - # make sure identifier lists in subselects are grouped - p = sqlparse.parse(('SELECT id, name FROM ' - '(SELECT id, name FROM bar) as foo'))[0] - assert len(p.tokens) == 7 - assert p.tokens[2].__class__ == sql.IdentifierList - assert p.tokens[-1].__class__ == sql.Identifier - assert p.tokens[-1].get_name() == 'foo' - sp = p.tokens[-1].tokens[0] - assert sp.tokens[3].__class__ == sql.IdentifierList - # make sure that formatting works as expected - s = sqlparse.format('SELECT id == name FROM ' - '(SELECT id, name FROM bar)', reindent=True) - assert s == '\n'.join([ - 'SELECT id == name', - 'FROM', - ' (SELECT id,', - ' name', - ' FROM bar)']) - - s = sqlparse.format('SELECT id == name FROM ' - '(SELECT id, name FROM bar) as foo', reindent=True) - assert s == '\n'.join([ - 'SELECT id == name', - 'FROM', - ' (SELECT id,', - ' name', - ' FROM bar) as foo']) - - -@pytest.mark.parametrize('s', ['select x.y::text as z from foo', - 'select x.y::text as "z" from foo', - 'select x."y"::text as z from foo', - 'select x."y"::text as "z" from foo', - 'select "x".y::text as z from foo', - 'select "x".y::text as "z" from foo', - 'select "x"."y"::text as z from foo', - 'select "x"."y"::text as "z" from foo']) -@pytest.mark.parametrize('func_name, result', [('get_name', 'z'), - ('get_real_name', 'y'), - ('get_parent_name', 'x'), - ('get_alias', 'z'), - ('get_typecast', 'text')]) -def test_issue78(s, func_name, result): - # the bug author provided this nice examples, let's use them! - p = sqlparse.parse(s)[0] - i = p.tokens[2] - assert isinstance(i, sql.Identifier) - - func = getattr(i, func_name) - assert func() == result - - -def test_issue83(): - sql = """ CREATE OR REPLACE FUNCTION func_a(text) - RETURNS boolean LANGUAGE plpgsql STRICT IMMUTABLE AS - $_$ - BEGIN - ... - END; - $_$; - - CREATE OR REPLACE FUNCTION func_b(text) - RETURNS boolean LANGUAGE plpgsql STRICT IMMUTABLE AS - $_$ - BEGIN - ... - END; - $_$; - - ALTER TABLE..... ;""" - t = sqlparse.split(sql) - assert len(t) == 3 - - -def test_comment_encoding_when_reindent(): - # There was an UnicodeEncodeError in the reindent filter that - # casted every comment followed by a keyword to str. - sql = u'select foo -- Comment containing Ümläuts\nfrom bar' - formatted = sqlparse.format(sql, reindent=True) - assert formatted == sql - - -def test_parse_sql_with_binary(): - # See https://github.com/andialbrecht/sqlparse/pull/88 - # digest = '‚|ËêŠplL4¡h‘øN{' - digest = '\x82|\xcb\x0e\xea\x8aplL4\xa1h\x91\xf8N{' - sql = "select * from foo where bar = '{0}'".format(digest) - formatted = sqlparse.format(sql, reindent=True) - tformatted = "select *\nfrom foo\nwhere bar = '{0}'".format(digest) - if PY2: - tformatted = tformatted.decode('unicode-escape') - assert formatted == tformatted - - -def test_dont_alias_keywords(): - # The _group_left_right function had a bug where the check for the - # left side wasn't handled correctly. In one case this resulted in - # a keyword turning into an identifier. - p = sqlparse.parse('FROM AS foo')[0] - assert len(p.tokens) == 5 - assert p.tokens[0].ttype is T.Keyword - assert p.tokens[2].ttype is T.Keyword - - -def test_format_accepts_encoding(load_file): - # issue20 - sql = load_file('test_cp1251.sql', 'cp1251') - formatted = sqlparse.format(sql, reindent=True, encoding='cp1251') - tformatted = u'insert into foo\nvalues (1); -- ПеÑÐ½Ñ Ð¿Ñ€Ð¾ надежду' - - assert formatted == tformatted - - -def test_stream(get_stream): - with get_stream("stream.sql") as stream: - p = sqlparse.parse(stream)[0] - assert p.get_type() == 'INSERT' - - -def test_issue90(): - sql = ('UPDATE "gallery_photo" SET "owner_id" = 4018, "deleted_at" = NULL,' - ' "width" = NULL, "height" = NULL, "rating_votes" = 0,' - ' "rating_score" = 0, "thumbnail_width" = NULL,' - ' "thumbnail_height" = NULL, "price" = 1, "description" = NULL') - formatted = sqlparse.format(sql, reindent=True) - tformatted = '\n'.join([ - 'UPDATE "gallery_photo"', - 'SET "owner_id" = 4018,', - ' "deleted_at" = NULL,', - ' "width" = NULL,', - ' "height" = NULL,', - ' "rating_votes" = 0,', - ' "rating_score" = 0,', - ' "thumbnail_width" = NULL,', - ' "thumbnail_height" = NULL,', - ' "price" = 1,', - ' "description" = NULL']) - assert formatted == tformatted - - -def test_except_formatting(): - sql = 'SELECT 1 FROM foo WHERE 2 = 3 EXCEPT SELECT 2 FROM bar WHERE 1 = 2' - formatted = sqlparse.format(sql, reindent=True) - tformatted = '\n'.join([ - 'SELECT 1', - 'FROM foo', - 'WHERE 2 = 3', - 'EXCEPT', - 'SELECT 2', - 'FROM bar', - 'WHERE 1 = 2']) - assert formatted == tformatted - - -def test_null_with_as(): - sql = 'SELECT NULL AS c1, NULL AS c2 FROM t1' - formatted = sqlparse.format(sql, reindent=True) - tformatted = '\n'.join([ - 'SELECT NULL AS c1,', - ' NULL AS c2', - 'FROM t1']) - assert formatted == tformatted - - -def test_issue190_open_file(filepath): - path = filepath('stream.sql') - with open(path) as stream: - p = sqlparse.parse(stream)[0] - assert p.get_type() == 'INSERT' - - -def test_issue193_splitting_function(): - sql = """ CREATE FUNCTION a(x VARCHAR(20)) RETURNS VARCHAR(20) - BEGIN - DECLARE y VARCHAR(20); - RETURN x; - END; - SELECT * FROM a.b;""" - statements = sqlparse.split(sql) - assert len(statements) == 2 - - -def test_issue194_splitting_function(): - sql = """ CREATE FUNCTION a(x VARCHAR(20)) RETURNS VARCHAR(20) - BEGIN - DECLARE y VARCHAR(20); - IF (1 = 1) THEN - SET x = y; - END IF; - RETURN x; - END; - SELECT * FROM a.b;""" - statements = sqlparse.split(sql) - assert len(statements) == 2 - - -def test_issue186_get_type(): - sql = "-- comment\ninsert into foo" - p = sqlparse.parse(sql)[0] - assert p.get_type() == 'INSERT' - - -def test_issue212_py2unicode(): - t1 = sql.Token(T.String, u'schöner ') - t2 = sql.Token(T.String, 'bug') - token_list = sql.TokenList([t1, t2]) - assert str(token_list) == 'schöner bug' - - -def test_issue213_leadingws(): - sql = " select * from foo" - assert sqlparse.format(sql, strip_whitespace=True) == "select * from foo" - - -def test_issue227_gettype_cte(): - select_stmt = sqlparse.parse('SELECT 1, 2, 3 FROM foo;') - assert select_stmt[0].get_type() == 'SELECT' - with_stmt = sqlparse.parse('WITH foo AS (SELECT 1, 2, 3)' - 'SELECT * FROM foo;') - assert with_stmt[0].get_type() == 'SELECT' - with2_stmt = sqlparse.parse(""" - WITH foo AS (SELECT 1 AS abc, 2 AS def), - bar AS (SELECT * FROM something WHERE x > 1) - INSERT INTO elsewhere SELECT * FROM foo JOIN bar;""") - assert with2_stmt[0].get_type() == 'INSERT' - - -def test_issue207_runaway_format(): - sql = 'select 1 from (select 1 as one, 2 as two, 3 from dual) t0' - p = sqlparse.format(sql, reindent=True) - assert p == '\n'.join([ - "select 1", - "from", - " (select 1 as one,", - " 2 as two,", - " 3", - " from dual) t0"]) - - -def test_token_next_doesnt_ignore_skip_cm(): - sql = '--comment\nselect 1' - tok = sqlparse.parse(sql)[0].token_next(-1, skip_cm=True)[1] - assert tok.value == 'select' - - -@pytest.mark.parametrize('s', [ - 'SELECT x AS', - 'AS' -]) -def test_issue284_as_grouping(s): - p = sqlparse.parse(s)[0] - assert s == str(p) - - -def test_issue315_utf8_by_default(): - # Make sure the lexer can handle utf-8 string by default correctly - # digest = 'é½å¤©å¤§åœ£.カラフルãªé›².사랑해요' - # The digest contains Chinese, Japanese and Korean characters - # All in 'utf-8' encoding. - digest = ( - '\xe9\xbd\x90\xe5\xa4\xa9\xe5\xa4\xa7\xe5\x9c\xa3.' - '\xe3\x82\xab\xe3\x83\xa9\xe3\x83\x95\xe3\x83\xab\xe3\x81\xaa\xe9' - '\x9b\xb2.' - '\xec\x82\xac\xeb\x9e\x91\xed\x95\xb4\xec\x9a\x94' - ) - sql = "select * from foo where bar = '{0}'".format(digest) - formatted = sqlparse.format(sql, reindent=True) - tformatted = "select *\nfrom foo\nwhere bar = '{0}'".format(digest) - if PY2: - tformatted = tformatted.decode('utf-8') - assert formatted == tformatted - - -def test_issue322_concurrently_is_keyword(): - s = 'CREATE INDEX CONCURRENTLY myindex ON mytable(col1);' - p = sqlparse.parse(s)[0] - - assert len(p.tokens) == 12 - assert p.tokens[0].ttype is T.Keyword.DDL # CREATE - assert p.tokens[2].ttype is T.Keyword # INDEX - assert p.tokens[4].ttype is T.Keyword # CONCURRENTLY - assert p.tokens[4].value == 'CONCURRENTLY' - assert isinstance(p.tokens[6], sql.Identifier) - assert p.tokens[6].value == 'myindex' - - -@pytest.mark.parametrize('s', [ - 'SELECT @min_price:=MIN(price), @max_price:=MAX(price) FROM shop;', - 'SELECT @min_price:=MIN(price), @max_price:=MAX(price) FROM shop', - -]) -def test_issue359_index_error_assignments(s): - sqlparse.parse(s) - sqlparse.format(s, strip_comments=True) - - -def test_issue469_copy_as_psql_command(): - formatted = sqlparse.format( - '\\copy select * from foo', - keyword_case='upper', identifier_case='capitalize') - assert formatted == '\\copy SELECT * FROM Foo' - - -@pytest.mark.xfail(reason='Needs to be fixed') -def test_issue484_comments_and_newlines(): - formatted = sqlparse.format('\n'.join([ - 'Create table myTable', - '(', - ' myId TINYINT NOT NULL, --my special comment', - ' myName VARCHAR2(100) NOT NULL', - ')']), - strip_comments=True) - assert formatted == ('\n'.join([ - 'Create table myTable', - '(', - ' myId TINYINT NOT NULL,', - ' myName VARCHAR2(100) NOT NULL', - ')'])) - - -def test_issue485_split_multi(): - p_sql = '''CREATE OR REPLACE RULE ruled_tab_2rules AS ON INSERT -TO public.ruled_tab -DO instead ( -select 1; -select 2; -);''' - assert len(sqlparse.split(p_sql)) == 1 - - -def test_issue489_tzcasts(): - p = sqlparse.parse('select bar at time zone \'UTC\' as foo')[0] - assert p.tokens[-1].has_alias() is True - assert p.tokens[-1].get_alias() == 'foo' diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_split.py b/shell/ext-py/sqlparse-0.3.1/tests/test_split.py deleted file mode 100644 index a93e3d401..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_split.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- - -# Tests splitting functions. - -import types - -import pytest - -import sqlparse -from sqlparse.compat import StringIO, text_type - - -def test_split_semicolon(): - sql1 = 'select * from foo;' - sql2 = "select * from foo where bar = 'foo;bar';" - stmts = sqlparse.parse(''.join([sql1, sql2])) - assert len(stmts) == 2 - assert str(stmts[0]) == sql1 - assert str(stmts[1]) == sql2 - - -def test_split_backslash(): - stmts = sqlparse.parse(r"select '\\'; select '\''; select '\\\'';") - assert len(stmts) == 3 - - -@pytest.mark.parametrize('fn', ['function.sql', - 'function_psql.sql', - 'function_psql2.sql', - 'function_psql3.sql', - 'function_psql4.sql']) -def test_split_create_function(load_file, fn): - sql = load_file(fn) - stmts = sqlparse.parse(sql) - assert len(stmts) == 1 - assert text_type(stmts[0]) == sql - - -def test_split_dashcomments(load_file): - sql = load_file('dashcomment.sql') - stmts = sqlparse.parse(sql) - assert len(stmts) == 3 - assert ''.join(str(q) for q in stmts) == sql - - -@pytest.mark.parametrize('s', ['select foo; -- comment\n', - 'select foo; -- comment\r', - 'select foo; -- comment\r\n', - 'select foo; -- comment']) -def test_split_dashcomments_eol(s): - stmts = sqlparse.parse(s) - assert len(stmts) == 1 - - -def test_split_begintag(load_file): - sql = load_file('begintag.sql') - stmts = sqlparse.parse(sql) - assert len(stmts) == 3 - assert ''.join(str(q) for q in stmts) == sql - - -def test_split_begintag_2(load_file): - sql = load_file('begintag_2.sql') - stmts = sqlparse.parse(sql) - assert len(stmts) == 1 - assert ''.join(str(q) for q in stmts) == sql - - -def test_split_dropif(): - sql = 'DROP TABLE IF EXISTS FOO;\n\nSELECT * FROM BAR;' - stmts = sqlparse.parse(sql) - assert len(stmts) == 2 - assert ''.join(str(q) for q in stmts) == sql - - -def test_split_comment_with_umlaut(): - sql = (u'select * from foo;\n' - u'-- Testing an umlaut: ä\n' - u'select * from bar;') - stmts = sqlparse.parse(sql) - assert len(stmts) == 2 - assert ''.join(text_type(q) for q in stmts) == sql - - -def test_split_comment_end_of_line(): - sql = ('select * from foo; -- foo\n' - 'select * from bar;') - stmts = sqlparse.parse(sql) - assert len(stmts) == 2 - assert ''.join(str(q) for q in stmts) == sql - # make sure the comment belongs to first query - assert str(stmts[0]) == 'select * from foo; -- foo\n' - - -def test_split_casewhen(): - sql = ("SELECT case when val = 1 then 2 else null end as foo;\n" - "comment on table actor is 'The actor table.';") - stmts = sqlparse.split(sql) - assert len(stmts) == 2 - - -def test_split_cursor_declare(): - sql = ('DECLARE CURSOR "foo" AS SELECT 1;\n' - 'SELECT 2;') - stmts = sqlparse.split(sql) - assert len(stmts) == 2 - - -def test_split_if_function(): # see issue 33 - # don't let IF as a function confuse the splitter - sql = ('CREATE TEMPORARY TABLE tmp ' - 'SELECT IF(a=1, a, b) AS o FROM one; ' - 'SELECT t FROM two') - stmts = sqlparse.split(sql) - assert len(stmts) == 2 - - -def test_split_stream(): - stream = StringIO("SELECT 1; SELECT 2;") - stmts = sqlparse.parsestream(stream) - assert isinstance(stmts, types.GeneratorType) - assert len(list(stmts)) == 2 - - -def test_split_encoding_parsestream(): - stream = StringIO("SELECT 1; SELECT 2;") - stmts = list(sqlparse.parsestream(stream)) - assert isinstance(stmts[0].tokens[0].value, text_type) - - -def test_split_unicode_parsestream(): - stream = StringIO(u'SELECT ö') - stmts = list(sqlparse.parsestream(stream)) - assert str(stmts[0]) == 'SELECT ö' - - -def test_split_simple(): - stmts = sqlparse.split('select * from foo; select * from bar;') - assert len(stmts) == 2 - assert stmts[0] == 'select * from foo;' - assert stmts[1] == 'select * from bar;' - - -def test_split_quotes_with_new_line(): - stmts = sqlparse.split('select "foo\nbar"') - assert len(stmts) == 1 - assert stmts[0] == 'select "foo\nbar"' - - stmts = sqlparse.split("select 'foo\n\bar'") - assert len(stmts) == 1 - assert stmts[0] == "select 'foo\n\bar'" diff --git a/shell/ext-py/sqlparse-0.3.1/tests/test_tokenize.py b/shell/ext-py/sqlparse-0.3.1/tests/test_tokenize.py deleted file mode 100644 index 3e8831b55..000000000 --- a/shell/ext-py/sqlparse-0.3.1/tests/test_tokenize.py +++ /dev/null @@ -1,238 +0,0 @@ -# -*- coding: utf-8 -*- - -import types - -import pytest - -import sqlparse -from sqlparse import lexer -from sqlparse import sql, tokens as T -from sqlparse.compat import StringIO - - -def test_tokenize_simple(): - s = 'select * from foo;' - stream = lexer.tokenize(s) - assert isinstance(stream, types.GeneratorType) - tokens = list(stream) - assert len(tokens) == 8 - assert len(tokens[0]) == 2 - assert tokens[0] == (T.Keyword.DML, 'select') - assert tokens[-1] == (T.Punctuation, ';') - - -def test_tokenize_backticks(): - s = '`foo`.`bar`' - tokens = list(lexer.tokenize(s)) - assert len(tokens) == 3 - assert tokens[0] == (T.Name, '`foo`') - - -@pytest.mark.parametrize('s', ['foo\nbar\n', 'foo\rbar\r', - 'foo\r\nbar\r\n', 'foo\r\nbar\n']) -def test_tokenize_linebreaks(s): - # issue1 - tokens = lexer.tokenize(s) - assert ''.join(str(x[1]) for x in tokens) == s - - -def test_tokenize_inline_keywords(): - # issue 7 - s = "create created_foo" - tokens = list(lexer.tokenize(s)) - assert len(tokens) == 3 - assert tokens[0][0] == T.Keyword.DDL - assert tokens[2][0] == T.Name - assert tokens[2][1] == 'created_foo' - s = "enddate" - tokens = list(lexer.tokenize(s)) - assert len(tokens) == 1 - assert tokens[0][0] == T.Name - s = "join_col" - tokens = list(lexer.tokenize(s)) - assert len(tokens) == 1 - assert tokens[0][0] == T.Name - s = "left join_col" - tokens = list(lexer.tokenize(s)) - assert len(tokens) == 3 - assert tokens[2][0] == T.Name - assert tokens[2][1] == 'join_col' - - -def test_tokenize_negative_numbers(): - s = "values(-1)" - tokens = list(lexer.tokenize(s)) - assert len(tokens) == 4 - assert tokens[2][0] == T.Number.Integer - assert tokens[2][1] == '-1' - - -def test_token_str(): - token = sql.Token(None, 'FoO') - assert str(token) == 'FoO' - - -def test_token_repr(): - token = sql.Token(T.Keyword, 'foo') - tst = "=3.5') - tornado_deps = ['tornado>=4.0'] - twisted_deps = ['twisted'] - - setup(name='thrift', - version='0.16.0', - description='Python bindings for the Apache Thrift RPC system', - long_description=read_file("README.md"), - long_description_content_type="text/markdown", - author='Apache Thrift Developers', - author_email='dev@thrift.apache.org', - url='http://thrift.apache.org', - license='Apache License 2.0', - install_requires=['six>=1.7.2'], - extras_require={ - 'ssl': ssl_deps, - 'tornado': tornado_deps, - 'twisted': twisted_deps, - 'all': ssl_deps + tornado_deps + twisted_deps, - }, - packages=[ - 'thrift', - 'thrift.protocol', - 'thrift.transport', - 'thrift.server', - ], - package_dir={'thrift': 'src'}, - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 3', - 'Topic :: Software Development :: Libraries', - 'Topic :: System :: Networking' - ], - zip_safe=False, - **extensions - ) - - -try: - with_binary = True - run_setup(with_binary) -except BuildFailed: - print() - print('*' * 80) - print("An error occurred while trying to compile with the C extension enabled") - print("Attempting to build without the extension now") - print('*' * 80) - print() - - run_setup(False) diff --git a/shell/ext-py/thrift-0.16.0/src/TMultiplexedProcessor.py b/shell/ext-py/thrift-0.16.0/src/TMultiplexedProcessor.py deleted file mode 100644 index ff88430bd..000000000 --- a/shell/ext-py/thrift-0.16.0/src/TMultiplexedProcessor.py +++ /dev/null @@ -1,82 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from thrift.Thrift import TProcessor, TMessageType -from thrift.protocol import TProtocolDecorator, TMultiplexedProtocol -from thrift.protocol.TProtocol import TProtocolException - - -class TMultiplexedProcessor(TProcessor): - def __init__(self): - self.defaultProcessor = None - self.services = {} - - def registerDefault(self, processor): - """ - If a non-multiplexed processor connects to the server and wants to - communicate, use the given processor to handle it. This mechanism - allows servers to upgrade from non-multiplexed to multiplexed in a - backwards-compatible way and still handle old clients. - """ - self.defaultProcessor = processor - - def registerProcessor(self, serviceName, processor): - self.services[serviceName] = processor - - def on_message_begin(self, func): - for key in self.services.keys(): - self.services[key].on_message_begin(func) - - def process(self, iprot, oprot): - (name, type, seqid) = iprot.readMessageBegin() - if type != TMessageType.CALL and type != TMessageType.ONEWAY: - raise TProtocolException( - TProtocolException.NOT_IMPLEMENTED, - "TMultiplexedProtocol only supports CALL & ONEWAY") - - index = name.find(TMultiplexedProtocol.SEPARATOR) - if index < 0: - if self.defaultProcessor: - return self.defaultProcessor.process( - StoredMessageProtocol(iprot, (name, type, seqid)), oprot) - else: - raise TProtocolException( - TProtocolException.NOT_IMPLEMENTED, - "Service name not found in message name: " + name + ". " + - "Did you forget to use TMultiplexedProtocol in your client?") - - serviceName = name[0:index] - call = name[index + len(TMultiplexedProtocol.SEPARATOR):] - if serviceName not in self.services: - raise TProtocolException( - TProtocolException.NOT_IMPLEMENTED, - "Service name not found: " + serviceName + ". " + - "Did you forget to call registerProcessor()?") - - standardMessage = (call, type, seqid) - return self.services[serviceName].process( - StoredMessageProtocol(iprot, standardMessage), oprot) - - -class StoredMessageProtocol(TProtocolDecorator.TProtocolDecorator): - def __init__(self, protocol, messageBegin): - self.messageBegin = messageBegin - - def readMessageBegin(self): - return self.messageBegin diff --git a/shell/ext-py/thrift-0.16.0/src/TRecursive.py b/shell/ext-py/thrift-0.16.0/src/TRecursive.py deleted file mode 100644 index abf202cb1..000000000 --- a/shell/ext-py/thrift-0.16.0/src/TRecursive.py +++ /dev/null @@ -1,83 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -from thrift.Thrift import TType - -TYPE_IDX = 1 -SPEC_ARGS_IDX = 3 -SPEC_ARGS_CLASS_REF_IDX = 0 -SPEC_ARGS_THRIFT_SPEC_IDX = 1 - - -def fix_spec(all_structs): - """Wire up recursive references for all TStruct definitions inside of each thrift_spec.""" - for struc in all_structs: - spec = struc.thrift_spec - for thrift_spec in spec: - if thrift_spec is None: - continue - elif thrift_spec[TYPE_IDX] == TType.STRUCT: - other = thrift_spec[SPEC_ARGS_IDX][SPEC_ARGS_CLASS_REF_IDX].thrift_spec - thrift_spec[SPEC_ARGS_IDX][SPEC_ARGS_THRIFT_SPEC_IDX] = other - elif thrift_spec[TYPE_IDX] in (TType.LIST, TType.SET): - _fix_list_or_set(thrift_spec[SPEC_ARGS_IDX]) - elif thrift_spec[TYPE_IDX] == TType.MAP: - _fix_map(thrift_spec[SPEC_ARGS_IDX]) - - -def _fix_list_or_set(element_type): - # For a list or set, the thrift_spec entry looks like, - # (1, TType.LIST, 'lister', (TType.STRUCT, [RecList, None], False), None, ), # 1 - # so ``element_type`` will be, - # (TType.STRUCT, [RecList, None], False) - if element_type[0] == TType.STRUCT: - element_type[1][1] = element_type[1][0].thrift_spec - elif element_type[0] in (TType.LIST, TType.SET): - _fix_list_or_set(element_type[1]) - elif element_type[0] == TType.MAP: - _fix_map(element_type[1]) - - -def _fix_map(element_type): - # For a map of key -> value type, ``element_type`` will be, - # (TType.I16, None, TType.STRUCT, [RecMapBasic, None], False), None, ) - # which is just a normal struct definition. - # - # For a map of key -> list / set, ``element_type`` will be, - # (TType.I16, None, TType.LIST, (TType.STRUCT, [RecMapList, None], False), False) - # and we need to process the 3rd element as a list. - # - # For a map of key -> map, ``element_type`` will be, - # (TType.I16, None, TType.MAP, (TType.I16, None, TType.STRUCT, - # [RecMapMap, None], False), False) - # and need to process 3rd element as a map. - - # Is the map key a struct? - if element_type[0] == TType.STRUCT: - element_type[1][1] = element_type[1][0].thrift_spec - elif element_type[0] in (TType.LIST, TType.SET): - _fix_list_or_set(element_type[1]) - elif element_type[0] == TType.MAP: - _fix_map(element_type[1]) - - # Is the map value a struct? - if element_type[2] == TType.STRUCT: - element_type[3][1] = element_type[3][0].thrift_spec - elif element_type[2] in (TType.LIST, TType.SET): - _fix_list_or_set(element_type[3]) - elif element_type[2] == TType.MAP: - _fix_map(element_type[3]) diff --git a/shell/ext-py/thrift-0.16.0/src/TSCons.py b/shell/ext-py/thrift-0.16.0/src/TSCons.py deleted file mode 100644 index bc67d7069..000000000 --- a/shell/ext-py/thrift-0.16.0/src/TSCons.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from os import path -from SCons.Builder import Builder -from six.moves import map - - -def scons_env(env, add=''): - opath = path.dirname(path.abspath('$TARGET')) - lstr = 'thrift --gen cpp -o ' + opath + ' ' + add + ' $SOURCE' - cppbuild = Builder(action=lstr) - env.Append(BUILDERS={'ThriftCpp': cppbuild}) - - -def gen_cpp(env, dir, file): - scons_env(env) - suffixes = ['_types.h', '_types.cpp'] - targets = map(lambda s: 'gen-cpp/' + file + s, suffixes) - return env.ThriftCpp(targets, dir + file + '.thrift') diff --git a/shell/ext-py/thrift-0.16.0/src/TSerialization.py b/shell/ext-py/thrift-0.16.0/src/TSerialization.py deleted file mode 100644 index fbbe76807..000000000 --- a/shell/ext-py/thrift-0.16.0/src/TSerialization.py +++ /dev/null @@ -1,38 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from .protocol import TBinaryProtocol -from .transport import TTransport - - -def serialize(thrift_object, - protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()): - transport = TTransport.TMemoryBuffer() - protocol = protocol_factory.getProtocol(transport) - thrift_object.write(protocol) - return transport.getvalue() - - -def deserialize(base, - buf, - protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()): - transport = TTransport.TMemoryBuffer(buf) - protocol = protocol_factory.getProtocol(transport) - base.read(protocol) - return base diff --git a/shell/ext-py/thrift-0.16.0/src/TTornado.py b/shell/ext-py/thrift-0.16.0/src/TTornado.py deleted file mode 100644 index 5eff11d2d..000000000 --- a/shell/ext-py/thrift-0.16.0/src/TTornado.py +++ /dev/null @@ -1,188 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from __future__ import absolute_import -import logging -import socket -import struct - -from .transport.TTransport import TTransportException, TTransportBase, TMemoryBuffer - -from io import BytesIO -from collections import deque -from contextlib import contextmanager -from tornado import gen, iostream, ioloop, tcpserver, concurrent - -__all__ = ['TTornadoServer', 'TTornadoStreamTransport'] - -logger = logging.getLogger(__name__) - - -class _Lock(object): - def __init__(self): - self._waiters = deque() - - def acquired(self): - return len(self._waiters) > 0 - - @gen.coroutine - def acquire(self): - blocker = self._waiters[-1] if self.acquired() else None - future = concurrent.Future() - self._waiters.append(future) - if blocker: - yield blocker - - raise gen.Return(self._lock_context()) - - def release(self): - assert self.acquired(), 'Lock not aquired' - future = self._waiters.popleft() - future.set_result(None) - - @contextmanager - def _lock_context(self): - try: - yield - finally: - self.release() - - -class TTornadoStreamTransport(TTransportBase): - """a framed, buffered transport over a Tornado stream""" - def __init__(self, host, port, stream=None, io_loop=None): - self.host = host - self.port = port - self.io_loop = io_loop or ioloop.IOLoop.current() - self.__wbuf = BytesIO() - self._read_lock = _Lock() - - # servers provide a ready-to-go stream - self.stream = stream - - def with_timeout(self, timeout, future): - return gen.with_timeout(timeout, future, self.io_loop) - - @gen.coroutine - def open(self, timeout=None): - logger.debug('socket connecting') - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) - self.stream = iostream.IOStream(sock) - - try: - connect = self.stream.connect((self.host, self.port)) - if timeout is not None: - yield self.with_timeout(timeout, connect) - else: - yield connect - except (socket.error, IOError, ioloop.TimeoutError) as e: - message = 'could not connect to {}:{} ({})'.format(self.host, self.port, e) - raise TTransportException( - type=TTransportException.NOT_OPEN, - message=message) - - raise gen.Return(self) - - def set_close_callback(self, callback): - """ - Should be called only after open() returns - """ - self.stream.set_close_callback(callback) - - def close(self): - # don't raise if we intend to close - self.stream.set_close_callback(None) - self.stream.close() - - def read(self, _): - # The generated code for Tornado shouldn't do individual reads -- only - # frames at a time - assert False, "you're doing it wrong" - - @contextmanager - def io_exception_context(self): - try: - yield - except (socket.error, IOError) as e: - raise TTransportException( - type=TTransportException.END_OF_FILE, - message=str(e)) - except iostream.StreamBufferFullError as e: - raise TTransportException( - type=TTransportException.UNKNOWN, - message=str(e)) - - @gen.coroutine - def readFrame(self): - # IOStream processes reads one at a time - with (yield self._read_lock.acquire()): - with self.io_exception_context(): - frame_header = yield self.stream.read_bytes(4) - if len(frame_header) == 0: - raise iostream.StreamClosedError('Read zero bytes from stream') - frame_length, = struct.unpack('!i', frame_header) - frame = yield self.stream.read_bytes(frame_length) - raise gen.Return(frame) - - def write(self, buf): - self.__wbuf.write(buf) - - def flush(self): - frame = self.__wbuf.getvalue() - # reset wbuf before write/flush to preserve state on underlying failure - frame_length = struct.pack('!i', len(frame)) - self.__wbuf = BytesIO() - with self.io_exception_context(): - return self.stream.write(frame_length + frame) - - -class TTornadoServer(tcpserver.TCPServer): - def __init__(self, processor, iprot_factory, oprot_factory=None, - *args, **kwargs): - super(TTornadoServer, self).__init__(*args, **kwargs) - - self._processor = processor - self._iprot_factory = iprot_factory - self._oprot_factory = (oprot_factory if oprot_factory is not None - else iprot_factory) - - @gen.coroutine - def handle_stream(self, stream, address): - host, port = address[:2] - trans = TTornadoStreamTransport(host=host, port=port, stream=stream, - io_loop=self.io_loop) - oprot = self._oprot_factory.getProtocol(trans) - - try: - while not trans.stream.closed(): - try: - frame = yield trans.readFrame() - except TTransportException as e: - if e.type == TTransportException.END_OF_FILE: - break - else: - raise - tr = TMemoryBuffer(frame) - iprot = self._iprot_factory.getProtocol(tr) - yield self._processor.process(iprot, oprot) - except Exception: - logger.exception('thrift exception in handle_stream') - trans.close() - - logger.info('client disconnected %s:%d', host, port) diff --git a/shell/ext-py/thrift-0.16.0/src/Thrift.py b/shell/ext-py/thrift-0.16.0/src/Thrift.py deleted file mode 100644 index 81fe8cf33..000000000 --- a/shell/ext-py/thrift-0.16.0/src/Thrift.py +++ /dev/null @@ -1,193 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - - -class TType(object): - STOP = 0 - VOID = 1 - BOOL = 2 - BYTE = 3 - I08 = 3 - DOUBLE = 4 - I16 = 6 - I32 = 8 - I64 = 10 - STRING = 11 - UTF7 = 11 - STRUCT = 12 - MAP = 13 - SET = 14 - LIST = 15 - UTF8 = 16 - UTF16 = 17 - - _VALUES_TO_NAMES = ( - 'STOP', - 'VOID', - 'BOOL', - 'BYTE', - 'DOUBLE', - None, - 'I16', - None, - 'I32', - None, - 'I64', - 'STRING', - 'STRUCT', - 'MAP', - 'SET', - 'LIST', - 'UTF8', - 'UTF16', - ) - - -class TMessageType(object): - CALL = 1 - REPLY = 2 - EXCEPTION = 3 - ONEWAY = 4 - - -class TProcessor(object): - """Base class for processor, which works on two streams.""" - - def process(self, iprot, oprot): - """ - Process a request. The normal behvaior is to have the - processor invoke the correct handler and then it is the - server's responsibility to write the response to oprot. - """ - pass - - def on_message_begin(self, func): - """ - Install a callback that receives (name, type, seqid) - after the message header is read. - """ - pass - - -class TException(Exception): - """Base class for all thrift exceptions.""" - - def __init__(self, message=None): - Exception.__init__(self, message) - super(TException, self).__setattr__("message", message) - - -class TApplicationException(TException): - """Application level thrift exceptions.""" - - UNKNOWN = 0 - UNKNOWN_METHOD = 1 - INVALID_MESSAGE_TYPE = 2 - WRONG_METHOD_NAME = 3 - BAD_SEQUENCE_ID = 4 - MISSING_RESULT = 5 - INTERNAL_ERROR = 6 - PROTOCOL_ERROR = 7 - INVALID_TRANSFORM = 8 - INVALID_PROTOCOL = 9 - UNSUPPORTED_CLIENT_TYPE = 10 - - def __init__(self, type=UNKNOWN, message=None): - TException.__init__(self, message) - self.type = type - - def __str__(self): - if self.message: - return self.message - elif self.type == self.UNKNOWN_METHOD: - return 'Unknown method' - elif self.type == self.INVALID_MESSAGE_TYPE: - return 'Invalid message type' - elif self.type == self.WRONG_METHOD_NAME: - return 'Wrong method name' - elif self.type == self.BAD_SEQUENCE_ID: - return 'Bad sequence ID' - elif self.type == self.MISSING_RESULT: - return 'Missing result' - elif self.type == self.INTERNAL_ERROR: - return 'Internal error' - elif self.type == self.PROTOCOL_ERROR: - return 'Protocol error' - elif self.type == self.INVALID_TRANSFORM: - return 'Invalid transform' - elif self.type == self.INVALID_PROTOCOL: - return 'Invalid protocol' - elif self.type == self.UNSUPPORTED_CLIENT_TYPE: - return 'Unsupported client type' - else: - return 'Default (unknown) TApplicationException' - - def read(self, iprot): - iprot.readStructBegin() - while True: - (fname, ftype, fid) = iprot.readFieldBegin() - if ftype == TType.STOP: - break - if fid == 1: - if ftype == TType.STRING: - self.message = iprot.readString() - else: - iprot.skip(ftype) - elif fid == 2: - if ftype == TType.I32: - self.type = iprot.readI32() - else: - iprot.skip(ftype) - else: - iprot.skip(ftype) - iprot.readFieldEnd() - iprot.readStructEnd() - - def write(self, oprot): - oprot.writeStructBegin('TApplicationException') - if self.message is not None: - oprot.writeFieldBegin('message', TType.STRING, 1) - oprot.writeString(self.message) - oprot.writeFieldEnd() - if self.type is not None: - oprot.writeFieldBegin('type', TType.I32, 2) - oprot.writeI32(self.type) - oprot.writeFieldEnd() - oprot.writeFieldStop() - oprot.writeStructEnd() - - -class TFrozenDict(dict): - """A dictionary that is "frozen" like a frozenset""" - - def __init__(self, *args, **kwargs): - super(TFrozenDict, self).__init__(*args, **kwargs) - # Sort the items so they will be in a consistent order. - # XOR in the hash of the class so we don't collide with - # the hash of a list of tuples. - self.__hashval = hash(TFrozenDict) ^ hash(tuple(sorted(self.items()))) - - def __setitem__(self, *args): - raise TypeError("Can't modify frozen TFreezableDict") - - def __delitem__(self, *args): - raise TypeError("Can't modify frozen TFreezableDict") - - def __hash__(self): - return self.__hashval diff --git a/shell/ext-py/thrift-0.16.0/src/__init__.py b/shell/ext-py/thrift-0.16.0/src/__init__.py deleted file mode 100644 index 48d659c40..000000000 --- a/shell/ext-py/thrift-0.16.0/src/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -__all__ = ['Thrift', 'TSCons'] diff --git a/shell/ext-py/thrift-0.16.0/src/compat.py b/shell/ext-py/thrift-0.16.0/src/compat.py deleted file mode 100644 index 0e8271dc1..000000000 --- a/shell/ext-py/thrift-0.16.0/src/compat.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import sys - -if sys.version_info[0] == 2: - - from cStringIO import StringIO as BufferIO - - def binary_to_str(bin_val): - return bin_val - - def str_to_binary(str_val): - return str_val - - def byte_index(bytes_val, i): - return ord(bytes_val[i]) - -else: - - from io import BytesIO as BufferIO # noqa - - def binary_to_str(bin_val): - return bin_val.decode('utf8') - - def str_to_binary(str_val): - return bytes(str_val, 'utf8') - - def byte_index(bytes_val, i): - return bytes_val[i] diff --git a/shell/ext-py/thrift-0.16.0/src/ext/binary.cpp b/shell/ext-py/thrift-0.16.0/src/ext/binary.cpp deleted file mode 100644 index 423fa8fee..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/binary.cpp +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#define PY_SSIZE_T_CLEAN -#include "ext/binary.h" -namespace apache { -namespace thrift { -namespace py { - -bool BinaryProtocol::readFieldBegin(TType& type, int16_t& tag) { - uint8_t b = 0; - if (!readByte(b)) { - return false; - } - type = static_cast(b); - if (type == T_STOP) { - return true; - } - return readI16(tag); -} -} -} -} diff --git a/shell/ext-py/thrift-0.16.0/src/ext/binary.h b/shell/ext-py/thrift-0.16.0/src/ext/binary.h deleted file mode 100644 index 960b0d003..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/binary.h +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#ifndef THRIFT_PY_BINARY_H -#define THRIFT_PY_BINARY_H - -#include -#include "ext/protocol.h" -#include "ext/endian.h" -#include - -namespace apache { -namespace thrift { -namespace py { - -class BinaryProtocol : public ProtocolBase { -public: - virtual ~BinaryProtocol() {} - - void writeI8(int8_t val) { writeBuffer(reinterpret_cast(&val), sizeof(int8_t)); } - - void writeI16(int16_t val) { - int16_t net = static_cast(htons(val)); - writeBuffer(reinterpret_cast(&net), sizeof(int16_t)); - } - - void writeI32(int32_t val) { - int32_t net = static_cast(htonl(val)); - writeBuffer(reinterpret_cast(&net), sizeof(int32_t)); - } - - void writeI64(int64_t val) { - int64_t net = static_cast(htonll(val)); - writeBuffer(reinterpret_cast(&net), sizeof(int64_t)); - } - - void writeDouble(double dub) { - // Unfortunately, bitwise_cast doesn't work in C. Bad C! - union { - double f; - int64_t t; - } transfer; - transfer.f = dub; - writeI64(transfer.t); - } - - void writeBool(int v) { writeByte(static_cast(v)); } - - void writeString(PyObject* value, int32_t len) { - writeI32(len); - writeBuffer(PyBytes_AS_STRING(value), len); - } - - bool writeListBegin(PyObject* value, const SetListTypeArgs& parsedargs, int32_t len) { - writeByte(parsedargs.element_type); - writeI32(len); - return true; - } - - bool writeMapBegin(PyObject* value, const MapTypeArgs& parsedargs, int32_t len) { - writeByte(parsedargs.ktag); - writeByte(parsedargs.vtag); - writeI32(len); - return true; - } - - bool writeStructBegin() { return true; } - bool writeStructEnd() { return true; } - bool writeField(PyObject* value, const StructItemSpec& parsedspec) { - writeByte(static_cast(parsedspec.type)); - writeI16(parsedspec.tag); - return encodeValue(value, parsedspec.type, parsedspec.typeargs); - } - - void writeFieldStop() { writeByte(static_cast(T_STOP)); } - - bool readBool(bool& val) { - char* buf; - if (!readBytes(&buf, 1)) { - return false; - } - val = buf[0] == 1; - return true; - } - - bool readI8(int8_t& val) { - char* buf; - if (!readBytes(&buf, 1)) { - return false; - } - val = buf[0]; - return true; - } - - bool readI16(int16_t& val) { - char* buf; - if (!readBytes(&buf, sizeof(int16_t))) { - return false; - } - memcpy(&val, buf, sizeof(int16_t)); - val = ntohs(val); - return true; - } - - bool readI32(int32_t& val) { - char* buf; - if (!readBytes(&buf, sizeof(int32_t))) { - return false; - } - memcpy(&val, buf, sizeof(int32_t)); - val = ntohl(val); - return true; - } - - bool readI64(int64_t& val) { - char* buf; - if (!readBytes(&buf, sizeof(int64_t))) { - return false; - } - memcpy(&val, buf, sizeof(int64_t)); - val = ntohll(val); - return true; - } - - bool readDouble(double& val) { - union { - int64_t f; - double t; - } transfer; - - if (!readI64(transfer.f)) { - return false; - } - val = transfer.t; - return true; - } - - int32_t readString(char** buf) { - int32_t len = 0; - if (!readI32(len) || !checkLengthLimit(len, stringLimit()) || !readBytes(buf, len)) { - return -1; - } - return len; - } - - int32_t readListBegin(TType& etype) { - int32_t len; - uint8_t b = 0; - if (!readByte(b) || !readI32(len) || !checkLengthLimit(len, containerLimit())) { - return -1; - } - etype = static_cast(b); - return len; - } - - int32_t readMapBegin(TType& ktype, TType& vtype) { - int32_t len; - uint8_t k, v; - if (!readByte(k) || !readByte(v) || !readI32(len) || !checkLengthLimit(len, containerLimit())) { - return -1; - } - ktype = static_cast(k); - vtype = static_cast(v); - return len; - } - - bool readStructBegin() { return true; } - bool readStructEnd() { return true; } - - bool readFieldBegin(TType& type, int16_t& tag); - -#define SKIPBYTES(n) \ - do { \ - if (!readBytes(&dummy_buf_, (n))) { \ - return false; \ - } \ - return true; \ - } while (0) - - bool skipBool() { SKIPBYTES(1); } - bool skipByte() { SKIPBYTES(1); } - bool skipI16() { SKIPBYTES(2); } - bool skipI32() { SKIPBYTES(4); } - bool skipI64() { SKIPBYTES(8); } - bool skipDouble() { SKIPBYTES(8); } - bool skipString() { - int32_t len; - if (!readI32(len)) { - return false; - } - SKIPBYTES(len); - } -#undef SKIPBYTES - -private: - char* dummy_buf_; -}; -} -} -} -#endif // THRIFT_PY_BINARY_H diff --git a/shell/ext-py/thrift-0.16.0/src/ext/compact.cpp b/shell/ext-py/thrift-0.16.0/src/ext/compact.cpp deleted file mode 100644 index ae89f2a65..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/compact.cpp +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#define PY_SSIZE_T_CLEAN -#include "ext/compact.h" - -namespace apache { -namespace thrift { -namespace py { - -const uint8_t CompactProtocol::TTypeToCType[] = { - CT_STOP, // T_STOP - 0, // unused - CT_BOOLEAN_TRUE, // T_BOOL - CT_BYTE, // T_BYTE - CT_DOUBLE, // T_DOUBLE - 0, // unused - CT_I16, // T_I16 - 0, // unused - CT_I32, // T_I32 - 0, // unused - CT_I64, // T_I64 - CT_BINARY, // T_STRING - CT_STRUCT, // T_STRUCT - CT_MAP, // T_MAP - CT_SET, // T_SET - CT_LIST, // T_LIST -}; - -bool CompactProtocol::readFieldBegin(TType& type, int16_t& tag) { - uint8_t b; - if (!readByte(b)) { - return false; - } - uint8_t ctype = b & 0xf; - type = getTType(ctype); - if (type == -1) { - return false; - } else if (type == T_STOP) { - tag = 0; - return true; - } - uint8_t diff = (b & 0xf0) >> 4; - if (diff) { - tag = readTags_.top() + diff; - } else if (!readI16(tag)) { - readTags_.top() = -1; - return false; - } - if (ctype == CT_BOOLEAN_FALSE || ctype == CT_BOOLEAN_TRUE) { - readBool_.exists = true; - readBool_.value = ctype == CT_BOOLEAN_TRUE; - } - readTags_.top() = tag; - return true; -} - -TType CompactProtocol::getTType(uint8_t type) { - switch (type) { - case T_STOP: - return T_STOP; - case CT_BOOLEAN_FALSE: - case CT_BOOLEAN_TRUE: - return T_BOOL; - case CT_BYTE: - return T_BYTE; - case CT_I16: - return T_I16; - case CT_I32: - return T_I32; - case CT_I64: - return T_I64; - case CT_DOUBLE: - return T_DOUBLE; - case CT_BINARY: - return T_STRING; - case CT_LIST: - return T_LIST; - case CT_SET: - return T_SET; - case CT_MAP: - return T_MAP; - case CT_STRUCT: - return T_STRUCT; - default: - PyErr_Format(PyExc_TypeError, "don't know what type: %d", type); - return static_cast(-1); - } -} -} -} -} diff --git a/shell/ext-py/thrift-0.16.0/src/ext/compact.h b/shell/ext-py/thrift-0.16.0/src/ext/compact.h deleted file mode 100644 index a78d7a703..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/compact.h +++ /dev/null @@ -1,368 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#ifndef THRIFT_PY_COMPACT_H -#define THRIFT_PY_COMPACT_H - -#include -#include "ext/protocol.h" -#include "ext/endian.h" -#include -#include - -namespace apache { -namespace thrift { -namespace py { - -class CompactProtocol : public ProtocolBase { -public: - CompactProtocol() { readBool_.exists = false; } - - virtual ~CompactProtocol() {} - - void writeI8(int8_t val) { writeBuffer(reinterpret_cast(&val), 1); } - - void writeI16(int16_t val) { writeVarint(toZigZag(val)); } - - int writeI32(int32_t val) { return writeVarint(toZigZag(val)); } - - void writeI64(int64_t val) { writeVarint64(toZigZag64(val)); } - - void writeDouble(double dub) { - union { - double f; - int64_t t; - } transfer; - transfer.f = htolell(dub); - writeBuffer(reinterpret_cast(&transfer.t), sizeof(int64_t)); - } - - void writeBool(int v) { writeByte(static_cast(v ? CT_BOOLEAN_TRUE : CT_BOOLEAN_FALSE)); } - - void writeString(PyObject* value, int32_t len) { - writeVarint(len); - writeBuffer(PyBytes_AS_STRING(value), len); - } - - bool writeListBegin(PyObject* value, const SetListTypeArgs& args, int32_t len) { - int ctype = toCompactType(args.element_type); - if (len <= 14) { - writeByte(static_cast(len << 4 | ctype)); - } else { - writeByte(0xf0 | ctype); - writeVarint(len); - } - return true; - } - - bool writeMapBegin(PyObject* value, const MapTypeArgs& args, int32_t len) { - if (len == 0) { - writeByte(0); - return true; - } - int ctype = toCompactType(args.ktag) << 4 | toCompactType(args.vtag); - writeVarint(len); - writeByte(ctype); - return true; - } - - bool writeStructBegin() { - writeTags_.push(0); - return true; - } - bool writeStructEnd() { - writeTags_.pop(); - return true; - } - - bool writeField(PyObject* value, const StructItemSpec& spec) { - if (spec.type == T_BOOL) { - doWriteFieldBegin(spec, PyObject_IsTrue(value) ? CT_BOOLEAN_TRUE : CT_BOOLEAN_FALSE); - return true; - } else { - doWriteFieldBegin(spec, toCompactType(spec.type)); - return encodeValue(value, spec.type, spec.typeargs); - } - } - - void writeFieldStop() { writeByte(0); } - - bool readBool(bool& val) { - if (readBool_.exists) { - readBool_.exists = false; - val = readBool_.value; - return true; - } - char* buf; - if (!readBytes(&buf, 1)) { - return false; - } - val = buf[0] == CT_BOOLEAN_TRUE; - return true; - } - bool readI8(int8_t& val) { - char* buf; - if (!readBytes(&buf, 1)) { - return false; - } - val = buf[0]; - return true; - } - - bool readI16(int16_t& val) { - uint16_t uval; - if (readVarint(uval)) { - val = fromZigZag(uval); - return true; - } - return false; - } - - bool readI32(int32_t& val) { - uint32_t uval; - if (readVarint(uval)) { - val = fromZigZag(uval); - return true; - } - return false; - } - - bool readI64(int64_t& val) { - uint64_t uval; - if (readVarint(uval)) { - val = fromZigZag(uval); - return true; - } - return false; - } - - bool readDouble(double& val) { - union { - int64_t f; - double t; - } transfer; - - char* buf; - if (!readBytes(&buf, 8)) { - return false; - } - memcpy(&transfer.f, buf, sizeof(int64_t)); - transfer.f = letohll(transfer.f); - val = transfer.t; - return true; - } - - int32_t readString(char** buf) { - uint32_t len; - if (!readVarint(len) || !checkLengthLimit(len, stringLimit())) { - return -1; - } - if (len == 0) { - return 0; - } - if (!readBytes(buf, len)) { - return -1; - } - return len; - } - - int32_t readListBegin(TType& etype) { - uint8_t b; - if (!readByte(b)) { - return -1; - } - etype = getTType(b & 0xf); - if (etype == -1) { - return -1; - } - uint32_t len = (b >> 4) & 0xf; - if (len == 15 && !readVarint(len)) { - return -1; - } - if (!checkLengthLimit(len, containerLimit())) { - return -1; - } - return len; - } - - int32_t readMapBegin(TType& ktype, TType& vtype) { - uint32_t len; - if (!readVarint(len) || !checkLengthLimit(len, containerLimit())) { - return -1; - } - if (len != 0) { - uint8_t kvType; - if (!readByte(kvType)) { - return -1; - } - ktype = getTType(kvType >> 4); - vtype = getTType(kvType & 0xf); - if (ktype == -1 || vtype == -1) { - return -1; - } - } - return len; - } - - bool readStructBegin() { - readTags_.push(0); - return true; - } - bool readStructEnd() { - readTags_.pop(); - return true; - } - bool readFieldBegin(TType& type, int16_t& tag); - - bool skipBool() { - bool val; - return readBool(val); - } -#define SKIPBYTES(n) \ - do { \ - if (!readBytes(&dummy_buf_, (n))) { \ - return false; \ - } \ - return true; \ - } while (0) - bool skipByte() { SKIPBYTES(1); } - bool skipDouble() { SKIPBYTES(8); } - bool skipI16() { - int16_t val; - return readI16(val); - } - bool skipI32() { - int32_t val; - return readI32(val); - } - bool skipI64() { - int64_t val; - return readI64(val); - } - bool skipString() { - uint32_t len; - if (!readVarint(len)) { - return false; - } - SKIPBYTES(len); - } -#undef SKIPBYTES - -private: - enum Types { - CT_STOP = 0x00, - CT_BOOLEAN_TRUE = 0x01, - CT_BOOLEAN_FALSE = 0x02, - CT_BYTE = 0x03, - CT_I16 = 0x04, - CT_I32 = 0x05, - CT_I64 = 0x06, - CT_DOUBLE = 0x07, - CT_BINARY = 0x08, - CT_LIST = 0x09, - CT_SET = 0x0A, - CT_MAP = 0x0B, - CT_STRUCT = 0x0C - }; - - static const uint8_t TTypeToCType[]; - - TType getTType(uint8_t type); - - int toCompactType(TType type) { - int i = static_cast(type); - return i < 16 ? TTypeToCType[i] : -1; - } - - uint32_t toZigZag(int32_t val) { return (val >> 31) ^ (val << 1); } - - uint64_t toZigZag64(int64_t val) { return (val >> 63) ^ (val << 1); } - - int writeVarint(uint32_t val) { - int cnt = 1; - while (val & ~0x7fU) { - writeByte(static_cast((val & 0x7fU) | 0x80U)); - val >>= 7; - ++cnt; - } - writeByte(static_cast(val)); - return cnt; - } - - int writeVarint64(uint64_t val) { - int cnt = 1; - while (val & ~0x7fULL) { - writeByte(static_cast((val & 0x7fULL) | 0x80ULL)); - val >>= 7; - ++cnt; - } - writeByte(static_cast(val)); - return cnt; - } - - template - bool readVarint(T& result) { - uint8_t b; - T val = 0; - int shift = 0; - for (int i = 0; i < Max; ++i) { - if (!readByte(b)) { - return false; - } - if (b & 0x80) { - val |= static_cast(b & 0x7f) << shift; - } else { - val |= static_cast(b) << shift; - result = val; - return true; - } - shift += 7; - } - PyErr_Format(PyExc_OverflowError, "varint exceeded %d bytes", Max); - return false; - } - - template - S fromZigZag(U val) { - return (val >> 1) ^ static_cast(-static_cast(val & 1)); - } - - void doWriteFieldBegin(const StructItemSpec& spec, int ctype) { - int diff = spec.tag - writeTags_.top(); - if (diff > 0 && diff <= 15) { - writeByte(static_cast(diff << 4 | ctype)); - } else { - writeByte(static_cast(ctype)); - writeI16(spec.tag); - } - writeTags_.top() = spec.tag; - } - - std::stack writeTags_; - std::stack readTags_; - struct { - bool exists; - bool value; - } readBool_; - char* dummy_buf_; -}; -} -} -} -#endif // THRIFT_PY_COMPACT_H diff --git a/shell/ext-py/thrift-0.16.0/src/ext/endian.h b/shell/ext-py/thrift-0.16.0/src/ext/endian.h deleted file mode 100644 index 1660cbd98..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/endian.h +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#ifndef THRIFT_PY_ENDIAN_H -#define THRIFT_PY_ENDIAN_H - -#include - -#ifndef _WIN32 -#include -#else -#include -#pragma comment(lib, "ws2_32.lib") -#define BIG_ENDIAN (4321) -#define LITTLE_ENDIAN (1234) -#define BYTE_ORDER LITTLE_ENDIAN -#define inline __inline -#endif - -/* Fix endianness issues on Solaris */ -#if defined(__SVR4) && defined(__sun) -#if defined(__i386) && !defined(__i386__) -#define __i386__ -#endif - -#ifndef BIG_ENDIAN -#define BIG_ENDIAN (4321) -#endif -#ifndef LITTLE_ENDIAN -#define LITTLE_ENDIAN (1234) -#endif - -/* I386 is LE, even on Solaris */ -#if !defined(BYTE_ORDER) && defined(__i386__) -#define BYTE_ORDER LITTLE_ENDIAN -#endif -#endif - -#ifndef __BYTE_ORDER -#if defined(BYTE_ORDER) && defined(LITTLE_ENDIAN) && defined(BIG_ENDIAN) -#define __BYTE_ORDER BYTE_ORDER -#define __LITTLE_ENDIAN LITTLE_ENDIAN -#define __BIG_ENDIAN BIG_ENDIAN -#else -#error "Cannot determine endianness" -#endif -#endif - -// Same comment as the enum. Sorry. -#if __BYTE_ORDER == __BIG_ENDIAN -#define ntohll(n) (n) -#define htonll(n) (n) -#if defined(__GNUC__) && defined(__GLIBC__) -#include -#define letohll(n) bswap_64(n) -#define htolell(n) bswap_64(n) -#else /* GNUC & GLIBC */ -#define letohll(n) ((((unsigned long long)ntohl(n)) << 32) + ntohl(n >> 32)) -#define htolell(n) ((((unsigned long long)htonl(n)) << 32) + htonl(n >> 32)) -#endif -#elif __BYTE_ORDER == __LITTLE_ENDIAN -#if defined(__GNUC__) && defined(__GLIBC__) -#include -#define ntohll(n) bswap_64(n) -#define htonll(n) bswap_64(n) -#elif defined(_MSC_VER) -#include -#define ntohll(n) _byteswap_uint64(n) -#define htonll(n) _byteswap_uint64(n) -#else /* GNUC & GLIBC */ -#define ntohll(n) ((((unsigned long long)ntohl(n)) << 32) + ntohl(n >> 32)) -#define htonll(n) ((((unsigned long long)htonl(n)) << 32) + htonl(n >> 32)) -#endif /* GNUC & GLIBC */ -#define letohll(n) (n) -#define htolell(n) (n) -#else /* __BYTE_ORDER */ -#error "Can't define htonll or ntohll!" -#endif - -#endif // THRIFT_PY_ENDIAN_H diff --git a/shell/ext-py/thrift-0.16.0/src/ext/module.cpp b/shell/ext-py/thrift-0.16.0/src/ext/module.cpp deleted file mode 100644 index f14ddaeb6..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/module.cpp +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#include -#include "types.h" -#include "binary.h" -#include "compact.h" -#include -#include - -// TODO(dreiss): defval appears to be unused. Look into removing it. -// TODO(dreiss): Make parse_spec_args recursive, and cache the output -// permanently in the object. (Malloc and orphan.) -// TODO(dreiss): Why do we need cStringIO for reading, why not just char*? -// Can cStringIO let us work with a BufferedTransport? -// TODO(dreiss): Don't ignore the rv from cwrite (maybe). - -// Doing a benchmark shows that interning actually makes a difference, amazingly. - -/** Pointer to interned string to speed up attribute lookup. */ -PyObject* INTERN_STRING(TFrozenDict); -PyObject* INTERN_STRING(cstringio_buf); -PyObject* INTERN_STRING(cstringio_refill); -static PyObject* INTERN_STRING(string_length_limit); -static PyObject* INTERN_STRING(container_length_limit); -static PyObject* INTERN_STRING(trans); - -namespace apache { -namespace thrift { -namespace py { - -template -static PyObject* encode_impl(PyObject* args) { - if (!args) - return nullptr; - - PyObject* enc_obj = nullptr; - PyObject* type_args = nullptr; - if (!PyArg_ParseTuple(args, "OO", &enc_obj, &type_args)) { - return nullptr; - } - if (!enc_obj || !type_args) { - return nullptr; - } - - T protocol; - if (!protocol.prepareEncodeBuffer() || !protocol.encodeValue(enc_obj, T_STRUCT, type_args)) { - return nullptr; - } - - return protocol.getEncodedValue(); -} - -static inline long as_long_then_delete(PyObject* value, long default_value) { - ScopedPyObject scope(value); - long v = PyInt_AsLong(value); - if (INT_CONV_ERROR_OCCURRED(v)) { - PyErr_Clear(); - return default_value; - } - return v; -} - -template -static PyObject* decode_impl(PyObject* args) { - PyObject* output_obj = nullptr; - PyObject* oprot = nullptr; - PyObject* typeargs = nullptr; - if (!PyArg_ParseTuple(args, "OOO", &output_obj, &oprot, &typeargs)) { - return nullptr; - } - - T protocol; - int32_t default_limit = (std::numeric_limits::max)(); - protocol.setStringLengthLimit( - as_long_then_delete(PyObject_GetAttr(oprot, INTERN_STRING(string_length_limit)), - default_limit)); - protocol.setContainerLengthLimit( - as_long_then_delete(PyObject_GetAttr(oprot, INTERN_STRING(container_length_limit)), - default_limit)); - ScopedPyObject transport(PyObject_GetAttr(oprot, INTERN_STRING(trans))); - if (!transport) { - return nullptr; - } - - StructTypeArgs parsedargs; - if (!parse_struct_args(&parsedargs, typeargs)) { - return nullptr; - } - - if (!protocol.prepareDecodeBufferFromTransport(transport.get())) { - return nullptr; - } - - return protocol.readStruct(output_obj, parsedargs.klass, parsedargs.spec); -} -} -} -} - -using namespace apache::thrift::py; - -/* -- PYTHON MODULE SETUP STUFF --- */ - -extern "C" { - -static PyObject* encode_binary(PyObject*, PyObject* args) { - return encode_impl(args); -} - -static PyObject* decode_binary(PyObject*, PyObject* args) { - return decode_impl(args); -} - -static PyObject* encode_compact(PyObject*, PyObject* args) { - return encode_impl(args); -} - -static PyObject* decode_compact(PyObject*, PyObject* args) { - return decode_impl(args); -} - -static PyMethodDef ThriftFastBinaryMethods[] = { - {"encode_binary", encode_binary, METH_VARARGS, ""}, - {"decode_binary", decode_binary, METH_VARARGS, ""}, - {"encode_compact", encode_compact, METH_VARARGS, ""}, - {"decode_compact", decode_compact, METH_VARARGS, ""}, - {nullptr, nullptr, 0, nullptr} /* Sentinel */ -}; - -#if PY_MAJOR_VERSION >= 3 - -static struct PyModuleDef ThriftFastBinaryDef = {PyModuleDef_HEAD_INIT, - "thrift.protocol.fastbinary", - nullptr, - 0, - ThriftFastBinaryMethods, - nullptr, - nullptr, - nullptr, - nullptr}; - -#define INITERROR return nullptr; - -PyObject* PyInit_fastbinary() { - -#else - -#define INITERROR return; - -void initfastbinary() { - - PycString_IMPORT; - if (PycStringIO == nullptr) - INITERROR - -#endif - -#define INIT_INTERN_STRING(value) \ - do { \ - INTERN_STRING(value) = PyString_InternFromString(#value); \ - if (!INTERN_STRING(value)) \ - INITERROR \ - } while (0) - - INIT_INTERN_STRING(TFrozenDict); - INIT_INTERN_STRING(cstringio_buf); - INIT_INTERN_STRING(cstringio_refill); - INIT_INTERN_STRING(string_length_limit); - INIT_INTERN_STRING(container_length_limit); - INIT_INTERN_STRING(trans); -#undef INIT_INTERN_STRING - - PyObject* module = -#if PY_MAJOR_VERSION >= 3 - PyModule_Create(&ThriftFastBinaryDef); -#else - Py_InitModule("thrift.protocol.fastbinary", ThriftFastBinaryMethods); -#endif - if (module == nullptr) - INITERROR; - -#if PY_MAJOR_VERSION >= 3 - return module; -#endif -} -} diff --git a/shell/ext-py/thrift-0.16.0/src/ext/protocol.h b/shell/ext-py/thrift-0.16.0/src/ext/protocol.h deleted file mode 100644 index c0cd43724..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/protocol.h +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#ifndef THRIFT_PY_PROTOCOL_H -#define THRIFT_PY_PROTOCOL_H - -#include "ext/types.h" -#include -#include - -namespace apache { -namespace thrift { -namespace py { - -template -class ProtocolBase { - -public: - ProtocolBase() - : stringLimit_((std::numeric_limits::max)()), - containerLimit_((std::numeric_limits::max)()), - output_(nullptr) {} - inline virtual ~ProtocolBase(); - - bool prepareDecodeBufferFromTransport(PyObject* trans); - - PyObject* readStruct(PyObject* output, PyObject* klass, PyObject* spec_seq); - - bool prepareEncodeBuffer(); - - bool encodeValue(PyObject* value, TType type, PyObject* typeargs); - - PyObject* getEncodedValue(); - - long stringLimit() const { return stringLimit_; } - void setStringLengthLimit(long limit) { stringLimit_ = limit; } - - long containerLimit() const { return containerLimit_; } - void setContainerLengthLimit(long limit) { containerLimit_ = limit; } - -protected: - bool readBytes(char** output, int len); - - bool readByte(uint8_t& val) { - char* buf; - if (!readBytes(&buf, 1)) { - return false; - } - val = static_cast(buf[0]); - return true; - } - - bool writeBuffer(char* data, size_t len); - - void writeByte(uint8_t val) { writeBuffer(reinterpret_cast(&val), 1); } - - PyObject* decodeValue(TType type, PyObject* typeargs); - - bool skip(TType type); - - inline bool checkType(TType got, TType expected); - inline bool checkLengthLimit(int32_t len, long limit); - - inline bool isUtf8(PyObject* typeargs); - -private: - Impl* impl() { return static_cast(this); } - - long stringLimit_; - long containerLimit_; - EncodeBuffer* output_; - DecodeBuffer input_; -}; -} -} -} - -#include "ext/protocol.tcc" - -#endif // THRIFT_PY_PROTOCOL_H diff --git a/shell/ext-py/thrift-0.16.0/src/ext/protocol.tcc b/shell/ext-py/thrift-0.16.0/src/ext/protocol.tcc deleted file mode 100644 index aad5a3c88..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/protocol.tcc +++ /dev/null @@ -1,913 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#ifndef THRIFT_PY_PROTOCOL_TCC -#define THRIFT_PY_PROTOCOL_TCC - -#include - -#define CHECK_RANGE(v, min, max) (((v) <= (max)) && ((v) >= (min))) -#define INIT_OUTBUF_SIZE 128 - -#if PY_MAJOR_VERSION < 3 -#include -#else -#include -#endif - -namespace apache { -namespace thrift { -namespace py { - -#if PY_MAJOR_VERSION < 3 - -namespace detail { - -inline bool input_check(PyObject* input) { - return PycStringIO_InputCheck(input); -} - -inline EncodeBuffer* new_encode_buffer(size_t size) { - if (!PycStringIO) { - PycString_IMPORT; - } - if (!PycStringIO) { - return nullptr; - } - return PycStringIO->NewOutput(size); -} - -inline int read_buffer(PyObject* buf, char** output, int len) { - if (!PycStringIO) { - PycString_IMPORT; - } - if (!PycStringIO) { - PyErr_SetString(PyExc_ImportError, "failed to import native cStringIO"); - return -1; - } - return PycStringIO->cread(buf, output, len); -} -} - -template -inline ProtocolBase::~ProtocolBase() { - if (output_) { - Py_CLEAR(output_); - } -} - -template -inline bool ProtocolBase::isUtf8(PyObject* typeargs) { - return PyString_Check(typeargs) && !strncmp(PyString_AS_STRING(typeargs), "UTF8", 4); -} - -template -PyObject* ProtocolBase::getEncodedValue() { - if (!PycStringIO) { - PycString_IMPORT; - } - if (!PycStringIO) { - return nullptr; - } - return PycStringIO->cgetvalue(output_); -} - -template -inline bool ProtocolBase::writeBuffer(char* data, size_t size) { - if (!PycStringIO) { - PycString_IMPORT; - } - if (!PycStringIO) { - PyErr_SetString(PyExc_ImportError, "failed to import native cStringIO"); - return false; - } - int len = PycStringIO->cwrite(output_, data, size); - if (len < 0) { - PyErr_SetString(PyExc_IOError, "failed to write to cStringIO object"); - return false; - } - if (static_cast(len) != size) { - PyErr_Format(PyExc_EOFError, "write length mismatch: expected %lu got %d", size, len); - return false; - } - return true; -} - -#else - -namespace detail { - -inline bool input_check(PyObject* input) { - // TODO: Check for BytesIO type - return true; -} - -inline EncodeBuffer* new_encode_buffer(size_t size) { - EncodeBuffer* buffer = new EncodeBuffer; - buffer->buf.reserve(size); - buffer->pos = 0; - return buffer; -} - -struct bytesio { - PyObject_HEAD -#if PY_MINOR_VERSION < 5 - char* buf; -#else - PyObject* buf; -#endif - Py_ssize_t pos; - Py_ssize_t string_size; -}; - -inline int read_buffer(PyObject* buf, char** output, int len) { - bytesio* buf2 = reinterpret_cast(buf); -#if PY_MINOR_VERSION < 5 - *output = buf2->buf + buf2->pos; -#else - *output = PyBytes_AS_STRING(buf2->buf) + buf2->pos; -#endif - Py_ssize_t pos0 = buf2->pos; - buf2->pos = (std::min)(buf2->pos + static_cast(len), buf2->string_size); - return static_cast(buf2->pos - pos0); -} -} - -template -inline ProtocolBase::~ProtocolBase() { - if (output_) { - delete output_; - } -} - -template -inline bool ProtocolBase::isUtf8(PyObject* typeargs) { - // while condition for py2 is "arg == 'UTF8'", it should be "arg != 'BINARY'" for py3. - // HACK: check the length and don't bother reading the value - return !PyUnicode_Check(typeargs) || PyUnicode_GET_LENGTH(typeargs) != 6; -} - -template -PyObject* ProtocolBase::getEncodedValue() { - return PyBytes_FromStringAndSize(output_->buf.data(), output_->buf.size()); -} - -template -inline bool ProtocolBase::writeBuffer(char* data, size_t size) { - size_t need = size + output_->pos; - if (output_->buf.capacity() < need) { - try { - output_->buf.reserve(need); - } catch (std::bad_alloc&) { - PyErr_SetString(PyExc_MemoryError, "Failed to allocate write buffer"); - return false; - } - } - std::copy(data, data + size, std::back_inserter(output_->buf)); - return true; -} - -#endif - -namespace detail { - -#define DECLARE_OP_SCOPE(name, op) \ - template \ - struct name##Scope { \ - Impl* impl; \ - bool valid; \ - name##Scope(Impl* thiz) : impl(thiz), valid(impl->op##Begin()) {} \ - ~name##Scope() { \ - if (valid) \ - impl->op##End(); \ - } \ - operator bool() { return valid; } \ - }; \ - template class T> \ - name##Scope op##Scope(T* thiz) { \ - return name##Scope(static_cast(thiz)); \ - } -DECLARE_OP_SCOPE(WriteStruct, writeStruct) -DECLARE_OP_SCOPE(ReadStruct, readStruct) -#undef DECLARE_OP_SCOPE - -inline bool check_ssize_t_32(Py_ssize_t len) { - // error from getting the int - if (INT_CONV_ERROR_OCCURRED(len)) { - return false; - } - if (!CHECK_RANGE(len, 0, (std::numeric_limits::max)())) { - PyErr_SetString(PyExc_OverflowError, "size out of range: exceeded INT32_MAX"); - return false; - } - return true; -} -} - -template -bool parse_pyint(PyObject* o, T* ret, int32_t min, int32_t max) { - long val = PyInt_AsLong(o); - - if (INT_CONV_ERROR_OCCURRED(val)) { - return false; - } - if (!CHECK_RANGE(val, min, max)) { - PyErr_SetString(PyExc_OverflowError, "int out of range"); - return false; - } - - *ret = static_cast(val); - return true; -} - -template -inline bool ProtocolBase::checkType(TType got, TType expected) { - if (expected != got) { - PyErr_SetString(PyExc_TypeError, "got wrong ttype while reading field"); - return false; - } - return true; -} - -template -bool ProtocolBase::checkLengthLimit(int32_t len, long limit) { - if (len < 0) { - PyErr_Format(PyExc_OverflowError, "negative length: %ld", limit); - return false; - } - if (len > limit) { - PyErr_Format(PyExc_OverflowError, "size exceeded specified limit: %ld", limit); - return false; - } - return true; -} - -template -bool ProtocolBase::readBytes(char** output, int len) { - if (len < 0) { - PyErr_Format(PyExc_ValueError, "attempted to read negative length: %d", len); - return false; - } - // TODO(dreiss): Don't fear the malloc. Think about taking a copy of - // the partial read instead of forcing the transport - // to prepend it to its buffer. - - int rlen = detail::read_buffer(input_.stringiobuf.get(), output, len); - - if (rlen == len) { - return true; - } else if (rlen == -1) { - return false; - } else { - // using building functions as this is a rare codepath - ScopedPyObject newiobuf(PyObject_CallFunction(input_.refill_callable.get(), refill_signature, - *output, rlen, len, nullptr)); - if (!newiobuf) { - return false; - } - - // must do this *AFTER* the call so that we don't deref the io buffer - input_.stringiobuf.reset(newiobuf.release()); - - rlen = detail::read_buffer(input_.stringiobuf.get(), output, len); - - if (rlen == len) { - return true; - } else if (rlen == -1) { - return false; - } else { - // TODO(dreiss): This could be a valid code path for big binary blobs. - PyErr_SetString(PyExc_TypeError, "refill claimed to have refilled the buffer, but didn't!!"); - return false; - } - } -} - -template -bool ProtocolBase::prepareDecodeBufferFromTransport(PyObject* trans) { - if (input_.stringiobuf) { - PyErr_SetString(PyExc_ValueError, "decode buffer is already initialized"); - return false; - } - - ScopedPyObject stringiobuf(PyObject_GetAttr(trans, INTERN_STRING(cstringio_buf))); - if (!stringiobuf) { - return false; - } - if (!detail::input_check(stringiobuf.get())) { - PyErr_SetString(PyExc_TypeError, "expecting stringio input_"); - return false; - } - - ScopedPyObject refill_callable(PyObject_GetAttr(trans, INTERN_STRING(cstringio_refill))); - if (!refill_callable) { - return false; - } - if (!PyCallable_Check(refill_callable.get())) { - PyErr_SetString(PyExc_TypeError, "expecting callable"); - return false; - } - - input_.stringiobuf.swap(stringiobuf); - input_.refill_callable.swap(refill_callable); - return true; -} - -template -bool ProtocolBase::prepareEncodeBuffer() { - output_ = detail::new_encode_buffer(INIT_OUTBUF_SIZE); - return output_ != nullptr; -} - -template -bool ProtocolBase::encodeValue(PyObject* value, TType type, PyObject* typeargs) { - /* - * Refcounting Strategy: - * - * We assume that elements of the thrift_spec tuple are not going to be - * mutated, so we don't ref count those at all. Other than that, we try to - * keep a reference to all the user-created objects while we work with them. - * encodeValue assumes that a reference is already held. The *caller* is - * responsible for handling references - */ - - switch (type) { - - case T_BOOL: { - int v = PyObject_IsTrue(value); - if (v == -1) { - return false; - } - impl()->writeBool(v); - return true; - } - case T_I08: { - int8_t val; - - if (!parse_pyint(value, &val, (std::numeric_limits::min)(), - (std::numeric_limits::max)())) { - return false; - } - - impl()->writeI8(val); - return true; - } - case T_I16: { - int16_t val; - - if (!parse_pyint(value, &val, (std::numeric_limits::min)(), - (std::numeric_limits::max)())) { - return false; - } - - impl()->writeI16(val); - return true; - } - case T_I32: { - int32_t val; - - if (!parse_pyint(value, &val, (std::numeric_limits::min)(), - (std::numeric_limits::max)())) { - return false; - } - - impl()->writeI32(val); - return true; - } - case T_I64: { - int64_t nval = PyLong_AsLongLong(value); - - if (INT_CONV_ERROR_OCCURRED(nval)) { - return false; - } - - if (!CHECK_RANGE(nval, (std::numeric_limits::min)(), - (std::numeric_limits::max)())) { - PyErr_SetString(PyExc_OverflowError, "int out of range"); - return false; - } - - impl()->writeI64(nval); - return true; - } - - case T_DOUBLE: { - double nval = PyFloat_AsDouble(value); - if (nval == -1.0 && PyErr_Occurred()) { - return false; - } - - impl()->writeDouble(nval); - return true; - } - - case T_STRING: { - ScopedPyObject nval; - - if (PyUnicode_Check(value)) { - nval.reset(PyUnicode_AsUTF8String(value)); - if (!nval) { - return false; - } - } else { - Py_INCREF(value); - nval.reset(value); - } - - Py_ssize_t len = PyBytes_Size(nval.get()); - if (!detail::check_ssize_t_32(len)) { - return false; - } - - impl()->writeString(nval.get(), static_cast(len)); - return true; - } - - case T_LIST: - case T_SET: { - SetListTypeArgs parsedargs; - if (!parse_set_list_args(&parsedargs, typeargs)) { - return false; - } - - Py_ssize_t len = PyObject_Length(value); - if (!detail::check_ssize_t_32(len)) { - return false; - } - - if (!impl()->writeListBegin(value, parsedargs, static_cast(len)) || PyErr_Occurred()) { - return false; - } - ScopedPyObject iterator(PyObject_GetIter(value)); - if (!iterator) { - return false; - } - - while (PyObject* rawItem = PyIter_Next(iterator.get())) { - ScopedPyObject item(rawItem); - if (!encodeValue(item.get(), parsedargs.element_type, parsedargs.typeargs)) { - return false; - } - } - - return true; - } - - case T_MAP: { - Py_ssize_t len = PyDict_Size(value); - if (!detail::check_ssize_t_32(len)) { - return false; - } - - MapTypeArgs parsedargs; - if (!parse_map_args(&parsedargs, typeargs)) { - return false; - } - - if (!impl()->writeMapBegin(value, parsedargs, static_cast(len)) || PyErr_Occurred()) { - return false; - } - Py_ssize_t pos = 0; - PyObject* k = nullptr; - PyObject* v = nullptr; - // TODO(bmaurer): should support any mapping, not just dicts - while (PyDict_Next(value, &pos, &k, &v)) { - if (!encodeValue(k, parsedargs.ktag, parsedargs.ktypeargs) - || !encodeValue(v, parsedargs.vtag, parsedargs.vtypeargs)) { - return false; - } - } - return true; - } - - case T_STRUCT: { - StructTypeArgs parsedargs; - if (!parse_struct_args(&parsedargs, typeargs)) { - return false; - } - - Py_ssize_t nspec = PyTuple_Size(parsedargs.spec); - if (nspec == -1) { - PyErr_SetString(PyExc_TypeError, "spec is not a tuple"); - return false; - } - - detail::WriteStructScope scope = detail::writeStructScope(this); - if (!scope) { - return false; - } - for (Py_ssize_t i = 0; i < nspec; i++) { - PyObject* spec_tuple = PyTuple_GET_ITEM(parsedargs.spec, i); - if (spec_tuple == Py_None) { - continue; - } - - StructItemSpec parsedspec; - if (!parse_struct_item_spec(&parsedspec, spec_tuple)) { - return false; - } - - ScopedPyObject instval(PyObject_GetAttr(value, parsedspec.attrname)); - - if (!instval) { - return false; - } - - if (instval.get() == Py_None) { - continue; - } - - bool res = impl()->writeField(instval.get(), parsedspec); - if (!res) { - return false; - } - } - impl()->writeFieldStop(); - return true; - } - - case T_STOP: - case T_VOID: - case T_UTF16: - case T_UTF8: - case T_U64: - default: - PyErr_Format(PyExc_TypeError, "Unexpected TType for encodeValue: %d", type); - return false; - } - - return true; -} - -template -bool ProtocolBase::skip(TType type) { - switch (type) { - case T_BOOL: - return impl()->skipBool(); - case T_I08: - return impl()->skipByte(); - case T_I16: - return impl()->skipI16(); - case T_I32: - return impl()->skipI32(); - case T_I64: - return impl()->skipI64(); - case T_DOUBLE: - return impl()->skipDouble(); - - case T_STRING: { - return impl()->skipString(); - } - - case T_LIST: - case T_SET: { - TType etype = T_STOP; - int32_t len = impl()->readListBegin(etype); - if (len < 0) { - return false; - } - for (int32_t i = 0; i < len; i++) { - if (!skip(etype)) { - return false; - } - } - return true; - } - - case T_MAP: { - TType ktype = T_STOP; - TType vtype = T_STOP; - int32_t len = impl()->readMapBegin(ktype, vtype); - if (len < 0) { - return false; - } - for (int32_t i = 0; i < len; i++) { - if (!skip(ktype) || !skip(vtype)) { - return false; - } - } - return true; - } - - case T_STRUCT: { - detail::ReadStructScope scope = detail::readStructScope(this); - if (!scope) { - return false; - } - while (true) { - TType type = T_STOP; - int16_t tag; - if (!impl()->readFieldBegin(type, tag)) { - return false; - } - if (type == T_STOP) { - return true; - } - if (!skip(type)) { - return false; - } - } - return true; - } - - case T_STOP: - case T_VOID: - case T_UTF16: - case T_UTF8: - case T_U64: - default: - PyErr_Format(PyExc_TypeError, "Unexpected TType for skip: %d", type); - return false; - } - - return true; -} - -// Returns a new reference. -template -PyObject* ProtocolBase::decodeValue(TType type, PyObject* typeargs) { - switch (type) { - - case T_BOOL: { - bool v = 0; - if (!impl()->readBool(v)) { - return nullptr; - } - if (v) { - Py_RETURN_TRUE; - } else { - Py_RETURN_FALSE; - } - } - case T_I08: { - int8_t v = 0; - if (!impl()->readI8(v)) { - return nullptr; - } - return PyInt_FromLong(v); - } - case T_I16: { - int16_t v = 0; - if (!impl()->readI16(v)) { - return nullptr; - } - return PyInt_FromLong(v); - } - case T_I32: { - int32_t v = 0; - if (!impl()->readI32(v)) { - return nullptr; - } - return PyInt_FromLong(v); - } - - case T_I64: { - int64_t v = 0; - if (!impl()->readI64(v)) { - return nullptr; - } - // TODO(dreiss): Find out if we can take this fastpath always when - // sizeof(long) == sizeof(long long). - if (CHECK_RANGE(v, LONG_MIN, LONG_MAX)) { - return PyInt_FromLong((long)v); - } - return PyLong_FromLongLong(v); - } - - case T_DOUBLE: { - double v = 0.0; - if (!impl()->readDouble(v)) { - return nullptr; - } - return PyFloat_FromDouble(v); - } - - case T_STRING: { - char* buf = nullptr; - int len = impl()->readString(&buf); - if (len < 0) { - return nullptr; - } - if (isUtf8(typeargs)) { - return PyUnicode_DecodeUTF8(buf, len, "replace"); - } else { - return PyBytes_FromStringAndSize(buf, len); - } - } - - case T_LIST: - case T_SET: { - SetListTypeArgs parsedargs; - if (!parse_set_list_args(&parsedargs, typeargs)) { - return nullptr; - } - - TType etype = T_STOP; - int32_t len = impl()->readListBegin(etype); - if (len < 0) { - return nullptr; - } - if (len > 0 && !checkType(etype, parsedargs.element_type)) { - return nullptr; - } - - bool use_tuple = type == T_LIST && parsedargs.immutable; - ScopedPyObject ret(use_tuple ? PyTuple_New(len) : PyList_New(len)); - if (!ret) { - return nullptr; - } - - for (int i = 0; i < len; i++) { - PyObject* item = decodeValue(etype, parsedargs.typeargs); - if (!item) { - return nullptr; - } - if (use_tuple) { - PyTuple_SET_ITEM(ret.get(), i, item); - } else { - PyList_SET_ITEM(ret.get(), i, item); - } - } - - // TODO(dreiss): Consider biting the bullet and making two separate cases - // for list and set, avoiding this post facto conversion. - if (type == T_SET) { - PyObject* setret; - setret = parsedargs.immutable ? PyFrozenSet_New(ret.get()) : PySet_New(ret.get()); - return setret; - } - return ret.release(); - } - - case T_MAP: { - MapTypeArgs parsedargs; - if (!parse_map_args(&parsedargs, typeargs)) { - return nullptr; - } - - TType ktype = T_STOP; - TType vtype = T_STOP; - uint32_t len = impl()->readMapBegin(ktype, vtype); - if (len > 0 && (!checkType(ktype, parsedargs.ktag) || !checkType(vtype, parsedargs.vtag))) { - return nullptr; - } - - ScopedPyObject ret(PyDict_New()); - if (!ret) { - return nullptr; - } - - for (uint32_t i = 0; i < len; i++) { - ScopedPyObject k(decodeValue(ktype, parsedargs.ktypeargs)); - if (!k) { - return nullptr; - } - ScopedPyObject v(decodeValue(vtype, parsedargs.vtypeargs)); - if (!v) { - return nullptr; - } - if (PyDict_SetItem(ret.get(), k.get(), v.get()) == -1) { - return nullptr; - } - } - - if (parsedargs.immutable) { - if (!ThriftModule) { - ThriftModule = PyImport_ImportModule("thrift.Thrift"); - } - if (!ThriftModule) { - return nullptr; - } - - ScopedPyObject cls(PyObject_GetAttr(ThriftModule, INTERN_STRING(TFrozenDict))); - if (!cls) { - return nullptr; - } - - ScopedPyObject arg(PyTuple_New(1)); - PyTuple_SET_ITEM(arg.get(), 0, ret.release()); - ret.reset(PyObject_CallObject(cls.get(), arg.get())); - } - - return ret.release(); - } - - case T_STRUCT: { - StructTypeArgs parsedargs; - if (!parse_struct_args(&parsedargs, typeargs)) { - return nullptr; - } - return readStruct(Py_None, parsedargs.klass, parsedargs.spec); - } - - case T_STOP: - case T_VOID: - case T_UTF16: - case T_UTF8: - case T_U64: - default: - PyErr_Format(PyExc_TypeError, "Unexpected TType for decodeValue: %d", type); - return nullptr; - } -} - -template -PyObject* ProtocolBase::readStruct(PyObject* output, PyObject* klass, PyObject* spec_seq) { - int spec_seq_len = PyTuple_Size(spec_seq); - bool immutable = output == Py_None; - ScopedPyObject kwargs; - if (spec_seq_len == -1) { - return nullptr; - } - - if (immutable) { - kwargs.reset(PyDict_New()); - if (!kwargs) { - PyErr_SetString(PyExc_TypeError, "failed to prepare kwargument storage"); - return nullptr; - } - } - - detail::ReadStructScope scope = detail::readStructScope(this); - if (!scope) { - return nullptr; - } - while (true) { - TType type = T_STOP; - int16_t tag; - if (!impl()->readFieldBegin(type, tag)) { - return nullptr; - } - if (type == T_STOP) { - break; - } - if (tag < 0 || tag >= spec_seq_len) { - if (!skip(type)) { - PyErr_SetString(PyExc_TypeError, "Error while skipping unknown field"); - return nullptr; - } - continue; - } - - PyObject* item_spec = PyTuple_GET_ITEM(spec_seq, tag); - if (item_spec == Py_None) { - if (!skip(type)) { - PyErr_SetString(PyExc_TypeError, "Error while skipping unknown field"); - return nullptr; - } - continue; - } - StructItemSpec parsedspec; - if (!parse_struct_item_spec(&parsedspec, item_spec)) { - return nullptr; - } - if (parsedspec.type != type) { - if (!skip(type)) { - PyErr_Format(PyExc_TypeError, "struct field had wrong type: expected %d but got %d", - parsedspec.type, type); - return nullptr; - } - continue; - } - - ScopedPyObject fieldval(decodeValue(parsedspec.type, parsedspec.typeargs)); - if (!fieldval) { - return nullptr; - } - - if ((immutable && PyDict_SetItem(kwargs.get(), parsedspec.attrname, fieldval.get()) == -1) - || (!immutable && PyObject_SetAttr(output, parsedspec.attrname, fieldval.get()) == -1)) { - return nullptr; - } - } - if (immutable) { - ScopedPyObject args(PyTuple_New(0)); - if (!args) { - PyErr_SetString(PyExc_TypeError, "failed to prepare argument storage"); - return nullptr; - } - return PyObject_Call(klass, args.get(), kwargs.get()); - } - Py_INCREF(output); - return output; -} -} -} -} -#endif // THRIFT_PY_PROTOCOL_H diff --git a/shell/ext-py/thrift-0.16.0/src/ext/types.cpp b/shell/ext-py/thrift-0.16.0/src/ext/types.cpp deleted file mode 100644 index e8d6939b1..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/types.cpp +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#include "ext/types.h" -#include "ext/protocol.h" - -namespace apache { -namespace thrift { -namespace py { - -PyObject* ThriftModule = nullptr; - -#if PY_MAJOR_VERSION < 3 -char refill_signature[] = {'s', '#', 'i'}; -#else -const char* refill_signature = "y#i"; -#endif - -bool parse_struct_item_spec(StructItemSpec* dest, PyObject* spec_tuple) { - // i'd like to use ParseArgs here, but it seems to be a bottleneck. - if (PyTuple_Size(spec_tuple) != 5) { - PyErr_Format(PyExc_TypeError, "expecting 5 arguments for spec tuple but got %d", - static_cast(PyTuple_Size(spec_tuple))); - return false; - } - - dest->tag = static_cast(PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 0))); - if (INT_CONV_ERROR_OCCURRED(dest->tag)) { - return false; - } - - dest->type = static_cast(PyInt_AsLong(PyTuple_GET_ITEM(spec_tuple, 1))); - if (INT_CONV_ERROR_OCCURRED(dest->type)) { - return false; - } - - dest->attrname = PyTuple_GET_ITEM(spec_tuple, 2); - dest->typeargs = PyTuple_GET_ITEM(spec_tuple, 3); - dest->defval = PyTuple_GET_ITEM(spec_tuple, 4); - return true; -} - -bool parse_set_list_args(SetListTypeArgs* dest, PyObject* typeargs) { - if (PyTuple_Size(typeargs) != 3) { - PyErr_SetString(PyExc_TypeError, "expecting tuple of size 3 for list/set type args"); - return false; - } - - dest->element_type = static_cast(PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0))); - if (INT_CONV_ERROR_OCCURRED(dest->element_type)) { - return false; - } - - dest->typeargs = PyTuple_GET_ITEM(typeargs, 1); - - dest->immutable = Py_True == PyTuple_GET_ITEM(typeargs, 2); - - return true; -} - -bool parse_map_args(MapTypeArgs* dest, PyObject* typeargs) { - if (PyTuple_Size(typeargs) != 5) { - PyErr_SetString(PyExc_TypeError, "expecting 5 arguments for typeargs to map"); - return false; - } - - dest->ktag = static_cast(PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 0))); - if (INT_CONV_ERROR_OCCURRED(dest->ktag)) { - return false; - } - - dest->vtag = static_cast(PyInt_AsLong(PyTuple_GET_ITEM(typeargs, 2))); - if (INT_CONV_ERROR_OCCURRED(dest->vtag)) { - return false; - } - - dest->ktypeargs = PyTuple_GET_ITEM(typeargs, 1); - dest->vtypeargs = PyTuple_GET_ITEM(typeargs, 3); - dest->immutable = Py_True == PyTuple_GET_ITEM(typeargs, 4); - - return true; -} - -bool parse_struct_args(StructTypeArgs* dest, PyObject* typeargs) { - if (PyList_Size(typeargs) != 2) { - PyErr_SetString(PyExc_TypeError, "expecting list of size 2 for struct args"); - return false; - } - - dest->klass = PyList_GET_ITEM(typeargs, 0); - dest->spec = PyList_GET_ITEM(typeargs, 1); - - return true; -} -} -} -} diff --git a/shell/ext-py/thrift-0.16.0/src/ext/types.h b/shell/ext-py/thrift-0.16.0/src/ext/types.h deleted file mode 100644 index 9b45dd065..000000000 --- a/shell/ext-py/thrift-0.16.0/src/ext/types.h +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -#ifndef THRIFT_PY_TYPES_H -#define THRIFT_PY_TYPES_H - -#include - -#ifdef _MSC_VER -#define __STDC_FORMAT_MACROS -#define __STDC_LIMIT_MACROS -#endif -#include - -#if PY_MAJOR_VERSION >= 3 - -#include - -// TODO: better macros -#define PyInt_AsLong(v) PyLong_AsLong(v) -#define PyInt_FromLong(v) PyLong_FromLong(v) - -#define PyString_InternFromString(v) PyUnicode_InternFromString(v) - -#endif - -#define INTERN_STRING(value) _intern_##value - -#define INT_CONV_ERROR_OCCURRED(v) (((v) == -1) && PyErr_Occurred()) - -extern "C" { -extern PyObject* INTERN_STRING(TFrozenDict); -extern PyObject* INTERN_STRING(cstringio_buf); -extern PyObject* INTERN_STRING(cstringio_refill); -} - -namespace apache { -namespace thrift { -namespace py { - -extern PyObject* ThriftModule; - -// Stolen out of TProtocol.h. -// It would be a huge pain to have both get this from one place. -enum TType { - T_INVALID = -1, - T_STOP = 0, - T_VOID = 1, - T_BOOL = 2, - T_BYTE = 3, - T_I08 = 3, - T_I16 = 6, - T_I32 = 8, - T_U64 = 9, - T_I64 = 10, - T_DOUBLE = 4, - T_STRING = 11, - T_UTF7 = 11, - T_STRUCT = 12, - T_MAP = 13, - T_SET = 14, - T_LIST = 15, - T_UTF8 = 16, - T_UTF16 = 17 -}; - -// replace with unique_ptr when we're OK with C++11 -class ScopedPyObject { -public: - ScopedPyObject() : obj_(nullptr) {} - explicit ScopedPyObject(PyObject* py_object) : obj_(py_object) {} - ~ScopedPyObject() { - if (obj_) - Py_DECREF(obj_); - } - PyObject* get() throw() { return obj_; } - operator bool() { return obj_; } - void reset(PyObject* py_object) throw() { - if (obj_) - Py_DECREF(obj_); - obj_ = py_object; - } - PyObject* release() throw() { - PyObject* tmp = obj_; - obj_ = nullptr; - return tmp; - } - void swap(ScopedPyObject& other) throw() { - ScopedPyObject tmp(other.release()); - other.reset(release()); - reset(tmp.release()); - } - -private: - ScopedPyObject(const ScopedPyObject&) {} - ScopedPyObject& operator=(const ScopedPyObject&) { return *this; } - - PyObject* obj_; -}; - -/** - * A cache of the two key attributes of a CReadableTransport, - * so we don't have to keep calling PyObject_GetAttr. - */ -struct DecodeBuffer { - ScopedPyObject stringiobuf; - ScopedPyObject refill_callable; -}; - -#if PY_MAJOR_VERSION < 3 -extern char refill_signature[3]; -typedef PyObject EncodeBuffer; -#else -extern const char* refill_signature; -struct EncodeBuffer { - std::vector buf; - size_t pos; -}; -#endif - -/** - * A cache of the spec_args for a set or list, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -struct SetListTypeArgs { - TType element_type; - PyObject* typeargs; - bool immutable; -}; - -/** - * A cache of the spec_args for a map, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -struct MapTypeArgs { - TType ktag; - TType vtag; - PyObject* ktypeargs; - PyObject* vtypeargs; - bool immutable; -}; - -/** - * A cache of the spec_args for a struct, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -struct StructTypeArgs { - PyObject* klass; - PyObject* spec; - bool immutable; -}; - -/** - * A cache of the item spec from a struct specification, - * so we don't have to keep calling PyTuple_GET_ITEM. - */ -struct StructItemSpec { - int tag; - TType type; - PyObject* attrname; - PyObject* typeargs; - PyObject* defval; -}; - -bool parse_set_list_args(SetListTypeArgs* dest, PyObject* typeargs); - -bool parse_map_args(MapTypeArgs* dest, PyObject* typeargs); - -bool parse_struct_args(StructTypeArgs* dest, PyObject* typeargs); - -bool parse_struct_item_spec(StructItemSpec* dest, PyObject* spec_tuple); -} -} -} - -#endif // THRIFT_PY_TYPES_H diff --git a/shell/ext-py/thrift-0.16.0/src/protocol/TBase.py b/shell/ext-py/thrift-0.16.0/src/protocol/TBase.py deleted file mode 100644 index 6c6ef18e8..000000000 --- a/shell/ext-py/thrift-0.16.0/src/protocol/TBase.py +++ /dev/null @@ -1,86 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from thrift.transport import TTransport - - -class TBase(object): - __slots__ = () - - def __repr__(self): - L = ['%s=%r' % (key, getattr(self, key)) for key in self.__slots__] - return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - for attr in self.__slots__: - my_val = getattr(self, attr) - other_val = getattr(other, attr) - if my_val != other_val: - return False - return True - - def __ne__(self, other): - return not (self == other) - - def read(self, iprot): - if (iprot._fast_decode is not None and - isinstance(iprot.trans, TTransport.CReadableTransport) and - self.thrift_spec is not None): - iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) - else: - iprot.readStruct(self, self.thrift_spec) - - def write(self, oprot): - if (oprot._fast_encode is not None and self.thrift_spec is not None): - oprot.trans.write( - oprot._fast_encode(self, [self.__class__, self.thrift_spec])) - else: - oprot.writeStruct(self, self.thrift_spec) - - -class TExceptionBase(TBase, Exception): - pass - - -class TFrozenBase(TBase): - def __setitem__(self, *args): - raise TypeError("Can't modify frozen struct") - - def __delitem__(self, *args): - raise TypeError("Can't modify frozen struct") - - def __hash__(self, *args): - return hash(self.__class__) ^ hash(self.__slots__) - - @classmethod - def read(cls, iprot): - if (iprot._fast_decode is not None and - isinstance(iprot.trans, TTransport.CReadableTransport) and - cls.thrift_spec is not None): - self = cls() - return iprot._fast_decode(None, iprot, - [self.__class__, self.thrift_spec]) - else: - return iprot.readStruct(cls, cls.thrift_spec, True) - - -class TFrozenExceptionBase(TFrozenBase, TExceptionBase): - pass diff --git a/shell/ext-py/thrift-0.16.0/src/protocol/TBinaryProtocol.py b/shell/ext-py/thrift-0.16.0/src/protocol/TBinaryProtocol.py deleted file mode 100644 index 6b2facc4f..000000000 --- a/shell/ext-py/thrift-0.16.0/src/protocol/TBinaryProtocol.py +++ /dev/null @@ -1,301 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from .TProtocol import TType, TProtocolBase, TProtocolException, TProtocolFactory -from struct import pack, unpack - - -class TBinaryProtocol(TProtocolBase): - """Binary implementation of the Thrift protocol driver.""" - - # NastyHaxx. Python 2.4+ on 32-bit machines forces hex constants to be - # positive, converting this into a long. If we hardcode the int value - # instead it'll stay in 32 bit-land. - - # VERSION_MASK = 0xffff0000 - VERSION_MASK = -65536 - - # VERSION_1 = 0x80010000 - VERSION_1 = -2147418112 - - TYPE_MASK = 0x000000ff - - def __init__(self, trans, strictRead=False, strictWrite=True, **kwargs): - TProtocolBase.__init__(self, trans) - self.strictRead = strictRead - self.strictWrite = strictWrite - self.string_length_limit = kwargs.get('string_length_limit', None) - self.container_length_limit = kwargs.get('container_length_limit', None) - - def _check_string_length(self, length): - self._check_length(self.string_length_limit, length) - - def _check_container_length(self, length): - self._check_length(self.container_length_limit, length) - - def writeMessageBegin(self, name, type, seqid): - if self.strictWrite: - self.writeI32(TBinaryProtocol.VERSION_1 | type) - self.writeString(name) - self.writeI32(seqid) - else: - self.writeString(name) - self.writeByte(type) - self.writeI32(seqid) - - def writeMessageEnd(self): - pass - - def writeStructBegin(self, name): - pass - - def writeStructEnd(self): - pass - - def writeFieldBegin(self, name, type, id): - self.writeByte(type) - self.writeI16(id) - - def writeFieldEnd(self): - pass - - def writeFieldStop(self): - self.writeByte(TType.STOP) - - def writeMapBegin(self, ktype, vtype, size): - self.writeByte(ktype) - self.writeByte(vtype) - self.writeI32(size) - - def writeMapEnd(self): - pass - - def writeListBegin(self, etype, size): - self.writeByte(etype) - self.writeI32(size) - - def writeListEnd(self): - pass - - def writeSetBegin(self, etype, size): - self.writeByte(etype) - self.writeI32(size) - - def writeSetEnd(self): - pass - - def writeBool(self, bool): - if bool: - self.writeByte(1) - else: - self.writeByte(0) - - def writeByte(self, byte): - buff = pack("!b", byte) - self.trans.write(buff) - - def writeI16(self, i16): - buff = pack("!h", i16) - self.trans.write(buff) - - def writeI32(self, i32): - buff = pack("!i", i32) - self.trans.write(buff) - - def writeI64(self, i64): - buff = pack("!q", i64) - self.trans.write(buff) - - def writeDouble(self, dub): - buff = pack("!d", dub) - self.trans.write(buff) - - def writeBinary(self, str): - self.writeI32(len(str)) - self.trans.write(str) - - def readMessageBegin(self): - sz = self.readI32() - if sz < 0: - version = sz & TBinaryProtocol.VERSION_MASK - if version != TBinaryProtocol.VERSION_1: - raise TProtocolException( - type=TProtocolException.BAD_VERSION, - message='Bad version in readMessageBegin: %d' % (sz)) - type = sz & TBinaryProtocol.TYPE_MASK - name = self.readString() - seqid = self.readI32() - else: - if self.strictRead: - raise TProtocolException(type=TProtocolException.BAD_VERSION, - message='No protocol version header') - name = self.trans.readAll(sz) - type = self.readByte() - seqid = self.readI32() - return (name, type, seqid) - - def readMessageEnd(self): - pass - - def readStructBegin(self): - pass - - def readStructEnd(self): - pass - - def readFieldBegin(self): - type = self.readByte() - if type == TType.STOP: - return (None, type, 0) - id = self.readI16() - return (None, type, id) - - def readFieldEnd(self): - pass - - def readMapBegin(self): - ktype = self.readByte() - vtype = self.readByte() - size = self.readI32() - self._check_container_length(size) - return (ktype, vtype, size) - - def readMapEnd(self): - pass - - def readListBegin(self): - etype = self.readByte() - size = self.readI32() - self._check_container_length(size) - return (etype, size) - - def readListEnd(self): - pass - - def readSetBegin(self): - etype = self.readByte() - size = self.readI32() - self._check_container_length(size) - return (etype, size) - - def readSetEnd(self): - pass - - def readBool(self): - byte = self.readByte() - if byte == 0: - return False - return True - - def readByte(self): - buff = self.trans.readAll(1) - val, = unpack('!b', buff) - return val - - def readI16(self): - buff = self.trans.readAll(2) - val, = unpack('!h', buff) - return val - - def readI32(self): - buff = self.trans.readAll(4) - val, = unpack('!i', buff) - return val - - def readI64(self): - buff = self.trans.readAll(8) - val, = unpack('!q', buff) - return val - - def readDouble(self): - buff = self.trans.readAll(8) - val, = unpack('!d', buff) - return val - - def readBinary(self): - size = self.readI32() - self._check_string_length(size) - s = self.trans.readAll(size) - return s - - -class TBinaryProtocolFactory(TProtocolFactory): - def __init__(self, strictRead=False, strictWrite=True, **kwargs): - self.strictRead = strictRead - self.strictWrite = strictWrite - self.string_length_limit = kwargs.get('string_length_limit', None) - self.container_length_limit = kwargs.get('container_length_limit', None) - - def getProtocol(self, trans): - prot = TBinaryProtocol(trans, self.strictRead, self.strictWrite, - string_length_limit=self.string_length_limit, - container_length_limit=self.container_length_limit) - return prot - - -class TBinaryProtocolAccelerated(TBinaryProtocol): - """C-Accelerated version of TBinaryProtocol. - - This class does not override any of TBinaryProtocol's methods, - but the generated code recognizes it directly and will call into - our C module to do the encoding, bypassing this object entirely. - We inherit from TBinaryProtocol so that the normal TBinaryProtocol - encoding can happen if the fastbinary module doesn't work for some - reason. (TODO(dreiss): Make this happen sanely in more cases.) - To disable this behavior, pass fallback=False constructor argument. - - In order to take advantage of the C module, just use - TBinaryProtocolAccelerated instead of TBinaryProtocol. - - NOTE: This code was contributed by an external developer. - The internal Thrift team has reviewed and tested it, - but we cannot guarantee that it is production-ready. - Please feel free to report bugs and/or success stories - to the public mailing list. - """ - pass - - def __init__(self, *args, **kwargs): - fallback = kwargs.pop('fallback', True) - super(TBinaryProtocolAccelerated, self).__init__(*args, **kwargs) - try: - from thrift.protocol import fastbinary - except ImportError: - if not fallback: - raise - else: - self._fast_decode = fastbinary.decode_binary - self._fast_encode = fastbinary.encode_binary - - -class TBinaryProtocolAcceleratedFactory(TProtocolFactory): - def __init__(self, - string_length_limit=None, - container_length_limit=None, - fallback=True): - self.string_length_limit = string_length_limit - self.container_length_limit = container_length_limit - self._fallback = fallback - - def getProtocol(self, trans): - return TBinaryProtocolAccelerated( - trans, - string_length_limit=self.string_length_limit, - container_length_limit=self.container_length_limit, - fallback=self._fallback) diff --git a/shell/ext-py/thrift-0.16.0/src/protocol/TCompactProtocol.py b/shell/ext-py/thrift-0.16.0/src/protocol/TCompactProtocol.py deleted file mode 100644 index 700e792f7..000000000 --- a/shell/ext-py/thrift-0.16.0/src/protocol/TCompactProtocol.py +++ /dev/null @@ -1,487 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from .TProtocol import TType, TProtocolBase, TProtocolException, TProtocolFactory, checkIntegerLimits -from struct import pack, unpack - -from ..compat import binary_to_str, str_to_binary - -__all__ = ['TCompactProtocol', 'TCompactProtocolFactory'] - -CLEAR = 0 -FIELD_WRITE = 1 -VALUE_WRITE = 2 -CONTAINER_WRITE = 3 -BOOL_WRITE = 4 -FIELD_READ = 5 -CONTAINER_READ = 6 -VALUE_READ = 7 -BOOL_READ = 8 - - -def make_helper(v_from, container): - def helper(func): - def nested(self, *args, **kwargs): - assert self.state in (v_from, container), (self.state, v_from, container) - return func(self, *args, **kwargs) - return nested - return helper - - -writer = make_helper(VALUE_WRITE, CONTAINER_WRITE) -reader = make_helper(VALUE_READ, CONTAINER_READ) - - -def makeZigZag(n, bits): - checkIntegerLimits(n, bits) - return (n << 1) ^ (n >> (bits - 1)) - - -def fromZigZag(n): - return (n >> 1) ^ -(n & 1) - - -def writeVarint(trans, n): - assert n >= 0, "Input to TCompactProtocol writeVarint cannot be negative!" - out = bytearray() - while True: - if n & ~0x7f == 0: - out.append(n) - break - else: - out.append((n & 0xff) | 0x80) - n = n >> 7 - trans.write(bytes(out)) - - -def readVarint(trans): - result = 0 - shift = 0 - while True: - x = trans.readAll(1) - byte = ord(x) - result |= (byte & 0x7f) << shift - if byte >> 7 == 0: - return result - shift += 7 - - -class CompactType(object): - STOP = 0x00 - TRUE = 0x01 - FALSE = 0x02 - BYTE = 0x03 - I16 = 0x04 - I32 = 0x05 - I64 = 0x06 - DOUBLE = 0x07 - BINARY = 0x08 - LIST = 0x09 - SET = 0x0A - MAP = 0x0B - STRUCT = 0x0C - - -CTYPES = { - TType.STOP: CompactType.STOP, - TType.BOOL: CompactType.TRUE, # used for collection - TType.BYTE: CompactType.BYTE, - TType.I16: CompactType.I16, - TType.I32: CompactType.I32, - TType.I64: CompactType.I64, - TType.DOUBLE: CompactType.DOUBLE, - TType.STRING: CompactType.BINARY, - TType.STRUCT: CompactType.STRUCT, - TType.LIST: CompactType.LIST, - TType.SET: CompactType.SET, - TType.MAP: CompactType.MAP, -} - -TTYPES = {} -for k, v in CTYPES.items(): - TTYPES[v] = k -TTYPES[CompactType.FALSE] = TType.BOOL -del k -del v - - -class TCompactProtocol(TProtocolBase): - """Compact implementation of the Thrift protocol driver.""" - - PROTOCOL_ID = 0x82 - VERSION = 1 - VERSION_MASK = 0x1f - TYPE_MASK = 0xe0 - TYPE_BITS = 0x07 - TYPE_SHIFT_AMOUNT = 5 - - def __init__(self, trans, - string_length_limit=None, - container_length_limit=None): - TProtocolBase.__init__(self, trans) - self.state = CLEAR - self.__last_fid = 0 - self.__bool_fid = None - self.__bool_value = None - self.__structs = [] - self.__containers = [] - self.string_length_limit = string_length_limit - self.container_length_limit = container_length_limit - - def _check_string_length(self, length): - self._check_length(self.string_length_limit, length) - - def _check_container_length(self, length): - self._check_length(self.container_length_limit, length) - - def __writeVarint(self, n): - writeVarint(self.trans, n) - - def writeMessageBegin(self, name, type, seqid): - assert self.state == CLEAR - self.__writeUByte(self.PROTOCOL_ID) - self.__writeUByte(self.VERSION | (type << self.TYPE_SHIFT_AMOUNT)) - # The sequence id is a signed 32-bit integer but the compact protocol - # writes this out as a "var int" which is always positive, and attempting - # to write a negative number results in an infinite loop, so we may - # need to do some conversion here... - tseqid = seqid - if tseqid < 0: - tseqid = 2147483648 + (2147483648 + tseqid) - self.__writeVarint(tseqid) - self.__writeBinary(str_to_binary(name)) - self.state = VALUE_WRITE - - def writeMessageEnd(self): - assert self.state == VALUE_WRITE - self.state = CLEAR - - def writeStructBegin(self, name): - assert self.state in (CLEAR, CONTAINER_WRITE, VALUE_WRITE), self.state - self.__structs.append((self.state, self.__last_fid)) - self.state = FIELD_WRITE - self.__last_fid = 0 - - def writeStructEnd(self): - assert self.state == FIELD_WRITE - self.state, self.__last_fid = self.__structs.pop() - - def writeFieldStop(self): - self.__writeByte(0) - - def __writeFieldHeader(self, type, fid): - delta = fid - self.__last_fid - if 0 < delta <= 15: - self.__writeUByte(delta << 4 | type) - else: - self.__writeByte(type) - self.__writeI16(fid) - self.__last_fid = fid - - def writeFieldBegin(self, name, type, fid): - assert self.state == FIELD_WRITE, self.state - if type == TType.BOOL: - self.state = BOOL_WRITE - self.__bool_fid = fid - else: - self.state = VALUE_WRITE - self.__writeFieldHeader(CTYPES[type], fid) - - def writeFieldEnd(self): - assert self.state in (VALUE_WRITE, BOOL_WRITE), self.state - self.state = FIELD_WRITE - - def __writeUByte(self, byte): - self.trans.write(pack('!B', byte)) - - def __writeByte(self, byte): - self.trans.write(pack('!b', byte)) - - def __writeI16(self, i16): - self.__writeVarint(makeZigZag(i16, 16)) - - def __writeSize(self, i32): - self.__writeVarint(i32) - - def writeCollectionBegin(self, etype, size): - assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state - if size <= 14: - self.__writeUByte(size << 4 | CTYPES[etype]) - else: - self.__writeUByte(0xf0 | CTYPES[etype]) - self.__writeSize(size) - self.__containers.append(self.state) - self.state = CONTAINER_WRITE - writeSetBegin = writeCollectionBegin - writeListBegin = writeCollectionBegin - - def writeMapBegin(self, ktype, vtype, size): - assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state - if size == 0: - self.__writeByte(0) - else: - self.__writeSize(size) - self.__writeUByte(CTYPES[ktype] << 4 | CTYPES[vtype]) - self.__containers.append(self.state) - self.state = CONTAINER_WRITE - - def writeCollectionEnd(self): - assert self.state == CONTAINER_WRITE, self.state - self.state = self.__containers.pop() - writeMapEnd = writeCollectionEnd - writeSetEnd = writeCollectionEnd - writeListEnd = writeCollectionEnd - - def writeBool(self, bool): - if self.state == BOOL_WRITE: - if bool: - ctype = CompactType.TRUE - else: - ctype = CompactType.FALSE - self.__writeFieldHeader(ctype, self.__bool_fid) - elif self.state == CONTAINER_WRITE: - if bool: - self.__writeByte(CompactType.TRUE) - else: - self.__writeByte(CompactType.FALSE) - else: - raise AssertionError("Invalid state in compact protocol") - - writeByte = writer(__writeByte) - writeI16 = writer(__writeI16) - - @writer - def writeI32(self, i32): - self.__writeVarint(makeZigZag(i32, 32)) - - @writer - def writeI64(self, i64): - self.__writeVarint(makeZigZag(i64, 64)) - - @writer - def writeDouble(self, dub): - self.trans.write(pack('> 4 - if delta == 0: - fid = self.__readI16() - else: - fid = self.__last_fid + delta - self.__last_fid = fid - type = type & 0x0f - if type == CompactType.TRUE: - self.state = BOOL_READ - self.__bool_value = True - elif type == CompactType.FALSE: - self.state = BOOL_READ - self.__bool_value = False - else: - self.state = VALUE_READ - return (None, self.__getTType(type), fid) - - def readFieldEnd(self): - assert self.state in (VALUE_READ, BOOL_READ), self.state - self.state = FIELD_READ - - def __readUByte(self): - result, = unpack('!B', self.trans.readAll(1)) - return result - - def __readByte(self): - result, = unpack('!b', self.trans.readAll(1)) - return result - - def __readVarint(self): - return readVarint(self.trans) - - def __readZigZag(self): - return fromZigZag(self.__readVarint()) - - def __readSize(self): - result = self.__readVarint() - if result < 0: - raise TProtocolException("Length < 0") - return result - - def readMessageBegin(self): - assert self.state == CLEAR - proto_id = self.__readUByte() - if proto_id != self.PROTOCOL_ID: - raise TProtocolException(TProtocolException.BAD_VERSION, - 'Bad protocol id in the message: %d' % proto_id) - ver_type = self.__readUByte() - type = (ver_type >> self.TYPE_SHIFT_AMOUNT) & self.TYPE_BITS - version = ver_type & self.VERSION_MASK - if version != self.VERSION: - raise TProtocolException(TProtocolException.BAD_VERSION, - 'Bad version: %d (expect %d)' % (version, self.VERSION)) - seqid = self.__readVarint() - # the sequence is a compact "var int" which is treaded as unsigned, - # however the sequence is actually signed... - if seqid > 2147483647: - seqid = -2147483648 - (2147483648 - seqid) - name = binary_to_str(self.__readBinary()) - return (name, type, seqid) - - def readMessageEnd(self): - assert self.state == CLEAR - assert len(self.__structs) == 0 - - def readStructBegin(self): - assert self.state in (CLEAR, CONTAINER_READ, VALUE_READ), self.state - self.__structs.append((self.state, self.__last_fid)) - self.state = FIELD_READ - self.__last_fid = 0 - - def readStructEnd(self): - assert self.state == FIELD_READ - self.state, self.__last_fid = self.__structs.pop() - - def readCollectionBegin(self): - assert self.state in (VALUE_READ, CONTAINER_READ), self.state - size_type = self.__readUByte() - size = size_type >> 4 - type = self.__getTType(size_type) - if size == 15: - size = self.__readSize() - self._check_container_length(size) - self.__containers.append(self.state) - self.state = CONTAINER_READ - return type, size - readSetBegin = readCollectionBegin - readListBegin = readCollectionBegin - - def readMapBegin(self): - assert self.state in (VALUE_READ, CONTAINER_READ), self.state - size = self.__readSize() - self._check_container_length(size) - types = 0 - if size > 0: - types = self.__readUByte() - vtype = self.__getTType(types) - ktype = self.__getTType(types >> 4) - self.__containers.append(self.state) - self.state = CONTAINER_READ - return (ktype, vtype, size) - - def readCollectionEnd(self): - assert self.state == CONTAINER_READ, self.state - self.state = self.__containers.pop() - readSetEnd = readCollectionEnd - readListEnd = readCollectionEnd - readMapEnd = readCollectionEnd - - def readBool(self): - if self.state == BOOL_READ: - return self.__bool_value == CompactType.TRUE - elif self.state == CONTAINER_READ: - return self.__readByte() == CompactType.TRUE - else: - raise AssertionError("Invalid state in compact protocol: %d" % - self.state) - - readByte = reader(__readByte) - __readI16 = __readZigZag - readI16 = reader(__readZigZag) - readI32 = reader(__readZigZag) - readI64 = reader(__readZigZag) - - @reader - def readDouble(self): - buff = self.trans.readAll(8) - val, = unpack('= 0xd800 and codeunit <= 0xdbff - - def _isLowSurrogate(self, codeunit): - return codeunit >= 0xdc00 and codeunit <= 0xdfff - - def _toChar(self, high, low=None): - if not low: - if sys.version_info[0] == 2: - return ("\\u%04x" % high).decode('unicode-escape') \ - .encode('utf-8') - else: - return chr(high) - else: - codepoint = (1 << 16) + ((high & 0x3ff) << 10) - codepoint += low & 0x3ff - if sys.version_info[0] == 2: - s = "\\U%08x" % codepoint - return s.decode('unicode-escape').encode('utf-8') - else: - return chr(codepoint) - - def readJSONString(self, skipContext): - highSurrogate = None - string = [] - if skipContext is False: - self.context.read() - self.readJSONSyntaxChar(QUOTE) - while True: - character = self.reader.read() - if character == QUOTE: - break - if ord(character) == ESCSEQ0: - character = self.reader.read() - if ord(character) == ESCSEQ1: - character = self.trans.read(4).decode('ascii') - codeunit = int(character, 16) - if self._isHighSurrogate(codeunit): - if highSurrogate: - raise TProtocolException( - TProtocolException.INVALID_DATA, - "Expected low surrogate char") - highSurrogate = codeunit - continue - elif self._isLowSurrogate(codeunit): - if not highSurrogate: - raise TProtocolException( - TProtocolException.INVALID_DATA, - "Expected high surrogate char") - character = self._toChar(highSurrogate, codeunit) - highSurrogate = None - else: - character = self._toChar(codeunit) - else: - if character not in ESCAPE_CHARS: - raise TProtocolException( - TProtocolException.INVALID_DATA, - "Expected control char") - character = ESCAPE_CHARS[character] - elif character in ESCAPE_CHAR_VALS: - raise TProtocolException(TProtocolException.INVALID_DATA, - "Unescaped control char") - elif sys.version_info[0] > 2: - utf8_bytes = bytearray([ord(character)]) - while ord(self.reader.peek()) >= 0x80: - utf8_bytes.append(ord(self.reader.read())) - character = utf8_bytes.decode('utf8') - string.append(character) - - if highSurrogate: - raise TProtocolException(TProtocolException.INVALID_DATA, - "Expected low surrogate char") - return ''.join(string) - - def isJSONNumeric(self, character): - return (True if NUMERIC_CHAR.find(character) != - 1 else False) - - def readJSONQuotes(self): - if (self.context.escapeNum()): - self.readJSONSyntaxChar(QUOTE) - - def readJSONNumericChars(self): - numeric = [] - while True: - character = self.reader.peek() - if self.isJSONNumeric(character) is False: - break - numeric.append(self.reader.read()) - return b''.join(numeric).decode('ascii') - - def readJSONInteger(self): - self.context.read() - self.readJSONQuotes() - numeric = self.readJSONNumericChars() - self.readJSONQuotes() - try: - return int(numeric) - except ValueError: - raise TProtocolException(TProtocolException.INVALID_DATA, - "Bad data encounted in numeric data") - - def readJSONDouble(self): - self.context.read() - if self.reader.peek() == QUOTE: - string = self.readJSONString(True) - try: - double = float(string) - if (self.context.escapeNum is False and - not math.isinf(double) and - not math.isnan(double)): - raise TProtocolException( - TProtocolException.INVALID_DATA, - "Numeric data unexpectedly quoted") - return double - except ValueError: - raise TProtocolException(TProtocolException.INVALID_DATA, - "Bad data encounted in numeric data") - else: - if self.context.escapeNum() is True: - self.readJSONSyntaxChar(QUOTE) - try: - return float(self.readJSONNumericChars()) - except ValueError: - raise TProtocolException(TProtocolException.INVALID_DATA, - "Bad data encounted in numeric data") - - def readJSONBase64(self): - string = self.readJSONString(False) - size = len(string) - m = size % 4 - # Force padding since b64encode method does not allow it - if m != 0: - for i in range(4 - m): - string += '=' - return base64.b64decode(string) - - def readJSONObjectStart(self): - self.context.read() - self.readJSONSyntaxChar(LBRACE) - self.pushContext(JSONPairContext(self)) - - def readJSONObjectEnd(self): - self.readJSONSyntaxChar(RBRACE) - self.popContext() - - def readJSONArrayStart(self): - self.context.read() - self.readJSONSyntaxChar(LBRACKET) - self.pushContext(JSONListContext(self)) - - def readJSONArrayEnd(self): - self.readJSONSyntaxChar(RBRACKET) - self.popContext() - - -class TJSONProtocol(TJSONProtocolBase): - - def readMessageBegin(self): - self.resetReadContext() - self.readJSONArrayStart() - if self.readJSONInteger() != VERSION: - raise TProtocolException(TProtocolException.BAD_VERSION, - "Message contained bad version.") - name = self.readJSONString(False) - typen = self.readJSONInteger() - seqid = self.readJSONInteger() - return (name, typen, seqid) - - def readMessageEnd(self): - self.readJSONArrayEnd() - - def readStructBegin(self): - self.readJSONObjectStart() - - def readStructEnd(self): - self.readJSONObjectEnd() - - def readFieldBegin(self): - character = self.reader.peek() - ttype = 0 - id = 0 - if character == RBRACE: - ttype = TType.STOP - else: - id = self.readJSONInteger() - self.readJSONObjectStart() - ttype = JTYPES[self.readJSONString(False)] - return (None, ttype, id) - - def readFieldEnd(self): - self.readJSONObjectEnd() - - def readMapBegin(self): - self.readJSONArrayStart() - keyType = JTYPES[self.readJSONString(False)] - valueType = JTYPES[self.readJSONString(False)] - size = self.readJSONInteger() - self.readJSONObjectStart() - return (keyType, valueType, size) - - def readMapEnd(self): - self.readJSONObjectEnd() - self.readJSONArrayEnd() - - def readCollectionBegin(self): - self.readJSONArrayStart() - elemType = JTYPES[self.readJSONString(False)] - size = self.readJSONInteger() - return (elemType, size) - readListBegin = readCollectionBegin - readSetBegin = readCollectionBegin - - def readCollectionEnd(self): - self.readJSONArrayEnd() - readSetEnd = readCollectionEnd - readListEnd = readCollectionEnd - - def readBool(self): - return (False if self.readJSONInteger() == 0 else True) - - def readNumber(self): - return self.readJSONInteger() - readByte = readNumber - readI16 = readNumber - readI32 = readNumber - readI64 = readNumber - - def readDouble(self): - return self.readJSONDouble() - - def readString(self): - return self.readJSONString(False) - - def readBinary(self): - return self.readJSONBase64() - - def writeMessageBegin(self, name, request_type, seqid): - self.resetWriteContext() - self.writeJSONArrayStart() - self.writeJSONNumber(VERSION) - self.writeJSONString(name) - self.writeJSONNumber(request_type) - self.writeJSONNumber(seqid) - - def writeMessageEnd(self): - self.writeJSONArrayEnd() - - def writeStructBegin(self, name): - self.writeJSONObjectStart() - - def writeStructEnd(self): - self.writeJSONObjectEnd() - - def writeFieldBegin(self, name, ttype, id): - self.writeJSONNumber(id) - self.writeJSONObjectStart() - self.writeJSONString(CTYPES[ttype]) - - def writeFieldEnd(self): - self.writeJSONObjectEnd() - - def writeFieldStop(self): - pass - - def writeMapBegin(self, ktype, vtype, size): - self.writeJSONArrayStart() - self.writeJSONString(CTYPES[ktype]) - self.writeJSONString(CTYPES[vtype]) - self.writeJSONNumber(size) - self.writeJSONObjectStart() - - def writeMapEnd(self): - self.writeJSONObjectEnd() - self.writeJSONArrayEnd() - - def writeListBegin(self, etype, size): - self.writeJSONArrayStart() - self.writeJSONString(CTYPES[etype]) - self.writeJSONNumber(size) - - def writeListEnd(self): - self.writeJSONArrayEnd() - - def writeSetBegin(self, etype, size): - self.writeJSONArrayStart() - self.writeJSONString(CTYPES[etype]) - self.writeJSONNumber(size) - - def writeSetEnd(self): - self.writeJSONArrayEnd() - - def writeBool(self, boolean): - self.writeJSONNumber(1 if boolean is True else 0) - - def writeByte(self, byte): - checkIntegerLimits(byte, 8) - self.writeJSONNumber(byte) - - def writeI16(self, i16): - checkIntegerLimits(i16, 16) - self.writeJSONNumber(i16) - - def writeI32(self, i32): - checkIntegerLimits(i32, 32) - self.writeJSONNumber(i32) - - def writeI64(self, i64): - checkIntegerLimits(i64, 64) - self.writeJSONNumber(i64) - - def writeDouble(self, dbl): - # 17 significant digits should be just enough for any double precision - # value. - self.writeJSONNumber(dbl, '{0:.17g}') - - def writeString(self, string): - self.writeJSONString(string) - - def writeBinary(self, binary): - self.writeJSONBase64(binary) - - -class TJSONProtocolFactory(TProtocolFactory): - def getProtocol(self, trans): - return TJSONProtocol(trans) - - @property - def string_length_limit(senf): - return None - - @property - def container_length_limit(senf): - return None - - -class TSimpleJSONProtocol(TJSONProtocolBase): - """Simple, readable, write-only JSON protocol. - - Useful for interacting with scripting languages. - """ - - def readMessageBegin(self): - raise NotImplementedError() - - def readMessageEnd(self): - raise NotImplementedError() - - def readStructBegin(self): - raise NotImplementedError() - - def readStructEnd(self): - raise NotImplementedError() - - def writeMessageBegin(self, name, request_type, seqid): - self.resetWriteContext() - - def writeMessageEnd(self): - pass - - def writeStructBegin(self, name): - self.writeJSONObjectStart() - - def writeStructEnd(self): - self.writeJSONObjectEnd() - - def writeFieldBegin(self, name, ttype, fid): - self.writeJSONString(name) - - def writeFieldEnd(self): - pass - - def writeMapBegin(self, ktype, vtype, size): - self.writeJSONObjectStart() - - def writeMapEnd(self): - self.writeJSONObjectEnd() - - def _writeCollectionBegin(self, etype, size): - self.writeJSONArrayStart() - - def _writeCollectionEnd(self): - self.writeJSONArrayEnd() - writeListBegin = _writeCollectionBegin - writeListEnd = _writeCollectionEnd - writeSetBegin = _writeCollectionBegin - writeSetEnd = _writeCollectionEnd - - def writeByte(self, byte): - checkIntegerLimits(byte, 8) - self.writeJSONNumber(byte) - - def writeI16(self, i16): - checkIntegerLimits(i16, 16) - self.writeJSONNumber(i16) - - def writeI32(self, i32): - checkIntegerLimits(i32, 32) - self.writeJSONNumber(i32) - - def writeI64(self, i64): - checkIntegerLimits(i64, 64) - self.writeJSONNumber(i64) - - def writeBool(self, boolean): - self.writeJSONNumber(1 if boolean is True else 0) - - def writeDouble(self, dbl): - self.writeJSONNumber(dbl) - - def writeString(self, string): - self.writeJSONString(string) - - def writeBinary(self, binary): - self.writeJSONBase64(binary) - - -class TSimpleJSONProtocolFactory(TProtocolFactory): - - def getProtocol(self, trans): - return TSimpleJSONProtocol(trans) diff --git a/shell/ext-py/thrift-0.16.0/src/protocol/TMultiplexedProtocol.py b/shell/ext-py/thrift-0.16.0/src/protocol/TMultiplexedProtocol.py deleted file mode 100644 index 0f8390fdb..000000000 --- a/shell/ext-py/thrift-0.16.0/src/protocol/TMultiplexedProtocol.py +++ /dev/null @@ -1,39 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from thrift.Thrift import TMessageType -from thrift.protocol import TProtocolDecorator - -SEPARATOR = ":" - - -class TMultiplexedProtocol(TProtocolDecorator.TProtocolDecorator): - def __init__(self, protocol, serviceName): - self.serviceName = serviceName - - def writeMessageBegin(self, name, type, seqid): - if (type == TMessageType.CALL or - type == TMessageType.ONEWAY): - super(TMultiplexedProtocol, self).writeMessageBegin( - self.serviceName + SEPARATOR + name, - type, - seqid - ) - else: - super(TMultiplexedProtocol, self).writeMessageBegin(name, type, seqid) diff --git a/shell/ext-py/thrift-0.16.0/src/protocol/TProtocol.py b/shell/ext-py/thrift-0.16.0/src/protocol/TProtocol.py deleted file mode 100644 index 339a2839d..000000000 --- a/shell/ext-py/thrift-0.16.0/src/protocol/TProtocol.py +++ /dev/null @@ -1,428 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from thrift.Thrift import TException, TType, TFrozenDict -from thrift.transport.TTransport import TTransportException -from ..compat import binary_to_str, str_to_binary - -import six -import sys -from itertools import islice -from six.moves import zip - - -class TProtocolException(TException): - """Custom Protocol Exception class""" - - UNKNOWN = 0 - INVALID_DATA = 1 - NEGATIVE_SIZE = 2 - SIZE_LIMIT = 3 - BAD_VERSION = 4 - NOT_IMPLEMENTED = 5 - DEPTH_LIMIT = 6 - INVALID_PROTOCOL = 7 - - def __init__(self, type=UNKNOWN, message=None): - TException.__init__(self, message) - self.type = type - - -class TProtocolBase(object): - """Base class for Thrift protocol driver.""" - - def __init__(self, trans): - self.trans = trans - self._fast_decode = None - self._fast_encode = None - - @staticmethod - def _check_length(limit, length): - if length < 0: - raise TTransportException(TTransportException.NEGATIVE_SIZE, - 'Negative length: %d' % length) - if limit is not None and length > limit: - raise TTransportException(TTransportException.SIZE_LIMIT, - 'Length exceeded max allowed: %d' % limit) - - def writeMessageBegin(self, name, ttype, seqid): - pass - - def writeMessageEnd(self): - pass - - def writeStructBegin(self, name): - pass - - def writeStructEnd(self): - pass - - def writeFieldBegin(self, name, ttype, fid): - pass - - def writeFieldEnd(self): - pass - - def writeFieldStop(self): - pass - - def writeMapBegin(self, ktype, vtype, size): - pass - - def writeMapEnd(self): - pass - - def writeListBegin(self, etype, size): - pass - - def writeListEnd(self): - pass - - def writeSetBegin(self, etype, size): - pass - - def writeSetEnd(self): - pass - - def writeBool(self, bool_val): - pass - - def writeByte(self, byte): - pass - - def writeI16(self, i16): - pass - - def writeI32(self, i32): - pass - - def writeI64(self, i64): - pass - - def writeDouble(self, dub): - pass - - def writeString(self, str_val): - self.writeBinary(str_to_binary(str_val)) - - def writeBinary(self, str_val): - pass - - def writeUtf8(self, str_val): - self.writeString(str_val.encode('utf8')) - - def readMessageBegin(self): - pass - - def readMessageEnd(self): - pass - - def readStructBegin(self): - pass - - def readStructEnd(self): - pass - - def readFieldBegin(self): - pass - - def readFieldEnd(self): - pass - - def readMapBegin(self): - pass - - def readMapEnd(self): - pass - - def readListBegin(self): - pass - - def readListEnd(self): - pass - - def readSetBegin(self): - pass - - def readSetEnd(self): - pass - - def readBool(self): - pass - - def readByte(self): - pass - - def readI16(self): - pass - - def readI32(self): - pass - - def readI64(self): - pass - - def readDouble(self): - pass - - def readString(self): - return binary_to_str(self.readBinary()) - - def readBinary(self): - pass - - def readUtf8(self): - return self.readString().decode('utf8') - - def skip(self, ttype): - if ttype == TType.BOOL: - self.readBool() - elif ttype == TType.BYTE: - self.readByte() - elif ttype == TType.I16: - self.readI16() - elif ttype == TType.I32: - self.readI32() - elif ttype == TType.I64: - self.readI64() - elif ttype == TType.DOUBLE: - self.readDouble() - elif ttype == TType.STRING: - self.readString() - elif ttype == TType.STRUCT: - name = self.readStructBegin() - while True: - (name, ttype, id) = self.readFieldBegin() - if ttype == TType.STOP: - break - self.skip(ttype) - self.readFieldEnd() - self.readStructEnd() - elif ttype == TType.MAP: - (ktype, vtype, size) = self.readMapBegin() - for i in range(size): - self.skip(ktype) - self.skip(vtype) - self.readMapEnd() - elif ttype == TType.SET: - (etype, size) = self.readSetBegin() - for i in range(size): - self.skip(etype) - self.readSetEnd() - elif ttype == TType.LIST: - (etype, size) = self.readListBegin() - for i in range(size): - self.skip(etype) - self.readListEnd() - else: - raise TProtocolException( - TProtocolException.INVALID_DATA, - "invalid TType") - - # tuple of: ( 'reader method' name, is_container bool, 'writer_method' name ) - _TTYPE_HANDLERS = ( - (None, None, False), # 0 TType.STOP - (None, None, False), # 1 TType.VOID # TODO: handle void? - ('readBool', 'writeBool', False), # 2 TType.BOOL - ('readByte', 'writeByte', False), # 3 TType.BYTE and I08 - ('readDouble', 'writeDouble', False), # 4 TType.DOUBLE - (None, None, False), # 5 undefined - ('readI16', 'writeI16', False), # 6 TType.I16 - (None, None, False), # 7 undefined - ('readI32', 'writeI32', False), # 8 TType.I32 - (None, None, False), # 9 undefined - ('readI64', 'writeI64', False), # 10 TType.I64 - ('readString', 'writeString', False), # 11 TType.STRING and UTF7 - ('readContainerStruct', 'writeContainerStruct', True), # 12 *.STRUCT - ('readContainerMap', 'writeContainerMap', True), # 13 TType.MAP - ('readContainerSet', 'writeContainerSet', True), # 14 TType.SET - ('readContainerList', 'writeContainerList', True), # 15 TType.LIST - (None, None, False), # 16 TType.UTF8 # TODO: handle utf8 types? - (None, None, False) # 17 TType.UTF16 # TODO: handle utf16 types? - ) - - def _ttype_handlers(self, ttype, spec): - if spec == 'BINARY': - if ttype != TType.STRING: - raise TProtocolException(type=TProtocolException.INVALID_DATA, - message='Invalid binary field type %d' % ttype) - return ('readBinary', 'writeBinary', False) - if sys.version_info[0] == 2 and spec == 'UTF8': - if ttype != TType.STRING: - raise TProtocolException(type=TProtocolException.INVALID_DATA, - message='Invalid string field type %d' % ttype) - return ('readUtf8', 'writeUtf8', False) - return self._TTYPE_HANDLERS[ttype] if ttype < len(self._TTYPE_HANDLERS) else (None, None, False) - - def _read_by_ttype(self, ttype, spec, espec): - reader_name, _, is_container = self._ttype_handlers(ttype, espec) - if reader_name is None: - raise TProtocolException(type=TProtocolException.INVALID_DATA, - message='Invalid type %d' % (ttype)) - reader_func = getattr(self, reader_name) - read = (lambda: reader_func(espec)) if is_container else reader_func - while True: - yield read() - - def readFieldByTType(self, ttype, spec): - return next(self._read_by_ttype(ttype, spec, spec)) - - def readContainerList(self, spec): - ttype, tspec, is_immutable = spec - (list_type, list_len) = self.readListBegin() - # TODO: compare types we just decoded with thrift_spec - elems = islice(self._read_by_ttype(ttype, spec, tspec), list_len) - results = (tuple if is_immutable else list)(elems) - self.readListEnd() - return results - - def readContainerSet(self, spec): - ttype, tspec, is_immutable = spec - (set_type, set_len) = self.readSetBegin() - # TODO: compare types we just decoded with thrift_spec - elems = islice(self._read_by_ttype(ttype, spec, tspec), set_len) - results = (frozenset if is_immutable else set)(elems) - self.readSetEnd() - return results - - def readContainerStruct(self, spec): - (obj_class, obj_spec) = spec - - # If obj_class.read is a classmethod (e.g. in frozen structs), - # call it as such. - if getattr(obj_class.read, '__self__', None) is obj_class: - obj = obj_class.read(self) - else: - obj = obj_class() - obj.read(self) - return obj - - def readContainerMap(self, spec): - ktype, kspec, vtype, vspec, is_immutable = spec - (map_ktype, map_vtype, map_len) = self.readMapBegin() - # TODO: compare types we just decoded with thrift_spec and - # abort/skip if types disagree - keys = self._read_by_ttype(ktype, spec, kspec) - vals = self._read_by_ttype(vtype, spec, vspec) - keyvals = islice(zip(keys, vals), map_len) - results = (TFrozenDict if is_immutable else dict)(keyvals) - self.readMapEnd() - return results - - def readStruct(self, obj, thrift_spec, is_immutable=False): - if is_immutable: - fields = {} - self.readStructBegin() - while True: - (fname, ftype, fid) = self.readFieldBegin() - if ftype == TType.STOP: - break - try: - field = thrift_spec[fid] - except IndexError: - self.skip(ftype) - else: - if field is not None and ftype == field[1]: - fname = field[2] - fspec = field[3] - val = self.readFieldByTType(ftype, fspec) - if is_immutable: - fields[fname] = val - else: - setattr(obj, fname, val) - else: - self.skip(ftype) - self.readFieldEnd() - self.readStructEnd() - if is_immutable: - return obj(**fields) - - def writeContainerStruct(self, val, spec): - val.write(self) - - def writeContainerList(self, val, spec): - ttype, tspec, _ = spec - self.writeListBegin(ttype, len(val)) - for _ in self._write_by_ttype(ttype, val, spec, tspec): - pass - self.writeListEnd() - - def writeContainerSet(self, val, spec): - ttype, tspec, _ = spec - self.writeSetBegin(ttype, len(val)) - for _ in self._write_by_ttype(ttype, val, spec, tspec): - pass - self.writeSetEnd() - - def writeContainerMap(self, val, spec): - ktype, kspec, vtype, vspec, _ = spec - self.writeMapBegin(ktype, vtype, len(val)) - for _ in zip(self._write_by_ttype(ktype, six.iterkeys(val), spec, kspec), - self._write_by_ttype(vtype, six.itervalues(val), spec, vspec)): - pass - self.writeMapEnd() - - def writeStruct(self, obj, thrift_spec): - self.writeStructBegin(obj.__class__.__name__) - for field in thrift_spec: - if field is None: - continue - fname = field[2] - val = getattr(obj, fname) - if val is None: - # skip writing out unset fields - continue - fid = field[0] - ftype = field[1] - fspec = field[3] - self.writeFieldBegin(fname, ftype, fid) - self.writeFieldByTType(ftype, val, fspec) - self.writeFieldEnd() - self.writeFieldStop() - self.writeStructEnd() - - def _write_by_ttype(self, ttype, vals, spec, espec): - _, writer_name, is_container = self._ttype_handlers(ttype, espec) - writer_func = getattr(self, writer_name) - write = (lambda v: writer_func(v, espec)) if is_container else writer_func - for v in vals: - yield write(v) - - def writeFieldByTType(self, ttype, val, spec): - next(self._write_by_ttype(ttype, [val], spec, spec)) - - -def checkIntegerLimits(i, bits): - if bits == 8 and (i < -128 or i > 127): - raise TProtocolException(TProtocolException.INVALID_DATA, - "i8 requires -128 <= number <= 127") - elif bits == 16 and (i < -32768 or i > 32767): - raise TProtocolException(TProtocolException.INVALID_DATA, - "i16 requires -32768 <= number <= 32767") - elif bits == 32 and (i < -2147483648 or i > 2147483647): - raise TProtocolException(TProtocolException.INVALID_DATA, - "i32 requires -2147483648 <= number <= 2147483647") - elif bits == 64 and (i < -9223372036854775808 or i > 9223372036854775807): - raise TProtocolException(TProtocolException.INVALID_DATA, - "i64 requires -9223372036854775808 <= number <= 9223372036854775807") - - -class TProtocolFactory(object): - def getProtocol(self, trans): - pass diff --git a/shell/ext-py/thrift-0.16.0/src/protocol/TProtocolDecorator.py b/shell/ext-py/thrift-0.16.0/src/protocol/TProtocolDecorator.py deleted file mode 100644 index f5546c736..000000000 --- a/shell/ext-py/thrift-0.16.0/src/protocol/TProtocolDecorator.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - - -class TProtocolDecorator(object): - def __new__(cls, protocol, *args, **kwargs): - decorated_cls = type(''.join(['Decorated', protocol.__class__.__name__]), - (cls, protocol.__class__), - protocol.__dict__) - return object.__new__(decorated_cls) diff --git a/shell/ext-py/thrift-0.16.0/src/protocol/__init__.py b/shell/ext-py/thrift-0.16.0/src/protocol/__init__.py deleted file mode 100644 index 06647a24b..000000000 --- a/shell/ext-py/thrift-0.16.0/src/protocol/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -__all__ = ['fastbinary', 'TBase', 'TBinaryProtocol', 'TCompactProtocol', - 'TJSONProtocol', 'TProtocol', 'TProtocolDecorator'] diff --git a/shell/ext-py/thrift-0.16.0/src/server/THttpServer.py b/shell/ext-py/thrift-0.16.0/src/server/THttpServer.py deleted file mode 100644 index 47e817df7..000000000 --- a/shell/ext-py/thrift-0.16.0/src/server/THttpServer.py +++ /dev/null @@ -1,131 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import ssl - -from six.moves import BaseHTTPServer - -from thrift.Thrift import TMessageType -from thrift.server import TServer -from thrift.transport import TTransport - - -class ResponseException(Exception): - """Allows handlers to override the HTTP response - - Normally, THttpServer always sends a 200 response. If a handler wants - to override this behavior (e.g., to simulate a misconfigured or - overloaded web server during testing), it can raise a ResponseException. - The function passed to the constructor will be called with the - RequestHandler as its only argument. Note that this is irrelevant - for ONEWAY requests, as the HTTP response must be sent before the - RPC is processed. - """ - def __init__(self, handler): - self.handler = handler - - -class THttpServer(TServer.TServer): - """A simple HTTP-based Thrift server - - This class is not very performant, but it is useful (for example) for - acting as a mock version of an Apache-based PHP Thrift endpoint. - Also important to note the HTTP implementation pretty much violates the - transport/protocol/processor/server layering, by performing the transport - functions here. This means things like oneway handling are oddly exposed. - """ - def __init__(self, - processor, - server_address, - inputProtocolFactory, - outputProtocolFactory=None, - server_class=BaseHTTPServer.HTTPServer, - **kwargs): - """Set up protocol factories and HTTP (or HTTPS) server. - - See BaseHTTPServer for server_address. - See TServer for protocol factories. - - To make a secure server, provide the named arguments: - * cafile - to validate clients [optional] - * cert_file - the server cert - * key_file - the server's key - """ - if outputProtocolFactory is None: - outputProtocolFactory = inputProtocolFactory - - TServer.TServer.__init__(self, processor, None, None, None, - inputProtocolFactory, outputProtocolFactory) - - thttpserver = self - self._replied = None - - class RequestHander(BaseHTTPServer.BaseHTTPRequestHandler): - def do_POST(self): - # Don't care about the request path. - thttpserver._replied = False - iftrans = TTransport.TFileObjectTransport(self.rfile) - itrans = TTransport.TBufferedTransport( - iftrans, int(self.headers['Content-Length'])) - otrans = TTransport.TMemoryBuffer() - iprot = thttpserver.inputProtocolFactory.getProtocol(itrans) - oprot = thttpserver.outputProtocolFactory.getProtocol(otrans) - try: - thttpserver.processor.on_message_begin(self.on_begin) - thttpserver.processor.process(iprot, oprot) - except ResponseException as exn: - exn.handler(self) - else: - if not thttpserver._replied: - # If the request was ONEWAY we would have replied already - data = otrans.getvalue() - self.send_response(200) - self.send_header("Content-Length", len(data)) - self.send_header("Content-Type", "application/x-thrift") - self.end_headers() - self.wfile.write(data) - - def on_begin(self, name, type, seqid): - """ - Inspect the message header. - - This allows us to post an immediate transport response - if the request is a ONEWAY message type. - """ - if type == TMessageType.ONEWAY: - self.send_response(200) - self.send_header("Content-Type", "application/x-thrift") - self.end_headers() - thttpserver._replied = True - - self.httpd = server_class(server_address, RequestHander) - - if (kwargs.get('cafile') or kwargs.get('cert_file') or kwargs.get('key_file')): - context = ssl.create_default_context(cafile=kwargs.get('cafile')) - context.check_hostname = False - context.load_cert_chain(kwargs.get('cert_file'), kwargs.get('key_file')) - context.verify_mode = ssl.CERT_REQUIRED if kwargs.get('cafile') else ssl.CERT_NONE - self.httpd.socket = context.wrap_socket(self.httpd.socket, server_side=True) - - def serve(self): - self.httpd.serve_forever() - - def shutdown(self): - self.httpd.socket.close() - # self.httpd.shutdown() # hangs forever, python doesn't handle POLLNVAL properly! diff --git a/shell/ext-py/thrift-0.16.0/src/server/TNonblockingServer.py b/shell/ext-py/thrift-0.16.0/src/server/TNonblockingServer.py deleted file mode 100644 index f62d486eb..000000000 --- a/shell/ext-py/thrift-0.16.0/src/server/TNonblockingServer.py +++ /dev/null @@ -1,370 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# -"""Implementation of non-blocking server. - -The main idea of the server is to receive and send requests -only from the main thread. - -The thread poool should be sized for concurrent tasks, not -maximum connections -""" - -import logging -import select -import socket -import struct -import threading - -from collections import deque -from six.moves import queue - -from thrift.transport import TTransport -from thrift.protocol.TBinaryProtocol import TBinaryProtocolFactory - -__all__ = ['TNonblockingServer'] - -logger = logging.getLogger(__name__) - - -class Worker(threading.Thread): - """Worker is a small helper to process incoming connection.""" - - def __init__(self, queue): - threading.Thread.__init__(self) - self.queue = queue - - def run(self): - """Process queries from task queue, stop if processor is None.""" - while True: - try: - processor, iprot, oprot, otrans, callback = self.queue.get() - if processor is None: - break - processor.process(iprot, oprot) - callback(True, otrans.getvalue()) - except Exception: - logger.exception("Exception while processing request", exc_info=True) - callback(False, b'') - - -WAIT_LEN = 0 -WAIT_MESSAGE = 1 -WAIT_PROCESS = 2 -SEND_ANSWER = 3 -CLOSED = 4 - - -def locked(func): - """Decorator which locks self.lock.""" - def nested(self, *args, **kwargs): - self.lock.acquire() - try: - return func(self, *args, **kwargs) - finally: - self.lock.release() - return nested - - -def socket_exception(func): - """Decorator close object on socket.error.""" - def read(self, *args, **kwargs): - try: - return func(self, *args, **kwargs) - except socket.error: - logger.debug('ignoring socket exception', exc_info=True) - self.close() - return read - - -class Message(object): - def __init__(self, offset, len_, header): - self.offset = offset - self.len = len_ - self.buffer = None - self.is_header = header - - @property - def end(self): - return self.offset + self.len - - -class Connection(object): - """Basic class is represented connection. - - It can be in state: - WAIT_LEN --- connection is reading request len. - WAIT_MESSAGE --- connection is reading request. - WAIT_PROCESS --- connection has just read whole request and - waits for call ready routine. - SEND_ANSWER --- connection is sending answer string (including length - of answer). - CLOSED --- socket was closed and connection should be deleted. - """ - def __init__(self, new_socket, wake_up): - self.socket = new_socket - self.socket.setblocking(False) - self.status = WAIT_LEN - self.len = 0 - self.received = deque() - self._reading = Message(0, 4, True) - self._rbuf = b'' - self._wbuf = b'' - self.lock = threading.Lock() - self.wake_up = wake_up - self.remaining = False - - @socket_exception - def read(self): - """Reads data from stream and switch state.""" - assert self.status in (WAIT_LEN, WAIT_MESSAGE) - assert not self.received - buf_size = 8192 - first = True - done = False - while not done: - read = self.socket.recv(buf_size) - rlen = len(read) - done = rlen < buf_size - self._rbuf += read - if first and rlen == 0: - if self.status != WAIT_LEN or self._rbuf: - logger.error('could not read frame from socket') - else: - logger.debug('read zero length. client might have disconnected') - self.close() - while len(self._rbuf) >= self._reading.end: - if self._reading.is_header: - mlen, = struct.unpack('!i', self._rbuf[:4]) - self._reading = Message(self._reading.end, mlen, False) - self.status = WAIT_MESSAGE - else: - self._reading.buffer = self._rbuf - self.received.append(self._reading) - self._rbuf = self._rbuf[self._reading.end:] - self._reading = Message(0, 4, True) - first = False - if self.received: - self.status = WAIT_PROCESS - break - self.remaining = not done - - @socket_exception - def write(self): - """Writes data from socket and switch state.""" - assert self.status == SEND_ANSWER - sent = self.socket.send(self._wbuf) - if sent == len(self._wbuf): - self.status = WAIT_LEN - self._wbuf = b'' - self.len = 0 - else: - self._wbuf = self._wbuf[sent:] - - @locked - def ready(self, all_ok, message): - """Callback function for switching state and waking up main thread. - - This function is the only function witch can be called asynchronous. - - The ready can switch Connection to three states: - WAIT_LEN if request was oneway. - SEND_ANSWER if request was processed in normal way. - CLOSED if request throws unexpected exception. - - The one wakes up main thread. - """ - assert self.status == WAIT_PROCESS - if not all_ok: - self.close() - self.wake_up() - return - self.len = 0 - if len(message) == 0: - # it was a oneway request, do not write answer - self._wbuf = b'' - self.status = WAIT_LEN - else: - self._wbuf = struct.pack('!i', len(message)) + message - self.status = SEND_ANSWER - self.wake_up() - - @locked - def is_writeable(self): - """Return True if connection should be added to write list of select""" - return self.status == SEND_ANSWER - - # it's not necessary, but... - @locked - def is_readable(self): - """Return True if connection should be added to read list of select""" - return self.status in (WAIT_LEN, WAIT_MESSAGE) - - @locked - def is_closed(self): - """Returns True if connection is closed.""" - return self.status == CLOSED - - def fileno(self): - """Returns the file descriptor of the associated socket.""" - return self.socket.fileno() - - def close(self): - """Closes connection""" - self.status = CLOSED - self.socket.close() - - -class TNonblockingServer(object): - """Non-blocking server.""" - - def __init__(self, - processor, - lsocket, - inputProtocolFactory=None, - outputProtocolFactory=None, - threads=10): - self.processor = processor - self.socket = lsocket - self.in_protocol = inputProtocolFactory or TBinaryProtocolFactory() - self.out_protocol = outputProtocolFactory or self.in_protocol - self.threads = int(threads) - self.clients = {} - self.tasks = queue.Queue() - self._read, self._write = socket.socketpair() - self.prepared = False - self._stop = False - - def setNumThreads(self, num): - """Set the number of worker threads that should be created.""" - # implement ThreadPool interface - assert not self.prepared, "Can't change number of threads after start" - self.threads = num - - def prepare(self): - """Prepares server for serve requests.""" - if self.prepared: - return - self.socket.listen() - for _ in range(self.threads): - thread = Worker(self.tasks) - thread.setDaemon(True) - thread.start() - self.prepared = True - - def wake_up(self): - """Wake up main thread. - - The server usually waits in select call in we should terminate one. - The simplest way is using socketpair. - - Select always wait to read from the first socket of socketpair. - - In this case, we can just write anything to the second socket from - socketpair. - """ - self._write.send(b'1') - - def stop(self): - """Stop the server. - - This method causes the serve() method to return. stop() may be invoked - from within your handler, or from another thread. - - After stop() is called, serve() will return but the server will still - be listening on the socket. serve() may then be called again to resume - processing requests. Alternatively, close() may be called after - serve() returns to close the server socket and shutdown all worker - threads. - """ - self._stop = True - self.wake_up() - - def _select(self): - """Does select on open connections.""" - readable = [self.socket.handle.fileno(), self._read.fileno()] - writable = [] - remaining = [] - for i, connection in list(self.clients.items()): - if connection.is_readable(): - readable.append(connection.fileno()) - if connection.remaining or connection.received: - remaining.append(connection.fileno()) - if connection.is_writeable(): - writable.append(connection.fileno()) - if connection.is_closed(): - del self.clients[i] - if remaining: - return remaining, [], [], False - else: - return select.select(readable, writable, readable) + (True,) - - def handle(self): - """Handle requests. - - WARNING! You must call prepare() BEFORE calling handle() - """ - assert self.prepared, "You have to call prepare before handle" - rset, wset, xset, selected = self._select() - for readable in rset: - if readable == self._read.fileno(): - # don't care i just need to clean readable flag - self._read.recv(1024) - elif readable == self.socket.handle.fileno(): - try: - client = self.socket.accept() - if client: - self.clients[client.handle.fileno()] = Connection(client.handle, - self.wake_up) - except socket.error: - logger.debug('error while accepting', exc_info=True) - else: - connection = self.clients[readable] - if selected: - connection.read() - if connection.received: - connection.status = WAIT_PROCESS - msg = connection.received.popleft() - itransport = TTransport.TMemoryBuffer(msg.buffer, msg.offset) - otransport = TTransport.TMemoryBuffer() - iprot = self.in_protocol.getProtocol(itransport) - oprot = self.out_protocol.getProtocol(otransport) - self.tasks.put([self.processor, iprot, oprot, - otransport, connection.ready]) - for writeable in wset: - self.clients[writeable].write() - for oob in xset: - self.clients[oob].close() - del self.clients[oob] - - def close(self): - """Closes the server.""" - for _ in range(self.threads): - self.tasks.put([None, None, None, None, None]) - self.socket.close() - self.prepared = False - - def serve(self): - """Serve requests. - - Serve requests forever, or until stop() is called. - """ - self._stop = False - self.prepare() - while not self._stop: - self.handle() diff --git a/shell/ext-py/thrift-0.16.0/src/server/TProcessPoolServer.py b/shell/ext-py/thrift-0.16.0/src/server/TProcessPoolServer.py deleted file mode 100644 index c9cfa1104..000000000 --- a/shell/ext-py/thrift-0.16.0/src/server/TProcessPoolServer.py +++ /dev/null @@ -1,128 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - - -import logging - -from multiprocessing import Process, Value, Condition - -from .TServer import TServer -from thrift.transport.TTransport import TTransportException - -logger = logging.getLogger(__name__) - - -class TProcessPoolServer(TServer): - """Server with a fixed size pool of worker subprocesses to service requests - - Note that if you need shared state between the handlers - it's up to you! - Written by Dvir Volk, doat.com - """ - def __init__(self, *args): - TServer.__init__(self, *args) - self.numWorkers = 10 - self.workers = [] - self.isRunning = Value('b', False) - self.stopCondition = Condition() - self.postForkCallback = None - - def __getstate__(self): - state = self.__dict__.copy() - state['workers'] = None - return state - - def setPostForkCallback(self, callback): - if not callable(callback): - raise TypeError("This is not a callback!") - self.postForkCallback = callback - - def setNumWorkers(self, num): - """Set the number of worker threads that should be created""" - self.numWorkers = num - - def workerProcess(self): - """Loop getting clients from the shared queue and process them""" - if self.postForkCallback: - self.postForkCallback() - - while self.isRunning.value: - try: - client = self.serverTransport.accept() - if not client: - continue - self.serveClient(client) - except (KeyboardInterrupt, SystemExit): - return 0 - except Exception as x: - logger.exception(x) - - def serveClient(self, client): - """Process input/output from a client for as long as possible""" - itrans = self.inputTransportFactory.getTransport(client) - otrans = self.outputTransportFactory.getTransport(client) - iprot = self.inputProtocolFactory.getProtocol(itrans) - oprot = self.outputProtocolFactory.getProtocol(otrans) - - try: - while True: - self.processor.process(iprot, oprot) - except TTransportException: - pass - except Exception as x: - logger.exception(x) - - itrans.close() - otrans.close() - - def serve(self): - """Start workers and put into queue""" - # this is a shared state that can tell the workers to exit when False - self.isRunning.value = True - - # first bind and listen to the port - self.serverTransport.listen() - - # fork the children - for i in range(self.numWorkers): - try: - w = Process(target=self.workerProcess) - w.daemon = True - w.start() - self.workers.append(w) - except Exception as x: - logger.exception(x) - - # wait until the condition is set by stop() - while True: - self.stopCondition.acquire() - try: - self.stopCondition.wait() - break - except (SystemExit, KeyboardInterrupt): - break - except Exception as x: - logger.exception(x) - - self.isRunning.value = False - - def stop(self): - self.isRunning.value = False - self.stopCondition.acquire() - self.stopCondition.notify() - self.stopCondition.release() diff --git a/shell/ext-py/thrift-0.16.0/src/server/TServer.py b/shell/ext-py/thrift-0.16.0/src/server/TServer.py deleted file mode 100644 index df2a7bb93..000000000 --- a/shell/ext-py/thrift-0.16.0/src/server/TServer.py +++ /dev/null @@ -1,323 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from six.moves import queue -import logging -import os -import threading - -from thrift.protocol import TBinaryProtocol -from thrift.protocol.THeaderProtocol import THeaderProtocolFactory -from thrift.transport import TTransport - -logger = logging.getLogger(__name__) - - -class TServer(object): - """Base interface for a server, which must have a serve() method. - - Three constructors for all servers: - 1) (processor, serverTransport) - 2) (processor, serverTransport, transportFactory, protocolFactory) - 3) (processor, serverTransport, - inputTransportFactory, outputTransportFactory, - inputProtocolFactory, outputProtocolFactory) - """ - def __init__(self, *args): - if (len(args) == 2): - self.__initArgs__(args[0], args[1], - TTransport.TTransportFactoryBase(), - TTransport.TTransportFactoryBase(), - TBinaryProtocol.TBinaryProtocolFactory(), - TBinaryProtocol.TBinaryProtocolFactory()) - elif (len(args) == 4): - self.__initArgs__(args[0], args[1], args[2], args[2], args[3], args[3]) - elif (len(args) == 6): - self.__initArgs__(args[0], args[1], args[2], args[3], args[4], args[5]) - - def __initArgs__(self, processor, serverTransport, - inputTransportFactory, outputTransportFactory, - inputProtocolFactory, outputProtocolFactory): - self.processor = processor - self.serverTransport = serverTransport - self.inputTransportFactory = inputTransportFactory - self.outputTransportFactory = outputTransportFactory - self.inputProtocolFactory = inputProtocolFactory - self.outputProtocolFactory = outputProtocolFactory - - input_is_header = isinstance(self.inputProtocolFactory, THeaderProtocolFactory) - output_is_header = isinstance(self.outputProtocolFactory, THeaderProtocolFactory) - if any((input_is_header, output_is_header)) and input_is_header != output_is_header: - raise ValueError("THeaderProtocol servers require that both the input and " - "output protocols are THeaderProtocol.") - - def serve(self): - pass - - -class TSimpleServer(TServer): - """Simple single-threaded server that just pumps around one transport.""" - - def __init__(self, *args): - TServer.__init__(self, *args) - - def serve(self): - self.serverTransport.listen() - while True: - client = self.serverTransport.accept() - if not client: - continue - - itrans = self.inputTransportFactory.getTransport(client) - iprot = self.inputProtocolFactory.getProtocol(itrans) - - # for THeaderProtocol, we must use the same protocol instance for - # input and output so that the response is in the same dialect that - # the server detected the request was in. - if isinstance(self.inputProtocolFactory, THeaderProtocolFactory): - otrans = None - oprot = iprot - else: - otrans = self.outputTransportFactory.getTransport(client) - oprot = self.outputProtocolFactory.getProtocol(otrans) - - try: - while True: - self.processor.process(iprot, oprot) - except TTransport.TTransportException: - pass - except Exception as x: - logger.exception(x) - - itrans.close() - if otrans: - otrans.close() - - -class TThreadedServer(TServer): - """Threaded server that spawns a new thread per each connection.""" - - def __init__(self, *args, **kwargs): - TServer.__init__(self, *args) - self.daemon = kwargs.get("daemon", False) - - def serve(self): - self.serverTransport.listen() - while True: - try: - client = self.serverTransport.accept() - if not client: - continue - t = threading.Thread(target=self.handle, args=(client,)) - t.setDaemon(self.daemon) - t.start() - except KeyboardInterrupt: - raise - except Exception as x: - logger.exception(x) - - def handle(self, client): - itrans = self.inputTransportFactory.getTransport(client) - iprot = self.inputProtocolFactory.getProtocol(itrans) - - # for THeaderProtocol, we must use the same protocol instance for input - # and output so that the response is in the same dialect that the - # server detected the request was in. - if isinstance(self.inputProtocolFactory, THeaderProtocolFactory): - otrans = None - oprot = iprot - else: - otrans = self.outputTransportFactory.getTransport(client) - oprot = self.outputProtocolFactory.getProtocol(otrans) - - try: - while True: - self.processor.process(iprot, oprot) - except TTransport.TTransportException: - pass - except Exception as x: - logger.exception(x) - - itrans.close() - if otrans: - otrans.close() - - -class TThreadPoolServer(TServer): - """Server with a fixed size pool of threads which service requests.""" - - def __init__(self, *args, **kwargs): - TServer.__init__(self, *args) - self.clients = queue.Queue() - self.threads = 10 - self.daemon = kwargs.get("daemon", False) - - def setNumThreads(self, num): - """Set the number of worker threads that should be created""" - self.threads = num - - def serveThread(self): - """Loop around getting clients from the shared queue and process them.""" - while True: - try: - client = self.clients.get() - self.serveClient(client) - except Exception as x: - logger.exception(x) - - def serveClient(self, client): - """Process input/output from a client for as long as possible""" - itrans = self.inputTransportFactory.getTransport(client) - iprot = self.inputProtocolFactory.getProtocol(itrans) - - # for THeaderProtocol, we must use the same protocol instance for input - # and output so that the response is in the same dialect that the - # server detected the request was in. - if isinstance(self.inputProtocolFactory, THeaderProtocolFactory): - otrans = None - oprot = iprot - else: - otrans = self.outputTransportFactory.getTransport(client) - oprot = self.outputProtocolFactory.getProtocol(otrans) - - try: - while True: - self.processor.process(iprot, oprot) - except TTransport.TTransportException: - pass - except Exception as x: - logger.exception(x) - - itrans.close() - if otrans: - otrans.close() - - def serve(self): - """Start a fixed number of worker threads and put client into a queue""" - for i in range(self.threads): - try: - t = threading.Thread(target=self.serveThread) - t.setDaemon(self.daemon) - t.start() - except Exception as x: - logger.exception(x) - - # Pump the socket for clients - self.serverTransport.listen() - while True: - try: - client = self.serverTransport.accept() - if not client: - continue - self.clients.put(client) - except Exception as x: - logger.exception(x) - - -class TForkingServer(TServer): - """A Thrift server that forks a new process for each request - - This is more scalable than the threaded server as it does not cause - GIL contention. - - Note that this has different semantics from the threading server. - Specifically, updates to shared variables will no longer be shared. - It will also not work on windows. - - This code is heavily inspired by SocketServer.ForkingMixIn in the - Python stdlib. - """ - def __init__(self, *args): - TServer.__init__(self, *args) - self.children = [] - - def serve(self): - def try_close(file): - try: - file.close() - except IOError as e: - logger.warning(e, exc_info=True) - - self.serverTransport.listen() - while True: - client = self.serverTransport.accept() - if not client: - continue - try: - pid = os.fork() - - if pid: # parent - # add before collect, otherwise you race w/ waitpid - self.children.append(pid) - self.collect_children() - - # Parent must close socket or the connection may not get - # closed promptly - itrans = self.inputTransportFactory.getTransport(client) - otrans = self.outputTransportFactory.getTransport(client) - try_close(itrans) - try_close(otrans) - else: - itrans = self.inputTransportFactory.getTransport(client) - iprot = self.inputProtocolFactory.getProtocol(itrans) - - # for THeaderProtocol, we must use the same protocol - # instance for input and output so that the response is in - # the same dialect that the server detected the request was - # in. - if isinstance(self.inputProtocolFactory, THeaderProtocolFactory): - otrans = None - oprot = iprot - else: - otrans = self.outputTransportFactory.getTransport(client) - oprot = self.outputProtocolFactory.getProtocol(otrans) - - ecode = 0 - try: - try: - while True: - self.processor.process(iprot, oprot) - except TTransport.TTransportException: - pass - except Exception as e: - logger.exception(e) - ecode = 1 - finally: - try_close(itrans) - if otrans: - try_close(otrans) - - os._exit(ecode) - - except TTransport.TTransportException: - pass - except Exception as x: - logger.exception(x) - - def collect_children(self): - while self.children: - try: - pid, status = os.waitpid(0, os.WNOHANG) - except os.error: - pid = None - - if pid: - self.children.remove(pid) - else: - break diff --git a/shell/ext-py/thrift-0.16.0/src/server/__init__.py b/shell/ext-py/thrift-0.16.0/src/server/__init__.py deleted file mode 100644 index 1bf6e254e..000000000 --- a/shell/ext-py/thrift-0.16.0/src/server/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -__all__ = ['TServer', 'TNonblockingServer'] diff --git a/shell/ext-py/thrift-0.16.0/src/transport/THeaderTransport.py b/shell/ext-py/thrift-0.16.0/src/transport/THeaderTransport.py deleted file mode 100644 index 7c9827ba3..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/THeaderTransport.py +++ /dev/null @@ -1,352 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import struct -import zlib - -from thrift.compat import BufferIO, byte_index -from thrift.protocol.TBinaryProtocol import TBinaryProtocol -from thrift.protocol.TCompactProtocol import TCompactProtocol, readVarint, writeVarint -from thrift.Thrift import TApplicationException -from thrift.transport.TTransport import ( - CReadableTransport, - TMemoryBuffer, - TTransportBase, - TTransportException, -) - - -U16 = struct.Struct("!H") -I32 = struct.Struct("!i") -HEADER_MAGIC = 0x0FFF -HARD_MAX_FRAME_SIZE = 0x3FFFFFFF - - -class THeaderClientType(object): - HEADERS = 0x00 - - FRAMED_BINARY = 0x01 - UNFRAMED_BINARY = 0x02 - - FRAMED_COMPACT = 0x03 - UNFRAMED_COMPACT = 0x04 - - -class THeaderSubprotocolID(object): - BINARY = 0x00 - COMPACT = 0x02 - - -class TInfoHeaderType(object): - KEY_VALUE = 0x01 - - -class THeaderTransformID(object): - ZLIB = 0x01 - - -READ_TRANSFORMS_BY_ID = { - THeaderTransformID.ZLIB: zlib.decompress, -} - - -WRITE_TRANSFORMS_BY_ID = { - THeaderTransformID.ZLIB: zlib.compress, -} - - -def _readString(trans): - size = readVarint(trans) - if size < 0: - raise TTransportException( - TTransportException.NEGATIVE_SIZE, - "Negative length" - ) - return trans.read(size) - - -def _writeString(trans, value): - writeVarint(trans, len(value)) - trans.write(value) - - -class THeaderTransport(TTransportBase, CReadableTransport): - def __init__(self, transport, allowed_client_types, default_protocol=THeaderSubprotocolID.BINARY): - self._transport = transport - self._client_type = THeaderClientType.HEADERS - self._allowed_client_types = allowed_client_types - - self._read_buffer = BufferIO(b"") - self._read_headers = {} - - self._write_buffer = BufferIO() - self._write_headers = {} - self._write_transforms = [] - - self.flags = 0 - self.sequence_id = 0 - self._protocol_id = default_protocol - self._max_frame_size = HARD_MAX_FRAME_SIZE - - def isOpen(self): - return self._transport.isOpen() - - def open(self): - return self._transport.open() - - def close(self): - return self._transport.close() - - def get_headers(self): - return self._read_headers - - def set_header(self, key, value): - if not isinstance(key, bytes): - raise ValueError("header names must be bytes") - if not isinstance(value, bytes): - raise ValueError("header values must be bytes") - self._write_headers[key] = value - - def clear_headers(self): - self._write_headers.clear() - - def add_transform(self, transform_id): - if transform_id not in WRITE_TRANSFORMS_BY_ID: - raise ValueError("unknown transform") - self._write_transforms.append(transform_id) - - def set_max_frame_size(self, size): - if not 0 < size < HARD_MAX_FRAME_SIZE: - raise ValueError("maximum frame size should be < %d and > 0" % HARD_MAX_FRAME_SIZE) - self._max_frame_size = size - - @property - def protocol_id(self): - if self._client_type == THeaderClientType.HEADERS: - return self._protocol_id - elif self._client_type in (THeaderClientType.FRAMED_BINARY, THeaderClientType.UNFRAMED_BINARY): - return THeaderSubprotocolID.BINARY - elif self._client_type in (THeaderClientType.FRAMED_COMPACT, THeaderClientType.UNFRAMED_COMPACT): - return THeaderSubprotocolID.COMPACT - else: - raise TTransportException( - TTransportException.INVALID_CLIENT_TYPE, - "Protocol ID not know for client type %d" % self._client_type, - ) - - def read(self, sz): - # if there are bytes left in the buffer, produce those first. - bytes_read = self._read_buffer.read(sz) - bytes_left_to_read = sz - len(bytes_read) - if bytes_left_to_read == 0: - return bytes_read - - # if we've determined this is an unframed client, just pass the read - # through to the underlying transport until we're reset again at the - # beginning of the next message. - if self._client_type in (THeaderClientType.UNFRAMED_BINARY, THeaderClientType.UNFRAMED_COMPACT): - return bytes_read + self._transport.read(bytes_left_to_read) - - # we're empty and (maybe) framed. fill the buffers with the next frame. - self.readFrame(bytes_left_to_read) - return bytes_read + self._read_buffer.read(bytes_left_to_read) - - def _set_client_type(self, client_type): - if client_type not in self._allowed_client_types: - raise TTransportException( - TTransportException.INVALID_CLIENT_TYPE, - "Client type %d not allowed by server." % client_type, - ) - self._client_type = client_type - - def readFrame(self, req_sz): - # the first word could either be the length field of a framed message - # or the first bytes of an unframed message. - first_word = self._transport.readAll(I32.size) - frame_size, = I32.unpack(first_word) - is_unframed = False - if frame_size & TBinaryProtocol.VERSION_MASK == TBinaryProtocol.VERSION_1: - self._set_client_type(THeaderClientType.UNFRAMED_BINARY) - is_unframed = True - elif (byte_index(first_word, 0) == TCompactProtocol.PROTOCOL_ID and - byte_index(first_word, 1) & TCompactProtocol.VERSION_MASK == TCompactProtocol.VERSION): - self._set_client_type(THeaderClientType.UNFRAMED_COMPACT) - is_unframed = True - - if is_unframed: - bytes_left_to_read = req_sz - I32.size - if bytes_left_to_read > 0: - rest = self._transport.read(bytes_left_to_read) - else: - rest = b"" - self._read_buffer = BufferIO(first_word + rest) - return - - # ok, we're still here so we're framed. - if frame_size > self._max_frame_size: - raise TTransportException( - TTransportException.SIZE_LIMIT, - "Frame was too large.", - ) - read_buffer = BufferIO(self._transport.readAll(frame_size)) - - # the next word is either going to be the version field of a - # binary/compact protocol message or the magic value + flags of a - # header protocol message. - second_word = read_buffer.read(I32.size) - version, = I32.unpack(second_word) - read_buffer.seek(0) - if version >> 16 == HEADER_MAGIC: - self._set_client_type(THeaderClientType.HEADERS) - self._read_buffer = self._parse_header_format(read_buffer) - elif version & TBinaryProtocol.VERSION_MASK == TBinaryProtocol.VERSION_1: - self._set_client_type(THeaderClientType.FRAMED_BINARY) - self._read_buffer = read_buffer - elif (byte_index(second_word, 0) == TCompactProtocol.PROTOCOL_ID and - byte_index(second_word, 1) & TCompactProtocol.VERSION_MASK == TCompactProtocol.VERSION): - self._set_client_type(THeaderClientType.FRAMED_COMPACT) - self._read_buffer = read_buffer - else: - raise TTransportException( - TTransportException.INVALID_CLIENT_TYPE, - "Could not detect client transport type.", - ) - - def _parse_header_format(self, buffer): - # make BufferIO look like TTransport for varint helpers - buffer_transport = TMemoryBuffer() - buffer_transport._buffer = buffer - - buffer.read(2) # discard the magic bytes - self.flags, = U16.unpack(buffer.read(U16.size)) - self.sequence_id, = I32.unpack(buffer.read(I32.size)) - - header_length = U16.unpack(buffer.read(U16.size))[0] * 4 - end_of_headers = buffer.tell() + header_length - if end_of_headers > len(buffer.getvalue()): - raise TTransportException( - TTransportException.SIZE_LIMIT, - "Header size is larger than whole frame.", - ) - - self._protocol_id = readVarint(buffer_transport) - - transforms = [] - transform_count = readVarint(buffer_transport) - for _ in range(transform_count): - transform_id = readVarint(buffer_transport) - if transform_id not in READ_TRANSFORMS_BY_ID: - raise TApplicationException( - TApplicationException.INVALID_TRANSFORM, - "Unknown transform: %d" % transform_id, - ) - transforms.append(transform_id) - transforms.reverse() - - headers = {} - while buffer.tell() < end_of_headers: - header_type = readVarint(buffer_transport) - if header_type == TInfoHeaderType.KEY_VALUE: - count = readVarint(buffer_transport) - for _ in range(count): - key = _readString(buffer_transport) - value = _readString(buffer_transport) - headers[key] = value - else: - break # ignore unknown headers - self._read_headers = headers - - # skip padding / anything we didn't understand - buffer.seek(end_of_headers) - - payload = buffer.read() - for transform_id in transforms: - transform_fn = READ_TRANSFORMS_BY_ID[transform_id] - payload = transform_fn(payload) - return BufferIO(payload) - - def write(self, buf): - self._write_buffer.write(buf) - - def flush(self): - payload = self._write_buffer.getvalue() - self._write_buffer = BufferIO() - - buffer = BufferIO() - if self._client_type == THeaderClientType.HEADERS: - for transform_id in self._write_transforms: - transform_fn = WRITE_TRANSFORMS_BY_ID[transform_id] - payload = transform_fn(payload) - - headers = BufferIO() - writeVarint(headers, self._protocol_id) - writeVarint(headers, len(self._write_transforms)) - for transform_id in self._write_transforms: - writeVarint(headers, transform_id) - if self._write_headers: - writeVarint(headers, TInfoHeaderType.KEY_VALUE) - writeVarint(headers, len(self._write_headers)) - for key, value in self._write_headers.items(): - _writeString(headers, key) - _writeString(headers, value) - self._write_headers = {} - padding_needed = (4 - (len(headers.getvalue()) % 4)) % 4 - headers.write(b"\x00" * padding_needed) - header_bytes = headers.getvalue() - - buffer.write(I32.pack(10 + len(header_bytes) + len(payload))) - buffer.write(U16.pack(HEADER_MAGIC)) - buffer.write(U16.pack(self.flags)) - buffer.write(I32.pack(self.sequence_id)) - buffer.write(U16.pack(len(header_bytes) // 4)) - buffer.write(header_bytes) - buffer.write(payload) - elif self._client_type in (THeaderClientType.FRAMED_BINARY, THeaderClientType.FRAMED_COMPACT): - buffer.write(I32.pack(len(payload))) - buffer.write(payload) - elif self._client_type in (THeaderClientType.UNFRAMED_BINARY, THeaderClientType.UNFRAMED_COMPACT): - buffer.write(payload) - else: - raise TTransportException( - TTransportException.INVALID_CLIENT_TYPE, - "Unknown client type.", - ) - - # the frame length field doesn't count towards the frame payload size - frame_bytes = buffer.getvalue() - frame_payload_size = len(frame_bytes) - 4 - if frame_payload_size > self._max_frame_size: - raise TTransportException( - TTransportException.SIZE_LIMIT, - "Attempting to send frame that is too large.", - ) - - self._transport.write(frame_bytes) - self._transport.flush() - - @property - def cstringio_buf(self): - return self._read_buffer - - def cstringio_refill(self, partialread, reqlen): - result = bytearray(partialread) - while len(result) < reqlen: - result += self.read(reqlen - len(result)) - self._read_buffer = BufferIO(result) - return self._read_buffer diff --git a/shell/ext-py/thrift-0.16.0/src/transport/THttpClient.py b/shell/ext-py/thrift-0.16.0/src/transport/THttpClient.py deleted file mode 100644 index 212da3aa5..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/THttpClient.py +++ /dev/null @@ -1,191 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from io import BytesIO -import os -import ssl -import sys -import warnings -import base64 - -from six.moves import urllib -from six.moves import http_client - -from .TTransport import TTransportBase -import six - - -class THttpClient(TTransportBase): - """Http implementation of TTransport base.""" - - def __init__(self, uri_or_host, port=None, path=None, cafile=None, cert_file=None, key_file=None, ssl_context=None): - """THttpClient supports two different types of construction: - - THttpClient(host, port, path) - deprecated - THttpClient(uri, [port=, path=, cafile=, cert_file=, key_file=, ssl_context=]) - - Only the second supports https. To properly authenticate against the server, - provide the client's identity by specifying cert_file and key_file. To properly - authenticate the server, specify either cafile or ssl_context with a CA defined. - NOTE: if both cafile and ssl_context are defined, ssl_context will override cafile. - """ - if port is not None: - warnings.warn( - "Please use the THttpClient('http{s}://host:port/path') constructor", - DeprecationWarning, - stacklevel=2) - self.host = uri_or_host - self.port = port - assert path - self.path = path - self.scheme = 'http' - else: - parsed = urllib.parse.urlparse(uri_or_host) - self.scheme = parsed.scheme - assert self.scheme in ('http', 'https') - if self.scheme == 'http': - self.port = parsed.port or http_client.HTTP_PORT - elif self.scheme == 'https': - self.port = parsed.port or http_client.HTTPS_PORT - self.certfile = cert_file - self.keyfile = key_file - self.context = ssl.create_default_context(cafile=cafile) if (cafile and not ssl_context) else ssl_context - self.host = parsed.hostname - self.path = parsed.path - if parsed.query: - self.path += '?%s' % parsed.query - try: - proxy = urllib.request.getproxies()[self.scheme] - except KeyError: - proxy = None - else: - if urllib.request.proxy_bypass(self.host): - proxy = None - if proxy: - parsed = urllib.parse.urlparse(proxy) - self.realhost = self.host - self.realport = self.port - self.host = parsed.hostname - self.port = parsed.port - self.proxy_auth = self.basic_proxy_auth_header(parsed) - else: - self.realhost = self.realport = self.proxy_auth = None - self.__wbuf = BytesIO() - self.__http = None - self.__http_response = None - self.__timeout = None - self.__custom_headers = None - - @staticmethod - def basic_proxy_auth_header(proxy): - if proxy is None or not proxy.username: - return None - ap = "%s:%s" % (urllib.parse.unquote(proxy.username), - urllib.parse.unquote(proxy.password)) - cr = base64.b64encode(ap).strip() - return "Basic " + cr - - def using_proxy(self): - return self.realhost is not None - - def open(self): - if self.scheme == 'http': - self.__http = http_client.HTTPConnection(self.host, self.port, - timeout=self.__timeout) - elif self.scheme == 'https': - self.__http = http_client.HTTPSConnection(self.host, self.port, - key_file=self.keyfile, - cert_file=self.certfile, - timeout=self.__timeout, - context=self.context) - if self.using_proxy(): - self.__http.set_tunnel(self.realhost, self.realport, - {"Proxy-Authorization": self.proxy_auth}) - - def close(self): - self.__http.close() - self.__http = None - self.__http_response = None - - def isOpen(self): - return self.__http is not None - - def setTimeout(self, ms): - if ms is None: - self.__timeout = None - else: - self.__timeout = ms / 1000.0 - - def setCustomHeaders(self, headers): - self.__custom_headers = headers - - def read(self, sz): - return self.__http_response.read(sz) - - def write(self, buf): - self.__wbuf.write(buf) - - def flush(self): - if self.isOpen(): - self.close() - self.open() - - # Pull data out of buffer - data = self.__wbuf.getvalue() - self.__wbuf = BytesIO() - - # HTTP request - if self.using_proxy() and self.scheme == "http": - # need full URL of real host for HTTP proxy here (HTTPS uses CONNECT tunnel) - self.__http.putrequest('POST', "http://%s:%s%s" % - (self.realhost, self.realport, self.path)) - else: - self.__http.putrequest('POST', self.path) - - # Write headers - self.__http.putheader('Content-Type', 'application/x-thrift') - self.__http.putheader('Content-Length', str(len(data))) - if self.using_proxy() and self.scheme == "http" and self.proxy_auth is not None: - self.__http.putheader("Proxy-Authorization", self.proxy_auth) - - if not self.__custom_headers or 'User-Agent' not in self.__custom_headers: - user_agent = 'Python/THttpClient' - script = os.path.basename(sys.argv[0]) - if script: - user_agent = '%s (%s)' % (user_agent, urllib.parse.quote(script)) - self.__http.putheader('User-Agent', user_agent) - - if self.__custom_headers: - for key, val in six.iteritems(self.__custom_headers): - self.__http.putheader(key, val) - - self.__http.endheaders() - - # Write payload - self.__http.send(data) - - # Get reply to flush the request - self.__http_response = self.__http.getresponse() - self.code = self.__http_response.status - self.message = self.__http_response.reason - self.headers = self.__http_response.msg - - # Saves the cookie sent by the server response - if 'Set-Cookie' in self.headers: - self.__http.putheader('Cookie', self.headers['Set-Cookie']) diff --git a/shell/ext-py/thrift-0.16.0/src/transport/TSSLSocket.py b/shell/ext-py/thrift-0.16.0/src/transport/TSSLSocket.py deleted file mode 100644 index 5b3ae5991..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/TSSLSocket.py +++ /dev/null @@ -1,408 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import logging -import os -import socket -import ssl -import sys -import warnings - -from .sslcompat import _match_hostname, _match_has_ipaddress -from thrift.transport import TSocket -from thrift.transport.TTransport import TTransportException - -logger = logging.getLogger(__name__) -warnings.filterwarnings( - 'default', category=DeprecationWarning, module=__name__) - - -class TSSLBase(object): - # SSLContext is not available for Python < 2.7.9 - _has_ssl_context = sys.hexversion >= 0x020709F0 - - # ciphers argument is not available for Python < 2.7.0 - _has_ciphers = sys.hexversion >= 0x020700F0 - - # For python >= 2.7.9, use latest TLS that both client and server - # supports. - # SSL 2.0 and 3.0 are disabled via ssl.OP_NO_SSLv2 and ssl.OP_NO_SSLv3. - # For python < 2.7.9, use TLS 1.0 since TLSv1_X nor OP_NO_SSLvX is - # unavailable. - _default_protocol = ssl.PROTOCOL_SSLv23 if _has_ssl_context else \ - ssl.PROTOCOL_TLSv1 - - def _init_context(self, ssl_version): - if self._has_ssl_context: - self._context = ssl.SSLContext(ssl_version) - if self._context.protocol == ssl.PROTOCOL_SSLv23: - self._context.options |= ssl.OP_NO_SSLv2 - self._context.options |= ssl.OP_NO_SSLv3 - else: - self._context = None - self._ssl_version = ssl_version - - @property - def _should_verify(self): - if self._has_ssl_context: - return self._context.verify_mode != ssl.CERT_NONE - else: - return self.cert_reqs != ssl.CERT_NONE - - @property - def ssl_version(self): - if self._has_ssl_context: - return self.ssl_context.protocol - else: - return self._ssl_version - - @property - def ssl_context(self): - return self._context - - SSL_VERSION = _default_protocol - """ - Default SSL version. - For backwards compatibility, it can be modified. - Use __init__ keyword argument "ssl_version" instead. - """ - - def _deprecated_arg(self, args, kwargs, pos, key): - if len(args) <= pos: - return - real_pos = pos + 3 - warnings.warn( - '%dth positional argument is deprecated.' - 'please use keyword argument instead.' - % real_pos, DeprecationWarning, stacklevel=3) - - if key in kwargs: - raise TypeError( - 'Duplicate argument: %dth argument and %s keyword argument.' - % (real_pos, key)) - kwargs[key] = args[pos] - - def _unix_socket_arg(self, host, port, args, kwargs): - key = 'unix_socket' - if host is None and port is None and len(args) == 1 and key not in kwargs: - kwargs[key] = args[0] - return True - return False - - def __getattr__(self, key): - if key == 'SSL_VERSION': - warnings.warn( - 'SSL_VERSION is deprecated.' - 'please use ssl_version attribute instead.', - DeprecationWarning, stacklevel=2) - return self.ssl_version - - def __init__(self, server_side, host, ssl_opts): - self._server_side = server_side - if TSSLBase.SSL_VERSION != self._default_protocol: - warnings.warn( - 'SSL_VERSION is deprecated.' - 'please use ssl_version keyword argument instead.', - DeprecationWarning, stacklevel=2) - self._context = ssl_opts.pop('ssl_context', None) - self._server_hostname = None - if not self._server_side: - self._server_hostname = ssl_opts.pop('server_hostname', host) - if self._context: - self._custom_context = True - if ssl_opts: - raise ValueError( - 'Incompatible arguments: ssl_context and %s' - % ' '.join(ssl_opts.keys())) - if not self._has_ssl_context: - raise ValueError( - 'ssl_context is not available for this version of Python') - else: - self._custom_context = False - ssl_version = ssl_opts.pop('ssl_version', TSSLBase.SSL_VERSION) - self._init_context(ssl_version) - self.cert_reqs = ssl_opts.pop('cert_reqs', ssl.CERT_REQUIRED) - self.ca_certs = ssl_opts.pop('ca_certs', None) - self.keyfile = ssl_opts.pop('keyfile', None) - self.certfile = ssl_opts.pop('certfile', None) - self.ciphers = ssl_opts.pop('ciphers', None) - - if ssl_opts: - raise ValueError( - 'Unknown keyword arguments: ', ' '.join(ssl_opts.keys())) - - if self._should_verify: - if not self.ca_certs: - raise ValueError( - 'ca_certs is needed when cert_reqs is not ssl.CERT_NONE') - if not os.access(self.ca_certs, os.R_OK): - raise IOError('Certificate Authority ca_certs file "%s" ' - 'is not readable, cannot validate SSL ' - 'certificates.' % (self.ca_certs)) - - @property - def certfile(self): - return self._certfile - - @certfile.setter - def certfile(self, certfile): - if self._server_side and not certfile: - raise ValueError('certfile is needed for server-side') - if certfile and not os.access(certfile, os.R_OK): - raise IOError('No such certfile found: %s' % (certfile)) - self._certfile = certfile - - def _wrap_socket(self, sock): - if self._has_ssl_context: - if not self._custom_context: - self.ssl_context.verify_mode = self.cert_reqs - if self.certfile: - self.ssl_context.load_cert_chain(self.certfile, - self.keyfile) - if self.ciphers: - self.ssl_context.set_ciphers(self.ciphers) - if self.ca_certs: - self.ssl_context.load_verify_locations(self.ca_certs) - return self.ssl_context.wrap_socket( - sock, server_side=self._server_side, - server_hostname=self._server_hostname) - else: - ssl_opts = { - 'ssl_version': self._ssl_version, - 'server_side': self._server_side, - 'ca_certs': self.ca_certs, - 'keyfile': self.keyfile, - 'certfile': self.certfile, - 'cert_reqs': self.cert_reqs, - } - if self.ciphers: - if self._has_ciphers: - ssl_opts['ciphers'] = self.ciphers - else: - logger.warning( - 'ciphers is specified but ignored due to old Python version') - return ssl.wrap_socket(sock, **ssl_opts) - - -class TSSLSocket(TSocket.TSocket, TSSLBase): - """ - SSL implementation of TSocket - - This class creates outbound sockets wrapped using the - python standard ssl module for encrypted connections. - """ - - # New signature - # def __init__(self, host='localhost', port=9090, unix_socket=None, - # **ssl_args): - # Deprecated signature - # def __init__(self, host='localhost', port=9090, validate=True, - # ca_certs=None, keyfile=None, certfile=None, - # unix_socket=None, ciphers=None): - def __init__(self, host='localhost', port=9090, *args, **kwargs): - """Positional arguments: ``host``, ``port``, ``unix_socket`` - - Keyword arguments: ``keyfile``, ``certfile``, ``cert_reqs``, - ``ssl_version``, ``ca_certs``, - ``ciphers`` (Python 2.7.0 or later), - ``server_hostname`` (Python 2.7.9 or later) - Passed to ssl.wrap_socket. See ssl.wrap_socket documentation. - - Alternative keyword arguments: (Python 2.7.9 or later) - ``ssl_context``: ssl.SSLContext to be used for SSLContext.wrap_socket - ``server_hostname``: Passed to SSLContext.wrap_socket - - Common keyword argument: - ``validate_callback`` (cert, hostname) -> None: - Called after SSL handshake. Can raise when hostname does not - match the cert. - ``socket_keepalive`` enable TCP keepalive, default off. - """ - self.is_valid = False - self.peercert = None - - if args: - if len(args) > 6: - raise TypeError('Too many positional argument') - if not self._unix_socket_arg(host, port, args, kwargs): - self._deprecated_arg(args, kwargs, 0, 'validate') - self._deprecated_arg(args, kwargs, 1, 'ca_certs') - self._deprecated_arg(args, kwargs, 2, 'keyfile') - self._deprecated_arg(args, kwargs, 3, 'certfile') - self._deprecated_arg(args, kwargs, 4, 'unix_socket') - self._deprecated_arg(args, kwargs, 5, 'ciphers') - - validate = kwargs.pop('validate', None) - if validate is not None: - cert_reqs_name = 'CERT_REQUIRED' if validate else 'CERT_NONE' - warnings.warn( - 'validate is deprecated. please use cert_reqs=ssl.%s instead' - % cert_reqs_name, - DeprecationWarning, stacklevel=2) - if 'cert_reqs' in kwargs: - raise TypeError('Cannot specify both validate and cert_reqs') - kwargs['cert_reqs'] = ssl.CERT_REQUIRED if validate else ssl.CERT_NONE - - unix_socket = kwargs.pop('unix_socket', None) - socket_keepalive = kwargs.pop('socket_keepalive', False) - self._validate_callback = kwargs.pop('validate_callback', _match_hostname) - TSSLBase.__init__(self, False, host, kwargs) - TSocket.TSocket.__init__(self, host, port, unix_socket, - socket_keepalive=socket_keepalive) - - def close(self): - try: - self.handle.settimeout(0.001) - self.handle = self.handle.unwrap() - except (ssl.SSLError, socket.error, OSError): - # could not complete shutdown in a reasonable amount of time. bail. - pass - TSocket.TSocket.close(self) - - @property - def validate(self): - warnings.warn('validate is deprecated. please use cert_reqs instead', - DeprecationWarning, stacklevel=2) - return self.cert_reqs != ssl.CERT_NONE - - @validate.setter - def validate(self, value): - warnings.warn('validate is deprecated. please use cert_reqs instead', - DeprecationWarning, stacklevel=2) - self.cert_reqs = ssl.CERT_REQUIRED if value else ssl.CERT_NONE - - def _do_open(self, family, socktype): - plain_sock = socket.socket(family, socktype) - try: - return self._wrap_socket(plain_sock) - except Exception as ex: - plain_sock.close() - msg = 'failed to initialize SSL' - logger.exception(msg) - raise TTransportException(type=TTransportException.NOT_OPEN, message=msg, inner=ex) - - def open(self): - super(TSSLSocket, self).open() - if self._should_verify: - self.peercert = self.handle.getpeercert() - try: - self._validate_callback(self.peercert, self._server_hostname) - self.is_valid = True - except TTransportException: - raise - except Exception as ex: - raise TTransportException(message=str(ex), inner=ex) - - -class TSSLServerSocket(TSocket.TServerSocket, TSSLBase): - """SSL implementation of TServerSocket - - This uses the ssl module's wrap_socket() method to provide SSL - negotiated encryption. - """ - - # New signature - # def __init__(self, host='localhost', port=9090, unix_socket=None, **ssl_args): - # Deprecated signature - # def __init__(self, host=None, port=9090, certfile='cert.pem', unix_socket=None, ciphers=None): - def __init__(self, host=None, port=9090, *args, **kwargs): - """Positional arguments: ``host``, ``port``, ``unix_socket`` - - Keyword arguments: ``keyfile``, ``certfile``, ``cert_reqs``, ``ssl_version``, - ``ca_certs``, ``ciphers`` (Python 2.7.0 or later) - See ssl.wrap_socket documentation. - - Alternative keyword arguments: (Python 2.7.9 or later) - ``ssl_context``: ssl.SSLContext to be used for SSLContext.wrap_socket - ``server_hostname``: Passed to SSLContext.wrap_socket - - Common keyword argument: - ``validate_callback`` (cert, hostname) -> None: - Called after SSL handshake. Can raise when hostname does not - match the cert. - """ - if args: - if len(args) > 3: - raise TypeError('Too many positional argument') - if not self._unix_socket_arg(host, port, args, kwargs): - self._deprecated_arg(args, kwargs, 0, 'certfile') - self._deprecated_arg(args, kwargs, 1, 'unix_socket') - self._deprecated_arg(args, kwargs, 2, 'ciphers') - - if 'ssl_context' not in kwargs: - # Preserve existing behaviors for default values - if 'cert_reqs' not in kwargs: - kwargs['cert_reqs'] = ssl.CERT_NONE - if'certfile' not in kwargs: - kwargs['certfile'] = 'cert.pem' - - unix_socket = kwargs.pop('unix_socket', None) - self._validate_callback = \ - kwargs.pop('validate_callback', _match_hostname) - TSSLBase.__init__(self, True, None, kwargs) - TSocket.TServerSocket.__init__(self, host, port, unix_socket) - if self._should_verify and not _match_has_ipaddress: - raise ValueError('Need ipaddress and backports.ssl_match_hostname ' - 'module to verify client certificate') - - def setCertfile(self, certfile): - """Set or change the server certificate file used to wrap new - connections. - - @param certfile: The filename of the server certificate, - i.e. '/etc/certs/server.pem' - @type certfile: str - - Raises an IOError exception if the certfile is not present or unreadable. - """ - warnings.warn( - 'setCertfile is deprecated. please use certfile property instead.', - DeprecationWarning, stacklevel=2) - self.certfile = certfile - - def accept(self): - plain_client, addr = self.handle.accept() - try: - client = self._wrap_socket(plain_client) - except (ssl.SSLError, socket.error, OSError): - logger.exception('Error while accepting from %s', addr) - # failed handshake/ssl wrap, close socket to client - plain_client.close() - # raise - # We can't raise the exception, because it kills most TServer derived - # serve() methods. - # Instead, return None, and let the TServer instance deal with it in - # other exception handling. (but TSimpleServer dies anyway) - return None - - if self._should_verify: - client.peercert = client.getpeercert() - try: - self._validate_callback(client.peercert, addr[0]) - client.is_valid = True - except Exception: - logger.warn('Failed to validate client certificate address: %s', - addr[0], exc_info=True) - client.close() - plain_client.close() - return None - - result = TSocket.TSocket() - result.handle = client - return result diff --git a/shell/ext-py/thrift-0.16.0/src/transport/TSocket.py b/shell/ext-py/thrift-0.16.0/src/transport/TSocket.py deleted file mode 100644 index 3c7a3ca7d..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/TSocket.py +++ /dev/null @@ -1,239 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import errno -import logging -import os -import socket -import sys - -from .TTransport import TTransportBase, TTransportException, TServerTransportBase - -logger = logging.getLogger(__name__) - - -class TSocketBase(TTransportBase): - def _resolveAddr(self): - if self._unix_socket is not None: - return [(socket.AF_UNIX, socket.SOCK_STREAM, None, None, - self._unix_socket)] - else: - return socket.getaddrinfo(self.host, - self.port, - self._socket_family, - socket.SOCK_STREAM, - 0, - socket.AI_PASSIVE) - - def close(self): - if self.handle: - self.handle.close() - self.handle = None - - -class TSocket(TSocketBase): - """Socket implementation of TTransport base.""" - - def __init__(self, host='localhost', port=9090, unix_socket=None, - socket_family=socket.AF_UNSPEC, - socket_keepalive=False): - """Initialize a TSocket - - @param host(str) The host to connect to. - @param port(int) The (TCP) port to connect to. - @param unix_socket(str) The filename of a unix socket to connect to. - (host and port will be ignored.) - @param socket_family(int) The socket family to use with this socket. - @param socket_keepalive(bool) enable TCP keepalive, default off. - """ - self.host = host - self.port = port - self.handle = None - self._unix_socket = unix_socket - self._timeout = None - self._socket_family = socket_family - self._socket_keepalive = socket_keepalive - - def setHandle(self, h): - self.handle = h - - def isOpen(self): - if self.handle is None: - return False - - # this lets us cheaply see if the other end of the socket is still - # connected. if disconnected, we'll get EOF back (expressed as zero - # bytes of data) otherwise we'll get one byte or an error indicating - # we'd have to block for data. - # - # note that we're not doing this with socket.MSG_DONTWAIT because 1) - # it's linux-specific and 2) gevent-patched sockets hide EAGAIN from us - # when timeout is non-zero. - original_timeout = self.handle.gettimeout() - try: - self.handle.settimeout(0) - try: - peeked_bytes = self.handle.recv(1, socket.MSG_PEEK) - except (socket.error, OSError) as exc: # on modern python this is just BlockingIOError - if exc.errno in (errno.EWOULDBLOCK, errno.EAGAIN): - return True - return False - finally: - self.handle.settimeout(original_timeout) - - # the length will be zero if we got EOF (indicating connection closed) - return len(peeked_bytes) == 1 - - def setTimeout(self, ms): - if ms is None: - self._timeout = None - else: - self._timeout = ms / 1000.0 - - if self.handle is not None: - self.handle.settimeout(self._timeout) - - def _do_open(self, family, socktype): - return socket.socket(family, socktype) - - @property - def _address(self): - return self._unix_socket if self._unix_socket else '%s:%d' % (self.host, self.port) - - def open(self): - if self.handle: - raise TTransportException(type=TTransportException.ALREADY_OPEN, message="already open") - try: - addrs = self._resolveAddr() - except socket.gaierror as gai: - msg = 'failed to resolve sockaddr for ' + str(self._address) - logger.exception(msg) - raise TTransportException(type=TTransportException.NOT_OPEN, message=msg, inner=gai) - for family, socktype, _, _, sockaddr in addrs: - handle = self._do_open(family, socktype) - - # TCP_KEEPALIVE - if self._socket_keepalive: - handle.setsockopt(socket.IPPROTO_TCP, socket.SO_KEEPALIVE, 1) - - handle.settimeout(self._timeout) - try: - handle.connect(sockaddr) - self.handle = handle - return - except socket.error: - handle.close() - logger.info('Could not connect to %s', sockaddr, exc_info=True) - msg = 'Could not connect to any of %s' % list(map(lambda a: a[4], - addrs)) - logger.error(msg) - raise TTransportException(type=TTransportException.NOT_OPEN, message=msg) - - def read(self, sz): - try: - buff = self.handle.recv(sz) - except socket.error as e: - if (e.args[0] == errno.ECONNRESET and - (sys.platform == 'darwin' or sys.platform.startswith('freebsd'))): - # freebsd and Mach don't follow POSIX semantic of recv - # and fail with ECONNRESET if peer performed shutdown. - # See corresponding comment and code in TSocket::read() - # in lib/cpp/src/transport/TSocket.cpp. - self.close() - # Trigger the check to raise the END_OF_FILE exception below. - buff = '' - elif e.args[0] == errno.ETIMEDOUT: - raise TTransportException(type=TTransportException.TIMED_OUT, message="read timeout", inner=e) - else: - raise TTransportException(message="unexpected exception", inner=e) - if len(buff) == 0: - raise TTransportException(type=TTransportException.END_OF_FILE, - message='TSocket read 0 bytes') - return buff - - def write(self, buff): - if not self.handle: - raise TTransportException(type=TTransportException.NOT_OPEN, - message='Transport not open') - sent = 0 - have = len(buff) - while sent < have: - try: - plus = self.handle.send(buff) - if plus == 0: - raise TTransportException(type=TTransportException.END_OF_FILE, - message='TSocket sent 0 bytes') - sent += plus - buff = buff[plus:] - except socket.error as e: - raise TTransportException(message="unexpected exception", inner=e) - - def flush(self): - pass - - -class TServerSocket(TSocketBase, TServerTransportBase): - """Socket implementation of TServerTransport base.""" - - def __init__(self, host=None, port=9090, unix_socket=None, socket_family=socket.AF_UNSPEC): - self.host = host - self.port = port - self._unix_socket = unix_socket - self._socket_family = socket_family - self.handle = None - self._backlog = 128 - - def setBacklog(self, backlog=None): - if not self.handle: - self._backlog = backlog - else: - # We cann't update backlog when it is already listening, since the - # handle has been created. - logger.warn('You have to set backlog before listen.') - - def listen(self): - res0 = self._resolveAddr() - socket_family = self._socket_family == socket.AF_UNSPEC and socket.AF_INET6 or self._socket_family - for res in res0: - if res[0] is socket_family or res is res0[-1]: - break - - # We need remove the old unix socket if the file exists and - # nobody is listening on it. - if self._unix_socket: - tmp = socket.socket(res[0], res[1]) - try: - tmp.connect(res[4]) - except socket.error as err: - eno, message = err.args - if eno == errno.ECONNREFUSED: - os.unlink(res[4]) - - self.handle = socket.socket(res[0], res[1]) - self.handle.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if hasattr(self.handle, 'settimeout'): - self.handle.settimeout(None) - self.handle.bind(res[4]) - self.handle.listen(self._backlog) - - def accept(self): - client, addr = self.handle.accept() - result = TSocket() - result.setHandle(client) - return result diff --git a/shell/ext-py/thrift-0.16.0/src/transport/TTransport.py b/shell/ext-py/thrift-0.16.0/src/transport/TTransport.py deleted file mode 100644 index ff20d7ec9..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/TTransport.py +++ /dev/null @@ -1,459 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from struct import pack, unpack -from thrift.Thrift import TException -from ..compat import BufferIO - - -class TTransportException(TException): - """Custom Transport Exception class""" - - UNKNOWN = 0 - NOT_OPEN = 1 - ALREADY_OPEN = 2 - TIMED_OUT = 3 - END_OF_FILE = 4 - NEGATIVE_SIZE = 5 - SIZE_LIMIT = 6 - INVALID_CLIENT_TYPE = 7 - - def __init__(self, type=UNKNOWN, message=None, inner=None): - TException.__init__(self, message) - self.type = type - self.inner = inner - - -class TTransportBase(object): - """Base class for Thrift transport layer.""" - - def isOpen(self): - pass - - def open(self): - pass - - def close(self): - pass - - def read(self, sz): - pass - - def readAll(self, sz): - buff = b'' - have = 0 - while (have < sz): - chunk = self.read(sz - have) - chunkLen = len(chunk) - have += chunkLen - buff += chunk - - if chunkLen == 0: - raise EOFError() - - return buff - - def write(self, buf): - pass - - def flush(self): - pass - - -# This class should be thought of as an interface. -class CReadableTransport(object): - """base class for transports that are readable from C""" - - # TODO(dreiss): Think about changing this interface to allow us to use - # a (Python, not c) StringIO instead, because it allows - # you to write after reading. - - # NOTE: This is a classic class, so properties will NOT work - # correctly for setting. - @property - def cstringio_buf(self): - """A cStringIO buffer that contains the current chunk we are reading.""" - pass - - def cstringio_refill(self, partialread, reqlen): - """Refills cstringio_buf. - - Returns the currently used buffer (which can but need not be the same as - the old cstringio_buf). partialread is what the C code has read from the - buffer, and should be inserted into the buffer before any more reads. The - return value must be a new, not borrowed reference. Something along the - lines of self._buf should be fine. - - If reqlen bytes can't be read, throw EOFError. - """ - pass - - -class TServerTransportBase(object): - """Base class for Thrift server transports.""" - - def listen(self): - pass - - def accept(self): - pass - - def close(self): - pass - - -class TTransportFactoryBase(object): - """Base class for a Transport Factory""" - - def getTransport(self, trans): - return trans - - -class TBufferedTransportFactory(object): - """Factory transport that builds buffered transports""" - - def getTransport(self, trans): - buffered = TBufferedTransport(trans) - return buffered - - -class TBufferedTransport(TTransportBase, CReadableTransport): - """Class that wraps another transport and buffers its I/O. - - The implementation uses a (configurable) fixed-size read buffer - but buffers all writes until a flush is performed. - """ - DEFAULT_BUFFER = 4096 - - def __init__(self, trans, rbuf_size=DEFAULT_BUFFER): - self.__trans = trans - self.__wbuf = BufferIO() - # Pass string argument to initialize read buffer as cStringIO.InputType - self.__rbuf = BufferIO(b'') - self.__rbuf_size = rbuf_size - - def isOpen(self): - return self.__trans.isOpen() - - def open(self): - return self.__trans.open() - - def close(self): - return self.__trans.close() - - def read(self, sz): - ret = self.__rbuf.read(sz) - if len(ret) != 0: - return ret - self.__rbuf = BufferIO(self.__trans.read(max(sz, self.__rbuf_size))) - return self.__rbuf.read(sz) - - def write(self, buf): - try: - self.__wbuf.write(buf) - except Exception as e: - # on exception reset wbuf so it doesn't contain a partial function call - self.__wbuf = BufferIO() - raise e - - def flush(self): - out = self.__wbuf.getvalue() - # reset wbuf before write/flush to preserve state on underlying failure - self.__wbuf = BufferIO() - self.__trans.write(out) - self.__trans.flush() - - # Implement the CReadableTransport interface. - @property - def cstringio_buf(self): - return self.__rbuf - - def cstringio_refill(self, partialread, reqlen): - retstring = partialread - if reqlen < self.__rbuf_size: - # try to make a read of as much as we can. - retstring += self.__trans.read(self.__rbuf_size) - - # but make sure we do read reqlen bytes. - if len(retstring) < reqlen: - retstring += self.__trans.readAll(reqlen - len(retstring)) - - self.__rbuf = BufferIO(retstring) - return self.__rbuf - - -class TMemoryBuffer(TTransportBase, CReadableTransport): - """Wraps a cBytesIO object as a TTransport. - - NOTE: Unlike the C++ version of this class, you cannot write to it - then immediately read from it. If you want to read from a - TMemoryBuffer, you must either pass a string to the constructor. - TODO(dreiss): Make this work like the C++ version. - """ - - def __init__(self, value=None, offset=0): - """value -- a value to read from for stringio - - If value is set, this will be a transport for reading, - otherwise, it is for writing""" - if value is not None: - self._buffer = BufferIO(value) - else: - self._buffer = BufferIO() - if offset: - self._buffer.seek(offset) - - def isOpen(self): - return not self._buffer.closed - - def open(self): - pass - - def close(self): - self._buffer.close() - - def read(self, sz): - return self._buffer.read(sz) - - def write(self, buf): - self._buffer.write(buf) - - def flush(self): - pass - - def getvalue(self): - return self._buffer.getvalue() - - # Implement the CReadableTransport interface. - @property - def cstringio_buf(self): - return self._buffer - - def cstringio_refill(self, partialread, reqlen): - # only one shot at reading... - raise EOFError() - - -class TFramedTransportFactory(object): - """Factory transport that builds framed transports""" - - def getTransport(self, trans): - framed = TFramedTransport(trans) - return framed - - -class TFramedTransport(TTransportBase, CReadableTransport): - """Class that wraps another transport and frames its I/O when writing.""" - - def __init__(self, trans,): - self.__trans = trans - self.__rbuf = BufferIO(b'') - self.__wbuf = BufferIO() - - def isOpen(self): - return self.__trans.isOpen() - - def open(self): - return self.__trans.open() - - def close(self): - return self.__trans.close() - - def read(self, sz): - ret = self.__rbuf.read(sz) - if len(ret) != 0: - return ret - - self.readFrame() - return self.__rbuf.read(sz) - - def readFrame(self): - buff = self.__trans.readAll(4) - sz, = unpack('!i', buff) - self.__rbuf = BufferIO(self.__trans.readAll(sz)) - - def write(self, buf): - self.__wbuf.write(buf) - - def flush(self): - wout = self.__wbuf.getvalue() - wsz = len(wout) - # reset wbuf before write/flush to preserve state on underlying failure - self.__wbuf = BufferIO() - # N.B.: Doing this string concatenation is WAY cheaper than making - # two separate calls to the underlying socket object. Socket writes in - # Python turn out to be REALLY expensive, but it seems to do a pretty - # good job of managing string buffer operations without excessive copies - buf = pack("!i", wsz) + wout - self.__trans.write(buf) - self.__trans.flush() - - # Implement the CReadableTransport interface. - @property - def cstringio_buf(self): - return self.__rbuf - - def cstringio_refill(self, prefix, reqlen): - # self.__rbuf will already be empty here because fastbinary doesn't - # ask for a refill until the previous buffer is empty. Therefore, - # we can start reading new frames immediately. - while len(prefix) < reqlen: - self.readFrame() - prefix += self.__rbuf.getvalue() - self.__rbuf = BufferIO(prefix) - return self.__rbuf - - -class TFileObjectTransport(TTransportBase): - """Wraps a file-like object to make it work as a Thrift transport.""" - - def __init__(self, fileobj): - self.fileobj = fileobj - - def isOpen(self): - return True - - def close(self): - self.fileobj.close() - - def read(self, sz): - return self.fileobj.read(sz) - - def write(self, buf): - self.fileobj.write(buf) - - def flush(self): - self.fileobj.flush() - - -class TSaslClientTransport(TTransportBase, CReadableTransport): - """ - SASL transport - """ - - START = 1 - OK = 2 - BAD = 3 - ERROR = 4 - COMPLETE = 5 - - def __init__(self, transport, host, service, mechanism='GSSAPI', - **sasl_kwargs): - """ - transport: an underlying transport to use, typically just a TSocket - host: the name of the server, from a SASL perspective - service: the name of the server's service, from a SASL perspective - mechanism: the name of the preferred mechanism to use - - All other kwargs will be passed to the puresasl.client.SASLClient - constructor. - """ - - from puresasl.client import SASLClient - - self.transport = transport - self.sasl = SASLClient(host, service, mechanism, **sasl_kwargs) - - self.__wbuf = BufferIO() - self.__rbuf = BufferIO(b'') - - def open(self): - if not self.transport.isOpen(): - self.transport.open() - - self.send_sasl_msg(self.START, bytes(self.sasl.mechanism, 'ascii')) - self.send_sasl_msg(self.OK, self.sasl.process()) - - while True: - status, challenge = self.recv_sasl_msg() - if status == self.OK: - self.send_sasl_msg(self.OK, self.sasl.process(challenge)) - elif status == self.COMPLETE: - if not self.sasl.complete: - raise TTransportException( - TTransportException.NOT_OPEN, - "The server erroneously indicated " - "that SASL negotiation was complete") - else: - break - else: - raise TTransportException( - TTransportException.NOT_OPEN, - "Bad SASL negotiation status: %d (%s)" - % (status, challenge)) - - def isOpen(self): - return self.transport.isOpen() - - def send_sasl_msg(self, status, body): - header = pack(">BI", status, len(body)) - self.transport.write(header + body) - self.transport.flush() - - def recv_sasl_msg(self): - header = self.transport.readAll(5) - status, length = unpack(">BI", header) - if length > 0: - payload = self.transport.readAll(length) - else: - payload = "" - return status, payload - - def write(self, data): - self.__wbuf.write(data) - - def flush(self): - data = self.__wbuf.getvalue() - encoded = self.sasl.wrap(data) - self.transport.write(pack("!i", len(encoded)) + encoded) - self.transport.flush() - self.__wbuf = BufferIO() - - def read(self, sz): - ret = self.__rbuf.read(sz) - if len(ret) != 0: - return ret - - self._read_frame() - return self.__rbuf.read(sz) - - def _read_frame(self): - header = self.transport.readAll(4) - length, = unpack('!i', header) - encoded = self.transport.readAll(length) - self.__rbuf = BufferIO(self.sasl.unwrap(encoded)) - - def close(self): - self.sasl.dispose() - self.transport.close() - - # based on TFramedTransport - @property - def cstringio_buf(self): - return self.__rbuf - - def cstringio_refill(self, prefix, reqlen): - # self.__rbuf will already be empty here because fastbinary doesn't - # ask for a refill until the previous buffer is empty. Therefore, - # we can start reading new frames immediately. - while len(prefix) < reqlen: - self._read_frame() - prefix += self.__rbuf.getvalue() - self.__rbuf = BufferIO(prefix) - return self.__rbuf diff --git a/shell/ext-py/thrift-0.16.0/src/transport/TTwisted.py b/shell/ext-py/thrift-0.16.0/src/transport/TTwisted.py deleted file mode 100644 index a27f0adad..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/TTwisted.py +++ /dev/null @@ -1,329 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -from io import BytesIO -import struct - -from zope.interface import implementer, Interface, Attribute -from twisted.internet.protocol import ServerFactory, ClientFactory, \ - connectionDone -from twisted.internet import defer -from twisted.internet.threads import deferToThread -from twisted.protocols import basic -from twisted.web import server, resource, http - -from thrift.transport import TTransport - - -class TMessageSenderTransport(TTransport.TTransportBase): - - def __init__(self): - self.__wbuf = BytesIO() - - def write(self, buf): - self.__wbuf.write(buf) - - def flush(self): - msg = self.__wbuf.getvalue() - self.__wbuf = BytesIO() - return self.sendMessage(msg) - - def sendMessage(self, message): - raise NotImplementedError - - -class TCallbackTransport(TMessageSenderTransport): - - def __init__(self, func): - TMessageSenderTransport.__init__(self) - self.func = func - - def sendMessage(self, message): - return self.func(message) - - -class ThriftClientProtocol(basic.Int32StringReceiver): - - MAX_LENGTH = 2 ** 31 - 1 - - def __init__(self, client_class, iprot_factory, oprot_factory=None): - self._client_class = client_class - self._iprot_factory = iprot_factory - if oprot_factory is None: - self._oprot_factory = iprot_factory - else: - self._oprot_factory = oprot_factory - - self.recv_map = {} - self.started = defer.Deferred() - - def dispatch(self, msg): - self.sendString(msg) - - def connectionMade(self): - tmo = TCallbackTransport(self.dispatch) - self.client = self._client_class(tmo, self._oprot_factory) - self.started.callback(self.client) - - def connectionLost(self, reason=connectionDone): - # the called errbacks can add items to our client's _reqs, - # so we need to use a tmp, and iterate until no more requests - # are added during errbacks - if self.client: - tex = TTransport.TTransportException( - type=TTransport.TTransportException.END_OF_FILE, - message='Connection closed (%s)' % reason) - while self.client._reqs: - _, v = self.client._reqs.popitem() - v.errback(tex) - del self.client._reqs - self.client = None - - def stringReceived(self, frame): - tr = TTransport.TMemoryBuffer(frame) - iprot = self._iprot_factory.getProtocol(tr) - (fname, mtype, rseqid) = iprot.readMessageBegin() - - try: - method = self.recv_map[fname] - except KeyError: - method = getattr(self.client, 'recv_' + fname) - self.recv_map[fname] = method - - method(iprot, mtype, rseqid) - - -class ThriftSASLClientProtocol(ThriftClientProtocol): - - START = 1 - OK = 2 - BAD = 3 - ERROR = 4 - COMPLETE = 5 - - MAX_LENGTH = 2 ** 31 - 1 - - def __init__(self, client_class, iprot_factory, oprot_factory=None, - host=None, service=None, mechanism='GSSAPI', **sasl_kwargs): - """ - host: the name of the server, from a SASL perspective - service: the name of the server's service, from a SASL perspective - mechanism: the name of the preferred mechanism to use - - All other kwargs will be passed to the puresasl.client.SASLClient - constructor. - """ - - from puresasl.client import SASLClient - self.SASLCLient = SASLClient - - ThriftClientProtocol.__init__(self, client_class, iprot_factory, oprot_factory) - - self._sasl_negotiation_deferred = None - self._sasl_negotiation_status = None - self.client = None - - if host is not None: - self.createSASLClient(host, service, mechanism, **sasl_kwargs) - - def createSASLClient(self, host, service, mechanism, **kwargs): - self.sasl = self.SASLClient(host, service, mechanism, **kwargs) - - def dispatch(self, msg): - encoded = self.sasl.wrap(msg) - len_and_encoded = ''.join((struct.pack('!i', len(encoded)), encoded)) - ThriftClientProtocol.dispatch(self, len_and_encoded) - - @defer.inlineCallbacks - def connectionMade(self): - self._sendSASLMessage(self.START, self.sasl.mechanism) - initial_message = yield deferToThread(self.sasl.process) - self._sendSASLMessage(self.OK, initial_message) - - while True: - status, challenge = yield self._receiveSASLMessage() - if status == self.OK: - response = yield deferToThread(self.sasl.process, challenge) - self._sendSASLMessage(self.OK, response) - elif status == self.COMPLETE: - if not self.sasl.complete: - msg = "The server erroneously indicated that SASL " \ - "negotiation was complete" - raise TTransport.TTransportException(msg, message=msg) - else: - break - else: - msg = "Bad SASL negotiation status: %d (%s)" % (status, challenge) - raise TTransport.TTransportException(msg, message=msg) - - self._sasl_negotiation_deferred = None - ThriftClientProtocol.connectionMade(self) - - def _sendSASLMessage(self, status, body): - if body is None: - body = "" - header = struct.pack(">BI", status, len(body)) - self.transport.write(header + body) - - def _receiveSASLMessage(self): - self._sasl_negotiation_deferred = defer.Deferred() - self._sasl_negotiation_status = None - return self._sasl_negotiation_deferred - - def connectionLost(self, reason=connectionDone): - if self.client: - ThriftClientProtocol.connectionLost(self, reason) - - def dataReceived(self, data): - if self._sasl_negotiation_deferred: - # we got a sasl challenge in the format (status, length, challenge) - # save the status, let IntNStringReceiver piece the challenge data together - self._sasl_negotiation_status, = struct.unpack("B", data[0]) - ThriftClientProtocol.dataReceived(self, data[1:]) - else: - # normal frame, let IntNStringReceiver piece it together - ThriftClientProtocol.dataReceived(self, data) - - def stringReceived(self, frame): - if self._sasl_negotiation_deferred: - # the frame is just a SASL challenge - response = (self._sasl_negotiation_status, frame) - self._sasl_negotiation_deferred.callback(response) - else: - # there's a second 4 byte length prefix inside the frame - decoded_frame = self.sasl.unwrap(frame[4:]) - ThriftClientProtocol.stringReceived(self, decoded_frame) - - -class ThriftServerProtocol(basic.Int32StringReceiver): - - MAX_LENGTH = 2 ** 31 - 1 - - def dispatch(self, msg): - self.sendString(msg) - - def processError(self, error): - self.transport.loseConnection() - - def processOk(self, _, tmo): - msg = tmo.getvalue() - - if len(msg) > 0: - self.dispatch(msg) - - def stringReceived(self, frame): - tmi = TTransport.TMemoryBuffer(frame) - tmo = TTransport.TMemoryBuffer() - - iprot = self.factory.iprot_factory.getProtocol(tmi) - oprot = self.factory.oprot_factory.getProtocol(tmo) - - d = self.factory.processor.process(iprot, oprot) - d.addCallbacks(self.processOk, self.processError, - callbackArgs=(tmo,)) - - -class IThriftServerFactory(Interface): - - processor = Attribute("Thrift processor") - - iprot_factory = Attribute("Input protocol factory") - - oprot_factory = Attribute("Output protocol factory") - - -class IThriftClientFactory(Interface): - - client_class = Attribute("Thrift client class") - - iprot_factory = Attribute("Input protocol factory") - - oprot_factory = Attribute("Output protocol factory") - - -@implementer(IThriftServerFactory) -class ThriftServerFactory(ServerFactory): - - protocol = ThriftServerProtocol - - def __init__(self, processor, iprot_factory, oprot_factory=None): - self.processor = processor - self.iprot_factory = iprot_factory - if oprot_factory is None: - self.oprot_factory = iprot_factory - else: - self.oprot_factory = oprot_factory - - -@implementer(IThriftClientFactory) -class ThriftClientFactory(ClientFactory): - - protocol = ThriftClientProtocol - - def __init__(self, client_class, iprot_factory, oprot_factory=None): - self.client_class = client_class - self.iprot_factory = iprot_factory - if oprot_factory is None: - self.oprot_factory = iprot_factory - else: - self.oprot_factory = oprot_factory - - def buildProtocol(self, addr): - p = self.protocol(self.client_class, self.iprot_factory, - self.oprot_factory) - p.factory = self - return p - - -class ThriftResource(resource.Resource): - - allowedMethods = ('POST',) - - def __init__(self, processor, inputProtocolFactory, - outputProtocolFactory=None): - resource.Resource.__init__(self) - self.inputProtocolFactory = inputProtocolFactory - if outputProtocolFactory is None: - self.outputProtocolFactory = inputProtocolFactory - else: - self.outputProtocolFactory = outputProtocolFactory - self.processor = processor - - def getChild(self, path, request): - return self - - def _cbProcess(self, _, request, tmo): - msg = tmo.getvalue() - request.setResponseCode(http.OK) - request.setHeader("content-type", "application/x-thrift") - request.write(msg) - request.finish() - - def render_POST(self, request): - request.content.seek(0, 0) - data = request.content.read() - tmi = TTransport.TMemoryBuffer(data) - tmo = TTransport.TMemoryBuffer() - - iprot = self.inputProtocolFactory.getProtocol(tmi) - oprot = self.outputProtocolFactory.getProtocol(tmo) - - d = self.processor.process(iprot, oprot) - d.addCallback(self._cbProcess, request, tmo) - return server.NOT_DONE_YET diff --git a/shell/ext-py/thrift-0.16.0/src/transport/TZlibTransport.py b/shell/ext-py/thrift-0.16.0/src/transport/TZlibTransport.py deleted file mode 100644 index e84857924..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/TZlibTransport.py +++ /dev/null @@ -1,248 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -"""TZlibTransport provides a compressed transport and transport factory -class, using the python standard library zlib module to implement -data compression. -""" - -from __future__ import division -import zlib -from .TTransport import TTransportBase, CReadableTransport -from ..compat import BufferIO - - -class TZlibTransportFactory(object): - """Factory transport that builds zlib compressed transports. - - This factory caches the last single client/transport that it was passed - and returns the same TZlibTransport object that was created. - - This caching means the TServer class will get the _same_ transport - object for both input and output transports from this factory. - (For non-threaded scenarios only, since the cache only holds one object) - - The purpose of this caching is to allocate only one TZlibTransport where - only one is really needed (since it must have separate read/write buffers), - and makes the statistics from getCompSavings() and getCompRatio() - easier to understand. - """ - # class scoped cache of last transport given and zlibtransport returned - _last_trans = None - _last_z = None - - def getTransport(self, trans, compresslevel=9): - """Wrap a transport, trans, with the TZlibTransport - compressed transport class, returning a new - transport to the caller. - - @param compresslevel: The zlib compression level, ranging - from 0 (no compression) to 9 (best compression). Defaults to 9. - @type compresslevel: int - - This method returns a TZlibTransport which wraps the - passed C{trans} TTransport derived instance. - """ - if trans == self._last_trans: - return self._last_z - ztrans = TZlibTransport(trans, compresslevel) - self._last_trans = trans - self._last_z = ztrans - return ztrans - - -class TZlibTransport(TTransportBase, CReadableTransport): - """Class that wraps a transport with zlib, compressing writes - and decompresses reads, using the python standard - library zlib module. - """ - # Read buffer size for the python fastbinary C extension, - # the TBinaryProtocolAccelerated class. - DEFAULT_BUFFSIZE = 4096 - - def __init__(self, trans, compresslevel=9): - """Create a new TZlibTransport, wrapping C{trans}, another - TTransport derived object. - - @param trans: A thrift transport object, i.e. a TSocket() object. - @type trans: TTransport - @param compresslevel: The zlib compression level, ranging - from 0 (no compression) to 9 (best compression). Default is 9. - @type compresslevel: int - """ - self.__trans = trans - self.compresslevel = compresslevel - self.__rbuf = BufferIO() - self.__wbuf = BufferIO() - self._init_zlib() - self._init_stats() - - def _reinit_buffers(self): - """Internal method to initialize/reset the internal StringIO objects - for read and write buffers. - """ - self.__rbuf = BufferIO() - self.__wbuf = BufferIO() - - def _init_stats(self): - """Internal method to reset the internal statistics counters - for compression ratios and bandwidth savings. - """ - self.bytes_in = 0 - self.bytes_out = 0 - self.bytes_in_comp = 0 - self.bytes_out_comp = 0 - - def _init_zlib(self): - """Internal method for setting up the zlib compression and - decompression objects. - """ - self._zcomp_read = zlib.decompressobj() - self._zcomp_write = zlib.compressobj(self.compresslevel) - - def getCompRatio(self): - """Get the current measured compression ratios (in,out) from - this transport. - - Returns a tuple of: - (inbound_compression_ratio, outbound_compression_ratio) - - The compression ratios are computed as: - compressed / uncompressed - - E.g., data that compresses by 10x will have a ratio of: 0.10 - and data that compresses to half of ts original size will - have a ratio of 0.5 - - None is returned if no bytes have yet been processed in - a particular direction. - """ - r_percent, w_percent = (None, None) - if self.bytes_in > 0: - r_percent = self.bytes_in_comp / self.bytes_in - if self.bytes_out > 0: - w_percent = self.bytes_out_comp / self.bytes_out - return (r_percent, w_percent) - - def getCompSavings(self): - """Get the current count of saved bytes due to data - compression. - - Returns a tuple of: - (inbound_saved_bytes, outbound_saved_bytes) - - Note: if compression is actually expanding your - data (only likely with very tiny thrift objects), then - the values returned will be negative. - """ - r_saved = self.bytes_in - self.bytes_in_comp - w_saved = self.bytes_out - self.bytes_out_comp - return (r_saved, w_saved) - - def isOpen(self): - """Return the underlying transport's open status""" - return self.__trans.isOpen() - - def open(self): - """Open the underlying transport""" - self._init_stats() - return self.__trans.open() - - def listen(self): - """Invoke the underlying transport's listen() method""" - self.__trans.listen() - - def accept(self): - """Accept connections on the underlying transport""" - return self.__trans.accept() - - def close(self): - """Close the underlying transport,""" - self._reinit_buffers() - self._init_zlib() - return self.__trans.close() - - def read(self, sz): - """Read up to sz bytes from the decompressed bytes buffer, and - read from the underlying transport if the decompression - buffer is empty. - """ - ret = self.__rbuf.read(sz) - if len(ret) > 0: - return ret - # keep reading from transport until something comes back - while True: - if self.readComp(sz): - break - ret = self.__rbuf.read(sz) - return ret - - def readComp(self, sz): - """Read compressed data from the underlying transport, then - decompress it and append it to the internal StringIO read buffer - """ - zbuf = self.__trans.read(sz) - zbuf = self._zcomp_read.unconsumed_tail + zbuf - buf = self._zcomp_read.decompress(zbuf) - self.bytes_in += len(zbuf) - self.bytes_in_comp += len(buf) - old = self.__rbuf.read() - self.__rbuf = BufferIO(old + buf) - if len(old) + len(buf) == 0: - return False - return True - - def write(self, buf): - """Write some bytes, putting them into the internal write - buffer for eventual compression. - """ - self.__wbuf.write(buf) - - def flush(self): - """Flush any queued up data in the write buffer and ensure the - compression buffer is flushed out to the underlying transport - """ - wout = self.__wbuf.getvalue() - if len(wout) > 0: - zbuf = self._zcomp_write.compress(wout) - self.bytes_out += len(wout) - self.bytes_out_comp += len(zbuf) - else: - zbuf = '' - ztail = self._zcomp_write.flush(zlib.Z_SYNC_FLUSH) - self.bytes_out_comp += len(ztail) - if (len(zbuf) + len(ztail)) > 0: - self.__wbuf = BufferIO() - self.__trans.write(zbuf + ztail) - self.__trans.flush() - - @property - def cstringio_buf(self): - """Implement the CReadableTransport interface""" - return self.__rbuf - - def cstringio_refill(self, partialread, reqlen): - """Implement the CReadableTransport interface for refill""" - retstring = partialread - if reqlen < self.DEFAULT_BUFFSIZE: - retstring += self.read(self.DEFAULT_BUFFSIZE) - while len(retstring) < reqlen: - retstring += self.read(reqlen - len(retstring)) - self.__rbuf = BufferIO(retstring) - return self.__rbuf diff --git a/shell/ext-py/thrift-0.16.0/src/transport/__init__.py b/shell/ext-py/thrift-0.16.0/src/transport/__init__.py deleted file mode 100644 index c9596d9a6..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -__all__ = ['TTransport', 'TSocket', 'THttpClient', 'TZlibTransport'] diff --git a/shell/ext-py/thrift-0.16.0/src/transport/sslcompat.py b/shell/ext-py/thrift-0.16.0/src/transport/sslcompat.py deleted file mode 100644 index ab00cb2a8..000000000 --- a/shell/ext-py/thrift-0.16.0/src/transport/sslcompat.py +++ /dev/null @@ -1,100 +0,0 @@ -# -# licensed to the apache software foundation (asf) under one -# or more contributor license agreements. see the notice file -# distributed with this work for additional information -# regarding copyright ownership. the asf licenses this file -# to you under the apache license, version 2.0 (the -# "license"); you may not use this file except in compliance -# with the license. you may obtain a copy of the license at -# -# http://www.apache.org/licenses/license-2.0 -# -# unless required by applicable law or agreed to in writing, -# software distributed under the license is distributed on an -# "as is" basis, without warranties or conditions of any -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import logging -import sys - -from thrift.transport.TTransport import TTransportException - -logger = logging.getLogger(__name__) - - -def legacy_validate_callback(cert, hostname): - """legacy method to validate the peer's SSL certificate, and to check - the commonName of the certificate to ensure it matches the hostname we - used to make this connection. Does not support subjectAltName records - in certificates. - - raises TTransportException if the certificate fails validation. - """ - if 'subject' not in cert: - raise TTransportException( - TTransportException.NOT_OPEN, - 'No SSL certificate found from %s' % hostname) - fields = cert['subject'] - for field in fields: - # ensure structure we get back is what we expect - if not isinstance(field, tuple): - continue - cert_pair = field[0] - if len(cert_pair) < 2: - continue - cert_key, cert_value = cert_pair[0:2] - if cert_key != 'commonName': - continue - certhost = cert_value - # this check should be performed by some sort of Access Manager - if certhost == hostname: - # success, cert commonName matches desired hostname - return - else: - raise TTransportException( - TTransportException.UNKNOWN, - 'Hostname we connected to "%s" doesn\'t match certificate ' - 'provided commonName "%s"' % (hostname, certhost)) - raise TTransportException( - TTransportException.UNKNOWN, - 'Could not validate SSL certificate from host "%s". Cert=%s' - % (hostname, cert)) - - -def _optional_dependencies(): - try: - import ipaddress # noqa - logger.debug('ipaddress module is available') - ipaddr = True - except ImportError: - logger.warn('ipaddress module is unavailable') - ipaddr = False - - if sys.hexversion < 0x030500F0: - try: - from backports.ssl_match_hostname import match_hostname, __version__ as ver - ver = list(map(int, ver.split('.'))) - logger.debug('backports.ssl_match_hostname module is available') - match = match_hostname - if ver[0] * 10 + ver[1] >= 35: - return ipaddr, match - else: - logger.warn('backports.ssl_match_hostname module is too old') - ipaddr = False - except ImportError: - logger.warn('backports.ssl_match_hostname is unavailable') - ipaddr = False - try: - from ssl import match_hostname - logger.debug('ssl.match_hostname is available') - match = match_hostname - except ImportError: - logger.warn('using legacy validation callback') - match = legacy_validate_callback - return ipaddr, match - - -_match_has_ipaddress, _match_hostname = _optional_dependencies() diff --git a/shell/ext-py/thrift-0.16.0/test/test_socket.py b/shell/ext-py/thrift-0.16.0/test/test_socket.py deleted file mode 100644 index 95124dcbe..000000000 --- a/shell/ext-py/thrift-0.16.0/test/test_socket.py +++ /dev/null @@ -1,57 +0,0 @@ -import errno -import unittest - -from test_sslsocket import ServerAcceptor - -import _import_local_thrift # noqa - -from thrift.transport.TSocket import TServerSocket -from thrift.transport.TSocket import TSocket -from thrift.transport.TTransport import TTransportException - - -class TSocketTest(unittest.TestCase): - def test_isOpen_checks_for_readability(self): - # https://docs.python.org/3/library/socket.html#notes-on-socket-timeouts - # https://docs.python.org/3/library/socket.html#socket.socket.settimeout - timeouts = [ - None, # blocking mode - 0, # non-blocking mode - 1.0, # timeout mode - ] - - for timeout in timeouts: - acc = ServerAcceptor(TServerSocket(port=0)) - acc.start() - - sock = TSocket(host="localhost", port=acc.port) - sock.open() - sock.setTimeout(timeout) - - # the socket shows as open immediately after connecting - self.assertTrue(sock.isOpen()) - - # and remains open during usage - sock.write(b"hello") - self.assertTrue(sock.isOpen()) - while True: - try: - sock.read(5) - except TTransportException as exc: - if exc.inner.errno == errno.EAGAIN: - # try again when we're in non-blocking mode - continue - raise - break - self.assertTrue(sock.isOpen()) - - # once the server side closes, it no longer shows open - acc.client.close() # this also blocks until the other thread is done - acc.close() - self.assertFalse(sock.isOpen()) - - sock.close() - - -if __name__ == "__main__": - unittest.main() diff --git a/shell/ext-py/thrift-0.16.0/test/test_sslsocket.py b/shell/ext-py/thrift-0.16.0/test/test_sslsocket.py deleted file mode 100644 index f4c87f195..000000000 --- a/shell/ext-py/thrift-0.16.0/test/test_sslsocket.py +++ /dev/null @@ -1,353 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import inspect -import logging -import os -import platform -import ssl -import sys -import tempfile -import threading -import unittest -import warnings -from contextlib import contextmanager - -import _import_local_thrift # noqa - -SCRIPT_DIR = os.path.realpath(os.path.dirname(__file__)) -ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(SCRIPT_DIR))) -SERVER_PEM = os.path.join(ROOT_DIR, 'test', 'keys', 'server.pem') -SERVER_CERT = os.path.join(ROOT_DIR, 'test', 'keys', 'server.crt') -SERVER_KEY = os.path.join(ROOT_DIR, 'test', 'keys', 'server.key') -CLIENT_CERT_NO_IP = os.path.join(ROOT_DIR, 'test', 'keys', 'client.crt') -CLIENT_KEY_NO_IP = os.path.join(ROOT_DIR, 'test', 'keys', 'client.key') -CLIENT_CERT = os.path.join(ROOT_DIR, 'test', 'keys', 'client_v3.crt') -CLIENT_KEY = os.path.join(ROOT_DIR, 'test', 'keys', 'client_v3.key') -CLIENT_CA = os.path.join(ROOT_DIR, 'test', 'keys', 'CA.pem') - -TEST_CIPHERS = 'DES-CBC3-SHA:ECDHE-RSA-AES128-GCM-SHA256' - - -class ServerAcceptor(threading.Thread): - def __init__(self, server, expect_failure=False): - super(ServerAcceptor, self).__init__() - self.daemon = True - self._server = server - self._listening = threading.Event() - self._port = None - self._port_bound = threading.Event() - self._client = None - self._client_accepted = threading.Event() - self._expect_failure = expect_failure - frame = inspect.stack(3)[2] - self.name = frame[3] - del frame - - def run(self): - self._server.listen() - self._listening.set() - - try: - address = self._server.handle.getsockname() - if len(address) > 1: - # AF_INET addresses are 2-tuples (host, port) and AF_INET6 are - # 4-tuples (host, port, ...), but in each case port is in the second slot. - self._port = address[1] - finally: - self._port_bound.set() - - try: - self._client = self._server.accept() - if self._client: - self._client.read(5) # hello - self._client.write(b"there") - except Exception: - logging.exception('error on server side (%s):' % self.name) - if not self._expect_failure: - raise - finally: - self._client_accepted.set() - - def await_listening(self): - self._listening.wait() - - @property - def port(self): - self._port_bound.wait() - return self._port - - @property - def client(self): - self._client_accepted.wait() - return self._client - - def close(self): - if self._client: - self._client.close() - self._server.close() - - -# Python 2.6 compat -class AssertRaises(object): - def __init__(self, expected): - self._expected = expected - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - if not exc_type or not issubclass(exc_type, self._expected): - raise Exception('fail') - return True - - -class TSSLSocketTest(unittest.TestCase): - def _server_socket(self, **kwargs): - return TSSLServerSocket(port=0, **kwargs) - - @contextmanager - def _connectable_client(self, server, expect_failure=False, path=None, **client_kwargs): - acc = ServerAcceptor(server, expect_failure) - try: - acc.start() - acc.await_listening() - - host, port = ('localhost', acc.port) if path is None else (None, None) - client = TSSLSocket(host, port, unix_socket=path, **client_kwargs) - yield acc, client - finally: - acc.close() - - def _assert_connection_failure(self, server, path=None, **client_args): - logging.disable(logging.CRITICAL) - try: - with self._connectable_client(server, True, path=path, **client_args) as (acc, client): - # We need to wait for a connection failure, but not too long. 20ms is a tunable - # compromise between test speed and stability - client.setTimeout(20) - with self._assert_raises(TTransportException): - client.open() - client.write(b"hello") - client.read(5) # b"there" - finally: - logging.disable(logging.NOTSET) - - def _assert_raises(self, exc): - if sys.hexversion >= 0x020700F0: - return self.assertRaises(exc) - else: - return AssertRaises(exc) - - def _assert_connection_success(self, server, path=None, **client_args): - with self._connectable_client(server, path=path, **client_args) as (acc, client): - try: - client.open() - client.write(b"hello") - self.assertEqual(client.read(5), b"there") - self.assertTrue(acc.client is not None) - finally: - client.close() - - # deprecated feature - def test_deprecation(self): - with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', category=DeprecationWarning, module=self.__module__) - TSSLSocket('localhost', 0, validate=True, ca_certs=SERVER_CERT) - self.assertEqual(len(w), 1) - - with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', category=DeprecationWarning, module=self.__module__) - # Deprecated signature - # def __init__(self, host='localhost', port=9090, validate=True, ca_certs=None, keyfile=None, certfile=None, unix_socket=None, ciphers=None): - TSSLSocket('localhost', 0, True, SERVER_CERT, CLIENT_KEY, CLIENT_CERT, None, TEST_CIPHERS) - self.assertEqual(len(w), 7) - - with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', category=DeprecationWarning, module=self.__module__) - # Deprecated signature - # def __init__(self, host=None, port=9090, certfile='cert.pem', unix_socket=None, ciphers=None): - TSSLServerSocket(None, 0, SERVER_PEM, None, TEST_CIPHERS) - self.assertEqual(len(w), 3) - - # deprecated feature - def test_set_cert_reqs_by_validate(self): - with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', category=DeprecationWarning, module=self.__module__) - c1 = TSSLSocket('localhost', 0, validate=True, ca_certs=SERVER_CERT) - self.assertEqual(c1.cert_reqs, ssl.CERT_REQUIRED) - - c1 = TSSLSocket('localhost', 0, validate=False) - self.assertEqual(c1.cert_reqs, ssl.CERT_NONE) - - self.assertEqual(len(w), 2) - - # deprecated feature - def test_set_validate_by_cert_reqs(self): - with warnings.catch_warnings(record=True) as w: - warnings.filterwarnings('always', category=DeprecationWarning, module=self.__module__) - c1 = TSSLSocket('localhost', 0, cert_reqs=ssl.CERT_NONE) - self.assertFalse(c1.validate) - - c2 = TSSLSocket('localhost', 0, cert_reqs=ssl.CERT_REQUIRED, ca_certs=SERVER_CERT) - self.assertTrue(c2.validate) - - c3 = TSSLSocket('localhost', 0, cert_reqs=ssl.CERT_OPTIONAL, ca_certs=SERVER_CERT) - self.assertTrue(c3.validate) - - self.assertEqual(len(w), 3) - - def test_unix_domain_socket(self): - if platform.system() == 'Windows': - print('skipping test_unix_domain_socket') - return - fd, path = tempfile.mkstemp() - os.close(fd) - os.unlink(path) - try: - server = self._server_socket(unix_socket=path, keyfile=SERVER_KEY, certfile=SERVER_CERT) - self._assert_connection_success(server, path=path, cert_reqs=ssl.CERT_NONE) - finally: - os.unlink(path) - - def test_server_cert(self): - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT) - self._assert_connection_success(server, cert_reqs=ssl.CERT_REQUIRED, ca_certs=SERVER_CERT) - - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT) - # server cert not in ca_certs - self._assert_connection_failure(server, cert_reqs=ssl.CERT_REQUIRED, ca_certs=CLIENT_CERT) - - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT) - self._assert_connection_success(server, cert_reqs=ssl.CERT_NONE) - - def test_set_server_cert(self): - server = self._server_socket(keyfile=SERVER_KEY, certfile=CLIENT_CERT) - with self._assert_raises(Exception): - server.certfile = 'foo' - with self._assert_raises(Exception): - server.certfile = None - server.certfile = SERVER_CERT - self._assert_connection_success(server, cert_reqs=ssl.CERT_REQUIRED, ca_certs=SERVER_CERT) - - def test_client_cert(self): - if not _match_has_ipaddress: - print('skipping test_client_cert') - return - server = self._server_socket( - cert_reqs=ssl.CERT_REQUIRED, keyfile=SERVER_KEY, - certfile=SERVER_CERT, ca_certs=CLIENT_CERT) - self._assert_connection_failure(server, cert_reqs=ssl.CERT_NONE, certfile=SERVER_CERT, keyfile=SERVER_KEY) - - server = self._server_socket( - cert_reqs=ssl.CERT_REQUIRED, keyfile=SERVER_KEY, - certfile=SERVER_CERT, ca_certs=CLIENT_CA) - self._assert_connection_failure(server, cert_reqs=ssl.CERT_NONE, certfile=CLIENT_CERT_NO_IP, keyfile=CLIENT_KEY_NO_IP) - - server = self._server_socket( - cert_reqs=ssl.CERT_REQUIRED, keyfile=SERVER_KEY, - certfile=SERVER_CERT, ca_certs=CLIENT_CA) - self._assert_connection_success(server, cert_reqs=ssl.CERT_NONE, certfile=CLIENT_CERT, keyfile=CLIENT_KEY) - - server = self._server_socket( - cert_reqs=ssl.CERT_OPTIONAL, keyfile=SERVER_KEY, - certfile=SERVER_CERT, ca_certs=CLIENT_CA) - self._assert_connection_success(server, cert_reqs=ssl.CERT_NONE, certfile=CLIENT_CERT, keyfile=CLIENT_KEY) - - def test_ciphers(self): - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT, ciphers=TEST_CIPHERS) - self._assert_connection_success(server, ca_certs=SERVER_CERT, ciphers=TEST_CIPHERS) - - if not TSSLSocket._has_ciphers: - # unittest.skip is not available for Python 2.6 - print('skipping test_ciphers') - return - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT) - self._assert_connection_failure(server, ca_certs=SERVER_CERT, ciphers='NULL') - - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT, ciphers=TEST_CIPHERS) - self._assert_connection_failure(server, ca_certs=SERVER_CERT, ciphers='NULL') - - def test_ssl2_and_ssl3_disabled(self): - if not hasattr(ssl, 'PROTOCOL_SSLv3'): - print('PROTOCOL_SSLv3 is not available') - else: - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT) - self._assert_connection_failure(server, ca_certs=SERVER_CERT, ssl_version=ssl.PROTOCOL_SSLv3) - - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT, ssl_version=ssl.PROTOCOL_SSLv3) - self._assert_connection_failure(server, ca_certs=SERVER_CERT) - - if not hasattr(ssl, 'PROTOCOL_SSLv2'): - print('PROTOCOL_SSLv2 is not available') - else: - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT) - self._assert_connection_failure(server, ca_certs=SERVER_CERT, ssl_version=ssl.PROTOCOL_SSLv2) - - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT, ssl_version=ssl.PROTOCOL_SSLv2) - self._assert_connection_failure(server, ca_certs=SERVER_CERT) - - def test_newer_tls(self): - if not TSSLSocket._has_ssl_context: - # unittest.skip is not available for Python 2.6 - print('skipping test_newer_tls') - return - if not hasattr(ssl, 'PROTOCOL_TLSv1_2'): - print('PROTOCOL_TLSv1_2 is not available') - else: - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT, ssl_version=ssl.PROTOCOL_TLSv1_2) - self._assert_connection_success(server, ca_certs=SERVER_CERT, ssl_version=ssl.PROTOCOL_TLSv1_2) - - if not hasattr(ssl, 'PROTOCOL_TLSv1_1'): - print('PROTOCOL_TLSv1_1 is not available') - else: - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT, ssl_version=ssl.PROTOCOL_TLSv1_1) - self._assert_connection_success(server, ca_certs=SERVER_CERT, ssl_version=ssl.PROTOCOL_TLSv1_1) - - if not hasattr(ssl, 'PROTOCOL_TLSv1_1') or not hasattr(ssl, 'PROTOCOL_TLSv1_2'): - print('PROTOCOL_TLSv1_1 and/or PROTOCOL_TLSv1_2 is not available') - else: - server = self._server_socket(keyfile=SERVER_KEY, certfile=SERVER_CERT, ssl_version=ssl.PROTOCOL_TLSv1_2) - self._assert_connection_failure(server, ca_certs=SERVER_CERT, ssl_version=ssl.PROTOCOL_TLSv1_1) - - def test_ssl_context(self): - if not TSSLSocket._has_ssl_context: - # unittest.skip is not available for Python 2.6 - print('skipping test_ssl_context') - return - server_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - server_context.load_cert_chain(SERVER_CERT, SERVER_KEY) - server_context.load_verify_locations(CLIENT_CA) - server_context.verify_mode = ssl.CERT_REQUIRED - server = self._server_socket(ssl_context=server_context) - - client_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) - client_context.load_cert_chain(CLIENT_CERT, CLIENT_KEY) - client_context.load_verify_locations(SERVER_CERT) - client_context.verify_mode = ssl.CERT_REQUIRED - - self._assert_connection_success(server, ssl_context=client_context) - - -if __name__ == '__main__': - logging.basicConfig(level=logging.WARN) - from thrift.transport.TSSLSocket import TSSLSocket, TSSLServerSocket, _match_has_ipaddress - from thrift.transport.TTransport import TTransportException - - unittest.main() diff --git a/shell/ext-py/thrift_sasl-0.4.3/CHANGELOG.md b/shell/ext-py/thrift_sasl-0.4.3/CHANGELOG.md deleted file mode 100644 index fbed8c5ab..000000000 --- a/shell/ext-py/thrift_sasl-0.4.3/CHANGELOG.md +++ /dev/null @@ -1,34 +0,0 @@ -Changelog -========= - -0.4.3 ------- - - Identical to 0.4.3a2 - -0.4.3a2 ------- -* **Improvements** - - Use pure-sasl instead of sasl to avoid dependency on g++ - -0.4.3a1 ------- -* **Bug Fixes** - - Replaced environment conditionals with environment markers in `setup.py` (#29) - - Make sure frames are fully buffered when used together with thriftpy / thriftpy2 (#31) - -* **Improvements** - - Added build script to build sdist archive and universal wheel - - Unpin thrift version with Python 2 - -0.4.2 ------- -* **Bug Fixes** - - Fixes a bug where Thrift transport was not reading all data (#22) - -0.4.1 ------- -* **Bug Fixes** - - Fix compatibility with Python 2 and 3 (#19) - -* **Improvements** - - Add CHANGELOG.md and remove old contacts from setup.py (#20) diff --git a/shell/ext-py/thrift_sasl-0.4.3/LICENSE b/shell/ext-py/thrift_sasl-0.4.3/LICENSE deleted file mode 100644 index 8f71f43fe..000000000 --- a/shell/ext-py/thrift_sasl-0.4.3/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/shell/ext-py/thrift_sasl-0.4.3/README.md b/shell/ext-py/thrift_sasl-0.4.3/README.md deleted file mode 100644 index 8110069df..000000000 --- a/shell/ext-py/thrift_sasl-0.4.3/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# thrift_sasl.py - -Thrift SASL Python module that implements SASL transports for Thrift -(`TSaslClientTransport`). diff --git a/shell/ext-py/thrift_sasl-0.4.3/setup.py b/shell/ext-py/thrift_sasl-0.4.3/setup.py deleted file mode 100644 index a55add023..000000000 --- a/shell/ext-py/thrift_sasl-0.4.3/setup.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2015 Cloudera Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import sys -from setuptools import setup - -PY3 = sys.version_info[0] == 3 - -description = ("Thrift SASL Python module that implements SASL transports for " - "Thrift (`TSaslClientTransport`).") - -setup( - name='thrift_sasl', - version='0.4.3', - description=description, - long_description=description, - url='https://github.com/cloudera/thrift_sasl', - install_requires=[ - # Python 3 support was added to thrift in version 0.10.0. - "thrift>=0.10.0" if PY3 else "thrift>=0.9.3", - "pure-sasl>=0.6.2", - "six>=1.13.0" - ], - packages=['thrift_sasl'], - keywords='thrift sasl transport', - license='Apache License, Version 2.0', - classifiers=[ - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9'] -) diff --git a/shell/ext-py/thrift_sasl-0.4.3/thrift_sasl/__init__.py b/shell/ext-py/thrift_sasl-0.4.3/thrift_sasl/__init__.py deleted file mode 100644 index 1032b16e8..000000000 --- a/shell/ext-py/thrift_sasl-0.4.3/thrift_sasl/__init__.py +++ /dev/null @@ -1,230 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# -""" SASL transports for Thrift. """ - -# Initially copied from the Impala repo - -from __future__ import absolute_import - -import six -import sys -import struct - -from thrift.transport.TTransport import (TTransportException, TTransportBase, CReadableTransport) - -# TODO: Check whether the following distinction is necessary. Does not appear to -# break anything when `io.BytesIO` is used everywhere, but there may be some edge -# cases where things break down. -if sys.version_info[0] == 3: - from io import BytesIO as BufferIO -else: - from cStringIO import StringIO as BufferIO - - -class TSaslClientTransport(TTransportBase, CReadableTransport): - START = 1 - OK = 2 - BAD = 3 - ERROR = 4 - COMPLETE = 5 - - def __init__(self, sasl_client_factory, mechanism, trans): - """ - @param sasl_client_factory: a callable that returns a new sasl.Client object - @param mechanism: the SASL mechanism (e.g. "GSSAPI") - @param trans: the underlying transport over which to communicate. - """ - self._trans = trans - self.sasl_client_factory = sasl_client_factory - self.sasl = None - self.mechanism = mechanism - self.__wbuf = BufferIO() - self.__rbuf = BufferIO() - self.opened = False - self.encode = None - - def isOpen(self): - try: - is_open = self._trans.isOpen # Thrift - except AttributeError: - is_open = self._trans.is_open # thriftpy - - return is_open() - - def is_open(self): - return self.isOpen() - - def open(self): - if not self.isOpen(): - self._trans.open() - - if self.sasl is not None: - raise TTransportException( - type=TTransportException.NOT_OPEN, - message="Already open!") - self.sasl = self.sasl_client_factory() - - ret, chosen_mech, initial_response = self.sasl.start(self.mechanism) - if not ret: - raise TTransportException(type=TTransportException.NOT_OPEN, - message=("Could not start SASL: %s" % self.sasl.getError())) - - # Send initial response - self._send_message(self.START, chosen_mech) - self._send_message(self.OK, initial_response) - - # SASL negotiation loop - while True: - status, payload = self._recv_sasl_message() - if status not in (self.OK, self.COMPLETE): - raise TTransportException(type=TTransportException.NOT_OPEN, - message=("Bad status: %d (%s)" % (status, payload))) - if status == self.COMPLETE: - break - ret, response = self.sasl.step(payload) - if not ret: - raise TTransportException(type=TTransportException.NOT_OPEN, - message=("Bad SASL result: %s" % (self.sasl.getError()))) - self._send_message(self.OK, response) - - def _send_message(self, status, body): - header = struct.pack(">BI", status, len(body)) - body = six.ensure_binary(body) - self._trans.write(header + body) - self._trans.flush() - - def _recv_sasl_message(self): - header = self._trans_read_all(5) - status, length = struct.unpack(">BI", header) - if length > 0: - payload = self._trans_read_all(length) - else: - payload = "" - return status, payload - - def write(self, data): - self.__wbuf.write(data) - - def flush(self): - buffer = self.__wbuf.getvalue() - # The first time we flush data, we send it to sasl.encode() - # If the length doesn't change, then we must be using a QOP - # of auth and we should no longer call sasl.encode(), otherwise - # we encode every time. - if self.encode == None: - success, encoded = self.sasl.encode(buffer) - if not success: - raise TTransportException(type=TTransportException.UNKNOWN, - message=self.sasl.getError()) - if (len(encoded)==len(buffer)): - self.encode = False - self._flushPlain(buffer) - else: - self.encode = True - self._trans.write(encoded) - elif self.encode: - self._flushEncoded(buffer) - else: - self._flushPlain(buffer) - - self._trans.flush() - self.__wbuf = BufferIO() - - def _flushEncoded(self, buffer): - # sasl.ecnode() does the encoding and adds the length header, so nothing - # to do but call it and write the result. - success, encoded = self.sasl.encode(buffer) - if not success: - raise TTransportException(type=TTransportException.UNKNOWN, - message=self.sasl.getError()) - self._trans.write(encoded) - - def _flushPlain(self, buffer): - # When we have QOP of auth, sasl.encode() will pass the input to the output - # but won't put a length header, so we have to do that. - - # Note stolen from TFramedTransport: - # N.B.: Doing this string concatenation is WAY cheaper than making - # two separate calls to the underlying socket object. Socket writes in - # Python turn out to be REALLY expensive, but it seems to do a pretty - # good job of managing string buffer operations without excessive copies - self._trans.write(struct.pack(">I", len(buffer)) + buffer) - - def read(self, sz): - ret = self.__rbuf.read(sz) - if len(ret) == sz: - return ret - - self._read_frame() - return ret + self.__rbuf.read(sz - len(ret)) - - def _read_frame(self): - header = self._trans_read_all(4) - (length,) = struct.unpack(">I", header) - if self.encode: - # If the frames are encoded (i.e. you're using a QOP of auth-int or - # auth-conf), then make sure to include the header in the bytes you send to - # sasl.decode() - encoded = header + self._trans_read_all(length) - success, decoded = self.sasl.decode(encoded) - if not success: - raise TTransportException(type=TTransportException.UNKNOWN, - message=self.sasl.getError()) - else: - # If the frames are not encoded, just pass it through - decoded = self._trans_read_all(length) - self.__rbuf = BufferIO(decoded) - - def _trans_read_all(self, sz): - try: - read_all = self._trans.readAll # Thrift - except AttributeError: - def read_all(sz): # thriftpy - buff = b'' - have = 0 - while have < sz: - chunk = self._trans.read(sz - have) - have += len(chunk) - buff += chunk - - if len(chunk) == 0: - raise TTransportException(type=TTransportException.END_OF_FILE, - message="End of file reading from transport") - - return buff - return read_all(sz) - - def close(self): - self._trans.close() - self.sasl = None - - # Implement the CReadableTransport interface. - # Stolen shamelessly from TFramedTransport - @property - def cstringio_buf(self): - return self.__rbuf - - def cstringio_refill(self, prefix, reqlen): - # self.__rbuf will already be empty here because fastbinary doesn't - # ask for a refill until the previous buffer is empty. Therefore, - # we can start reading new frames immediately. - while len(prefix) < reqlen: - self._read_frame() - prefix += self.__rbuf.getvalue() - self.__rbuf = BufferIO(prefix) - return self.__rbuf diff --git a/shell/gen_impala_build_version.sh b/shell/gen_impala_build_version.sh new file mode 100755 index 000000000..f7ec3deea --- /dev/null +++ b/shell/gen_impala_build_version.sh @@ -0,0 +1,75 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -euo pipefail +. $IMPALA_HOME/bin/report_build_error.sh +setup_report_build_error + +if [ "x${IMPALA_HOME}" == "x" ]; then + echo "\$IMPALA_HOME must be set" + exit 1 +fi + +IMPALA_VERSION_INFO_FILE=${IMPALA_HOME}/bin/version.info + +if [ ! -f ${IMPALA_VERSION_INFO_FILE} ]; then + echo "No version.info file found. Generating new version info" + ${IMPALA_HOME}/bin/save-version.sh +else + echo "Using existing version.info file." +fi + +VERSION=$(grep "VERSION: " ${IMPALA_VERSION_INFO_FILE} | awk '{print $2}') +GIT_HASH=$(grep "GIT_HASH: " ${IMPALA_VERSION_INFO_FILE} | awk '{print $2}') +BUILD_DATE=$(grep "BUILD_TIME: " ${IMPALA_VERSION_INFO_FILE} | cut -f 2- -d ' ') +cat ${IMPALA_VERSION_INFO_FILE} + +SHELL_HOME=${IMPALA_HOME}/shell +THRIFT_GEN_PY_DIR="${SHELL_HOME}/gen-py" + +rm -f ${THRIFT_GEN_PY_DIR}/impala_build_version.py +cat > ${THRIFT_GEN_PY_DIR}/impala_build_version.py < /dev/null; then PYTHON_EXE=python3 fi -PYTHONPATH="${SHELL_HOME}/gen-py:${SHELL_HOME}/lib:${PYTHONPATH}" - -# External dependencies are installed in /ext-py${PYTHON_VERSION} +# impala-shell is installed in /install_py${PYTHON_VERSION} PYTHON_VERSION=$("${PYTHON_EXE}" -c 'import sys; \ print("{}.{}".format(sys.version_info.major, sys.version_info.minor))') -if [ ! -d "${SHELL_HOME}/ext-py${PYTHON_VERSION}" ]; then - # List all ext-py* dirs, remove ext-py prefix, and join into a comma-separated string. - dirs=( $(cd ${SHELL_HOME} && echo ext-py*) ) - vers="${dirs[@]#ext-py}" +if [ ! -d "${SHELL_HOME}/install_py${PYTHON_VERSION}" ]; then + # List all install_py* dirs, remove install_py prefix, and join into a comma-separated + # string. + dirs=( $(cd ${SHELL_HOME} && echo install_py*) ) + vers="${dirs[@]#install_py}" pretty="$(printf "%s, " ${vers[@]})" echo "This impala-shell package was not built to support Python ${PYTHON_VERSION}." \ "Supported Python versions are: ${pretty%, }." exit 1 fi -PYTHONPATH="${SHELL_HOME}/ext-py${PYTHON_VERSION}:${PYTHONPATH}" - -if [ "${PYTHON_VERSION}" = "2.7" ]; then - # Python 2 requires the legacy pkg_resources.py code - PYTHONPATH="${PYTHONPATH}:${SHELL_HOME}/legacy" -fi -PYTHONPATH="${PYTHONPATH}" PYTHONIOENCODING='utf-8' exec ${PYTHON_EXE} \ - ${SHELL_HOME}/impala_shell.py "$@" +PYTHONIOENCODING='utf-8' PYTHONPATH="${SHELL_HOME}/install_py${PYTHON_VERSION}" \ + exec ${PYTHON_EXE} -m "impala_shell.impala_shell" "$@" diff --git a/shell/legacy/pkg_resources.py b/shell/legacy/pkg_resources.py deleted file mode 100644 index 977de194b..000000000 --- a/shell/legacy/pkg_resources.py +++ /dev/null @@ -1,2698 +0,0 @@ -""" - This file is redistributed under the Python Software Foundation License: - http://docs.python.org/2/license.html -""" - -"""Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -import sys, os, zipimport, time, re, imp, types -from urlparse import urlparse, urlunparse - -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset - -# capture these to bypass sandboxing -from os import utime -try: - from os import mkdir, rename, unlink - WRITE_SUPPORT = True -except ImportError: - # no write support, probably under GAE - WRITE_SUPPORT = False - -from os import open as os_open -from os.path import isdir, split - -# This marker is used to simplify the process that checks is the -# setuptools package was installed by the Setuptools project -# or by the Distribute project, in case Setuptools creates -# a distribution with the same version. -# -# The bootstrapping script for instance, will check if this -# attribute is present to decide wether to reinstall the package -_distribute = True - -def _bypass_ensure_directory(name, mode=0777): - # Sandbox-bypassing version of ensure_directory() - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(name) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, mode) - - - - - - - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform(); m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - pass # not Mac OS X - return plat - - - - - - - - - - - - - - - - - - - - - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - def __repr__(self): - return self.__class__.__name__+repr(self.args) - -class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" -_provider_factories = {} - -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - -def _macosx_vers(_cache=[]): - if not _cache: - import platform - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - import plistlib - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] - -def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - try: - from distutils.util import get_platform - except ImportError: - from sysconfig import get_platform - - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), - _macosx_arch(machine)) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat - -def compatible_platforms(provided,required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided==required: - return True # easy case - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) - return True - return False # egg isn't macosx or legacy darwin - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - -run_main = run_script # backward compatibility - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - - - - - - - - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - - - - - - - - - - - - - - -class WorkingSet(object): - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry,True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - - def __contains__(self,dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - - - - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - for dist in self: - entries = dist.get_entry_map(group) - if name is None: - for ep in entries.values(): - yield ep - elif name in entries: - yield entries[name] - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - for key in self.entry_keys[item]: - if key not in seen: - seen[key]=1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set. If it's added, any - callbacks registered with the ``subscribe()`` method will be called. - """ - if insert: - dist.insert_on(self.entries, entry) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - keys2 = self.entry_keys.setdefault(dist.location,[]) - if dist.key in self.by_key: - return # ignore hidden distros - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None, replacement=True): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - """ - - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist - to_activate = [] - - while requirements: - req = requirements.pop(0) # process dependencies breadth-first - if _override_setuptools(req) and replacement: - req = Requirement.parse('distribute') - - if req in processed: - # Ignore cyclic or redundant dependencies - continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None: - if env is None: - env = Environment(self.entries) - dist = best[req.key] = env.best_match(req, self, installer) - if dist is None: - #msg = ("The '%s' distribution was not found on this " - # "system, and is required by this application.") - #raise DistributionNotFound(msg % req) - - # unfortunately, zc.buildout uses a str(err) - # to get the name of the distribution here.. - raise DistributionNotFound(req) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) - processed[req] = True - - return to_activate # return list of distros to activate - - def find_plugins(self, - plugin_env, full_env=None, installer=None, fallback=True - ): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print 'Could not load', errors # display errors - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - map(shadow_set.add, self) # put all our entries in shadow_set - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError as v: - error_info[dist] = v # save error info - if fallback: - continue # try the next older version of project - else: - break # give up on this project, keep going - - else: - map(shadow_set.add, resolvees) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - - - - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" - if callback in self.callbacks: - return - self.callbacks.append(callback) - for dist in self: - callback(dist) - - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - - - - - - - - - - -class Environment(object): - """Searchable snapshot of distributions on a search path""" - - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'2.4'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self._cache = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self,project_name): - """Return a newest-to-oldest list of distributions for `project_name` - """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] - - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) - if dist not in dists: - dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) - - - def best_match(self, req, working_set, installer=None): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - dist = working_set.find(req) - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - return self.obtain(req, installer) # try and download/install - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: yield key - - - - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): - self.add(other) - elif isinstance(other,Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -AvailableDistributions = Environment # XXX backward compatibility - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - err = ExtractionError("""Can't extract file(s) to egg cache - -The following error occurred while trying to extract file(s) to the Python egg -cache: - - %s - -The Python egg cache directory is currently set to: - - %s - -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - - - - - - - - - - - - - - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except: - self.extraction_error() - - self.cached_files[target_path] = 1 - return target_path - - - - - - - - - - - - - - - - - - - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0555) & 07777 - os.chmod(tempname, mode) - - - - - - - - - - - - - - - - - - - - - - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - - - -def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". - """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - app_data = 'Application Data' # XXX this may be locale-specific! - app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname,subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') - - - - - - - - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return StringIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) - - if sys.version_info <= (3,): - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)) - else: - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)).decode("utf-8") - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self,resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) - - - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) - - def metadata_listdir(self,name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) - return [] - - def run_script(self,script_name,namespace): - script = 'scripts/'+script_name - if not self.has_metadata(script): - raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text,script_filename,'exec') - exec script_code in namespace, namespace - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self,module): - NullProvider.__init__(self,module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path!=old: - if path.lower().endswith('.egg'): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - - - - - - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self,path): - return os.path.isdir(path) - - def _listdir(self,path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - stream = open(path, 'rb') - try: - return stream.read() - finally: - stream.close() - -register_loader_type(type(None), DefaultProvider) - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] - module_path = None - - def __init__(self): - pass - -empty_provider = EmptyProvider() - - - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - - def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = zipimport._zip_directory_cache[self.loader.archive] - self.zip_pre = self.loader.archive+os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) - ) - - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) - ) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - return os.path.dirname(last) # return the extracted directory name - - zip_stat = self.zipinfo[zip_path] - t,d,size = zip_stat[5], zip_stat[6], zip_stat[3] - date_time = ( - (d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd - (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc. - ) - timestamp = time.mktime(date_time) - - try: - if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if os.path.isfile(real_path): - stat = os.stat(real_path) - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, don't bother extracting - return real_path - - outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp,timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - stat = os.stat(real_path) - - if stat.st_size==size and stat.st_mtime==timestamp: - # size and stamp match, somebody did it just ahead of - # us, so we're done - return real_path - elif os.name=='nt': # Windows, del old file and retry - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - manager.extraction_error() # report a user-friendly error - - return real_path - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self,fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self,fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) - - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) - -register_loader_type(zipimport.zipimporter, ZipProvider) - - - - - - - - - - - - - - - - - - - - - - - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self,path): - self.path = path - - def has_metadata(self,name): - return name=='PKG-INFO' - - def get_metadata(self,name): - if name=='PKG-INFO': - f = open(self.path,'rU') - metadata = f.read() - f.close() - return metadata - raise KeyError("No metadata except PKG-INFO is available") - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - - - - - - - - - - - - - - - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zipinfo = zipimport._zip_directory_cache[importer.archive] - self.zip_pre = importer.archive+os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - - -class ImpWrapper: - """PEP 302 Importer that wraps Python's "normal" import algorithm""" - - def __init__(self, path=None): - self.path = path - - def find_module(self, fullname, path=None): - subname = fullname.split(".")[-1] - if subname != fullname and self.path is None: - return None - if self.path is None: - path = None - else: - path = [self.path] - try: - file, filename, etc = imp.find_module(subname, path) - except ImportError: - return None - return ImpLoader(file, filename, etc) - - -class ImpLoader: - """PEP 302 Loader that wraps Python's "normal" import algorithm""" - - def __init__(self, file, filename, etc): - self.file = file - self.filename = filename - self.etc = etc - - def load_module(self, fullname): - try: - mod = imp.load_module(fullname, self.file, self.filename, self.etc) - finally: - if self.file: self.file.close() - # Note: we don't set __loader__ because we want the module to look - # normal; i.e. this is just a wrapper for standard import machinery - return mod - - - - -def get_importer(path_item): - """Retrieve a PEP 302 "importer" for the given path item - - If there is no importer, this returns a wrapper around the builtin import - machinery. The returned importer is only cached if it was created by a - path hook. - """ - try: - importer = sys.path_importer_cache[path_item] - except KeyError: - for hook in sys.path_hooks: - try: - importer = hook(path_item) - except ImportError: - pass - else: - break - else: - importer = None - - sys.path_importer_cache.setdefault(path_item,importer) - if importer is None: - try: - importer = ImpWrapper(path_item) - except ImportError: - pass - return importer - -try: - from pkgutil import get_importer, ImpImporter -except ImportError: - pass # Python 2.3 or 2.4, use our own implementation -else: - ImpWrapper = ImpImporter # Python 2.5, use pkgutil's implementation - del ImpLoader, ImpImporter - - - - - - -_distribution_finders = {} - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - -def find_in_zip(importer, path_item, only=False): - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - return # don't yield nested distros - for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): - subpath = os.path.join(path_item, subitem) - for dist in find_in_zip(zipimport.zipimporter(subpath), subpath): - yield dist - -register_finder(zipimport.zipimporter, find_in_zip) - -def StringIO(*args, **kw): - """Thunk to load the real StringIO on demand""" - global StringIO - try: - from cStringIO import StringIO - except ImportError: - from StringIO import StringIO - return StringIO(*args,**kw) - -def find_nothing(importer, path_item, only=False): - return () -register_finder(object,find_nothing) - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if os.path.isdir(path_item) and os.access(path_item, os.R_OK): - if path_item.lower().endswith('.egg'): - # unpacked egg - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') - ) - ) - else: - # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): - lower = entry.lower() - if lower.endswith('.egg-info'): - fullpath = os.path.join(path_item, entry) - if os.path.isdir(fullpath): - # egg-info directory, allow getting metadata - metadata = PathMetadata(path_item, fullpath) - else: - metadata = FileMetadata(fullpath) - yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST - ) - elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): - yield dist - elif not only and lower.endswith('.egg-link'): - for line in open(os.path.join(path_item, entry)): - if not line.strip(): continue - for item in find_distributions(os.path.join(path_item,line.rstrip())): - yield item - break -register_finder(ImpWrapper,find_on_path) - -_namespace_handlers = {} -_namespace_packages = {} - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer,path_entry,moduleName,module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - importer = get_importer(path_item) - if importer is None: - return None - loader = importer.find_module(packageName) - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = types.ModuleType(packageName) - module.__path__ = []; _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer,path_item,packageName,module) - if subpath is not None: - path = module.__path__; path.append(subpath) - loader.load_module(packageName); module.__path__ = path - return subpath - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path, parent = sys.path, None - if '.' in packageName: - parent = '.'.join(packageName.split('.')[:-1]) - declare_namespace(parent) - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - imp.release_lock() - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - imp.acquire_lock() - try: - for package in _namespace_packages.get(parent,()): - subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) - finally: - imp.release_lock() - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item)==normalized: - break - else: - # Only return the path if it's not already there - return subpath - -register_namespace_handler(ImpWrapper,file_ns_handler) -register_namespace_handler(zipimport.zipimporter,file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - -register_namespace_handler(object,null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) - -def _normalize_cached(filename,_cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): - for s in strs.splitlines(): - s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r"(?P[^-]+)" - r"( -(?P[^-]+) (-py(?P[^-]+) (-(?P.+))? )? )?", - re.VERBOSE | re.IGNORECASE -).match - -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get - -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them, and "dev" is replaced with an '@' so that it sorts lower than - than any other pre-release tag. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) - -class EntryPoint(object): - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer)) - - - - #@classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1,extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.strip(), attrs, extras, dist) - - parse = classmethod(parse) - - - - - - - - - #@classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep - return this - - parse_group = classmethod(parse_group) - - #@classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data,dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - parse_map = classmethod(parse_map) - - -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urlparse(location) - if parsed[-1].startswith('md5='): - return urlunparse(parsed[:-1] + ('',)) - return location - - -class Distribution(object): - """Wrap an actual or potential sys.path entry w/metadata""" - def __init__(self, - location=None, metadata=None, project_name=None, version=None, - py_version=PY_MAJOR, platform=None, precedence = EGG_DIST - ): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - #@classmethod - def from_location(cls,location,basename,metadata=None,**kw): - project_name, version, py_version, platform = [None]*4 - basename, ext = os.path.splitext(basename) - if ext.lower() in (".egg",".egg-info"): - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' - ) - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - ) - from_location = classmethod(from_location) - - - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), - self.precedence, - self.key, - _remove_md5_fragment(self.location), - self.py_version, - self.platform - ) - ) - def __hash__(self): return hash(self.hashcmp) - def __lt__(self, other): - return self.hashcmp < other.hashcmp - def __le__(self, other): - return self.hashcmp <= other.hashcmp - def __gt__(self, other): - return self.hashcmp > other.hashcmp - def __ge__(self, other): - return self.hashcmp >= other.hashcmp - def __eq__(self, other): - if not isinstance(other, self.__class__): - # It's not a Distribution, so they are not equal - return False - return self.hashcmp == other.hashcmp - def __ne__(self, other): - return not self == other - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - #@property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - key = property(key) - - #@property - def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv - - parsed_version = property(parsed_version) - - #@property - def version(self): - try: - return self._version - except AttributeError: - for line in self._get_metadata('PKG-INFO'): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: - raise ValueError( - "Missing 'Version:' header and/or PKG-INFO file", self - ) - version = property(version) - - - - - #@property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - dm = self.__dep_map = {None: []} - for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): - if extra: extra = safe_extra(extra) - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) - return dm - _dep_map = property(_dep_map) - - def requires(self,extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None,())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata(self,name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def activate(self,path=None): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path - self.insert_on(path) - if path is sys.path: - fixup_namespace_packages(self.location) - map(declare_namespace, self._get_metadata('namespace_packages.txt')) - - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-'+self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self,self.location) - else: - return str(self) - - def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) - - def __getattr__(self,attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError,attr - return getattr(self._provider, attr) - - #@classmethod - def from_filename(cls,filename,metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - from_filename = classmethod(from_filename) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group,{}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - - - - - - - - - - - - - - - - - - - def insert_on(self, path, loc = None): - """Insert self.location in path before its nearest parent directory""" - - loc = loc or self.location - - if self.project_name == 'setuptools': - try: - version = self.version - except ValueError: - version = '' - if version.startswith('0.7'): - raise ValueError( - "A 0.7-series setuptools cannot be installed " - "with distribute. Found one at %s" % str(self.location)) - - if not loc: - return - - if path is sys.path: - self.check_version_conflict() - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath= map(_normalize_cached, path) - - bp = None - for p, item in enumerate(npath): - if item==nloc: - break - elif item==bdir and self.precedence==EGG_DIST: - # if it's an .egg, give it precedence over its directory - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while 1: - try: - np = npath.index(nloc, p+1) - except ValueError: - break - else: - del npath[np], path[np] - p = np # ha! - - return - - - - def check_version_conflict(self): - if self.key=='distribute': - return # ignore the inevitable setuptools self-conflicts :( - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages - ): - continue - if modname in ('pkg_resources', 'setuptools', 'site'): - continue - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) - return False - return True - - def clone(self,**kw): - """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - - - - #@property - def extras(self): - return [dep for dep in self._dep_map if dep] - extras = property(extras) - - -def issue_warning(*args,**kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) - - - - - - - - - - - - - - - - - - - - - - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): - - items = [] - - while not TERMINATOR(line,p): - if CONTINUE(line,p): - try: - line = lines.next(); p = 0 - except StopIteration: - raise ValueError( - "\\ must not appear on the last nonblank line" - ) - - match = ITEM(line,p) - if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line,p) - if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) - - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any - return line, p, items - - for line in lines: - match = DISTRO(line) - if not match: - raise ValueError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line,p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] - yield Requirement(project_name, specs, extras) - - -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - - - - - - - - - - - - - - - - -class Requirement: - def __init__(self, project_name, specs, extras): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) - self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) - self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - frozenset(self.extras) - ) - self.__hash = hash(self.hashCmp) - - def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) - extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) - - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp - - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key <> self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 - for parsed,trans,op,ver in self.index: - action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 - if action=='F': return False - elif action=='T': return True - elif action=='+': last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last - - - def __hash__(self): - return self.__hash - - def __repr__(self): return "Requirement.parse(%r)" % str(self) - - #@staticmethod - def parse(s, replacement=True): - reqs = list(parse_requirements(s)) - if reqs: - if len(reqs) == 1: - founded_req = reqs[0] - # if asked for setuptools distribution - # and if distribute is installed, we want to give - # distribute instead - if _override_setuptools(founded_req) and replacement: - distribute = list(parse_requirements('distribute')) - if len(distribute) == 1: - return distribute[0] - return founded_req - else: - return founded_req - - raise ValueError("Expected only one requirement", s) - raise ValueError("No requirements found", s) - - parse = staticmethod(parse) - -state_machine = { - # =>< - '<' : '--T', - '<=': 'T-T', - '>' : 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - - -def _override_setuptools(req): - """Return True when distribute wants to override a setuptools dependency. - - We want to override when the requirement is setuptools and the version is - a variant of 0.6. - - """ - if req.project_name == 'setuptools': - if not len(req.specs): - # Just setuptools: ok - return True - for comparator, version in req.specs: - if comparator in ['==', '>=', '>']: - if version.startswith('0.7'): - # We want some setuptools not from the 0.6 series. - return False - return True - return False - - -def _get_mro(cls): - """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass - return cls.__mro__[1:] - return cls.__mro__ - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - for t in _get_mro(getattr(ob, '__class__', type(ob))): - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - -def _mkstemp(*args,**kw): - from tempfile import mkstemp - old_open = os.open - try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) - finally: - os.open = old_open # and then put it back - - -# Set up global resource manager -_manager = ResourceManager() -def _initialize(g): - for name in dir(_manager): - if not name.startswith('_'): - g[name] = getattr(_manager, name) -_initialize(globals()) - -# Prepare the master working set and make the ``require()`` API available -working_set = WorkingSet() -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except VersionConflict: # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path - -require = working_set.require -iter_entry_points = working_set.iter_entry_points -add_activation_listener = working_set.subscribe -run_script = working_set.run_script -run_main = run_script # backward compatibility -# Activate all distributions already on sys.path, and ensure that -# all distributions added to the working set in the future (e.g. by -# calling ``require()``) will get activated as well. -add_activation_listener(lambda dist: dist.activate()) -working_set.entries=[]; map(working_set.add_entry,sys.path) # match order - diff --git a/shell/make_shell_tarball.sh b/shell/make_shell_tarball.sh index 0fcc08b7a..227ec3daf 100755 --- a/shell/make_shell_tarball.sh +++ b/shell/make_shell_tarball.sh @@ -31,152 +31,49 @@ if [ "x${IMPALA_HOME}" == "x" ]; then exit 1 fi -if [ $# -eq 0 ]; then - echo "Must specify at least one python interpreter" +if [ $# -lt 2 ]; then + echo "Must provide pypi package and at least one python interpreter" exit 1 fi -# Detect whether IMPALA_HOME is a git repository. This is used below to allow extra -# checks when building ext-py. -pushd ${IMPALA_HOME} -IS_GIT_CHECKOUT=false -if git ls-files --error-unmatch > /dev/null 2>&1 ; then - IS_GIT_CHECKOUT=true - echo "IMPALA_HOME is a git repository" -else - echo "IMPALA_HOME is not a git repository" -fi; -popd - -IMPALA_VERSION_INFO_FILE=${IMPALA_HOME}/bin/version.info - -if [ ! -f ${IMPALA_VERSION_INFO_FILE} ]; then - echo "No version.info file found. Generating new version info" - ${IMPALA_HOME}/bin/save-version.sh -else - echo "Using existing version.info file." -fi - -VERSION=$(grep "VERSION: " ${IMPALA_VERSION_INFO_FILE} | awk '{print $2}') -GIT_HASH=$(grep "GIT_HASH: " ${IMPALA_VERSION_INFO_FILE} | awk '{print $2}') -BUILD_DATE=$(grep "BUILD_TIME: " ${IMPALA_VERSION_INFO_FILE} | cut -f 2- -d ' ') -cat ${IMPALA_VERSION_INFO_FILE} - +PYPI_PACKAGE=$1 +shift SHELL_HOME=${IMPALA_HOME}/shell BUILD_DIR=${SHELL_HOME}/build -TARBALL_ROOT=${BUILD_DIR}/impala-shell-${VERSION} +TARBALL_ROOT=${BUILD_DIR}/impala-shell-${IMPALA_VERSION} -THRIFT_GEN_PY_DIR="${SHELL_HOME}/gen-py" - -echo "Deleting all files in ${TARBALL_ROOT}/{gen-py,lib,ext-py*,legacy}" -rm -rf ${TARBALL_ROOT}/lib/* 2>&1 > /dev/null -rm -rf ${TARBALL_ROOT}/gen-py/* 2>&1 > /dev/null -rm -rf ${TARBALL_ROOT}/ext-py* 2>&1 > /dev/null -rm -rf ${TARBALL_ROOT}/legacy/* 2>&1 > /dev/null -mkdir -p ${TARBALL_ROOT}/lib -mkdir -p ${TARBALL_ROOT}/legacy - -rm -f ${THRIFT_GEN_PY_DIR}/impala_build_version.py -cat > ${THRIFT_GEN_PY_DIR}/impala_build_version.py < /dev/null 2>&1 ; then - echo "WARNING: ${MODULE} is not tracked by the git repository, skipping..." - continue; - fi - pushd ${MODULE} > /dev/null 2>&1 - echo "Cleaning up old build artifacts." - rm -rf dist 2>&1 > /dev/null - rm -rf build 2>&1 > /dev/null - echo "Building ${MODULE} with ${PYTHON_EXE}" - if [[ "$MODULE" == *"/bitarray"* ]]; then - # Need to use setuptools to build wheel for bitarray module - python -c "import setuptools; exec(open('setup.py').read())" -q bdist_wheel - else - python setup.py -q bdist_wheel clean - fi - pip install --no-deps --cache "${PIP_CACHE}" \ - --target ${TARBALL_ROOT}/ext-py${PYTHON_VERSION} dist/*.whl - done - popd 2>&1 > /dev/null + BUILD_TMP_DIR="$(mktemp -d)" + + echo "Deleting all files in ${TARBALL_ROOT}/install_py${PYTHON_VERSION}" + rm -rf ${TARBALL_ROOT}/install_py${PYTHON_VERSION} 2>&1 > /dev/null + echo "Installing for python ${PYTHON_VERSION}" + # Use pip that matches the major version + if [[ $PYTHON_MAJOR_VERSION == 2 ]]; then + source ${IMPALA_HOME}/shell/build/python2_venv/bin/activate + else + source ${IMPALA_HOME}/shell/build/python3_venv/bin/activate + fi + mkdir -p ${TARBALL_ROOT}/install_py${PYTHON_VERSION} + pip install --cache ${PIP_CACHE} \ + --target ${TARBALL_ROOT}/install_py${PYTHON_VERSION} ${PYPI_PACKAGE} + # We don't need the impala-shell binary for the installation. It contains + # a weird shebang from the virtualenv, so it is worth removing it. + rm ${TARBALL_ROOT}/install_py${PYTHON_VERSION}/bin/impala-shell + # Cleanup temp build directory + rm -rf ${BUILD_TMP_DIR} done -# Copy all the shell files into the build dir - -cp -r ${THRIFT_GEN_PY_DIR} ${TARBALL_ROOT} -cp ${SHELL_HOME}/option_parser.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/impala_shell_config_defaults.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/impala_client.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/TSSLSocketWithWildcardSAN.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/ImpalaHttpClient.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/shell_exceptions.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/shell_output.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/cookie_util.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/kerberos_util.py ${TARBALL_ROOT}/lib -cp ${SHELL_HOME}/value_converter.py ${TARBALL_ROOT}/lib +# Copy the impala-shell driver script into the tarball root cp ${SHELL_HOME}/impala-shell ${TARBALL_ROOT} -cp ${SHELL_HOME}/impala_shell.py ${TARBALL_ROOT} -cp ${SHELL_HOME}/compatibility.py ${TARBALL_ROOT} -cp ${SHELL_HOME}/thrift_printer.py ${TARBALL_ROOT} - -cp ${SHELL_HOME}/legacy/pkg_resources.py ${TARBALL_ROOT}/legacy pushd ${BUILD_DIR} > /dev/null echo "Making tarball in ${BUILD_DIR}" -tar czf ${BUILD_DIR}/impala-shell-${VERSION}.tar.gz --exclude="*.pyc" \ - ./impala-shell-${VERSION}/ || popd 2>&1 > /dev/null +tar czf ${BUILD_DIR}/impala-shell-${IMPALA_VERSION}.tar.gz --exclude="*.pyc" \ + ./impala-shell-${IMPALA_VERSION}/ || popd 2>&1 > /dev/null diff --git a/shell/packaging/make_python_package.sh b/shell/packaging/make_python_package.sh index a5af9d1b3..5ec12047a 100755 --- a/shell/packaging/make_python_package.sh +++ b/shell/packaging/make_python_package.sh @@ -27,8 +27,8 @@ # Until the thrift-generated python files in ${IMPALA_HOME}/shell/gen-py # have been created by the build process, this script will not work. # It also relies upon the impala_build_version.py file created by the -# parent packaging script, ${IMPALA_HOME}/shell/make_shell_tarball.sh, -# which needs to be run before this script will work. +# ${IMPALA_HOME}/shell/gen_impala_build_version.sh script, which needs +# to run before this script will work. # # After those files exist, however, this script can be run again at will.