diff --git a/.asf.yaml b/.asf.yaml new file mode 100644 index 0000000000..0bacf232d1 --- /dev/null +++ b/.asf.yaml @@ -0,0 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +notifications: + commits: commits@cassandra.apache.org + issues: commits@cassandra.apache.org + pullrequests: pr@cassandra.apache.org + jira_options: link worklog + +github: + description: "Python Driver for Apache Cassandra®" + homepage: https://docs.datastax.com/en/developer/python-driver/3.29/index.html + enabled_merge_buttons: + squash: false + merge: false + rebase: true + features: + wiki: false + issues: false + projects: false + discussions: false + autolink_jira: + - CASSANDRA + - CASSPYTHON + protected_branches: + trunk: + required_linear_history: true diff --git a/.gitignore b/.gitignore index 5c9cbec957..7983f44b87 100644 --- a/.gitignore +++ b/.gitignore @@ -11,7 +11,6 @@ build MANIFEST dist .coverage -nosetests.xml cover/ docs/_build/ tests/integration/ccm @@ -42,3 +41,6 @@ tests/unit/cython/bytesio_testhelper.c #iPython *.ipynb +venv +docs/venv +.eggs \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index b485e21227..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,33 +0,0 @@ -dist: xenial -sudo: false - -language: python -python: - - "2.7" - - "3.5" - - "3.6" - - "3.7" - - "pypy2.7-6.0" - - "pypy3.5" - -env: - - CASS_DRIVER_NO_CYTHON=1 - -addons: - apt: - packages: - - build-essential - - python-dev - - pypy-dev - - libc-ares-dev - - libev4 - - libev-dev - -install: - - pip install tox-travis - - if [[ $TRAVIS_PYTHON_VERSION != pypy3.5 ]]; then pip install lz4; fi - -script: - - tox - - tox -e gevent_loop - - tox -e eventlet_loop diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 53a5e22436..6da84ae7a4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,160 @@ +3.29.3 +====== +October 20, 2025 + +Features +-------- +* Upgraded cython to 3.0.x (PR 1221 & PYTHON-1390) +* Add support for DSE 6.9.x and HCD releases to CI (PYTHON-1402) +* Add execute_concurrent_async and expose execute_concurrent_* in Session (PR 1229) + +Bug Fixes +--------- +* Update geomet to align with requirements.txt (PR 1236) +* Connection failure to SNI endpoint when first host is unavailable (PYTHON-1419) +* Maintain compatibility with CPython 3.13 (PR 1242) + +Others +------ +* Remove duplicated condition in primary key check (PR 1240) +* Remove Python 3.8 which reached EOL on Oct 2024, update cryptography lib to 42 (PR 1247) +* Remove obsolete urllib2 from ez_setup.py (PR 1248) +* Remove stale dependency on sure (PR 1227) +* Removed 2.7 Cpython defines (PR 1252) + +3.29.2 +====== +September 9, 2024 + +Features +-------- +* Convert to pytest for running unit and integration tests (PYTHON-1297) +* Add support for Cassandra 4.1.x and 5.0 releases to CI (PYTHON-1393) +* Extend driver vector support to arbitrary subtypes and fix handling of variable length types (PYTHON-1369) + +Bug Fixes +--------- +* Python NumpyProtocolHandler does not work with NumPy 1.24.0 or greater (PYTHON-1359) +* cibuildwheel appears to not be stripping Cython-generated shared objects (PYTHON-1387) +* Windows build for Python 3.12 compiled without libev support (PYTHON-1386) + +Others +------ +* Update README.rst with badges for version and license (PR 1210) +* Remove dependency on old mock external module (PR 1201) +* Removed future print_function, division, and with and some pre 3.7 handling (PR 1208) +* Update geomet dependency (PR 1207) +* Remove problematic escape sequences in some docstrings to avoid SyntaxWarning in Python 3.12 (PR 1205) +* Use timezone-aware API to avoid deprecated warning (PR 1213) + +3.29.1 +====== +March 19, 2024 + +Bug Fixes +--------- +* cassandra-driver for Python 3.12 Linux is compiled without libev support (PYTHON-1378) +* Consider moving to native wheel builds for OS X and removing universal2 wheels (PYTHON-1379) + +3.29.0 +====== +December 19, 2023 + +Features +-------- +* Add support for Python 3.9 through 3.12, drop support for 3.7 (PYTHON-1283) +* Removal of dependency on six module (PR 1172) +* Raise explicit exception when deserializing a vector with a subtype that isn’t a constant size (PYTHON-1371) + +Others +------ +* Remove outdated Python pre-3.7 references (PR 1186) +* Remove backup(.bak) files (PR 1185) +* Fix doc typo in add_callbacks (PR 1177) + +3.28.0 +====== +June 5, 2023 + +Features +-------- +* Add support for vector type (PYTHON-1352) +* Cryptography module is now an optional dependency (PYTHON-1351) + +Bug Fixes +--------- +* Store IV along with encrypted text when using column-level encryption (PYTHON-1350) +* Create session-specific protocol handlers to contain session-specific CLE policies (PYTHON-1356) + +Others +------ +* Use Cython for smoke builds (PYTHON-1343) +* Don't fail when inserting UDTs with prepared queries with some missing fields (PR 1151) +* Convert print statement to function in docs (PR 1157) +* Update comment for retry policy (DOC-3278) +* Added error handling blog reference (DOC-2813) + +3.27.0 +====== +May 1, 2023 + +Features +-------- +* Add support for client-side encryption (PYTHON-1341) + +3.26.0 +====== +March 13, 2023 + +Features +-------- +* Add support for execution profiles in execute_concurrent (PR 1122) + +Bug Fixes +--------- +* Handle empty non-final result pages (PR 1110) +* Do not re-use stream IDs for in-flight requests (PR 1114) +* Asyncore race condition cause logging exception on shutdown (PYTHON-1266) + +Others +------ +* Fix deprecation warning in query tracing (PR 1103) +* Remove mutable default values from some tests (PR 1116) +* Remove dependency on unittest2 (PYTHON-1289) +* Fix deprecation warnings for asyncio.coroutine annotation in asyncioreactor (PYTHON-1290) +* Fix typos in source files (PR 1126) +* HostFilterPolicyInitTest fix for Python 3.11 (PR 1131) +* Fix for DontPrepareOnIgnoredHostsTest (PYTHON-1287) +* tests.integration.simulacron.test_connection failures (PYTHON-1304) +* tests.integration.standard.test_single_interface.py appears to be failing for C* 4.0 (PYTHON-1329) +* Authentication tests appear to be failing fraudulently (PYTHON-1328) +* PreparedStatementTests.test_fail_if_different_query_id_on_reprepare() failing unexpectedly (PTYHON-1327) +* Refactor deprecated unittest aliases for Python 3.11 compatibility (PR 1112) + +Deprecations +------------ +* This release removes support for Python 2.7.x as well as Python 3.5.x and 3.6.x + +3.25.0 +====== +March 18, 2021 + +Features +-------- +* Ensure the driver can connect when invalid peer hosts are in system.peers (PYTHON-1260) +* Implement protocol v5 checksumming (PYTHON-1258) +* Fix the default cqlengine connection mechanism to work with Astra (PYTHON-1265) + +Bug Fixes +--------- +* Asyncore race condition cause logging exception on shutdown (PYTHON-1266) +* Update list of reserved keywords (PYTHON-1269) + +Others +------ +* Drop Python 3.4 support (PYTHON-1220) +* Update security documentation and examples to use PROTOCOL_TLS (PYTHON-1264) + 3.24.0 ====== June 18, 2020 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index cdd742c063..f71ebabdbb 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -5,7 +5,8 @@ Contributions are welcome in the form of bug reports or pull requests. Bug Reports ----------- -Quality bug reports are welcome at the `DataStax Python Driver JIRA `_. +Quality bug reports are welcome at the `CASSPYTHON project `_ +of the ASF JIRA. There are plenty of `good resources `_ describing how to create good bug reports. They will not be repeated in detail here, but in general, the bug report include where appropriate: @@ -18,15 +19,10 @@ good bug reports. They will not be repeated in detail here, but in general, the Pull Requests ------------- If you're able to fix a bug yourself, you can `fork the repository `_ and submit a `Pull Request `_ with the fix. -Please include tests demonstrating the issue and fix. For examples of how to run the tests, consult the `dev README `_. - -Contribution License Agreement ------------------------------- -To protect the community, all contributors are required to `sign the DataStax Contribution License Agreement `_. The process is completely electronic and should only take a few minutes. +Please include tests demonstrating the issue and fix. For examples of how to run the tests, consult the `dev README `_. Design and Implementation Guidelines ------------------------------------ -- We support Python 2.7+, so any changes must work in any of these runtimes (we use ``six``, ``futures``, and some internal backports for compatability) - We have integrations (notably Cassandra cqlsh) that require pure Python and minimal external dependencies. We try to avoid new external dependencies. Where compiled extensions are concerned, there should always be a pure Python fallback implementation. - This project follows `semantic versioning `_, so breaking API changes will only be introduced in major versions. - Legacy ``cqlengine`` has varying degrees of overreaching client-side validation. Going forward, we will avoid client validation where server feedback is adequate and not overly expensive. diff --git a/Jenkinsfile b/Jenkinsfile index 87b20804ca..dc70acc6c5 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,43 +1,195 @@ #!groovy - -def initializeEnvironment() { - env.DRIVER_DISPLAY_NAME = 'Cassandra Python Driver' - env.DRIVER_METRIC_TYPE = 'oss' +/* + +There are multiple combinations to test the python driver. + +Test Profiles: + + Full: Execute all unit and integration tests, including long tests. + Standard: Execute unit and integration tests. + Smoke Tests: Execute a small subset of tests. + EVENT_LOOP: Execute a small subset of tests selected to test EVENT_LOOPs. + +Matrix Types: + + Full: All server versions, python runtimes tested with and without Cython. + Cassandra: All cassandra server versions. + Dse: All dse server versions. + Hcd: All hcd server versions. + Smoke: CI-friendly configurations. Currently-supported Python version + modern Cassandra/DSE instances. + We also avoid cython since it's tested as part of the nightlies + +Parameters: + + EVENT_LOOP: 'LIBEV' (Default), 'GEVENT', 'EVENTLET', 'ASYNCIO', 'ASYNCORE', 'TWISTED' + CYTHON: Default, 'True', 'False' + +*/ + +@Library('dsdrivers-pipeline-lib@develop') +import com.datastax.jenkins.drivers.python.Slack + +slack = new Slack() + +DEFAULT_CASSANDRA = ['3.11', '4.0', '4.1', '5.0'] +DEFAULT_DSE = ['dse-5.1.35', 'dse-6.8.30', 'dse-6.9.0'] +DEFAULT_HCD = ['hcd-1.0.0'] +DEFAULT_RUNTIME = ['3.9.23', '3.10.18', '3.11.13', '3.12.11', '3.13.5'] +DEFAULT_CYTHON = ["True", "False"] +matrices = [ + "FULL": [ + "SERVER": DEFAULT_CASSANDRA + DEFAULT_DSE, + "RUNTIME": DEFAULT_RUNTIME, + "CYTHON": DEFAULT_CYTHON + ], + "CASSANDRA": [ + "SERVER": DEFAULT_CASSANDRA, + "RUNTIME": DEFAULT_RUNTIME, + "CYTHON": DEFAULT_CYTHON + ], + "DSE": [ + "SERVER": DEFAULT_DSE, + "RUNTIME": DEFAULT_RUNTIME, + "CYTHON": DEFAULT_CYTHON + ], + "SMOKE": [ + "SERVER": DEFAULT_CASSANDRA.takeRight(2) + DEFAULT_DSE.takeRight(2) + DEFAULT_HCD.takeRight(1), + "RUNTIME": DEFAULT_RUNTIME.take(1) + DEFAULT_RUNTIME.takeRight(1), + "CYTHON": ["True"] + ] +] + +def initializeSlackContext() { + /* + Based on git branch/commit, configure the build context and env vars. + */ + + def driver_display_name = 'Cassandra Python Driver' if (env.GIT_URL.contains('riptano/python-driver')) { - env.DRIVER_DISPLAY_NAME = 'private ' + env.DRIVER_DISPLAY_NAME - env.DRIVER_METRIC_TYPE = 'oss-private' + driver_display_name = 'private ' + driver_display_name } else if (env.GIT_URL.contains('python-dse-driver')) { - env.DRIVER_DISPLAY_NAME = 'DSE Python Driver' - env.DRIVER_METRIC_TYPE = 'dse' + driver_display_name = 'DSE Python Driver' } - + env.DRIVER_DISPLAY_NAME = driver_display_name env.GIT_SHA = "${env.GIT_COMMIT.take(7)}" env.GITHUB_PROJECT_URL = "https://${GIT_URL.replaceFirst(/(git@|http:\/\/|https:\/\/)/, '').replace(':', '/').replace('.git', '')}" - env.GITHUB_BRANCH_URL = "${GITHUB_PROJECT_URL}/tree/${env.BRANCH_NAME}" - env.GITHUB_COMMIT_URL = "${GITHUB_PROJECT_URL}/commit/${env.GIT_COMMIT}" + env.GITHUB_BRANCH_URL = "${env.GITHUB_PROJECT_URL}/tree/${env.BRANCH_NAME}" + env.GITHUB_COMMIT_URL = "${env.GITHUB_PROJECT_URL}/commit/${env.GIT_COMMIT}" +} - sh label: 'Assign Python global environment', script: '''#!/bin/bash -lex - pyenv global ${PYTHON_VERSION} - ''' +def getBuildContext() { + /* + Based on schedule and parameters, configure the build context and env vars. + */ - sh label: 'Install socat; required for unix socket tests', script: '''#!/bin/bash -lex - sudo apt-get install socat - ''' + def PROFILE = "${params.PROFILE}" + def EVENT_LOOP = "${params.EVENT_LOOP.toLowerCase()}" + + matrixType = params.MATRIX != "DEFAULT" ? params.MATRIX : "SMOKE" + matrix = matrices[matrixType].clone() + + // Check if parameters were set explicitly + if (params.CYTHON != "DEFAULT") { + matrix["CYTHON"] = [params.CYTHON] + } + + if (params.SERVER_VERSION != "DEFAULT") { + matrix["SERVER"] = [params.SERVER_VERSION] + } + + if (params.PYTHON_VERSION != "DEFAULT") { + matrix["RUNTIME"] = [params.PYTHON_VERSION] + } + + if (params.CI_SCHEDULE == "WEEKNIGHTS") { + matrix["SERVER"] = params.CI_SCHEDULE_SERVER_VERSION.split(' ') + matrix["RUNTIME"] = params.CI_SCHEDULE_PYTHON_VERSION.split(' ') + } + + context = [ + vars: [ + "PROFILE=${PROFILE}", + "EVENT_LOOP=${EVENT_LOOP}" + ], + matrix: matrix + ] + + return context +} + +def buildAndTest(context) { + initializeEnvironment() + installDriverAndCompileExtensions() - sh label: 'Install the latest setuptools', script: '''#!/bin/bash -lex + try { + executeTests() + } finally { + junit testResults: '*_results.xml' + } +} + +def getMatrixBuilds(buildContext) { + def tasks = [:] + matrix = buildContext.matrix + + matrix["SERVER"].each { serverVersion -> + matrix["RUNTIME"].each { runtimeVersion -> + matrix["CYTHON"].each { cythonFlag -> + def taskVars = [ + "CASSANDRA_VERSION=${serverVersion}", + "PYTHON_VERSION=${runtimeVersion}", + "CYTHON_ENABLED=${cythonFlag}" + ] + def cythonDesc = cythonFlag == "True" ? ", Cython": "" + tasks["${serverVersion}, py${runtimeVersion}${cythonDesc}"] = { + node("${OS_VERSION}") { + scm_variables = checkout scm + env.GIT_COMMIT = scm_variables.get('GIT_COMMIT') + env.GIT_URL = scm_variables.get('GIT_URL') + initializeSlackContext() + + if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { + slack.notifyChannel() + } + + withEnv(taskVars) { + buildAndTest(context) + } + } + } + } + } + } + return tasks +} + +def initializeEnvironment() { + sh label: 'Initialize the environment', script: '''#!/bin/bash -lex + pyenv global ${PYTHON_VERSION} + sudo apt-get install socat pip install --upgrade pip pip install -U setuptools - ''' - sh label: 'Install CCM', script: '''#!/bin/bash -lex + # install a version of pyyaml<6.0 compatible with ccm-3.1.5 as of Aug 2023 + # this works around the python-3.10+ compatibility problem as described in DSP-23524 + pip install wheel + pip install "Cython<3.0" "pyyaml<6.0" --no-build-isolation pip install ${HOME}/ccm ''' - // Determine if server version is Apache Cassandra� or DataStax Enterprise + // Determine if server version is Apache CassandraⓇ or DataStax Enterprise if (env.CASSANDRA_VERSION.split('-')[0] == 'dse') { - sh label: 'Install DataStax Enterprise requirements', script: '''#!/bin/bash -lex - pip install -r test-datastax-requirements.txt - ''' + if (env.PYTHON_VERSION =~ /3\.12\.\d+/) { + echo "Cannot install DSE dependencies for Python 3.12.x; installing Apache CassandraⓇ requirements only. See PYTHON-1368 for more detail." + sh label: 'Install Apache CassandraⓇ requirements', script: '''#!/bin/bash -lex + pip install -r test-requirements.txt + ''' + } + else { + sh label: 'Install DataStax Enterprise requirements', script: '''#!/bin/bash -lex + pip install -r test-datastax-requirements.txt + ''' + } } else { sh label: 'Install Apache CassandraⓇ requirements', script: '''#!/bin/bash -lex pip install -r test-requirements.txt @@ -46,11 +198,11 @@ def initializeEnvironment() { sh label: 'Uninstall the geomet dependency since it is not required for Cassandra', script: '''#!/bin/bash -lex pip uninstall -y geomet ''' - } sh label: 'Install unit test modules', script: '''#!/bin/bash -lex - pip install nose-ignore-docstring nose-exclude service_identity + pip install --no-deps nose-ignore-docstring nose-exclude + pip install service_identity ''' if (env.CYTHON_ENABLED == 'True') { @@ -63,6 +215,34 @@ def initializeEnvironment() { . ${CCM_ENVIRONMENT_SHELL} ${CASSANDRA_VERSION} ''' + if (env.CASSANDRA_VERSION.split('-')[0] == 'dse') { + env.DSE_FIXED_VERSION = env.CASSANDRA_VERSION.split('-')[1] + sh label: 'Update environment for DataStax Enterprise', script: '''#!/bin/bash -le + cat >> ${HOME}/environment.txt << ENVIRONMENT_EOF +CCM_CASSANDRA_VERSION=${DSE_FIXED_VERSION} # maintain for backwards compatibility +CCM_VERSION=${DSE_FIXED_VERSION} +CCM_SERVER_TYPE=dse +DSE_VERSION=${DSE_FIXED_VERSION} +CCM_IS_DSE=true +CCM_BRANCH=${DSE_FIXED_VERSION} +DSE_BRANCH=${DSE_FIXED_VERSION} +ENVIRONMENT_EOF + ''' + } else if (env.CASSANDRA_VERSION.split('-')[0] == 'hcd') { + env.HCD_FIXED_VERSION = env.CASSANDRA_VERSION.split('-')[1] + sh label: 'Update environment for DataStax Enterprise', script: '''#!/bin/bash -le + cat >> ${HOME}/environment.txt << ENVIRONMENT_EOF +CCM_CASSANDRA_VERSION=${HCD_FIXED_VERSION} # maintain for backwards compatibility +CCM_VERSION=${HCD_FIXED_VERSION} +CCM_SERVER_TYPE=hcd +HCD_VERSION=${HCD_FIXED_VERSION} +CCM_IS_HCD=true +CCM_BRANCH=${HCD_FIXED_VERSION} +HCD_BRANCH=${HCD_FIXED_VERSION} +ENVIRONMENT_EOF + ''' + } + sh label: 'Display Python and environment information', script: '''#!/bin/bash -le # Load CCM environment variables set -o allexport @@ -71,6 +251,7 @@ def initializeEnvironment() { python --version pip --version + pip freeze printenv | sort ''' } @@ -89,80 +270,135 @@ def installDriverAndCompileExtensions() { def executeStandardTests() { - sh label: 'Execute unit tests', script: '''#!/bin/bash -lex - # Load CCM environment variables - set -o allexport - . ${HOME}/environment.txt - set +o allexport + try { + sh label: 'Execute unit tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml tests/unit/ || true - EVENT_LOOP_MANAGER=eventlet VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_eventlet_results.xml tests/unit/io/test_eventletreactor.py || true - EVENT_LOOP_MANAGER=gevent VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_gevent_results.xml tests/unit/io/test_geventreactor.py || true - ''' + failure=0 + EVENT_LOOP=${EVENT_LOOP} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=unit_results.xml tests/unit/ || failure=1 + EVENT_LOOP_MANAGER=eventlet VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=unit_eventlet_results.xml tests/unit/io/test_eventletreactor.py || failure=1 + EVENT_LOOP_MANAGER=gevent VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=unit_gevent_results.xml tests/unit/io/test_geventreactor.py || failure=1 + exit $failure + ''' + } catch (err) { + currentBuild.result = 'UNSTABLE' + } - sh label: 'Execute Simulacron integration tests', script: '''#!/bin/bash -lex - # Load CCM environment variables - set -o allexport - . ${HOME}/environment.txt - set +o allexport + try { + sh label: 'Execute Simulacron integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport - SIMULACRON_JAR="${HOME}/simulacron.jar" - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_results.xml tests/integration/simulacron/ || true + . ${JABBA_SHELL} + jabba use 1.8 - # Run backpressure tests separately to avoid memory issue - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_1_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_paused_connections || true - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_2_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_queued_requests_timeout || true - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_3_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_cluster_busy || true - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_4_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_node_busy || true - ''' + failure=0 + SIMULACRON_JAR="${HOME}/simulacron.jar" + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --ignore=test_backpressure.py --junit-xml=simulacron_results.xml tests/integration/simulacron/ || true - sh label: 'Execute CQL engine integration tests', script: '''#!/bin/bash -lex - # Load CCM environment variables - set -o allexport - . ${HOME}/environment.txt - set +o allexport + # Run backpressure tests separately to avoid memory issue + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --ignore=test_backpressure.py --junit-xml=simulacron_backpressure_1_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_paused_connections || failure=1 + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --ignore=test_backpressure.py --junit-xml=simulacron_backpressure_2_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_queued_requests_timeout || failure=1 + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --ignore=test_backpressure.py --junit-xml=simulacron_backpressure_3_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_cluster_busy || failure=1 + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --ignore=test_backpressure.py --junit-xml=simulacron_backpressure_4_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_node_busy || failure=1 + exit $failure + ''' + } catch (err) { + currentBuild.result = 'UNSTABLE' + } - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=cqle_results.xml tests/integration/cqlengine/ || true - ''' + try { + sh label: 'Execute CQL engine integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport - sh label: 'Execute Apache CassandraⓇ integration tests', script: '''#!/bin/bash -lex - # Load CCM environment variables - set -o allexport - . ${HOME}/environment.txt - set +o allexport + . ${JABBA_SHELL} + jabba use 1.8 - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/ || true - ''' + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=cqle_results.xml tests/integration/cqlengine/ + ''' + } catch (err) { + currentBuild.result = 'UNSTABLE' + } - if (env.CASSANDRA_VERSION.split('-')[0] == 'dse' && env.CASSANDRA_VERSION.split('-')[1] != '4.8') { - sh label: 'Execute DataStax Enterprise integration tests', script: '''#!/bin/bash -lex - # Load CCM environment variable + try { + sh label: 'Execute Apache CassandraⓇ integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variables set -o allexport . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} DSE_VERSION=${DSE_VERSION} ADS_HOME="${HOME}/" VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=dse_results.xml tests/integration/advanced/ || true + . ${JABBA_SHELL} + jabba use 1.8 + + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=standard_results.xml tests/integration/standard/ ''' + } catch (err) { + currentBuild.result = 'UNSTABLE' } - sh label: 'Execute DataStax Constellation integration tests', script: '''#!/bin/bash -lex - # Load CCM environment variable - set -o allexport - . ${HOME}/environment.txt - set +o allexport + if (env.CASSANDRA_VERSION.split('-')[0] == 'dse' && env.CASSANDRA_VERSION.split('-')[1] != '4.8') { + if (env.PYTHON_VERSION =~ /3\.12\.\d+/) { + echo "Cannot install DSE dependencies for Python 3.12.x. See PYTHON-1368 for more detail." + } + else { + try { + sh label: 'Execute DataStax Enterprise integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CLOUD_PROXY_PATH="${HOME}/proxy/" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=advanced_results.xml tests/integration/cloud/ || true - ''' + . ${JABBA_SHELL} + jabba use 1.8 - if (env.EXECUTE_LONG_TESTS == 'True') { - sh label: 'Execute long running integration tests', script: '''#!/bin/bash -lex + EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} ADS_HOME="${HOME}/" VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=dse_results.xml tests/integration/advanced/ + ''' + } catch (err) { + currentBuild.result = 'UNSTABLE' + } + } + } + + try { + sh label: 'Execute DataStax Astra integration tests', script: '''#!/bin/bash -lex # Load CCM environment variable set -o allexport . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --exclude-dir=tests/integration/long/upgrade --with-ignore-docstrings --with-xunit --xunit-file=long_results.xml tests/integration/long/ || true + . ${JABBA_SHELL} + jabba use 1.8 + + EVENT_LOOP=${EVENT_LOOP} CLOUD_PROXY_PATH="${HOME}/proxy/" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=advanced_results.xml tests/integration/cloud/ ''' + } catch (err) { + currentBuild.result = 'UNSTABLE' + } + + if (env.PROFILE == 'FULL') { + try { + sh label: 'Execute long running integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + . ${JABBA_SHELL} + jabba use 1.8 + + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --ignore=tests/integration/long/upgrade --junit-xml=long_results.xml tests/integration/long/ + ''' + } catch (err) { + currentBuild.result = 'UNSTABLE' + } } } @@ -173,7 +409,10 @@ def executeDseSmokeTests() { . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} DSE_VERSION=${DSE_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/test_dse.py || true + . ${JABBA_SHELL} + jabba use 1.8 + + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} DSE_VERSION=${DSE_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=standard_results.xml tests/integration/standard/test_dse.py ''' } @@ -184,6 +423,9 @@ def executeEventLoopTests() { . ${HOME}/environment.txt set +o allexport + . ${JABBA_SHELL} + jabba use 1.8 + EVENT_LOOP_TESTS=( "tests/integration/standard/test_cluster.py" "tests/integration/standard/test_concurrent.py" @@ -194,192 +436,58 @@ def executeEventLoopTests() { "tests/integration/simulacron/test_endpoint.py" "tests/integration/long/test_ssl.py" ) - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml ${EVENT_LOOP_TESTS[@]} || true - ''' -} - -def executeUpgradeTests() { - sh label: 'Execute profile upgrade integration tests', script: '''#!/bin/bash -lex - # Load CCM environment variable - set -o allexport - . ${HOME}/environment.txt - set +o allexport - - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=upgrade_results.xml tests/integration/upgrade || true + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} HCD_VERSION=${HCD_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} JVM_EXTRA_OPTS="$JVM_EXTRA_OPTS -Xss384k" pytest -s -v --log-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --junit-xml=standard_results.xml ${EVENT_LOOP_TESTS[@]} ''' } def executeTests() { - switch(params.PROFILE) { + switch(env.PROFILE) { case 'DSE-SMOKE-TEST': executeDseSmokeTests() break - case 'EVENT-LOOP': + case 'EVENT_LOOP': executeEventLoopTests() break - case 'UPGRADE': - executeUpgradeTests() - break default: executeStandardTests() break } } -def notifySlack(status = 'started') { - // Set the global pipeline scoped environment (this is above each matrix) - env.BUILD_STATED_SLACK_NOTIFIED = 'true' - def buildType = 'Commit' - if (params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION') { - buildType = "${params.CI_SCHEDULE.toLowerCase().capitalize()}" - } - - def color = 'good' // Green - if (status.equalsIgnoreCase('aborted')) { - color = '808080' // Grey - } else if (status.equalsIgnoreCase('unstable')) { - color = 'warning' // Orange - } else if (status.equalsIgnoreCase('failed')) { - color = 'danger' // Red - } - - def message = """Build ${status} for ${env.DRIVER_DISPLAY_NAME} [${buildType}] -<${env.GITHUB_BRANCH_URL}|${env.BRANCH_NAME}> - <${env.RUN_DISPLAY_URL}|#${env.BUILD_NUMBER}> - <${env.GITHUB_COMMIT_URL}|${env.GIT_SHA}>""" - if (params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION') { - message += " - ${params.CI_SCHEDULE_PYTHON_VERSION} - ${params.EVENT_LOOP_MANAGER}" - } - if (!status.equalsIgnoreCase('Started')) { - message += """ -${status} after ${currentBuild.durationString - ' and counting'}""" - } - - slackSend color: "${color}", - channel: "#python-driver-dev-bots", - message: "${message}" -} - -def submitCIMetrics(buildType) { - long durationMs = currentBuild.duration - long durationSec = durationMs / 1000 - long nowSec = (currentBuild.startTimeInMillis + durationMs) / 1000 - def branchNameNoPeriods = env.BRANCH_NAME.replaceAll('\\.', '_') - def durationMetric = "okr.ci.python.${env.DRIVER_METRIC_TYPE}.${buildType}.${branchNameNoPeriods} ${durationSec} ${nowSec}" - - timeout(time: 1, unit: 'MINUTES') { - withCredentials([string(credentialsId: 'lab-grafana-address', variable: 'LAB_GRAFANA_ADDRESS'), - string(credentialsId: 'lab-grafana-port', variable: 'LAB_GRAFANA_PORT')]) { - withEnv(["DURATION_METRIC=${durationMetric}"]) { - sh label: 'Send runtime metrics to labgrafana', script: '''#!/bin/bash -lex - echo "${DURATION_METRIC}" | nc -q 5 ${LAB_GRAFANA_ADDRESS} ${LAB_GRAFANA_PORT} - ''' - } - } - } -} - -def describePerCommitStage() { - script { - def type = 'standard' - def serverDescription = 'current Apache CassandaraⓇ and supported DataStax Enterprise versions' - if (env.BRANCH_NAME ==~ /long-python.*/) { - type = 'long' - } else if (env.BRANCH_NAME ==~ /dev-python.*/) { - type = 'dev' - } - - currentBuild.displayName = "Per-Commit (${env.EVENT_LOOP_MANAGER} | ${type.capitalize()})" - currentBuild.description = "Per-Commit build and ${type} testing of ${serverDescription} against Python v2.7.18 and v3.5.9 using ${env.EVENT_LOOP_MANAGER} event loop manager" +// TODO move this in the shared lib +def getDriverMetricType() { + metric_type = 'oss' + if (env.GIT_URL.contains('riptano/python-driver')) { + metric_type = 'oss-private' + } else if (env.GIT_URL.contains('python-dse-driver')) { + metric_type = 'dse' } - - sh label: 'Describe the python environment', script: '''#!/bin/bash -lex - python -V - pip freeze - ''' + return metric_type } -def describeScheduledTestingStage() { +def describeBuild(buildContext) { script { - def type = params.CI_SCHEDULE.toLowerCase().capitalize() - def displayName = "${type} schedule (${env.EVENT_LOOP_MANAGER}" - if (env.CYTHON_ENABLED == 'True') { - displayName += " | Cython" - } - if (params.PROFILE != 'NONE') { - displayName += " | ${params.PROFILE}" - } - displayName += ")" - currentBuild.displayName = displayName - - def serverVersionDescription = "${params.CI_SCHEDULE_SERVER_VERSION.replaceAll(' ', ', ')} server version(s) in the matrix" - def pythonVersionDescription = "${params.CI_SCHEDULE_PYTHON_VERSION.replaceAll(' ', ', ')} Python version(s) in the matrix" - def description = "${type} scheduled testing using ${env.EVENT_LOOP_MANAGER} event loop manager" - if (env.CYTHON_ENABLED == 'True') { - description += ", with Cython enabled" - } - if (params.PROFILE != 'NONE') { - description += ", ${params.PROFILE} profile" - } - description += ", ${serverVersionDescription}, and ${pythonVersionDescription}" - currentBuild.description = description + def runtimes = buildContext.matrix["RUNTIME"] + def serverVersions = buildContext.matrix["SERVER"] + def numBuilds = runtimes.size() * serverVersions.size() * buildContext.matrix["CYTHON"].size() + currentBuild.displayName = "${env.PROFILE} (${env.EVENT_LOOP} | ${numBuilds} builds)" + currentBuild.description = "${env.PROFILE} build testing servers (${serverVersions.join(', ')}) against Python (${runtimes.join(', ')}) using ${env.EVENT_LOOP} event loop manager" } } -def describeAdhocTestingStage() { - script { - def serverType = params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[0] - def serverDisplayName = 'Apache CassandaraⓇ' - def serverVersion = " v${serverType}" - if (serverType == 'ALL') { - serverDisplayName = "all ${serverDisplayName} and DataStax Enterprise server versions" - serverVersion = '' - } else { - try { - serverVersion = " v${env.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[1]}" - } catch (e) { - ;; // no-op - } - if (serverType == 'dse') { - serverDisplayName = 'DataStax Enterprise' - } - } - def displayName = "${params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION} for v${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION} (${env.EVENT_LOOP_MANAGER}" - if (env.CYTHON_ENABLED == 'True') { - displayName += " | Cython" - } - if (params.PROFILE != 'NONE') { - displayName += " | ${params.PROFILE}" - } - displayName += ")" - currentBuild.displayName = displayName - - def description = "Testing ${serverDisplayName} ${serverVersion} using ${env.EVENT_LOOP_MANAGER} against Python ${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION}" - if (env.CYTHON_ENABLED == 'True') { - description += ", with Cython" - } - if (params.PROFILE == 'NONE') { - if (params.EXECUTE_LONG_TESTS) { - description += ", with" - } else { - description += ", without" - } - description += " long tests executed" - } else { - description += ", ${params.PROFILE} profile" - } - currentBuild.description = description - } +// branch pattern for cron +def branchPatternCron() { + ~"(master)" } -def branchPatternCron = ~"(master)" -def riptanoPatternCron = ~"(riptano)" - pipeline { agent none // Global pipeline timeout options { - timeout(time: 10, unit: 'HOURS') + disableConcurrentBuilds() + timeout(time: 10, unit: 'HOURS') // TODO timeout should be per build buildDiscarder(logRotator(artifactNumToKeepStr: '10', // Keep only the last 10 artifacts numToKeepStr: '50')) // Keep only the last 50 build records } @@ -406,22 +514,76 @@ pipeline { ''') choice( - name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION', - choices: ['2.7.18', '3.4.10', '3.5.9', '3.6.10', '3.7.7', '3.8.3'], - description: 'Python version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY!') + name: 'PROFILE', + choices: ['STANDARD', 'FULL', 'DSE-SMOKE-TEST', 'EVENT_LOOP'], + description: '''

Profile to utilize for scheduled or adhoc builds

+ + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
STANDARDExecute the standard tests for the driver
FULLExecute all tests for the driver, including long tests.
DSE-SMOKE-TESTExecute only the DataStax Enterprise smoke tests
EVENT_LOOPExecute only the event loop tests for the specified event loop manager (see: EVENT_LOOP)
''') + choice( + name: 'MATRIX', + choices: ['DEFAULT', 'SMOKE', 'FULL', 'CASSANDRA', 'DSE', 'HCD'], + description: '''

The matrix for the build.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
DEFAULTDefault to the build context.
SMOKEBasic smoke tests for current Python runtimes + C*/DSE versions, no Cython
FULLAll server versions, python runtimes tested with and without Cython.
CASSANDRAAll cassandra server versions.
DSEAll dse server versions.
HCDAll hcd server versions.
''') + choice( + name: 'PYTHON_VERSION', + choices: ['DEFAULT'] + DEFAULT_RUNTIME, + description: 'Python runtime version. Default to the build context.') choice( - name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION', - choices: ['2.1', // Legacy Apache CassandraⓇ - '2.2', // Legacy Apache CassandraⓇ - '3.0', // Previous Apache CassandraⓇ - '3.11', // Current Apache CassandraⓇ - '4.0', // Development Apache CassandraⓇ - 'dse-5.0', // Long Term Support DataStax Enterprise - 'dse-5.1', // Legacy DataStax Enterprise - 'dse-6.0', // Previous DataStax Enterprise - 'dse-6.7', // Previous DataStax Enterprise - 'dse-6.8', // Current DataStax Enterprise - 'ALL'], + name: 'SERVER_VERSION', + choices: ['DEFAULT'] + DEFAULT_CASSANDRA + DEFAULT_DSE + DEFAULT_HCD, description: '''Apache CassandraⓇ and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY! @@ -430,57 +592,69 @@ pipeline { - - - - - - - + + + - + - + - + - - + + - + - - + + - - + + - - + +
Choice Description
2.1Apache CassandaraⓇ; v2.1.x
2.2Apache CassandarⓇ; v2.2.x
DEFAULTDefault to the build context.
3.0Apache CassandaraⓇ v3.0.xApache CassandraⓇ v3.0.x
3.11Apache CassandaraⓇ v3.11.xApache CassandraⓇ v3.11.x
4.0Apache CassandaraⓇ v4.x (CURRENTLY UNDER DEVELOPMENT)Apache CassandraⓇ v4.0.x
dse-5.0DataStax Enterprise v5.0.x (Long Term Support)5.0Apache CassandraⓇ v5.0.x
dse-5.1dse-5.1.35 DataStax Enterprise v5.1.x
dse-6.0DataStax Enterprise v6.0.xdse-6.8.30DataStax Enterprise v6.8.x
dse-6.7DataStax Enterprise v6.7.xdse-6.9.0DataStax Enterprise v6.9.x (CURRENTLY UNDER DEVELOPMENT)
dse-6.8DataStax Enterprise v6.8.x (CURRENTLY UNDER DEVELOPMENT)hcd-1.0.0DataStax HCD v1.0.x (CURRENTLY UNDER DEVELOPMENT)
''') - booleanParam( + choice( name: 'CYTHON', - defaultValue: false, - description: 'Flag to determine if Cython should be enabled for scheduled or adhoc builds') - booleanParam( - name: 'EXECUTE_LONG_TESTS', - defaultValue: false, - description: 'Flag to determine if long integration tests should be executed for scheduled or adhoc builds') + choices: ['DEFAULT'] + DEFAULT_CYTHON, + description: '''

Flag to determine if Cython should be enabled

+ + + + + + + + + + + + + + + + + + + +
ChoiceDescription
DefaultDefault to the build context.
TrueEnable Cython
FalseDisable Cython
''') choice( - name: 'EVENT_LOOP_MANAGER', + name: 'EVENT_LOOP', choices: ['LIBEV', 'GEVENT', 'EVENTLET', 'ASYNCIO', 'ASYNCORE', 'TWISTED'], description: '''

Event loop manager to utilize for scheduled or adhoc builds

@@ -515,34 +689,6 @@ pipeline {
An event-driven networking engine written in Python and licensed under the open source MIT license
''') - choice( - name: 'PROFILE', - choices: ['NONE', 'DSE-SMOKE-TEST', 'EVENT-LOOP', 'UPGRADE'], - description: '''

Profile to utilize for scheduled or adhoc builds

- - - - - - - - - - - - - - - - - - - - - - - -
ChoiceDescription
NONEExecute the standard tests for the driver
DSE-SMOKE-TESTExecute only the DataStax Enterprise smoke tests
EVENT-LOOPExecute only the event loop tests for the specified event loop manager (see: EVENT_LOOP_MANAGER)
UPGRADEExecute only the upgrade tests
''') choice( name: 'CI_SCHEDULE', choices: ['DO-NOT-CHANGE-THIS-SELECTION', 'WEEKNIGHTS', 'WEEKENDS'], @@ -558,316 +704,43 @@ pipeline { } triggers { - parameterizedCron((branchPatternCron.matcher(env.BRANCH_NAME).matches() && !riptanoPatternCron.matcher(GIT_URL).find()) ? """ + parameterizedCron(branchPatternCron().matcher(env.BRANCH_NAME).matches() ? """ # Every weeknight (Monday - Friday) around 4:00 AM - # These schedules will run with and without Cython enabled for Python v2.7.18 and v3.5.9 - H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.2 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.2 3.11 dse-5.1 dse-6.0 dse-6.7 - - # Every Saturday around 12:00, 4:00 and 8:00 PM - # These schedules are for weekly libev event manager runs with and without Cython for most of the Python versions (excludes v3.5.9.x) - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.4.10;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - # These schedules are for weekly gevent event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - # These schedules are for weekly eventlet event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - - # Every Sunday around 12:00 and 4:00 AM - # These schedules are for weekly asyncore event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - # These schedules are for weekly twisted event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + # These schedules will run with and without Cython enabled for Python 3.9.23 and 3.13.5 + H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.9.23 3.13.5;CI_SCHEDULE_SERVER_VERSION=3.11 4.0 5.0 dse-5.1.35 dse-6.8.30 dse-6.9.0 hcd-1.0.0 """ : "") } environment { - OS_VERSION = 'ubuntu/bionic64/python-driver' - CYTHON_ENABLED = "${params.CYTHON ? 'True' : 'False'}" - EVENT_LOOP_MANAGER = "${params.EVENT_LOOP_MANAGER.toLowerCase()}" - EXECUTE_LONG_TESTS = "${params.EXECUTE_LONG_TESTS ? 'True' : 'False'}" + OS_VERSION = 'ubuntu/focal64/python-driver' CCM_ENVIRONMENT_SHELL = '/usr/local/bin/ccm_environment.sh' CCM_MAX_HEAP_SIZE = '1536M' + JABBA_SHELL = '/usr/lib/jabba/jabba.sh' } stages { - stage ('Per-Commit') { - options { - timeout(time: 2, unit: 'HOURS') - } + stage ('Build and Test') { when { beforeAgent true - branch pattern: '((dev|long)-)?python-.*', comparator: 'REGEXP' allOf { - expression { params.ADHOC_BUILD_TYPE == 'BUILD' } - expression { params.CI_SCHEDULE == 'DO-NOT-CHANGE-THIS-SELECTION' } not { buildingTag() } } } - matrix { - axes { - axis { - name 'CASSANDRA_VERSION' - values '3.11', // Current Apache Cassandra - 'dse-6.8' // Current DataStax Enterprise - } - axis { - name 'PYTHON_VERSION' - values '2.7.18', '3.5.9' - } - axis { - name 'CYTHON_ENABLED' - values 'False' - } - } - - agent { - label "${OS_VERSION}" - } - - stages { - stage('Initialize-Environment') { - steps { - initializeEnvironment() - script { - if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { - notifySlack() - } - } - } - } - stage('Describe-Build') { - steps { - describePerCommitStage() - } - } - stage('Install-Driver-And-Compile-Extensions') { - steps { - installDriverAndCompileExtensions() - } - } - stage('Execute-Tests') { - steps { - - script { - if (env.BRANCH_NAME ==~ /long-python.*/) { - withEnv(["EXECUTE_LONG_TESTS=True"]) { - executeTests() - } - } - else { - executeTests() - } - } - } - post { - always { - junit testResults: '*_results.xml' - } - } - } - } - } - post { - always { - node('master') { - submitCIMetrics('commit') - } - } - aborted { - notifySlack('aborted') - } - success { - notifySlack('completed') - } - unstable { - notifySlack('unstable') - } - failure { - notifySlack('FAILED') - } - } - } - - stage ('Scheduled-Testing') { - when { - beforeAgent true - allOf { - expression { params.ADHOC_BUILD_TYPE == 'BUILD' } - expression { params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION' } - not { buildingTag() } - } - } - matrix { - axes { - axis { - name 'CASSANDRA_VERSION' - values '2.1', // Legacy Apache Cassandra - '2.2', // Legacy Apache Cassandra - '3.0', // Previous Apache Cassandra - '3.11', // Current Apache Cassandra - 'dse-5.1', // Legacy DataStax Enterprise - 'dse-6.0', // Previous DataStax Enterprise - 'dse-6.7' // Current DataStax Enterprise - } - axis { - name 'CYTHON_ENABLED' - values 'True', 'False' - } - } - when { - beforeAgent true - allOf { - expression { return params.CI_SCHEDULE_SERVER_VERSION.split(' ').any { it =~ /(ALL|${env.CASSANDRA_VERSION})/ } } - } - } + steps { + script { + context = getBuildContext() + withEnv(context.vars) { + describeBuild(context) - environment { - PYTHON_VERSION = "${params.CI_SCHEDULE_PYTHON_VERSION}" - } - agent { - label "${OS_VERSION}" - } + // build and test all builds + parallel getMatrixBuilds(context) - stages { - stage('Initialize-Environment') { - steps { - initializeEnvironment() - script { - if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { - notifySlack() - } - } - } + slack.notifyChannel(currentBuild.currentResult) } - stage('Describe-Build') { - steps { - describeScheduledTestingStage() - } - } - stage('Install-Driver-And-Compile-Extensions') { - steps { - installDriverAndCompileExtensions() - } - } - stage('Execute-Tests') { - steps { - executeTests() - } - post { - always { - junit testResults: '*_results.xml' - } - } - } - } - } - post { - aborted { - notifySlack('aborted') - } - success { - notifySlack('completed') - } - unstable { - notifySlack('unstable') - } - failure { - notifySlack('FAILED') } } } - - stage('Adhoc-Testing') { - when { - beforeAgent true - allOf { - expression { params.ADHOC_BUILD_TYPE == 'BUILD-AND-EXECUTE-TESTS' } - not { buildingTag() } - } - } - - environment { - CYTHON_ENABLED = "${params.CYTHON ? 'True' : 'False'}" - PYTHON_VERSION = "${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION}" - } - - matrix { - axes { - axis { - name 'CASSANDRA_VERSION' - values '2.1', // Legacy Apache Cassandra - '2.2', // Legacy Apache Cassandra - '3.0', // Previous Apache Cassandra - '3.11', // Current Apache Cassandra - '4.0', // Development Apache Cassandra - 'dse-5.0', // Long Term Support DataStax Enterprise - 'dse-5.1', // Legacy DataStax Enterprise - 'dse-6.0', // Previous DataStax Enterprise - 'dse-6.7', // Current DataStax Enterprise - 'dse-6.8' // Development DataStax Enterprise - } - } - when { - beforeAgent true - allOf { - expression { params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION ==~ /(ALL|${env.CASSANDRA_VERSION})/ } - } - } - - agent { - label "${OS_VERSION}" - } - - stages { - stage('Describe-Build') { - steps { - describeAdhocTestingStage() - } - } - stage('Initialize-Environment') { - steps { - initializeEnvironment() - } - } - stage('Install-Driver-And-Compile-Extensions') { - steps { - installDriverAndCompileExtensions() - } - } - stage('Execute-Tests') { - steps { - executeTests() - } - post { - always { - junit testResults: '*_results.xml' - } - } - } - } - } - } } } diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..58250f616b --- /dev/null +++ b/NOTICE @@ -0,0 +1,115 @@ +Apache Cassandra Python Driver +Copyright 2013 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + + +This product originates, before git sha +5d4fd2349119a3237ad351a96e7f2b3317159305, from software from DataStax and other +individual contributors. All work was previously copyrighted to DataStax. + +Non-DataStax contributors are listed below. Those marked with asterisk have +explicitly consented to their contributions being donated to the ASF. + +a-detiste Alexandre Detiste * +a-lst Andrey Istochkin * +aboudreault Alan Boudreault alan@alanb.ca +advance512 Alon Diamant diamant.alon@gmail.com * +alanjds Alan Justino da Silva alan.justino@yahoo.com.br * +alistair-broomhead Alistair Broomhead * +amygdalama Amy Hanlon * +andy-slac Andy Salnikov * +andy8zhao Andy Zhao +anthony-cervantes Anthony Cervantes anthony@cervantes.io * +BackEndTea Gert de Pagter * +barvinograd Bar Vinograd +bbirand Berk Birand +bergundy Roey Berman roey.berman@gmail.com * +bohdantan * +codesnik Alexey Trofimenko aronaxis@gmail.com * +coldeasy coldeasy +DanieleSalatti Daniele Salatti me@danielesalatti.com * +daniloarodrigues Danilo de Araújo Rodrigues * +daubman Aaron Daubman github@ajd.us * +dcosson Danny Cosson dcosson@gmail.com * +detzgk Eli Green eli@zigr.org * +devdazed Russ Bradberry * +dizpers Dmitry Belaventsev dizpers@gmail.com +dkropachev Dmitry Kropachev dmitry.kropachev@gmail.com * +dmglab Daniel dmg.lab@outlook.com +dokai Kai Lautaportti * +eamanu Emmanuel Arias eamanu@yaerobi.com +figpope Andrew FigPope andrew.figpope@gmail.com * +flupke Luper Rouch * +frensjan Frens Jan Rumph * +frew Fred Wulff frew@cs.stanford.edu * +gdoermann Greg Doermann +haaawk Piotr Jastrzębski +ikapl Irina Kaplounova +ittus Thang Minh Vu * +JeremyOT Jeremy Olmsted-Thompson * +jeremyschlatter Jeremy Schlatter * +jpuerta Ernesto Puerta * +julien-duponchelle Julien Duponchelle julien@duponchelle.info * +justinsb Justin Santa Barbara justinsb@google.com * +Kami Tomaz Muraus tomaz@tomaz.me +kandul Michał Kandulski michal.kandulski@gmail.com +kdeldycke Kevin Deldycke * +kishkaru Kishan Karunaratne kishan@karu.io * +kracekumar Kracekumar kracethekingmaker@gmail.com +lenards Andrew Lenards andrew.lenards@gmail.com * +lenolib +Lifto Ellis Low +Lorak-mmk Karol Baryła git@baryla.org * +lukaselmer Lukas Elmer lukas.elmer@gmail.com * +mahall Michael Hall +markflorisson Mark Florisson * +mattrobenolt Matt Robenolt m@robenolt.com * +mattstibbs Matt Stibbs * +Mhs-220 Mo Shahmohammadi hos1377@gmail.com * +mikeokner Mike Okner * +Mishail Mikhail Stepura mstepura@apple.com * +mission-liao mission.liao missionaryliao@gmail.com * +mkocikowski Mik Kocikowski +Mokto Théo Mathieu * +mrk-its Mariusz Kryński * +multani Jonathan Ballet jon@multani.info * +niklaskorz Niklas Korz * +nisanharamati nisanharamati +nschrader Nick Schrader nick.schrader@mailbox.org * +Orenef11 Oren Efraimov * +oz123 Oz Tiram * +pistolero Sergii Kyryllov * +pmcnett Paul McNett p@ulmcnett.com * +psarna Piotr Sarna * +r4fek Rafał Furmański * +raopm +rbranson Rick Branson * +rqx Roman Khanenko * +rtb-zla-karma xyz * +sigmunau +silviot Silvio Tomatis +sontek John Anderson sontek@gmail.com * +stanhu Stan Hu +stefanor Stefano Rivera stefanor@debian.org * +strixcuriosus Ash Hoover strixcuriosus@gmail.com +tarzanjw Học Đỗ hoc3010@gmail.com +tbarbugli Tommaso Barbugli +tchaikov Kefu Chai tchaikov@gmail.com * +tglines Travis Glines +thoslin Tom Lin +tigrus Nikolay Fominykh nikolayfn@gmail.com +timgates42 Tim Gates +timsavage Tim Savage * +tirkarthi Karthikeyan Singaravelan tir.karthi@gmail.com * +Trundle Andreas Stührk andy@hammerhartes.de +ubombi Vitalii Kozlovskyi vitalii@kozlovskyi.dev * +ultrabug Ultrabug * +vetal4444 Shevchenko Vitaliy * +victorpoluceno Victor Godoy Poluceno victorpoluceno@gmail.com +weisslj Johannes Weißl * +wenheping wenheping wenheping2000@hotmail.com +yi719 +yinyin Yinyin * +yriveiro Yago Riveiro * \ No newline at end of file diff --git a/README-dev.rst b/README-dev.rst index 8294d4efb8..939d3fa480 100644 --- a/README-dev.rst +++ b/README-dev.rst @@ -1,5 +1,7 @@ Releasing ========= +Note: the precise details of some of these steps have changed. Leaving this here as a guide only. + * Run the tests and ensure they all pass * Update CHANGELOG.rst * Check for any missing entries @@ -13,7 +15,8 @@ Releasing * Tag the release. For example: ``git tag -a 1.0.0 -m 'version 1.0.0'`` * Push the tag and new ``master``: ``git push origin 1.0.0 ; git push origin master`` * Update the `python-driver` submodule of `python-driver-wheels`, - commit then push. This will trigger TravisCI and the wheels building. + commit then push. +* Trigger the Github Actions necessary to build wheels for the various platforms * For a GA release, upload the package to pypi:: # Clean the working directory @@ -49,85 +52,12 @@ Releasing * this is typically a matter of merging or rebasing onto master * test and push updated branch to origin -* Update the JIRA versions: https://datastax-oss.atlassian.net/plugins/servlet/project-config/PYTHON/versions +* Update the JIRA releases: https://issues.apache.org/jira/projects/CASSPYTHON?selectedItem=com.atlassian.jira.jira-projects-plugin:release-page * add release dates and set version as "released" * Make an announcement on the mailing list -Building the Docs -================= -Sphinx is required to build the docs. You probably want to install through apt, -if possible:: - - sudo apt-get install python-sphinx - -pip may also work:: - - sudo pip install -U Sphinx - -To build the docs, run:: - - python setup.py doc - -Upload the Docs -================= - -This is deprecated. The docs is now only published on https://docs.datastax.com. - -To upload the docs, checkout the ``gh-pages`` branch and copy the entire -contents all of ``docs/_build/X.Y.Z/*`` into the root of the ``gh-pages`` branch -and then push that branch to github. - -For example:: - - git checkout 1.0.0 - python setup.py doc - git checkout gh-pages - cp -R docs/_build/1.0.0/* . - git add --update # add modified files - # Also make sure to add any new documentation files! - git commit -m 'Update docs (version 1.0.0)' - git push origin gh-pages - -If docs build includes errors, those errors may not show up in the next build unless -you have changed the files with errors. It's good to occassionally clear the build -directory and build from scratch:: - - rm -rf docs/_build/* - -Documentor -========== -We now also use another tool called Documentor with Sphinx source to build docs. -This gives us versioned docs with nice integrated search. This is a private tool -of DataStax. - -Dependencies ------------- -Sphinx -~~~~~~ -Installed as described above - -Documentor -~~~~~~~~~~ -Clone and setup Documentor as specified in `the project `_. -This tool assumes Ruby, bundler, and npm are present. - -Building --------- -The setup script expects documentor to be in the system path. You can either add it permanently or run with something -like this:: - - PATH=$PATH:/bin python setup.py doc - -The docs will not display properly just browsing the filesystem in a browser. To view the docs as they would be in most -web servers, use the SimpleHTTPServer module:: - - cd docs/_build/ - python -m SimpleHTTPServer - -Then, browse to `localhost:8000 `_. - Tests ===== @@ -135,49 +65,38 @@ Running Unit Tests ------------------ Unit tests can be run like so:: - nosetests -w tests/unit/ + pytest tests/unit/ You can run a specific test method like so:: - nosetests -w tests/unit/test_connection.py:ConnectionTest.test_bad_protocol_version + pytest tests/unit/test_connection.py::ConnectionTest::test_bad_protocol_version Running Integration Tests ------------------------- In order to run integration tests, you must specify a version to run using the ``CASSANDRA_VERSION`` or ``DSE_VERSION`` environment variable:: - CASSANDRA_VERSION=2.0.9 nosetests -w tests/integration/standard + CASSANDRA_VERSION=2.0.9 pytest tests/integration/standard Or you can specify a cassandra directory (to test unreleased versions):: - CASSANDRA_DIR=/home/thobbs/cassandra nosetests -w tests/integration/standard/ + CASSANDRA_DIR=/path/to/cassandra pytest tests/integration/standard/ Specifying the usage of an already running Cassandra cluster ------------------------------------------------------------ The test will start the appropriate Cassandra clusters when necessary but if you don't want this to happen because a Cassandra cluster is already running the flag ``USE_CASS_EXTERNAL`` can be used, for example:: - USE_CASS_EXTERNAL=1 CASSANDRA_VERSION=2.0.9 nosetests -w tests/integration/standard + USE_CASS_EXTERNAL=1 CASSANDRA_VERSION=2.0.9 pytest tests/integration/standard Specify a Protocol Version for Tests ------------------------------------ The protocol version defaults to 1 for cassandra 1.2 and 2 otherwise. You can explicitly set it with the ``PROTOCOL_VERSION`` environment variable:: - PROTOCOL_VERSION=3 nosetests -w tests/integration/standard - -Seeing Test Logs in Real Time ------------------------------ -Sometimes it's useful to output logs for the tests as they run:: - - nosetests -w tests/unit/ --nocapture --nologcapture - -Use tee to capture logs and see them on your terminal:: - - nosetests -w tests/unit/ --nocapture --nologcapture 2>&1 | tee test.log + PROTOCOL_VERSION=3 pytest tests/integration/standard Testing Multiple Python Versions -------------------------------- -If you want to test all of python 2.7, 3.4, 3.5, 3.6, 3.7, and pypy, use tox (this is what -TravisCI runs):: +Use tox to test all of Python 3.9 through 3.13 and pypy:: tox @@ -234,18 +153,3 @@ An EAP release is only uploaded on a private server and it is not published on p python setup.py doc * Upload the docs on the EAP download server. - -Adding a New Python Runtime Support -=================================== - -* Add the new python version to our jenkins image: - https://github.com/riptano/openstack-jenkins-drivers/ - -* Add the new python version in job-creator: - https://github.com/riptano/job-creator/ - -* Run the tests and ensure they all pass - * also test all event loops - -* Update the wheels building repo to support that version: - https://github.com/riptano/python-dse-driver-wheels diff --git a/README.rst b/README.rst index 358f588d32..47b5593ee9 100644 --- a/README.rst +++ b/README.rst @@ -1,27 +1,35 @@ -DataStax Driver for Apache Cassandra -==================================== -.. image:: https://travis-ci.com/datastax/python-driver.png?branch=master - :target: https://travis-ci.com/github/datastax/python-driver +.. |license| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg + :target: https://opensource.org/licenses/Apache-2.0 +.. |version| image:: https://badge.fury.io/py/cassandra-driver.svg + :target: https://badge.fury.io/py/cassandra-driver +.. |pyversion| image:: https://img.shields.io/pypi/pyversions/cassandra-driver.svg +.. |travis| image:: https://api.travis-ci.com/datastax/python-driver.svg?branch=master + :target: https://travis-ci.com/github/datastax/python-driver + +|license| |version| |pyversion| |travis| + +Apache Cassandra Python Driver +============================== A modern, `feature-rich `_ and highly-tunable Python client library for Apache Cassandra (2.1+) and DataStax Enterprise (4.7+) using exclusively Cassandra's binary protocol and Cassandra Query Language v3. -The driver supports Python 2.7, 3.4, 3.5, 3.6, 3.7 and 3.8. +The driver supports Python 3.9 through 3.13. **Note:** DataStax products do not support big-endian systems. Features -------- -* `Synchronous `_ and `Asynchronous `_ APIs -* `Simple, Prepared, and Batch statements `_ +* `Synchronous `_ and `Asynchronous `_ APIs +* `Simple, Prepared, and Batch statements `_ * Asynchronous IO, parallel execution, request pipelining -* `Connection pooling `_ +* `Connection pooling `_ * Automatic node discovery -* `Automatic reconnection `_ -* Configurable `load balancing `_ and `retry policies `_ -* `Concurrent execution utilities `_ -* `Object mapper `_ +* `Automatic reconnection `_ +* Configurable `load balancing `_ and `retry policies `_ +* `Concurrent execution utilities `_ +* `Object mapper `_ * `Connecting to DataStax Astra database (cloud) `_ * DSE Graph execution API * DSE Geometric type serialization @@ -34,45 +42,45 @@ Installation through pip is recommended:: $ pip install cassandra-driver For more complete installation instructions, see the -`installation guide `_. +`installation guide `_. Documentation ------------- -The documentation can be found online `here `_. +The documentation can be found online `here `_. A couple of links for getting up to speed: -* `Installation `_ -* `Getting started guide `_ -* `API docs `_ -* `Performance tips `_ +* `Installation `_ +* `Getting started guide `_ +* `API docs `_ +* `Performance tips `_ Object Mapper ------------- cqlengine (originally developed by Blake Eggleston and Jon Haddad, with contributions from the community) is now maintained as an integral part of this package. Refer to -`documentation here `_. +`documentation here `_. Contributing ------------ -See `CONTRIBUTING.md `_. +See `CONTRIBUTING.rst `_. Reporting Problems ------------------ Please report any bugs and make any feature requests on the -`JIRA `_ issue tracker. +`CASSPYTHON project `_ +of the ASF JIRA. If you would like to contribute, please feel free to open a pull request. Getting Help ------------ -Your best options for getting help with the driver are the -`mailing list `_ -and the `DataStax Community `_. +You can talk about the driver, ask questions and get help in the #cassandra-drivers channel on +`ASF Slack `_. License ------- -Copyright DataStax, Inc. +Copyright 2013 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/appveyor.yml b/appveyor.yml index d1daaa6ec6..12c43d57a0 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,9 +1,6 @@ environment: matrix: - - PYTHON: "C:\\Python27-x64" - cassandra_version: 3.11.2 - ci_type: standard - - PYTHON: "C:\\Python35-x64" + - PYTHON: "C:\\Python38-x64" cassandra_version: 3.11.2 ci_type: standard os: Visual Studio 2015 diff --git a/appveyor/run_test.ps1 b/appveyor/run_test.ps1 index fc95ec7e52..9b8c23fd8b 100644 --- a/appveyor/run_test.ps1 +++ b/appveyor/run_test.ps1 @@ -15,12 +15,12 @@ $wc = New-Object 'System.Net.WebClient' if($env:ci_type -eq 'unit'){ echo "Running Unit tests" - nosetests -s -v --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml .\tests\unit + pytest -s -v --junit-xml=unit_results.xml .\tests\unit $env:EVENT_LOOP_MANAGER="gevent" - nosetests -s -v --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml .\tests\unit\io\test_geventreactor.py + pytest -s -v --junit-xml=unit_results.xml .\tests\unit\io\test_geventreactor.py $env:EVENT_LOOP_MANAGER="eventlet" - nosetests -s -v --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml .\tests\unit\io\test_eventletreactor.py + pytest -s -v --junit-xml=unit_results.xml .\tests\unit\io\test_eventletreactor.py $env:EVENT_LOOP_MANAGER="asyncore" echo "uploading unit results" @@ -31,13 +31,13 @@ if($env:ci_type -eq 'unit'){ if($env:ci_type -eq 'standard'){ echo "Running CQLEngine integration tests" - nosetests -s -v --with-ignore-docstrings --with-xunit --xunit-file=cqlengine_results.xml .\tests\integration\cqlengine + pytest -s -v --junit-xml=cqlengine_results.xml .\tests\integration\cqlengine $cqlengine_tests_result = $lastexitcode $wc.UploadFile("https://ci.appveyor.com/api/testresults/junit/$($env:APPVEYOR_JOB_ID)", (Resolve-Path .\cqlengine_results.xml)) echo "uploading CQLEngine test results" echo "Running standard integration tests" - nosetests -s -v --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml .\tests\integration\standard + pytest -s -v --junit-xml=standard_results.xml .\tests\integration\standard $integration_tests_result = $lastexitcode $wc.UploadFile("https://ci.appveyor.com/api/testresults/junit/$($env:APPVEYOR_JOB_ID)", (Resolve-Path .\standard_results.xml)) echo "uploading standard integration test results" diff --git a/benchmarks/base.py b/benchmarks/base.py index 47a03bbd68..290ba28788 100644 --- a/benchmarks/base.py +++ b/benchmarks/base.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/benchmarks/callback_full_pipeline.py b/benchmarks/callback_full_pipeline.py index e3ecfe3be5..5eafa5df8b 100644 --- a/benchmarks/callback_full_pipeline.py +++ b/benchmarks/callback_full_pipeline.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +20,6 @@ from threading import Event from base import benchmark, BenchmarkThread -from six.moves import range log = logging.getLogger(__name__) diff --git a/benchmarks/future_batches.py b/benchmarks/future_batches.py index 8cd915ebab..112cc24981 100644 --- a/benchmarks/future_batches.py +++ b/benchmarks/future_batches.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +16,7 @@ import logging from base import benchmark, BenchmarkThread -from six.moves import queue +import queue log = logging.getLogger(__name__) diff --git a/benchmarks/future_full_pipeline.py b/benchmarks/future_full_pipeline.py index 9a9fcfcd50..ca95b742d2 100644 --- a/benchmarks/future_full_pipeline.py +++ b/benchmarks/future_full_pipeline.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +16,7 @@ import logging from base import benchmark, BenchmarkThread -from six.moves import queue +import queue log = logging.getLogger(__name__) diff --git a/benchmarks/future_full_throttle.py b/benchmarks/future_full_throttle.py index b4ba951c28..f85eb99b0d 100644 --- a/benchmarks/future_full_throttle.py +++ b/benchmarks/future_full_throttle.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/benchmarks/sync.py b/benchmarks/sync.py index f2a45fcd7d..090a265579 100644 --- a/benchmarks/sync.py +++ b/benchmarks/sync.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +15,6 @@ # limitations under the License. from base import benchmark, BenchmarkThread -from six.moves import range class Runner(BenchmarkThread): diff --git a/build.yaml.bak b/build.yaml.bak deleted file mode 100644 index bd40809ef3..0000000000 --- a/build.yaml.bak +++ /dev/null @@ -1,279 +0,0 @@ -schedules: - nightly_master: - schedule: nightly - disable_pull_requests: true - branches: - include: [master] - env_vars: | - EVENT_LOOP_MANAGER='libev' - matrix: - exclude: - - python: [3.4, 3.6, 3.7, 3.8] - - cassandra: ['2.1', '3.0', '4.0', 'test-dse'] - - commit_long_test: - schedule: per_commit - disable_pull_requests: true - branches: - include: [/long-python.*/] - env_vars: | - EVENT_LOOP_MANAGER='libev' - matrix: - exclude: - - python: [3.4, 3.6, 3.7, 3.8] - - cassandra: ['2.1', '3.0', 'test-dse'] - - commit_branches: - schedule: per_commit - disable_pull_requests: true - branches: - include: [/python.*/] - env_vars: | - EVENT_LOOP_MANAGER='libev' - EXCLUDE_LONG=1 - matrix: - exclude: - - python: [3.4, 3.6, 3.7, 3.8] - - cassandra: ['2.1', '3.0', 'test-dse'] - - commit_branches_dev: - schedule: per_commit - disable_pull_requests: true - branches: - include: [/dev-python.*/] - env_vars: | - EVENT_LOOP_MANAGER='libev' - EXCLUDE_LONG=1 - matrix: - exclude: - - python: [2.7, 3.4, 3.7, 3.6, 3.8] - - cassandra: ['2.0', '2.1', '2.2', '3.0', '4.0', 'test-dse', 'dse-4.8', 'dse-5.0', 'dse-6.0', 'dse-6.8'] - - release_test: - schedule: per_commit - disable_pull_requests: true - branches: - include: [/release-.+/] - env_vars: | - EVENT_LOOP_MANAGER='libev' - - weekly_master: - schedule: 0 10 * * 6 - disable_pull_requests: true - branches: - include: [master] - env_vars: | - EVENT_LOOP_MANAGER='libev' - matrix: - exclude: - - python: [3.5] - - cassandra: ['2.2', '3.1'] - - weekly_gevent: - schedule: 0 14 * * 6 - disable_pull_requests: true - branches: - include: [master] - env_vars: | - EVENT_LOOP_MANAGER='gevent' - JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] - - weekly_eventlet: - schedule: 0 18 * * 6 - disable_pull_requests: true - branches: - include: [master] - env_vars: | - EVENT_LOOP_MANAGER='eventlet' - JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] - - weekly_asyncio: - schedule: 0 22 * * 6 - disable_pull_requests: true - branches: - include: [master] - env_vars: | - EVENT_LOOP_MANAGER='asyncio' - JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [2.7] - - weekly_async: - schedule: 0 10 * * 7 - disable_pull_requests: true - branches: - include: [master] - env_vars: | - EVENT_LOOP_MANAGER='asyncore' - JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] - - weekly_twister: - schedule: 0 14 * * 7 - disable_pull_requests: true - branches: - include: [master] - env_vars: | - EVENT_LOOP_MANAGER='twisted' - JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] - - upgrade_tests: - schedule: adhoc - branches: - include: [master, python-546] - env_vars: | - EVENT_LOOP_MANAGER='libev' - JUST_UPGRADE=True - matrix: - exclude: - - python: [3.4, 3.6, 3.7, 3.8] - - cassandra: ['2.0', '2.1', '2.2', '3.0', '4.0', 'test-dse'] - -python: - - 2.7 - - 3.4 - - 3.5 - - 3.6 - - 3.7 - - 3.8 - -os: - - ubuntu/bionic64/python-driver - -cassandra: - - '2.1' - - '2.2' - - '3.0' - - '3.11' - - '4.0' - - 'dse-4.8' - - 'dse-5.0' - - 'dse-5.1' - - 'dse-6.0' - - 'dse-6.7' - - 'dse-6.8.0' - -env: - CYTHON: - - CYTHON - - NO_CYTHON - -build: - - script: | - export JAVA_HOME=$CCM_JAVA_HOME - export PATH=$JAVA_HOME/bin:$PATH - export PYTHONPATH="" - export CCM_MAX_HEAP_SIZE=1024M - - # Required for unix socket tests - sudo apt-get install socat - - # Install latest setuptools - pip install --upgrade pip - pip install -U setuptools - - pip install git+ssh://git@github.com/riptano/ccm-private.git@cassandra-7544-native-ports-with-dse-fix - - # Remove this pyyaml installation when removing Python 3.4 support - pip install PyYAML==5.2 - #pip install $HOME/ccm - - if [ -n "$CCM_IS_DSE" ]; then - pip install -r test-datastax-requirements.txt - else - pip install -r test-requirements.txt - fi - - pip install nose-ignore-docstring - pip install nose-exclude - pip install service_identity - - FORCE_CYTHON=False - if [[ $CYTHON == 'CYTHON' ]]; then - FORCE_CYTHON=True - pip install cython - pip install numpy - # Install the driver & compile C extensions - python setup.py build_ext --inplace - else - # Install the driver & compile C extensions with no cython - python setup.py build_ext --inplace --no-cython - fi - - echo "JUST_UPGRADE: $JUST_UPGRADE" - if [[ $JUST_UPGRADE == 'True' ]]; then - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=upgrade_results.xml tests/integration/upgrade || true - exit 0 - fi - - if [[ $JUST_SMOKE == 'true' ]]; then - # When we ONLY want to run the smoke tests - echo "JUST_SMOKE: $JUST_SMOKE" - echo "==========RUNNING SMOKE TESTS===========" - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CCM_ARGS="$CCM_ARGS" CASSANDRA_VERSION=$CCM_CASSANDRA_VERSION DSE_VERSION='6.7.0' MAPPED_CASSANDRA_VERSION=$MAPPED_CASSANDRA_VERSION VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/test_dse.py || true - exit 0 - fi - - # Run the unit tests, this is not done in travis because - # it takes too much time for the whole matrix to build with cython - if [[ $CYTHON == 'CYTHON' ]]; then - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER VERIFY_CYTHON=1 nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml tests/unit/ || true - EVENT_LOOP_MANAGER=eventlet VERIFY_CYTHON=1 nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_eventlet_results.xml tests/unit/io/test_eventletreactor.py || true - EVENT_LOOP_MANAGER=gevent VERIFY_CYTHON=1 nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_gevent_results.xml tests/unit/io/test_geventreactor.py || true - fi - - if [ -n "$JUST_EVENT_LOOP" ]; then - echo "Running integration event loop subset with $EVENT_LOOP_MANAGER" - EVENT_LOOP_TESTS=( - "tests/integration/standard/test_cluster.py" - "tests/integration/standard/test_concurrent.py" - "tests/integration/standard/test_connection.py" - "tests/integration/standard/test_control_connection.py" - "tests/integration/standard/test_metrics.py" - "tests/integration/standard/test_query.py" - "tests/integration/simulacron/test_endpoint.py" - "tests/integration/long/test_ssl.py" - ) - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CCM_ARGS="$CCM_ARGS" DSE_VERSION=$DSE_VERSION CASSANDRA_VERSION=$CCM_CASSANDRA_VERSION MAPPED_CASSANDRA_VERSION=$MAPPED_CASSANDRA_VERSION VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml ${EVENT_LOOP_TESTS[@]} || true - exit 0 - fi - - echo "Running with event loop manager: $EVENT_LOOP_MANAGER" - echo "==========RUNNING SIMULACRON TESTS==========" - SIMULACRON_JAR="$HOME/simulacron.jar" - SIMULACRON_JAR=$SIMULACRON_JAR EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CASSANDRA_DIR=$CCM_INSTALL_DIR CCM_ARGS="$CCM_ARGS" DSE_VERSION=$DSE_VERSION CASSANDRA_VERSION=$CCM_CASSANDRA_VERSION MAPPED_CASSANDRA_VERSION=$MAPPED_CASSANDRA_VERSION VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=simulacron_results.xml tests/integration/simulacron/ || true - - echo "Running with event loop manager: $EVENT_LOOP_MANAGER" - echo "==========RUNNING CQLENGINE TESTS==========" - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CCM_ARGS="$CCM_ARGS" DSE_VERSION=$DSE_VERSION CASSANDRA_VERSION=$CCM_CASSANDRA_VERSION MAPPED_CASSANDRA_VERSION=$MAPPED_CASSANDRA_VERSION VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=cqle_results.xml tests/integration/cqlengine/ || true - - echo "==========RUNNING INTEGRATION TESTS==========" - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CCM_ARGS="$CCM_ARGS" DSE_VERSION=$DSE_VERSION CASSANDRA_VERSION=$CCM_CASSANDRA_VERSION MAPPED_CASSANDRA_VERSION=$MAPPED_CASSANDRA_VERSION VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/ || true - - if [ -n "$DSE_VERSION" ] && ! [[ $DSE_VERSION == "4.8"* ]]; then - echo "==========RUNNING DSE INTEGRATION TESTS==========" - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CASSANDRA_DIR=$CCM_INSTALL_DIR DSE_VERSION=$DSE_VERSION ADS_HOME=$HOME/ VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=dse_results.xml tests/integration/advanced/ || true - fi - - echo "==========RUNNING CLOUD TESTS==========" - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CLOUD_PROXY_PATH="$HOME/proxy/" CASSANDRA_VERSION=$CCM_CASSANDRA_VERSION MAPPED_CASSANDRA_VERSION=$MAPPED_CASSANDRA_VERSION VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=advanced_results.xml tests/integration/cloud/ || true - - if [ -z "$EXCLUDE_LONG" ]; then - echo "==========RUNNING LONG INTEGRATION TESTS==========" - EVENT_LOOP_MANAGER=$EVENT_LOOP_MANAGER CCM_ARGS="$CCM_ARGS" DSE_VERSION=$DSE_VERSION CASSANDRA_VERSION=$CCM_CASSANDRA_VERSION MAPPED_CASSANDRA_VERSION=$MAPPED_CASSANDRA_VERSION VERIFY_CYTHON=$FORCE_CYTHON nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --exclude-dir=tests/integration/long/upgrade --with-ignore-docstrings --with-xunit --xunit-file=long_results.xml tests/integration/long/ || true - fi - - - xunit: - - "*_results.xml" diff --git a/cassandra/__init__.py b/cassandra/__init__.py index f2bf696035..6d0744aa6e 100644 --- a/cassandra/__init__.py +++ b/cassandra/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -22,13 +24,13 @@ def emit(self, record): logging.getLogger('cassandra').addHandler(NullHandler()) -__version_info__ = (3, 24, 0) +__version_info__ = (3, 29, 3) __version__ = '.'.join(map(str, __version_info__)) class ConsistencyLevel(object): """ - Spcifies how many replicas must respond for an operation to be considered + Specifies how many replicas must respond for an operation to be considered a success. By default, ``ONE`` is used for all operations. """ @@ -55,7 +57,7 @@ class ConsistencyLevel(object): QUORUM = 4 """ - ``ceil(RF/2)`` replicas must respond to consider the operation a success + ``ceil(RF/2) + 1`` replicas must respond to consider the operation a success """ ALL = 5 @@ -161,7 +163,12 @@ class ProtocolVersion(object): V5 = 5 """ - v5, in beta from 3.x+ + v5, in beta from 3.x+. Finalised in 4.0-beta5 + """ + + V6 = 6 + """ + v6, in beta from 4.0-beta5 """ DSE_V1 = 0x41 @@ -174,12 +181,12 @@ class ProtocolVersion(object): DSE private protocol v2, supported in DSE 6.0+ """ - SUPPORTED_VERSIONS = (DSE_V2, DSE_V1, V5, V4, V3, V2, V1) + SUPPORTED_VERSIONS = (DSE_V2, DSE_V1, V6, V5, V4, V3, V2, V1) """ A tuple of all supported protocol versions """ - BETA_VERSIONS = (V5,) + BETA_VERSIONS = (V6,) """ A tuple of all beta protocol versions """ @@ -235,10 +242,14 @@ def has_continuous_paging_support(cls, version): def has_continuous_paging_next_pages(cls, version): return version >= cls.DSE_V2 + @classmethod + def has_checksumming_support(cls, version): + return cls.V5 <= version < cls.DSE_V1 + class WriteType(object): """ - For usage with :class:`.RetryPolicy`, this describe a type + For usage with :class:`.RetryPolicy`, this describes a type of write operation. """ @@ -263,7 +274,7 @@ class WriteType(object): COUNTER = 3 """ A counter write (for one or multiple partition keys). Such writes should - not be replayed in order to avoid overcount. + not be replayed in order to avoid over counting. """ BATCH_LOG = 4 @@ -274,7 +285,7 @@ class WriteType(object): CAS = 5 """ - A lighweight-transaction write, such as "DELETE ... IF EXISTS". + A lightweight-transaction write, such as "DELETE ... IF EXISTS". """ VIEW = 6 @@ -719,3 +730,19 @@ class UnresolvableContactPoints(DriverException): contact points, only when lookup fails for all hosts """ pass + +class DependencyException(Exception): + """ + Specific exception class for handling issues with driver dependencies + """ + + excs = [] + """ + A sequence of child exceptions + """ + + def __init__(self, msg, excs=[]): + complete_msg = msg + if excs: + complete_msg += ("\nThe following exceptions were observed: \n - " + '\n - '.join(str(e) for e in excs)) + Exception.__init__(self, complete_msg) diff --git a/cassandra/auth.py b/cassandra/auth.py index 3d2f751ac0..86759afe4d 100644 --- a/cassandra/auth.py +++ b/cassandra/auth.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -32,8 +34,6 @@ except ImportError: SASLClient = None -import six - log = logging.getLogger(__name__) # Custom payload keys related to DSE Unified Auth @@ -79,7 +79,7 @@ class Authenticator(object): 3) When the server indicates that authentication is successful, :meth:`~.on_authentication_success` will be called a token string that - that the server may optionally have sent. + the server may optionally have sent. The exact nature of the negotiation between the client and server is specific to the authentication mechanism configured server-side. @@ -92,7 +92,7 @@ class Authenticator(object): def initial_response(self): """ - Returns an message to send to the server to initiate the SASL handshake. + Returns a message to send to the server to initiate the SASL handshake. :const:`None` may be returned to send an empty message. """ return None @@ -270,15 +270,15 @@ def __init__(self, username, password): self.password = password def get_mechanism(self): - return six.b("PLAIN") + return b"PLAIN" def get_initial_challenge(self): - return six.b("PLAIN-START") + return b"PLAIN-START" def evaluate_challenge(self, challenge): - if challenge == six.b('PLAIN-START'): + if challenge == b'PLAIN-START': data = "\x00%s\x00%s" % (self.username, self.password) - return data if six.PY2 else data.encode() + return data.encode() raise Exception('Did not receive a valid challenge response from server') @@ -297,13 +297,13 @@ def __init__(self, host, service, qops, properties): self.sasl = SASLClient(host, service, 'GSSAPI', qops=qops, **properties) def get_mechanism(self): - return six.b("GSSAPI") + return b"GSSAPI" def get_initial_challenge(self): - return six.b("GSSAPI-START") + return b"GSSAPI-START" def evaluate_challenge(self, challenge): - if challenge == six.b('GSSAPI-START'): + if challenge == b'GSSAPI-START': return self.sasl.process() else: return self.sasl.process(challenge) diff --git a/cassandra/buffer.pxd b/cassandra/buffer.pxd index 0bbb1d5f57..3383fcd272 100644 --- a/cassandra/buffer.pxd +++ b/cassandra/buffer.pxd @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/bytesio.pxd b/cassandra/bytesio.pxd index d52d3fa8fe..24320f0ae1 100644 --- a/cassandra/bytesio.pxd +++ b/cassandra/bytesio.pxd @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/bytesio.pyx b/cassandra/bytesio.pyx index 1a57911fcf..d9781035ef 100644 --- a/cassandra/bytesio.pyx +++ b/cassandra/bytesio.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/cluster.py b/cassandra/cluster.py index c4d6de124d..43066f73f0 100644 --- a/cassandra/cluster.py +++ b/cassandra/cluster.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,16 +23,17 @@ import atexit from binascii import hexlify from collections import defaultdict +from collections.abc import Mapping from concurrent.futures import ThreadPoolExecutor, FIRST_COMPLETED, wait as wait_futures from copy import copy -from functools import partial, wraps +from functools import partial, reduce, wraps from itertools import groupby, count, chain import json import logging from warnings import warn from random import random -import six -from six.moves import filter, range, queue as Queue +import re +import queue import socket import sys import time @@ -43,7 +46,7 @@ from cassandra import (ConsistencyLevel, AuthenticationFailed, OperationTimedOut, UnsupportedOperation, SchemaTargetType, DriverException, ProtocolVersion, - UnresolvableContactPoints) + UnresolvableContactPoints, DependencyException) from cassandra.auth import _proxy_execute_key, PlainTextAuthProvider from cassandra.connection import (ConnectionException, ConnectionShutdown, ConnectionHeartbeat, ProtocolVersionUnsupported, @@ -63,7 +66,7 @@ BatchMessage, RESULT_KIND_PREPARED, RESULT_KIND_SET_KEYSPACE, RESULT_KIND_ROWS, RESULT_KIND_SCHEMA_CHANGE, ProtocolHandler, - RESULT_KIND_VOID) + RESULT_KIND_VOID, ProtocolException) from cassandra.metadata import Metadata, protect_name, murmur3, _NodeInfo from cassandra.policies import (TokenAwarePolicy, DCAwareRoundRobinPolicy, SimpleConvictionPolicy, ExponentialReconnectionPolicy, HostDistance, @@ -79,7 +82,6 @@ HostTargetingStatement) from cassandra.marshal import int64_pack from cassandra.timestamps import MonotonicTimestampGenerator -from cassandra.compat import Mapping from cassandra.util import _resolve_contact_points_to_string_map, Version from cassandra.datastax.insights.reporter import MonitorReporter @@ -99,7 +101,11 @@ try: from cassandra.io.eventletreactor import EventletConnection -except ImportError: +# PYTHON-1364 +# +# At the moment eventlet initialization is chucking AttributeErrors due to its dependence on pyOpenSSL +# and some changes in Python 3.12 which have some knock-on effects there. +except (ImportError, AttributeError): EventletConnection = None try: @@ -107,44 +113,74 @@ except ImportError: from cassandra.util import WeakSet # NOQA -if six.PY3: - long = int - -def _is_eventlet_monkey_patched(): - if 'eventlet.patcher' not in sys.modules: - return False - import eventlet.patcher - return eventlet.patcher.is_monkey_patched('socket') - - def _is_gevent_monkey_patched(): if 'gevent.monkey' not in sys.modules: return False import gevent.socket return socket.socket is gevent.socket.socket +def _try_gevent_import(): + if _is_gevent_monkey_patched(): + from cassandra.io.geventreactor import GeventConnection + return (GeventConnection,None) + else: + return (None,None) -# default to gevent when we are monkey patched with gevent, eventlet when -# monkey patched with eventlet, otherwise if libev is available, use that as -# the default because it's fastest. Otherwise, use asyncore. -if _is_gevent_monkey_patched(): - from cassandra.io.geventreactor import GeventConnection as DefaultConnection -elif _is_eventlet_monkey_patched(): - from cassandra.io.eventletreactor import EventletConnection as DefaultConnection -else: +def _is_eventlet_monkey_patched(): + if 'eventlet.patcher' not in sys.modules: + return False try: - from cassandra.io.libevreactor import LibevConnection as DefaultConnection # NOQA - except ImportError: - from cassandra.io.asyncorereactor import AsyncoreConnection as DefaultConnection # NOQA + import eventlet.patcher + return eventlet.patcher.is_monkey_patched('socket') + # Another case related to PYTHON-1364 + except AttributeError: + return False + +def _try_eventlet_import(): + if _is_eventlet_monkey_patched(): + from cassandra.io.eventletreactor import EventletConnection + return (EventletConnection,None) + else: + return (None,None) + +def _try_libev_import(): + try: + from cassandra.io.libevreactor import LibevConnection + return (LibevConnection,None) + except DependencyException as e: + return (None, e) + +def _try_asyncore_import(): + try: + from cassandra.io.asyncorereactor import AsyncoreConnection + return (AsyncoreConnection,None) + except DependencyException as e: + return (None, e) + +def _connection_reduce_fn(val,import_fn): + (rv, excs) = val + # If we've already found a workable Connection class return immediately + if rv: + return val + (import_result, exc) = import_fn() + if exc: + excs.append(exc) + return (rv or import_result, excs) + +log = logging.getLogger(__name__) + +conn_fns = (_try_gevent_import, _try_eventlet_import, _try_libev_import, _try_asyncore_import) +(conn_class, excs) = reduce(_connection_reduce_fn, conn_fns, (None,[])) +if not conn_class: + raise DependencyException("Unable to load a default connection class", excs) +DefaultConnection = conn_class # Forces load of utf8 encoding module to avoid deadlock that occurs -# if code that is being imported tries to import the module in a seperate +# if code that is being imported tries to import the module in a separate # thread. # See http://bugs.python.org/issue10923 "".encode('utf8') -log = logging.getLogger(__name__) - DEFAULT_MIN_REQUESTS = 5 DEFAULT_MAX_REQUESTS = 100 @@ -777,15 +813,15 @@ def default_retry_policy(self, policy): Using ssl_options without ssl_context is deprecated and will be removed in the next major release. - An optional dict which will be used as kwargs for ``ssl.SSLContext.wrap_socket`` (or - ``ssl.wrap_socket()`` if used without ssl_context) when new sockets are created. - This should be used when client encryption is enabled in Cassandra. + An optional dict which will be used as kwargs for ``ssl.SSLContext.wrap_socket`` + when new sockets are created. This should be used when client encryption is enabled + in Cassandra. The following documentation only applies when ssl_options is used without ssl_context. By default, a ``ca_certs`` value should be supplied (the value should be a string pointing to the location of the CA certs file), and you probably - want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLSv1`` to match + want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLS`` to match Cassandra's default protocol. .. versionchanged:: 3.3.0 @@ -795,6 +831,12 @@ def default_retry_policy(self, policy): should almost always require the option ``'cert_reqs': ssl.CERT_REQUIRED``. Note also that this functionality was not built into Python standard library until (2.7.9, 3.2). To enable this mechanism in earlier versions, patch ``ssl.match_hostname`` with a custom or `back-ported function `_. + + .. versionchanged:: 3.29.0 + + ``ssl.match_hostname`` has been deprecated since Python 3.7 (and removed in Python 3.12). This functionality is now implemented + via ``ssl.SSLContext.check_hostname``. All options specified above (including ``check_hostname``) should continue to behave in a + way that is consistent with prior implementations. """ ssl_context = None @@ -984,13 +1026,13 @@ def default_retry_policy(self, policy): application_version = '' """ - A string identifiying this application's version to Insights + A string identifying this application's version to Insights """ cloud = None """ A dict of the cloud configuration. Example:: - + { # path to the secure connect bundle 'secure_connect_bundle': '/path/to/secure-connect-dbname.zip', @@ -1003,6 +1045,12 @@ def default_retry_policy(self, policy): load the configuration and certificates. """ + column_encryption_policy = None + """ + An instance of :class:`cassandra.policies.ColumnEncryptionPolicy` specifying encryption materials to be + used for columns in this cluster. + """ + @property def schema_metadata_enabled(self): """ @@ -1104,10 +1152,11 @@ def __init__(self, monitor_reporting_enabled=True, monitor_reporting_interval=30, client_id=None, - cloud=None): + cloud=None, + column_encryption_policy=None): """ ``executor_threads`` defines the number of threads in a pool for handling asynchronous tasks such as - extablishing connection pools or refreshing metadata. + establishing connection pools or refreshing metadata. Any of the mutable Cluster attributes may be set as keyword arguments to the constructor. """ @@ -1143,7 +1192,7 @@ def __init__(self, else: self._contact_points_explicit = True - if isinstance(contact_points, six.string_types): + if isinstance(contact_points, str): raise TypeError("contact_points should not be a string, it should be a sequence (e.g. list) of strings") if None in contact_points: @@ -1152,6 +1201,9 @@ def __init__(self, self.port = port + if column_encryption_policy is not None: + self.column_encryption_policy = column_encryption_policy + self.endpoint_factory = endpoint_factory or DefaultEndPointFactory(port=self.port) self.endpoint_factory.configure(self) @@ -1411,7 +1463,7 @@ def register_user_type(self, keyspace, user_type, klass): for. `klass` should be a class with attributes whose names match the - fields of the user-defined type. The constructor must accepts kwargs + fields of the user-defined type. The constructor must accept kwargs for each of the fields in the UDT. This method should only be called after the type has been created @@ -1441,7 +1493,7 @@ def __init__(self, street, zipcode): # results will include Address instances results = session.execute("SELECT * FROM users") row = results[0] - print row.id, row.location.street, row.location.zipcode + print(row.id, row.location.street, row.location.zipcode) """ if self.protocol_version < 3: @@ -1570,7 +1622,7 @@ def set_core_connections_per_host(self, host_distance, core_connections): If :attr:`~.Cluster.protocol_version` is set to 3 or higher, this is not supported (there is always one connection per host, unless the host is remote and :attr:`connect_to_remote_hosts` is :const:`False`) - and using this will result in an :exc:`~.UnsupporteOperation`. + and using this will result in an :exc:`~.UnsupportedOperation`. """ if self.protocol_version >= 3: raise UnsupportedOperation( @@ -1603,7 +1655,7 @@ def set_max_connections_per_host(self, host_distance, max_connections): If :attr:`~.Cluster.protocol_version` is set to 3 or higher, this is not supported (there is always one connection per host, unless the host is remote and :attr:`connect_to_remote_hosts` is :const:`False`) - and using this will result in an :exc:`~.UnsupporteOperation`. + and using this will result in an :exc:`~.UnsupportedOperation`. """ if self.protocol_version >= 3: raise UnsupportedOperation( @@ -1650,7 +1702,7 @@ def protocol_downgrade(self, host_endpoint, previous_version): log.warning("Downgrading core protocol version from %d to %d for %s. " "To avoid this, it is best practice to explicitly set Cluster(protocol_version) to the version supported by your cluster. " - "http://datastax.github.io/python-driver/api/cassandra/cluster.html#cassandra.cluster.Cluster.protocol_version", self.protocol_version, new_version, host_endpoint) + "https://docs.datastax.com/en/developer/python-driver/latest/api/cassandra/cluster.html#cassandra.cluster.Cluster.protocol_version", self.protocol_version, new_version, host_endpoint) self.protocol_version = new_version def connect(self, keyspace=None, wait_for_all_pools=False): @@ -1775,8 +1827,8 @@ def _new_session(self, keyspace): return session def _session_register_user_types(self, session): - for keyspace, type_map in six.iteritems(self._user_types): - for udt_name, klass in six.iteritems(type_map): + for keyspace, type_map in self._user_types.items(): + for udt_name, klass in type_map.items(): session.user_type_registered(keyspace, udt_name, klass) def _cleanup_failed_on_up_handling(self, host): @@ -2391,7 +2443,7 @@ def default_consistency_level(self, cl): *Deprecated:* use execution profiles instead """ warn("Setting the consistency level at the session level will be removed in 4.0. Consider using " - "execution profiles and setting the desired consitency level to the EXEC_PROFILE_DEFAULT profile." + "execution profiles and setting the desired consistency level to the EXEC_PROFILE_DEFAULT profile." , DeprecationWarning) self._validate_set_legacy_config('default_consistency_level', cl) @@ -2555,6 +2607,15 @@ def __init__(self, cluster, hosts, keyspace=None): self.session_id = uuid.uuid4() self._graph_paging_available = self._check_graph_paging_available() + if self.cluster.column_encryption_policy is not None: + try: + self.client_protocol_handler = type( + str(self.session_id) + "-ProtocolHandler", + (ProtocolHandler,), + {"column_encryption_policy": self.cluster.column_encryption_policy}) + except AttributeError: + log.info("Unable to set column encryption policy for session") + if self.cluster.monitor_reporting_enabled: cc_host = self.cluster.get_control_connection_host() valid_insights_version = (cc_host and version_supports_insights(cc_host.dse_version)) @@ -2656,7 +2717,7 @@ def execute_async(self, query, parameters=None, trace=False, custom_payload=None """ custom_payload = custom_payload if custom_payload else {} if execute_as: - custom_payload[_proxy_execute_key] = six.b(execute_as) + custom_payload[_proxy_execute_key] = execute_as.encode() future = self._create_response_future( query, parameters, trace, custom_payload, timeout, @@ -2666,6 +2727,98 @@ def execute_async(self, query, parameters=None, trace=False, custom_payload=None future.send_request() return future + def execute_concurrent(self, statements_and_parameters, concurrency=100, raise_on_first_error=True, results_generator=False, execution_profile=EXEC_PROFILE_DEFAULT): + """ + Executes a sequence of (statement, parameters) tuples concurrently. Each + ``parameters`` item must be a sequence or :const:`None`. + + The `concurrency` parameter controls how many statements will be executed + concurrently. When :attr:`.Cluster.protocol_version` is set to 1 or 2, + it is recommended that this be kept below 100 times the number of + core connections per host times the number of connected hosts (see + :meth:`.Cluster.set_core_connections_per_host`). If that amount is exceeded, + the event loop thread may attempt to block on new connection creation, + substantially impacting throughput. If :attr:`~.Cluster.protocol_version` + is 3 or higher, you can safely experiment with higher levels of concurrency. + + If `raise_on_first_error` is left as :const:`True`, execution will stop + after the first failed statement and the corresponding exception will be + raised. + + `results_generator` controls how the results are returned. + + * If :const:`False`, the results are returned only after all requests have completed. + * If :const:`True`, a generator expression is returned. Using a generator results in a constrained + memory footprint when the results set will be large -- results are yielded + as they return instead of materializing the entire list at once. The trade for lower memory + footprint is marginal CPU overhead (more thread coordination and sorting out-of-order results + on-the-fly). + + `execution_profile` argument is the execution profile to use for this + request, it is passed directly to :meth:`Session.execute_async`. + + A sequence of ``ExecutionResult(success, result_or_exc)`` namedtuples is returned + in the same order that the statements were passed in. If ``success`` is :const:`False`, + there was an error executing the statement, and ``result_or_exc`` + will be an :class:`Exception`. If ``success`` is :const:`True`, ``result_or_exc`` + will be the query result. + + Example usage:: + + select_statement = session.prepare("SELECT * FROM users WHERE id=?") + + statements_and_params = [] + for user_id in user_ids: + params = (user_id, ) + statements_and_params.append((select_statement, params)) + + results = session.execute_concurrent(statements_and_params, raise_on_first_error=False) + + for (success, result) in results: + if not success: + handle_error(result) # result will be an Exception + else: + process_user(result[0]) # result will be a list of rows + + Note: in the case that `generators` are used, it is important to ensure the consumers do not + block or attempt further synchronous requests, because no further IO will be processed until + the consumer returns. This may also produce a deadlock in the IO event thread. + """ + from cassandra.concurrent import execute_concurrent + return execute_concurrent(self, statements_and_parameters, concurrency, raise_on_first_error, results_generator, execution_profile) + + def execute_concurrent_with_args(self, statement, parameters, *args, **kwargs): + """ + Like :meth:`~cassandra.concurrent.execute_concurrent()`, but takes a single + statement and a sequence of parameters. Each item in ``parameters`` + should be a sequence or :const:`None`. + + Example usage:: + + statement = session.prepare("INSERT INTO mytable (a, b) VALUES (1, ?)") + parameters = [(x,) for x in range(1000)] + session.execute_concurrent_with_args(statement, parameters, concurrency=50) + """ + from cassandra.concurrent import execute_concurrent_with_args + return execute_concurrent_with_args(self, statement, parameters, *args, **kwargs) + + def execute_concurrent_async(self, statements_and_parameters, concurrency=100, raise_on_first_error=False, execution_profile=EXEC_PROFILE_DEFAULT): + """ + Asynchronously executes a sequence of (statement, parameters) tuples concurrently. + + Args: + session: Cassandra session object. + statement_and_parameters: Iterable of (prepared CQL statement, bind parameters) tuples. + concurrency (int, optional): Number of concurrent operations. Default is 100. + raise_on_first_error (bool, optional): If True, execution stops on the first error. Default is True. + execution_profile (ExecutionProfile, optional): Execution profile to use. Default is EXEC_PROFILE_DEFAULT. + + Returns: + A `Future` object that will be completed when all operations are done. + """ + from cassandra.concurrent import execute_concurrent_async + return execute_concurrent_async(self, statements_and_parameters, concurrency, raise_on_first_error, execution_profile) + def execute_graph(self, query, parameters=None, trace=False, execution_profile=EXEC_PROFILE_GRAPH_DEFAULT, execute_as=None): """ Executes a Gremlin query string or GraphStatement synchronously, @@ -2720,8 +2873,8 @@ def execute_graph_async(self, query, parameters=None, trace=False, execution_pro custom_payload = execution_profile.graph_options.get_options_map() if execute_as: - custom_payload[_proxy_execute_key] = six.b(execute_as) - custom_payload[_request_timeout_key] = int64_pack(long(execution_profile.request_timeout * 1000)) + custom_payload[_proxy_execute_key] = execute_as.encode() + custom_payload[_request_timeout_key] = int64_pack(int(execution_profile.request_timeout * 1000)) future = self._create_response_future(query, parameters=None, trace=trace, custom_payload=custom_payload, timeout=_NOT_SET, execution_profile=execution_profile) @@ -2858,7 +3011,7 @@ def _create_response_future(self, query, parameters, trace, custom_payload, prepared_statement = None - if isinstance(query, six.string_types): + if isinstance(query, str): query = SimpleStatement(query) elif isinstance(query, PreparedStatement): query = query.bind(parameters) @@ -3074,7 +3227,7 @@ def prepare(self, query, custom_payload=None, keyspace=None): prepared_keyspace = keyspace if keyspace else None prepared_statement = PreparedStatement.from_message( response.query_id, response.bind_metadata, response.pk_indexes, self.cluster.metadata, query, prepared_keyspace, - self._protocol_version, response.column_metadata, response.result_metadata_id) + self._protocol_version, response.column_metadata, response.result_metadata_id, self.cluster.column_encryption_policy) prepared_statement.custom_payload = future.custom_payload self.cluster.add_prepared(response.query_id, prepared_statement) @@ -3109,7 +3262,7 @@ def prepare_on_all_hosts(self, query, excluded_host, keyspace=None): continue if request_id is None: - # the error has already been logged by ResponsFuture + # the error has already been logged by ResponseFuture log.debug("Failed to prepare query for host %s: %r", host, future._errors.get(host)) continue @@ -3326,10 +3479,6 @@ def user_type_registered(self, keyspace, user_type, klass): 'User type %s does not exist in keyspace %s' % (user_type, keyspace)) field_names = type_meta.field_names - if six.PY2: - # go from unicode to string to avoid decode errors from implicit - # decode when formatting non-ascii values - field_names = [fn.encode('utf-8') for fn in field_names] def encode(val): return '{ %s }' % ' , '.join('%s : %s' % ( @@ -3548,6 +3697,14 @@ def _try_connect(self, host): break except ProtocolVersionUnsupported as e: self._cluster.protocol_downgrade(host.endpoint, e.startup_version) + except ProtocolException as e: + # protocol v5 is out of beta in C* >=4.0-beta5 and is now the default driver + # protocol version. If the protocol version was not explicitly specified, + # and that the server raises a beta protocol error, we should downgrade. + if not self._cluster._protocol_version_explicit and e.is_beta_protocol_error: + self._cluster.protocol_downgrade(host.endpoint, self._cluster.protocol_version) + else: + raise log.debug("[control connection] Established new connection %r, " "registering watchers and refreshing schema and topology", @@ -3788,12 +3945,14 @@ def _refresh_node_list_and_token_map(self, connection, preloaded_results=None, # any new nodes, so we need this additional check. (See PYTHON-90) should_rebuild_token_map = force_token_rebuild or self._cluster.metadata.partitioner is None for row in peers_result: + if not self._is_valid_peer(row): + log.warning( + "Found an invalid row for peer (%s). Ignoring host." % + _NodeInfo.get_broadcast_rpc_address(row)) + continue + endpoint = self._cluster.endpoint_factory.create(row) - tokens = row.get("tokens", None) - if 'tokens' in row and not tokens: # it was selected, but empty - log.warning("Excluding host (%s) with no tokens in system.peers table of %s." % (endpoint, connection.endpoint)) - continue if endpoint in found_hosts: log.warning("Found multiple hosts with the same endpoint (%s). Excluding peer %s", endpoint, row.get("peer")) continue @@ -3820,6 +3979,7 @@ def _refresh_node_list_and_token_map(self, connection, preloaded_results=None, host.dse_workload = row.get("workload") host.dse_workloads = row.get("workloads") + tokens = row.get("tokens", None) if partitioner and tokens and self._token_meta_enabled: token_map[host] = tokens @@ -3834,6 +3994,12 @@ def _refresh_node_list_and_token_map(self, connection, preloaded_results=None, log.debug("[control connection] Rebuilding token map due to topology changes") self._cluster.metadata.rebuild_token_map(partitioner, token_map) + @staticmethod + def _is_valid_peer(row): + return bool(_NodeInfo.get_broadcast_rpc_address(row) and row.get("host_id") and + row.get("data_center") and row.get("rack") and + ('tokens' not in row or row.get('tokens'))) + def _update_location_info(self, host, datacenter, rack): if host.datacenter == datacenter and host.rack == rack: return False @@ -3893,7 +4059,7 @@ def _handle_status_change(self, event): elif change_type == "DOWN": # Note that there is a slight risk we can receive the event late and thus # mark the host down even though we already had reconnected successfully. - # But it is unlikely, and don't have too much consequence since we'll try reconnecting + # This is unlikely, and will not have much consequence because we'll try reconnecting # right away, so we favor the detection to make the Host.is_up more accurate. if host is not None: # this will be run by the scheduler @@ -3991,7 +4157,7 @@ def _get_schema_mismatches(self, peers_result, local_result, local_address): log.debug("[control connection] Schemas match") return None - return dict((version, list(nodes)) for version, nodes in six.iteritems(versions)) + return dict((version, list(nodes)) for version, nodes in versions.items()) def _get_peers_query(self, peers_query_type, connection=None): """ @@ -4111,7 +4277,7 @@ class _Scheduler(Thread): is_shutdown = False def __init__(self, executor): - self._queue = Queue.PriorityQueue() + self._queue = queue.PriorityQueue() self._scheduled_tasks = set() self._count = count() self._executor = executor @@ -4169,7 +4335,7 @@ def run(self): else: self._queue.put_nowait((run_at, i, task)) break - except Queue.Empty: + except queue.Empty: pass time.sleep(0.1) @@ -4344,10 +4510,17 @@ def _on_timeout(self, _attempts=0): pool = self.session._pools.get(self._current_host) if pool and not pool.is_shutdown: + # Do not return the stream ID to the pool yet. We cannot reuse it + # because the node might still be processing the query and will + # return a late response to that query - if we used such stream + # before the response to the previous query has arrived, the new + # query could get a response from the old query with self._connection.lock: - self._connection.request_ids.append(self._req_id) + self._connection.orphaned_request_ids.add(self._req_id) + if len(self._connection.orphaned_request_ids) >= self._connection.orphaned_threshold: + self._connection.orphaned_threshold_reached = True - pool.return_connection(self._connection) + pool.return_connection(self._connection, stream_was_orphaned=True) errors = self._errors if not errors: @@ -4368,7 +4541,7 @@ def _on_speculative_execute(self): # PYTHON-836, the speculative queries must be after # the query is sent from the main thread, otherwise the # query from the main thread may raise NoHostAvailable - # if the _query_plan has been exhausted by the specualtive queries. + # if the _query_plan has been exhausted by the speculative queries. # This also prevents a race condition accessing the iterator. # We reschedule this call until the main thread has succeeded # making a query @@ -4480,7 +4653,7 @@ def warnings(self): Ensure the future is complete before trying to access this property (call :meth:`.result()`, or after callback is invoked). - Otherwise it may throw if the response has not been received. + Otherwise, it may throw if the response has not been received. """ # TODO: When timers are introduced, just make this wait if not self._event.is_set(): @@ -4496,7 +4669,7 @@ def custom_payload(self): Ensure the future is complete before trying to access this property (call :meth:`.result()`, or after callback is invoked). - Otherwise it may throw if the response has not been received. + Otherwise, it may throw if the response has not been received. :return: :ref:`custom_payload`. """ @@ -5125,6 +5298,12 @@ def next(self): self.fetch_next_page() self._page_iter = iter(self._current_rows) + # Some servers can return empty pages in this case; Scylla is known to do + # so in some circumstances. Guard against this by recursing to handle + # the next(iter) call. If we have an empty page in that case it will + # get handled by the StopIteration handler when we recurse. + return self.next() + return next(self._page_iter) __next__ = next @@ -5200,7 +5379,7 @@ def cancel_continuous_paging(self): try: self.response_future._continuous_paging_session.cancel() except AttributeError: - raise DriverException("Attempted to cancel paging with no active session. This is only for requests with ContinuousdPagingOptions.") + raise DriverException("Attempted to cancel paging with no active session. This is only for requests with ContinuousPagingOptions.") @property def was_applied(self): @@ -5211,7 +5390,7 @@ def was_applied(self): a :class:`.query.BatchStatement` containing LWT. In the latter case either all the batch succeeds or fails. - Only valid when one of the of the internal row factories is in use. + Only valid when one of the internal row factories is in use. """ if self.response_future.row_factory not in (named_tuple_factory, dict_factory, tuple_factory): raise RuntimeError("Cannot determine LWT result with row factory %s" % (self.response_future.row_factory,)) diff --git a/cassandra/cmurmur3.c b/cassandra/cmurmur3.c index bce513217d..4affdad46c 100644 --- a/cassandra/cmurmur3.c +++ b/cassandra/cmurmur3.c @@ -14,12 +14,6 @@ #include #include -#if PY_VERSION_HEX < 0x02050000 -typedef int Py_ssize_t; -#define PY_SSIZE_T_MAX INT_MAX -#define PY_SSIZE_T_MIN INT_MIN -#endif - #ifdef PYPY_VERSION #define COMPILING_IN_PYPY 1 #define COMPILING_IN_CPYTHON 0 @@ -216,8 +210,6 @@ static PyMethodDef cmurmur3_methods[] = { {NULL, NULL, 0, NULL} }; -#if PY_MAJOR_VERSION >= 3 - static int cmurmur3_traverse(PyObject *m, visitproc visit, void *arg) { Py_VISIT(GETSTATE(m)->error); return 0; @@ -245,18 +237,8 @@ static struct PyModuleDef moduledef = { PyObject * PyInit_cmurmur3(void) -#else -#define INITERROR return - -void -initcmurmur3(void) -#endif { -#if PY_MAJOR_VERSION >= 3 PyObject *module = PyModule_Create(&moduledef); -#else - PyObject *module = Py_InitModule("cmurmur3", cmurmur3_methods); -#endif struct module_state *st = NULL; if (module == NULL) @@ -269,7 +251,5 @@ initcmurmur3(void) INITERROR; } -#if PY_MAJOR_VERSION >= 3 return module; -#endif } diff --git a/cassandra/column_encryption/_policies.py b/cassandra/column_encryption/_policies.py new file mode 100644 index 0000000000..e1519f6b79 --- /dev/null +++ b/cassandra/column_encryption/_policies.py @@ -0,0 +1,141 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import namedtuple +from functools import lru_cache + +import logging +import os + +log = logging.getLogger(__name__) + +from cassandra.cqltypes import _cqltypes +from cassandra.policies import ColumnEncryptionPolicy + +from cryptography.hazmat.primitives import padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes + +AES256_BLOCK_SIZE = 128 +AES256_BLOCK_SIZE_BYTES = int(AES256_BLOCK_SIZE / 8) +AES256_KEY_SIZE = 256 +AES256_KEY_SIZE_BYTES = int(AES256_KEY_SIZE / 8) + +ColData = namedtuple('ColData', ['key','type']) + +class AES256ColumnEncryptionPolicy(ColumnEncryptionPolicy): + + # Fix block cipher mode for now. IV size is a function of block cipher used + # so fixing this avoids (possibly unnecessary) validation logic here. + mode = modes.CBC + + # "iv" param here expects a bytearray that's the same size as the block + # size for AES-256 (128 bits or 16 bytes). If none is provided a new one + # will be randomly generated, but in this case the IV should be recorded and + # preserved or else you will not be able to decrypt any data encrypted by this + # policy. + def __init__(self, iv=None): + + # CBC uses an IV that's the same size as the block size + # + # Avoid defining IV with a default arg in order to stay away from + # any issues around the caching of default args + self.iv = iv + if self.iv: + if not len(self.iv) == AES256_BLOCK_SIZE_BYTES: + raise ValueError("This policy uses AES-256 with CBC mode and therefore expects a 128-bit initialization vector") + else: + self.iv = os.urandom(AES256_BLOCK_SIZE_BYTES) + + # ColData for a given ColDesc is always preserved. We only create a Cipher + # when there's an actual need to for a given ColDesc + self.coldata = {} + self.ciphers = {} + + def encrypt(self, coldesc, obj_bytes): + + # AES256 has a 128-bit block size so if the input bytes don't align perfectly on + # those blocks we have to pad them. There's plenty of room for optimization here: + # + # * Instances of the PKCS7 padder should be managed in a bounded pool + # * It would be nice if we could get a flag from encrypted data to indicate + # whether it was padded or not + # * Might be able to make this happen with a leading block of flags in encrypted data + padder = padding.PKCS7(AES256_BLOCK_SIZE).padder() + padded_bytes = padder.update(obj_bytes) + padder.finalize() + + cipher = self._get_cipher(coldesc) + encryptor = cipher.encryptor() + return self.iv + encryptor.update(padded_bytes) + encryptor.finalize() + + def decrypt(self, coldesc, bytes): + + iv = bytes[:AES256_BLOCK_SIZE_BYTES] + encrypted_bytes = bytes[AES256_BLOCK_SIZE_BYTES:] + cipher = self._get_cipher(coldesc, iv=iv) + decryptor = cipher.decryptor() + padded_bytes = decryptor.update(encrypted_bytes) + decryptor.finalize() + + unpadder = padding.PKCS7(AES256_BLOCK_SIZE).unpadder() + return unpadder.update(padded_bytes) + unpadder.finalize() + + def add_column(self, coldesc, key, type): + + if not coldesc: + raise ValueError("ColDesc supplied to add_column cannot be None") + if not key: + raise ValueError("Key supplied to add_column cannot be None") + if not type: + raise ValueError("Type supplied to add_column cannot be None") + if type not in _cqltypes.keys(): + raise ValueError("Type %s is not a supported type".format(type)) + if not len(key) == AES256_KEY_SIZE_BYTES: + raise ValueError("AES256 column encryption policy expects a 256-bit encryption key") + self.coldata[coldesc] = ColData(key, _cqltypes[type]) + + def contains_column(self, coldesc): + return coldesc in self.coldata + + def encode_and_encrypt(self, coldesc, obj): + if not coldesc: + raise ValueError("ColDesc supplied to encode_and_encrypt cannot be None") + if not obj: + raise ValueError("Object supplied to encode_and_encrypt cannot be None") + coldata = self.coldata.get(coldesc) + if not coldata: + raise ValueError("Could not find ColData for ColDesc %s".format(coldesc)) + return self.encrypt(coldesc, coldata.type.serialize(obj, None)) + + def cache_info(self): + return AES256ColumnEncryptionPolicy._build_cipher.cache_info() + + def column_type(self, coldesc): + return self.coldata[coldesc].type + + def _get_cipher(self, coldesc, iv=None): + """ + Access relevant state from this instance necessary to create a Cipher and then get one, + hopefully returning a cached instance if we've already done so (and it hasn't been evicted) + """ + try: + coldata = self.coldata[coldesc] + return AES256ColumnEncryptionPolicy._build_cipher(coldata.key, iv or self.iv) + except KeyError: + raise ValueError("Could not find column {}".format(coldesc)) + + # Explicitly use a class method here to avoid caching self + @lru_cache(maxsize=128) + def _build_cipher(key, iv): + return Cipher(algorithms.AES256(key), AES256ColumnEncryptionPolicy.mode(iv)) diff --git a/cassandra/column_encryption/policies.py b/cassandra/column_encryption/policies.py new file mode 100644 index 0000000000..a1bd25d3e6 --- /dev/null +++ b/cassandra/column_encryption/policies.py @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import cryptography + from cassandra.column_encryption._policies import * +except ImportError: + # Cryptography is not installed + pass diff --git a/cassandra/compat.py b/cassandra/compat.py deleted file mode 100644 index 83c1b104e5..0000000000 --- a/cassandra/compat.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright DataStax, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import six - -if six.PY2: - from collections import Mapping -elif six.PY3: - from collections.abc import Mapping diff --git a/cassandra/concurrent.py b/cassandra/concurrent.py index a8bddcbdab..012f52f954 100644 --- a/cassandra/concurrent.py +++ b/cassandra/concurrent.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,76 +15,23 @@ # limitations under the License. +import logging from collections import namedtuple +from concurrent.futures import Future from heapq import heappush, heappop from itertools import cycle -import six -from six.moves import xrange, zip from threading import Condition -import sys -from cassandra.cluster import ResultSet +from cassandra.cluster import ResultSet, EXEC_PROFILE_DEFAULT -import logging log = logging.getLogger(__name__) ExecutionResult = namedtuple('ExecutionResult', ['success', 'result_or_exc']) -def execute_concurrent(session, statements_and_parameters, concurrency=100, raise_on_first_error=True, results_generator=False): +def execute_concurrent(session, statements_and_parameters, concurrency=100, raise_on_first_error=True, results_generator=False, execution_profile=EXEC_PROFILE_DEFAULT): """ - Executes a sequence of (statement, parameters) tuples concurrently. Each - ``parameters`` item must be a sequence or :const:`None`. - - The `concurrency` parameter controls how many statements will be executed - concurrently. When :attr:`.Cluster.protocol_version` is set to 1 or 2, - it is recommended that this be kept below 100 times the number of - core connections per host times the number of connected hosts (see - :meth:`.Cluster.set_core_connections_per_host`). If that amount is exceeded, - the event loop thread may attempt to block on new connection creation, - substantially impacting throughput. If :attr:`~.Cluster.protocol_version` - is 3 or higher, you can safely experiment with higher levels of concurrency. - - If `raise_on_first_error` is left as :const:`True`, execution will stop - after the first failed statement and the corresponding exception will be - raised. - - `results_generator` controls how the results are returned. - - * If :const:`False`, the results are returned only after all requests have completed. - * If :const:`True`, a generator expression is returned. Using a generator results in a constrained - memory footprint when the results set will be large -- results are yielded - as they return instead of materializing the entire list at once. The trade for lower memory - footprint is marginal CPU overhead (more thread coordination and sorting out-of-order results - on-the-fly). - - A sequence of ``ExecutionResult(success, result_or_exc)`` namedtuples is returned - in the same order that the statements were passed in. If ``success`` is :const:`False`, - there was an error executing the statement, and ``result_or_exc`` will be - an :class:`Exception`. If ``success`` is :const:`True`, ``result_or_exc`` - will be the query result. - - Example usage:: - - select_statement = session.prepare("SELECT * FROM users WHERE id=?") - - statements_and_params = [] - for user_id in user_ids: - params = (user_id, ) - statements_and_params.append((select_statement, params)) - - results = execute_concurrent( - session, statements_and_params, raise_on_first_error=False) - - for (success, result) in results: - if not success: - handle_error(result) # result will be an Exception - else: - process_user(result[0]) # result will be a list of rows - - Note: in the case that `generators` are used, it is important to ensure the consumers do not - block or attempt further synchronous requests, because no further IO will be processed until - the consumer returns. This may also produce a deadlock in the IO event thread. + See :meth:`.Session.execute_concurrent`. """ if concurrency <= 0: raise ValueError("concurrency must be greater than 0") @@ -90,7 +39,8 @@ def execute_concurrent(session, statements_and_parameters, concurrency=100, rais if not statements_and_parameters: return [] - executor = ConcurrentExecutorGenResults(session, statements_and_parameters) if results_generator else ConcurrentExecutorListResults(session, statements_and_parameters) + executor = ConcurrentExecutorGenResults(session, statements_and_parameters, execution_profile) \ + if results_generator else ConcurrentExecutorListResults(session, statements_and_parameters, execution_profile) return executor.execute(concurrency, raise_on_first_error) @@ -98,9 +48,10 @@ class _ConcurrentExecutor(object): max_error_recursion = 100 - def __init__(self, session, statements_and_params): + def __init__(self, session, statements_and_params, execution_profile): self.session = session self._enum_statements = enumerate(iter(statements_and_params)) + self._execution_profile = execution_profile self._condition = Condition() self._fail_fast = False self._results_queue = [] @@ -114,7 +65,7 @@ def execute(self, concurrency, fail_fast): self._current = 0 self._exec_count = 0 with self._condition: - for n in xrange(concurrency): + for n in range(concurrency): if not self._execute_next(): break return self._results() @@ -132,23 +83,19 @@ def _execute_next(self): def _execute(self, idx, statement, params): self._exec_depth += 1 try: - future = self.session.execute_async(statement, params, timeout=None) + future = self.session.execute_async(statement, params, execution_profile=self._execution_profile) args = (future, idx) future.add_callbacks( callback=self._on_success, callback_args=args, errback=self._on_error, errback_args=args) except Exception as exc: - # exc_info with fail_fast to preserve stack trace info when raising on the client thread - # (matches previous behavior -- not sure why we wouldn't want stack trace in the other case) - e = sys.exc_info() if self._fail_fast and six.PY2 else exc - # If we're not failing fast and all executions are raising, there is a chance of recursing # here as subsequent requests are attempted. If we hit this threshold, schedule this result/retry # and let the event loop thread return. if self._exec_depth < self.max_error_recursion: - self._put_result(e, idx, False) + self._put_result(exc, idx, False) else: - self.session.submit(self._put_result, e, idx, False) + self.session.submit(self._put_result, exc, idx, False) self._exec_depth -= 1 def _on_success(self, result, future, idx): @@ -158,14 +105,6 @@ def _on_success(self, result, future, idx): def _on_error(self, result, future, idx): self._put_result(result, idx, False) - @staticmethod - def _raise(exc): - if six.PY2 and isinstance(exc, tuple): - (exc_type, value, traceback) = exc - six.reraise(exc_type, value, traceback) - else: - raise exc - class ConcurrentExecutorGenResults(_ConcurrentExecutor): @@ -185,7 +124,7 @@ def _results(self): try: self._condition.release() if self._fail_fast and not res[0]: - self._raise(res[1]) + raise res[1] yield res finally: self._condition.acquire() @@ -216,23 +155,59 @@ def _results(self): while self._current < self._exec_count: self._condition.wait() if self._exception and self._fail_fast: - self._raise(self._exception) + raise self._exception if self._exception and self._fail_fast: # raise the exception even if there was no wait - self._raise(self._exception) + raise self._exception return [r[1] for r in sorted(self._results_queue)] def execute_concurrent_with_args(session, statement, parameters, *args, **kwargs): """ - Like :meth:`~cassandra.concurrent.execute_concurrent()`, but takes a single - statement and a sequence of parameters. Each item in ``parameters`` - should be a sequence or :const:`None`. + See :meth:`.Session.execute_concurrent_with_args`. + """ + return execute_concurrent(session, zip(cycle((statement,)), parameters), *args, **kwargs) + - Example usage:: +class ConcurrentExecutorFutureResults(ConcurrentExecutorListResults): + def __init__(self, session, statements_and_params, execution_profile, future): + super().__init__(session, statements_and_params, execution_profile) + self.future = future - statement = session.prepare("INSERT INTO mytable (a, b) VALUES (1, ?)") - parameters = [(x,) for x in range(1000)] - execute_concurrent_with_args(session, statement, parameters, concurrency=50) + def _put_result(self, result, idx, success): + super()._put_result(result, idx, success) + with self._condition: + if self._current == self._exec_count: + if self._exception and self._fail_fast: + self.future.set_exception(self._exception) + else: + sorted_results = [r[1] for r in sorted(self._results_queue)] + self.future.set_result(sorted_results) + + +def execute_concurrent_async( + session, + statements_and_parameters, + concurrency=100, + raise_on_first_error=False, + execution_profile=EXEC_PROFILE_DEFAULT +): """ - return execute_concurrent(session, zip(cycle((statement,)), parameters), *args, **kwargs) + See :meth:`.Session.execute_concurrent_async`. + """ + # Create a Future object and initialize the custom ConcurrentExecutor with the Future + future = Future() + executor = ConcurrentExecutorFutureResults( + session=session, + statements_and_params=statements_and_parameters, + execution_profile=execution_profile, + future=future + ) + + # Execute concurrently + try: + executor.execute(concurrency=concurrency, fail_fast=raise_on_first_error) + except Exception as e: + future.set_exception(e) + + return future diff --git a/cassandra/connection.py b/cassandra/connection.py index 6ce3e44a30..4a16c46ab4 100644 --- a/cassandra/connection.py +++ b/cassandra/connection.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -19,19 +21,19 @@ from heapq import heappush, heappop import io import logging -import six -from six.moves import range import socket import struct import sys from threading import Thread, Event, RLock, Condition import time import ssl +import weakref + if 'gevent.monkey' in sys.modules: from gevent.queue import Queue, Empty else: - from six.moves.queue import Queue, Empty # noqa + from queue import Queue, Empty # noqa from cassandra import ConsistencyLevel, AuthenticationFailed, OperationTimedOut, ProtocolVersion from cassandra.marshal import int32_pack @@ -42,11 +44,15 @@ AuthResponseMessage, AuthChallengeMessage, AuthSuccessMessage, ProtocolException, RegisterMessage, ReviseRequestMessage) +from cassandra.segment import SegmentCodec, CrcException from cassandra.util import OrderedDict log = logging.getLogger(__name__) +segment_codec_no_compression = SegmentCodec() +segment_codec_lz4 = None + # We use an ordered dictionary and specifically add lz4 before # snappy so that lz4 will be preferred. Changing the order of this # will change the compression preferences for the driver. @@ -88,6 +94,7 @@ def lz4_decompress(byts): return lz4_block.decompress(byts[3::-1] + byts[4:]) locally_supported_compressions['lz4'] = (lz4_compress, lz4_decompress) + segment_codec_lz4 = SegmentCodec(lz4_compress, lz4_decompress) try: import snappy @@ -240,9 +247,9 @@ def create(self, row): class SniEndPoint(EndPoint): """SNI Proxy EndPoint implementation.""" - def __init__(self, proxy_address, server_name, port=9042): + def __init__(self, proxy_address, server_name, port=9042, init_index=0): self._proxy_address = proxy_address - self._index = 0 + self._index = init_index self._resolved_address = None # resolved address self._port = port self._server_name = server_name @@ -262,8 +269,7 @@ def ssl_options(self): def resolve(self): try: - resolved_addresses = socket.getaddrinfo(self._proxy_address, self._port, - socket.AF_UNSPEC, socket.SOCK_STREAM) + resolved_addresses = self._resolve_proxy_addresses() except socket.gaierror: log.debug('Could not resolve sni proxy hostname "%s" ' 'with port %d' % (self._proxy_address, self._port)) @@ -275,6 +281,10 @@ def resolve(self): return self._resolved_address, self._port + def _resolve_proxy_addresses(self): + return socket.getaddrinfo(self._proxy_address, self._port, + socket.AF_UNSPEC, socket.SOCK_STREAM) + def __eq__(self, other): return (isinstance(other, SniEndPoint) and self.address == other.address and self.port == other.port and @@ -300,16 +310,24 @@ class SniEndPointFactory(EndPointFactory): def __init__(self, proxy_address, port): self._proxy_address = proxy_address self._port = port + # Initial lookup index to prevent all SNI endpoints to be resolved + # into the same starting IP address (which might not be available currently). + # If SNI resolves to 3 IPs, first endpoint will connect to first + # IP address, and subsequent resolutions to next IPs in round-robin + # fusion. + self._init_index = -1 def create(self, row): host_id = row.get("host_id") if host_id is None: raise ValueError("No host_id to create the SniEndPoint") - return SniEndPoint(self._proxy_address, str(host_id), self._port) + self._init_index += 1 + return SniEndPoint(self._proxy_address, str(host_id), self._port, self._init_index) def create_from_sni(self, sni): - return SniEndPoint(self._proxy_address, sni, self._port) + self._init_index += 1 + return SniEndPoint(self._proxy_address, sni, self._port, self._init_index) @total_ordering @@ -426,6 +444,10 @@ class ProtocolError(Exception): pass +class CrcMismatchException(ConnectionException): + pass + + class ContinuousPagingState(object): """ A class for specifying continuous paging state, only supported starting with DSE_V2. @@ -594,11 +616,59 @@ def wrapper(self, *args, **kwargs): DEFAULT_CQL_VERSION = '3.0.0' -if six.PY3: - def int_from_buf_item(i): - return i -else: - int_from_buf_item = ord + +class _ConnectionIOBuffer(object): + """ + Abstraction class to ease the use of the different connection io buffers. With + protocol V5 and checksumming, the data is read, validated and copied to another + cql frame buffer. + """ + _io_buffer = None + _cql_frame_buffer = None + _connection = None + _segment_consumed = False + + def __init__(self, connection): + self._io_buffer = io.BytesIO() + self._connection = weakref.proxy(connection) + + @property + def io_buffer(self): + return self._io_buffer + + @property + def cql_frame_buffer(self): + return self._cql_frame_buffer if self.is_checksumming_enabled else \ + self._io_buffer + + def set_checksumming_buffer(self): + self.reset_io_buffer() + self._cql_frame_buffer = io.BytesIO() + + @property + def is_checksumming_enabled(self): + return self._connection._is_checksumming_enabled + + @property + def has_consumed_segment(self): + return self._segment_consumed; + + def readable_io_bytes(self): + return self.io_buffer.tell() + + def readable_cql_frame_bytes(self): + return self.cql_frame_buffer.tell() + + def reset_io_buffer(self): + self._io_buffer = io.BytesIO(self._io_buffer.read()) + self._io_buffer.seek(0, 2) # 2 == SEEK_END + + def reset_cql_frame_buffer(self): + if self.is_checksumming_enabled: + self._cql_frame_buffer = io.BytesIO(self._cql_frame_buffer.read()) + self._cql_frame_buffer.seek(0, 2) # 2 == SEEK_END + else: + self.reset_io_buffer() class Connection(object): @@ -625,6 +695,7 @@ class Connection(object): # The current number of operations that are in flight. More precisely, # the number of request IDs that are currently in use. + # This includes orphaned requests. in_flight = 0 # Max concurrent requests allowed per connection. This is set optimistically high, allowing @@ -642,6 +713,20 @@ class Connection(object): # request_ids set highest_request_id = 0 + # Tracks the request IDs which are no longer waited on (timed out), but + # cannot be reused yet because the node might still send a response + # on this stream + orphaned_request_ids = None + + # Set to true if the orphaned stream ID count cross configured threshold + # and the connection will be replaced + orphaned_threshold_reached = False + + # If the number of orphaned streams reaches this threshold, this connection + # will become marked and will be replaced with a new connection by the + # owning pool (currently, only HostConnection supports this) + orphaned_threshold = 3 * max_in_flight // 4 + is_defunct = False is_closed = False lock = None @@ -656,28 +741,35 @@ class Connection(object): allow_beta_protocol_version = False - _iobuf = None _current_frame = None _socket = None _socket_impl = socket - _ssl_impl = ssl _check_hostname = False _product_type = None + _is_checksumming_enabled = False + + _on_orphaned_stream_released = None + + @property + def _iobuf(self): + # backward compatibility, to avoid any change in the reactors + return self._io_buffer.io_buffer + def __init__(self, host='127.0.0.1', port=9042, authenticator=None, ssl_options=None, sockopts=None, compression=True, cql_version=None, protocol_version=ProtocolVersion.MAX_SUPPORTED, is_control_connection=False, user_type_map=None, connect_timeout=None, allow_beta_protocol_version=False, no_compact=False, - ssl_context=None): + ssl_context=None, on_orphaned_stream_released=None): # TODO next major rename host to endpoint and remove port kwarg. self.endpoint = host if isinstance(host, EndPoint) else DefaultEndPoint(host, port) self.authenticator = authenticator - self.ssl_options = ssl_options.copy() if ssl_options else None + self.ssl_options = ssl_options.copy() if ssl_options else {} self.ssl_context = ssl_context self.sockopts = sockopts self.compression = compression @@ -690,20 +782,27 @@ def __init__(self, host='127.0.0.1', port=9042, authenticator=None, self.no_compact = no_compact self._push_watchers = defaultdict(set) self._requests = {} - self._iobuf = io.BytesIO() + self._io_buffer = _ConnectionIOBuffer(self) self._continuous_paging_sessions = {} self._socket_writable = True + self.orphaned_request_ids = set() + self._on_orphaned_stream_released = on_orphaned_stream_released if ssl_options: - self._check_hostname = bool(self.ssl_options.pop('check_hostname', False)) - if self._check_hostname: - if not getattr(ssl, 'match_hostname', None): - raise RuntimeError("ssl_options specify 'check_hostname', but ssl.match_hostname is not provided. " - "Patch or upgrade Python to use this option.") self.ssl_options.update(self.endpoint.ssl_options or {}) elif self.endpoint.ssl_options: self.ssl_options = self.endpoint.ssl_options + # PYTHON-1331 + # + # We always use SSLContext.wrap_socket() now but legacy configs may have other params that were passed to ssl.wrap_socket()... + # and either could have 'check_hostname'. Remove these params into a separate map and use them to build an SSLContext if + # we need to do so. + # + # Note the use of pop() here; we are very deliberately removing these params from ssl_options if they're present. After this + # operation ssl_options should contain only args needed for the ssl_context.wrap_socket() call. + if not self.ssl_context and self.ssl_options: + self.ssl_context = self._build_ssl_context_from_options() if protocol_version >= 3: self.max_request_id = min(self.max_in_flight - 1, (2 ** 15) - 1) @@ -739,7 +838,7 @@ def initialize_reactor(cls): @classmethod def handle_fork(cls): """ - Called after a forking. This should cleanup any remaining reactor state + Called after a forking. This should clean up any remaining reactor state from the parent process. """ pass @@ -770,21 +869,57 @@ def factory(cls, endpoint, timeout, *args, **kwargs): else: return conn + def _build_ssl_context_from_options(self): + + # Extract a subset of names from self.ssl_options which apply to SSLContext creation + ssl_context_opt_names = ['ssl_version', 'cert_reqs', 'check_hostname', 'keyfile', 'certfile', 'ca_certs', 'ciphers'] + opts = {k:self.ssl_options.get(k, None) for k in ssl_context_opt_names if k in self.ssl_options} + + # Python >= 3.10 requires either PROTOCOL_TLS_CLIENT or PROTOCOL_TLS_SERVER, so we'll get ahead of things by always + # being explicit + ssl_version = opts.get('ssl_version', None) or ssl.PROTOCOL_TLS_CLIENT + cert_reqs = opts.get('cert_reqs', None) or ssl.CERT_REQUIRED + rv = ssl.SSLContext(protocol=int(ssl_version)) + rv.check_hostname = bool(opts.get('check_hostname', False)) + rv.options = int(cert_reqs) + + certfile = opts.get('certfile', None) + keyfile = opts.get('keyfile', None) + if certfile: + rv.load_cert_chain(certfile, keyfile) + ca_certs = opts.get('ca_certs', None) + if ca_certs: + rv.load_verify_locations(ca_certs) + ciphers = opts.get('ciphers', None) + if ciphers: + rv.set_ciphers(ciphers) + + return rv + def _wrap_socket_from_context(self): - ssl_options = self.ssl_options or {} + + # Extract a subset of names from self.ssl_options which apply to SSLContext.wrap_socket (or at least the parts + # of it that don't involve building an SSLContext under the covers) + wrap_socket_opt_names = ['server_side', 'do_handshake_on_connect', 'suppress_ragged_eofs', 'server_hostname'] + opts = {k:self.ssl_options.get(k, None) for k in wrap_socket_opt_names if k in self.ssl_options} + # PYTHON-1186: set the server_hostname only if the SSLContext has - # check_hostname enabled and it is not already provided by the EndPoint ssl options - if (self.ssl_context.check_hostname and - 'server_hostname' not in ssl_options): - ssl_options = ssl_options.copy() - ssl_options['server_hostname'] = self.endpoint.address - self._socket = self.ssl_context.wrap_socket(self._socket, **ssl_options) + # check_hostname enabled, and it is not already provided by the EndPoint ssl options + #opts['server_hostname'] = self.endpoint.address + if (self.ssl_context.check_hostname and 'server_hostname' not in opts): + server_hostname = self.endpoint.address + opts['server_hostname'] = server_hostname + + return self.ssl_context.wrap_socket(self._socket, **opts) def _initiate_connection(self, sockaddr): self._socket.connect(sockaddr) - def _match_hostname(self): - ssl.match_hostname(self._socket.getpeercert(), self.endpoint.address) + # PYTHON-1331 + # + # Allow implementations specific to an event loop to add additional behaviours + def _validate_hostname(self): + pass def _get_socket_addresses(self): address, port = self.endpoint.resolve() @@ -805,16 +940,18 @@ def _connect_socket(self): try: self._socket = self._socket_impl.socket(af, socktype, proto) if self.ssl_context: - self._wrap_socket_from_context() - elif self.ssl_options: - if not self._ssl_impl: - raise RuntimeError("This version of Python was not compiled with SSL support") - self._socket = self._ssl_impl.wrap_socket(self._socket, **self.ssl_options) + self._socket = self._wrap_socket_from_context() self._socket.settimeout(self.connect_timeout) self._initiate_connection(sockaddr) self._socket.settimeout(None) + + # PYTHON-1331 + # + # Most checking is done via the check_hostname param on the SSLContext. + # Subclasses can add additional behaviours via _validate_hostname() so + # run that here. if self._check_hostname: - self._match_hostname() + self._validate_hostname() sockerr = None break except socket.error as err: @@ -831,6 +968,16 @@ def _connect_socket(self): for args in self.sockopts: self._socket.setsockopt(*args) + def _enable_compression(self): + if self._compressor: + self.compressor = self._compressor + + def _enable_checksumming(self): + self._io_buffer.set_checksumming_buffer() + self._is_checksumming_enabled = True + self._segment_codec = segment_codec_lz4 if self.compressor else segment_codec_no_compression + log.debug("Enabling protocol checksumming on connection (%s).", id(self)) + def close(self): raise NotImplementedError() @@ -933,7 +1080,14 @@ def send_msg(self, msg, request_id, cb, encoder=ProtocolHandler.encode_message, # queue the decoder function with the request # this allows us to inject custom functions per request to encode, decode messages self._requests[request_id] = (cb, decoder, result_metadata) - msg = encoder(msg, request_id, self.protocol_version, compressor=self.compressor, allow_beta_protocol_version=self.allow_beta_protocol_version) + msg = encoder(msg, request_id, self.protocol_version, compressor=self.compressor, + allow_beta_protocol_version=self.allow_beta_protocol_version) + + if self._is_checksumming_enabled: + buffer = io.BytesIO() + self._segment_codec.encode(buffer, msg) + msg = buffer.getvalue() + self.push(msg) return len(msg) @@ -1012,10 +1166,10 @@ def control_conn_disposed(self): @defunct_on_error def _read_frame_header(self): - buf = self._iobuf.getvalue() + buf = self._io_buffer.cql_frame_buffer.getvalue() pos = len(buf) if pos: - version = int_from_buf_item(buf[0]) & PROTOCOL_VERSION_MASK + version = buf[0] & PROTOCOL_VERSION_MASK if version not in ProtocolVersion.SUPPORTED_VERSIONS: raise ProtocolError("This version of the driver does not support protocol version %d" % version) frame_header = frame_header_v3 if version >= 3 else frame_header_v1_v2 @@ -1028,29 +1182,62 @@ def _read_frame_header(self): self._current_frame = _Frame(version, flags, stream, op, header_size, body_len + header_size) return pos - def _reset_frame(self): - self._iobuf = io.BytesIO(self._iobuf.read()) - self._iobuf.seek(0, 2) # io.SEEK_END == 2 (constant not present in 2.6) - self._current_frame = None + @defunct_on_error + def _process_segment_buffer(self): + readable_bytes = self._io_buffer.readable_io_bytes() + if readable_bytes >= self._segment_codec.header_length_with_crc: + try: + self._io_buffer.io_buffer.seek(0) + segment_header = self._segment_codec.decode_header(self._io_buffer.io_buffer) + + if readable_bytes >= segment_header.segment_length: + segment = self._segment_codec.decode(self._iobuf, segment_header) + self._io_buffer._segment_consumed = True + self._io_buffer.cql_frame_buffer.write(segment.payload) + else: + # not enough data to read the segment. reset the buffer pointer at the + # beginning to not lose what we previously read (header). + self._io_buffer._segment_consumed = False + self._io_buffer.io_buffer.seek(0) + except CrcException as exc: + # re-raise an exception that inherits from ConnectionException + raise CrcMismatchException(str(exc), self.endpoint) + else: + self._io_buffer._segment_consumed = False def process_io_buffer(self): while True: + if self._is_checksumming_enabled and self._io_buffer.readable_io_bytes(): + self._process_segment_buffer() + self._io_buffer.reset_io_buffer() + + if self._is_checksumming_enabled and not self._io_buffer.has_consumed_segment: + # We couldn't read an entire segment from the io buffer, so return + # control to allow more bytes to be read off the wire + return + if not self._current_frame: pos = self._read_frame_header() else: - pos = self._iobuf.tell() + pos = self._io_buffer.readable_cql_frame_bytes() if not self._current_frame or pos < self._current_frame.end_pos: - # we don't have a complete header yet or we + if self._is_checksumming_enabled and self._io_buffer.readable_io_bytes(): + # We have a multi-segments message, and we need to read more + # data to complete the current cql frame + continue + + # we don't have a complete header yet, or we # already saw a header, but we don't have a # complete message yet return else: frame = self._current_frame - self._iobuf.seek(frame.body_offset) - msg = self._iobuf.read(frame.end_pos - frame.body_offset) + self._io_buffer.cql_frame_buffer.seek(frame.body_offset) + msg = self._io_buffer.cql_frame_buffer.read(frame.end_pos - frame.body_offset) self.process_msg(frame, msg) - self._reset_frame() + self._io_buffer.reset_cql_frame_buffer() + self._current_frame = None @defunct_on_error def process_msg(self, header, body): @@ -1067,11 +1254,22 @@ def process_msg(self, header, body): decoder = paging_session.decoder result_metadata = None else: + need_notify_of_release = False + with self.lock: + if stream_id in self.orphaned_request_ids: + self.in_flight -= 1 + self.orphaned_request_ids.remove(stream_id) + need_notify_of_release = True + if need_notify_of_release and self._on_orphaned_stream_released: + self._on_orphaned_stream_released() + try: callback, decoder, result_metadata = self._requests.pop(stream_id) # This can only happen if the stream_id was # removed due to an OperationTimedOut except KeyError: + with self.lock: + self.request_ids.append(stream_id) return try: @@ -1170,7 +1368,7 @@ def _handle_options_response(self, options_response): remote_supported_compressions) else: compression_type = None - if isinstance(self.compression, six.string_types): + if isinstance(self.compression, str): # the user picked a specific compression type ('snappy' or 'lz4') if self.compression not in remote_supported_compressions: raise ProtocolError( @@ -1185,11 +1383,19 @@ def _handle_options_response(self, options_response): compression_type = k break - # set the decompressor here, but set the compressor only after - # a successful Ready message - self._compression_type = compression_type - self._compressor, self.decompressor = \ - locally_supported_compressions[compression_type] + # If snappy compression is selected with v5+checksumming, the connection + # will fail with OTO. Only lz4 is supported + if (compression_type == 'snappy' and + ProtocolVersion.has_checksumming_support(self.protocol_version)): + log.debug("Snappy compression is not supported with protocol version %s and " + "checksumming. Consider installing lz4. Disabling compression.", self.protocol_version) + compression_type = None + else: + # set the decompressor here, but set the compressor only after + # a successful Ready message + self._compression_type = compression_type + self._compressor, self.decompressor = \ + locally_supported_compressions[compression_type] self._send_startup_message(compression_type, no_compact=self.no_compact) @@ -1210,6 +1416,7 @@ def _send_startup_message(self, compression=None, no_compact=False): def _handle_startup_response(self, startup_response, did_authenticate=False): if self.is_defunct: return + if isinstance(startup_response, ReadyMessage): if self.authenticator: log.warning("An authentication challenge was not sent, " @@ -1218,8 +1425,11 @@ def _handle_startup_response(self, startup_response, did_authenticate=False): self.authenticator.__class__.__name__) log.debug("Got ReadyMessage on new connection (%s) from %s", id(self), self.endpoint) - if self._compressor: - self.compressor = self._compressor + self._enable_compression() + + if ProtocolVersion.has_checksumming_support(self.protocol_version): + self._enable_checksumming() + self.connected_event.set() elif isinstance(startup_response, AuthenticateMessage): log.debug("Got AuthenticateMessage on new connection (%s) from %s: %s", @@ -1231,6 +1441,10 @@ def _handle_startup_response(self, startup_response, did_authenticate=False): "if DSE authentication is configured with transitional mode" % (self.host,)) raise AuthenticationFailed('Remote end requires authentication') + self._enable_compression() + if ProtocolVersion.has_checksumming_support(self.protocol_version): + self._enable_checksumming() + if isinstance(self.authenticator, dict): log.debug("Sending credentials-based auth response on %s", self) cm = CredentialsMessage(creds=self.authenticator) @@ -1512,7 +1726,7 @@ def run(self): else: log.debug("Cannot send heartbeat message on connection (%s) to %s", id(connection), connection.endpoint) - # make sure the owner sees this defunt/closed connection + # make sure the owner sees this defunct/closed connection owner.return_connection(connection) self._raise_if_stopped() diff --git a/cassandra/cqlengine/__init__.py b/cassandra/cqlengine/__init__.py index e2a952d682..200d04b831 100644 --- a/cassandra/cqlengine/__init__.py +++ b/cassandra/cqlengine/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,9 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six - - # Caching constants. CACHING_ALL = "ALL" CACHING_KEYS_ONLY = "KEYS_ONLY" @@ -31,7 +30,4 @@ class ValidationError(CQLEngineException): class UnicodeMixin(object): - if six.PY3: - __str__ = lambda x: x.__unicode__() - else: - __str__ = lambda x: six.text_type(x).encode('utf-8') + __str__ = lambda x: x.__unicode__() diff --git a/cassandra/cqlengine/columns.py b/cassandra/cqlengine/columns.py index 49116129fc..7d50687d95 100644 --- a/cassandra/cqlengine/columns.py +++ b/cassandra/cqlengine/columns.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,9 +15,8 @@ # limitations under the License. from copy import deepcopy, copy -from datetime import date, datetime, timedelta +from datetime import date, datetime, timedelta, timezone import logging -import six from uuid import UUID as _UUID from cassandra import util @@ -327,7 +328,7 @@ class Blob(Column): def to_database(self, value): - if not isinstance(value, (six.binary_type, bytearray)): + if not isinstance(value, (bytes, bytearray)): raise Exception("expecting a binary, got a %s" % type(value)) val = super(Bytes, self).to_database(value) @@ -381,7 +382,7 @@ def __init__(self, min_length=None, max_length=None, **kwargs): def validate(self, value): value = super(Text, self).validate(value) - if not isinstance(value, (six.string_types, bytearray)) and value is not None: + if not isinstance(value, (str, bytearray)) and value is not None: raise ValidationError('{0} {1} is not a string'.format(self.column_name, type(value))) if self.max_length is not None: if value and len(value) > self.max_length: @@ -552,7 +553,7 @@ def to_python(self, value): elif isinstance(value, date): return datetime(*(value.timetuple()[:6])) - return datetime.utcfromtimestamp(value) + return datetime.fromtimestamp(value, tz=timezone.utc).replace(tzinfo=None) def to_database(self, value): value = super(DateTime, self).to_database(value) @@ -655,7 +656,7 @@ def validate(self, value): return if isinstance(val, _UUID): return val - if isinstance(val, six.string_types): + if isinstance(val, str): try: return _UUID(val) except ValueError: diff --git a/cassandra/cqlengine/connection.py b/cassandra/cqlengine/connection.py index 884e04ed74..55437d7b7f 100644 --- a/cassandra/cqlengine/connection.py +++ b/cassandra/cqlengine/connection.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +16,6 @@ from collections import defaultdict import logging -import six import threading from cassandra.cluster import Cluster, _ConfigMode, _NOT_SET, NoHostAvailable, UserTypeDoesNotExist, ConsistencyLevel @@ -89,7 +90,7 @@ def from_session(cls, name, session): return instance def setup(self): - """Setup the connection""" + """Set up the connection""" global cluster, session if 'username' in self.cluster_options or 'password' in self.cluster_options: @@ -98,7 +99,13 @@ def setup(self): if self.lazy_connect: return - self.cluster = Cluster(self.hosts, **self.cluster_options) + if 'cloud' in self.cluster_options: + if self.hosts: + log.warning("Ignoring hosts %s because a cloud config was provided.", self.hosts) + self.cluster = Cluster(**self.cluster_options) + else: + self.cluster = Cluster(self.hosts, **self.cluster_options) + try: self.session = self.cluster.connect() log.debug(format_log_context("connection initialized with internally created session", connection=self.name)) @@ -129,7 +136,7 @@ def setup_session(self): def handle_lazy_connect(self): - # if lazy_connect is False, it means the cluster is setup and ready + # if lazy_connect is False, it means the cluster is set up and ready # No need to acquire the lock if not self.lazy_connect: return @@ -275,7 +282,7 @@ def set_session(s): try: conn = get_connection() except CQLEngineException: - # no default connection set; initalize one + # no default connection set; initialize one register_connection('default', session=s, default=True) conn = get_connection() else: @@ -301,6 +308,8 @@ def set_session(s): log.debug("cqlengine default connection initialized with %s", s) +# TODO next major: if a cloud config is specified in kwargs, hosts will be ignored. +# This function should be refactored to reflect this change. PYTHON-1265 def setup( hosts, default_keyspace, @@ -309,14 +318,14 @@ def setup( retry_connect=False, **kwargs): """ - Setup a the driver connection used by the mapper + Set up the driver connection used by the mapper :param list hosts: list of hosts, (``contact_points`` for :class:`cassandra.cluster.Cluster`) :param str default_keyspace: The default keyspace to use :param int consistency: The global default :class:`~.ConsistencyLevel` - default is the same as :attr:`.Session.default_consistency_level` :param bool lazy_connect: True if should not connect until first use :param bool retry_connect: True if we should retry to connect even if there was a connection failure initially - :param \*\*kwargs: Pass-through keyword arguments for :class:`cassandra.cluster.Cluster` + :param kwargs: Pass-through keyword arguments for :class:`cassandra.cluster.Cluster` """ from cassandra.cqlengine import models @@ -338,7 +347,7 @@ def execute(query, params=None, consistency_level=None, timeout=NOT_SET, connect elif isinstance(query, BaseCQLStatement): params = query.get_context() query = SimpleStatement(str(query), consistency_level=consistency_level, fetch_size=query.fetch_size) - elif isinstance(query, six.string_types): + elif isinstance(query, str): query = SimpleStatement(query, consistency_level=consistency_level) log.debug(format_log_context('Query: {}, Params: {}'.format(query.query_string, params), connection=connection)) diff --git a/cassandra/cqlengine/functions.py b/cassandra/cqlengine/functions.py index 5cb0f673d1..69bdc3feb4 100644 --- a/cassandra/cqlengine/functions.py +++ b/cassandra/cqlengine/functions.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,21 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import division from datetime import datetime from cassandra.cqlengine import UnicodeMixin, ValidationError -import sys - -if sys.version_info >= (2, 7): - def get_total_seconds(td): - return td.total_seconds() -else: - def get_total_seconds(td): - # integer division used here to emulate built-in total_seconds - return ((86400 * td.days + td.seconds) * 10 ** 6 + td.microseconds) / 10 ** 6 - +def get_total_seconds(td): + return td.total_seconds() class QueryValue(UnicodeMixin): """ diff --git a/cassandra/cqlengine/management.py b/cassandra/cqlengine/management.py index 536bde6349..66b391b714 100644 --- a/cassandra/cqlengine/management.py +++ b/cassandra/cqlengine/management.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +18,6 @@ import json import logging import os -import six import warnings from itertools import product @@ -232,7 +233,7 @@ def _sync_table(model, connection=None): except CQLEngineException as ex: # 1.2 doesn't return cf names, so we have to examine the exception # and ignore if it says the column family already exists - if "Cannot add already existing column family" not in six.text_type(ex): + if "Cannot add already existing column family" not in str(ex): raise else: log.debug(format_log_context("sync_table checking existing table %s", keyspace=ks_name, connection=connection), cf_name) @@ -257,7 +258,7 @@ def _sync_table(model, connection=None): continue - if col.primary_key or col.primary_key: + if col.primary_key: msg = format_log_context("Cannot add primary key '{0}' (with db_field '{1}') to existing table {2}", keyspace=ks_name, connection=connection) raise CQLEngineException(msg.format(model_name, db_name, cf_name)) @@ -477,7 +478,7 @@ def _update_options(model, connection=None): except KeyError: msg = format_log_context("Invalid table option: '%s'; known options: %s", keyspace=ks_name, connection=connection) raise KeyError(msg % (name, existing_options.keys())) - if isinstance(existing_value, six.string_types): + if isinstance(existing_value, str): if value != existing_value: update_options[name] = value else: @@ -526,7 +527,7 @@ def _drop_table(model, connection=None): connection = connection or model._get_connection() - # don't try to delete non existant tables + # don't try to delete non existent tables meta = get_cluster(connection).metadata ks_name = model._get_keyspace() diff --git a/cassandra/cqlengine/models.py b/cassandra/cqlengine/models.py index b3c7c9e37f..f0f5a207ec 100644 --- a/cassandra/cqlengine/models.py +++ b/cassandra/cqlengine/models.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +16,6 @@ import logging import re -import six from warnings import warn from cassandra.cqlengine import CQLEngineException, ValidationError @@ -184,7 +185,7 @@ def __call__(self, *args, **kwargs): class IfNotExistsDescriptor(object): """ - return a query set descriptor with a if_not_exists flag specified + return a query set descriptor with an if_not_exists flag specified """ def __get__(self, instance, model): if instance: @@ -202,7 +203,7 @@ def __call__(self, *args, **kwargs): class IfExistsDescriptor(object): """ - return a query set descriptor with a if_exists flag specified + return a query set descriptor with an if_exists flag specified """ def __get__(self, instance, model): if instance: @@ -399,7 +400,7 @@ def __init__(self, **values): self._values = {} for name, column in self._columns.items(): # Set default values on instantiation. Thanks to this, we don't have - # to wait anylonger for a call to validate() to have CQLengine set + # to wait any longer for a call to validate() to have CQLengine set # default columns values. column_default = column.get_default() if column.has_default else None value = values.get(name, column_default) @@ -614,7 +615,7 @@ def __iter__(self): def __getitem__(self, key): """ Returns column's value. """ - if not isinstance(key, six.string_types): + if not isinstance(key, str): raise TypeError if key not in self._columns.keys(): raise KeyError @@ -622,7 +623,7 @@ def __getitem__(self, key): def __setitem__(self, key, val): """ Sets a column's value. """ - if not isinstance(key, six.string_types): + if not isinstance(key, str): raise TypeError if key not in self._columns.keys(): raise KeyError @@ -1042,8 +1043,7 @@ def _transform_column(col_name, col_obj): return klass -@six.add_metaclass(ModelMetaClass) -class Model(BaseModel): +class Model(BaseModel, metaclass=ModelMetaClass): __abstract__ = True """ *Optional.* Indicates that this model is only intended to be used as a base class for other models. diff --git a/cassandra/cqlengine/named.py b/cassandra/cqlengine/named.py index 265d5c91e4..219155818c 100644 --- a/cassandra/cqlengine/named.py +++ b/cassandra/cqlengine/named.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/cqlengine/operators.py b/cassandra/cqlengine/operators.py index bba505583c..a9e7db2545 100644 --- a/cassandra/cqlengine/operators.py +++ b/cassandra/cqlengine/operators.py @@ -1,17 +1,18 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import six from cassandra.cqlengine import UnicodeMixin @@ -44,8 +45,7 @@ def __init__(cls, name, bases, dct): super(OpMapMeta, cls).__init__(name, bases, dct) -@six.add_metaclass(OpMapMeta) -class BaseWhereOperator(BaseQueryOperator): +class BaseWhereOperator(BaseQueryOperator, metaclass=OpMapMeta): """ base operator used for where clauses """ @classmethod def get_operator(cls, symbol): diff --git a/cassandra/cqlengine/query.py b/cassandra/cqlengine/query.py index 11f664ec02..329bc7fade 100644 --- a/cassandra/cqlengine/query.py +++ b/cassandra/cqlengine/query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +18,6 @@ from datetime import datetime, timedelta from functools import partial import time -import six from warnings import warn from cassandra.query import SimpleStatement, BatchType as CBatchType, BatchStatement @@ -103,29 +104,29 @@ def in_(self, item): used where you'd typically want to use python's `in` operator """ - return WhereClause(six.text_type(self), InOperator(), item) + return WhereClause(str(self), InOperator(), item) def contains_(self, item): """ Returns a CONTAINS operator """ - return WhereClause(six.text_type(self), ContainsOperator(), item) + return WhereClause(str(self), ContainsOperator(), item) def __eq__(self, other): - return WhereClause(six.text_type(self), EqualsOperator(), self._to_database(other)) + return WhereClause(str(self), EqualsOperator(), self._to_database(other)) def __gt__(self, other): - return WhereClause(six.text_type(self), GreaterThanOperator(), self._to_database(other)) + return WhereClause(str(self), GreaterThanOperator(), self._to_database(other)) def __ge__(self, other): - return WhereClause(six.text_type(self), GreaterThanOrEqualOperator(), self._to_database(other)) + return WhereClause(str(self), GreaterThanOrEqualOperator(), self._to_database(other)) def __lt__(self, other): - return WhereClause(six.text_type(self), LessThanOperator(), self._to_database(other)) + return WhereClause(str(self), LessThanOperator(), self._to_database(other)) def __le__(self, other): - return WhereClause(six.text_type(self), LessThanOrEqualOperator(), self._to_database(other)) + return WhereClause(str(self), LessThanOrEqualOperator(), self._to_database(other)) class BatchType(object): @@ -206,8 +207,8 @@ def add_callback(self, fn, *args, **kwargs): :param fn: Callable object :type fn: callable - :param \*args: Positional arguments to be passed to the callback at the time of execution - :param \*\*kwargs: Named arguments to be passed to the callback at the time of execution + :param args: Positional arguments to be passed to the callback at the time of execution + :param kwargs: Named arguments to be passed to the callback at the time of execution """ if not callable(fn): raise ValueError("Value for argument 'fn' is {0} and is not a callable object.".format(type(fn))) @@ -231,7 +232,7 @@ def execute(self): opener = 'BEGIN ' + (str(batch_type) + ' ' if batch_type else '') + ' BATCH' if self.timestamp: - if isinstance(self.timestamp, six.integer_types): + if isinstance(self.timestamp, int): ts = self.timestamp elif isinstance(self.timestamp, (datetime, timedelta)): ts = self.timestamp @@ -277,8 +278,8 @@ class ContextQuery(object): A Context manager to allow a Model to switch context easily. Presently, the context only specifies a keyspace for model IO. - :param \*args: One or more models. A model should be a class type, not an instance. - :param \*\*kwargs: (optional) Context parameters: can be *keyspace* or *connection* + :param args: One or more models. A model should be a class type, not an instance. + :param kwargs: (optional) Context parameters: can be *keyspace* or *connection* For example: @@ -286,15 +287,15 @@ class ContextQuery(object): with ContextQuery(Automobile, keyspace='test2') as A: A.objects.create(manufacturer='honda', year=2008, model='civic') - print len(A.objects.all()) # 1 result + print(len(A.objects.all())) # 1 result with ContextQuery(Automobile, keyspace='test4') as A: - print len(A.objects.all()) # 0 result + print(len(A.objects.all())) # 0 result # Multiple models with ContextQuery(Automobile, Automobile2, connection='cluster2') as (A, A2): - print len(A.objects.all()) - print len(A2.objects.all()) + print(len(A.objects.all())) + print(len(A2.objects.all())) """ @@ -407,7 +408,7 @@ def _execute(self, statement): return result def __unicode__(self): - return six.text_type(self._select_query()) + return str(self._select_query()) def __str__(self): return str(self.__unicode__()) @@ -604,7 +605,7 @@ def batch(self, batch_obj): def first(self): try: - return six.next(iter(self)) + return next(iter(self)) except StopIteration: return None @@ -809,11 +810,11 @@ class Comment(Model): print("Normal") for comment in Comment.objects(photo_id=u): - print comment.comment_id + print(comment.comment_id) print("Reversed") for comment in Comment.objects(photo_id=u).order_by("-comment_id"): - print comment.comment_id + print(comment.comment_id) """ if len(colnames) == 0: clone = copy.deepcopy(self) @@ -901,7 +902,7 @@ def limit(self, v): if v is None: v = 0 - if not isinstance(v, six.integer_types): + if not isinstance(v, int): raise TypeError if v == self._limit: return self @@ -925,7 +926,7 @@ def fetch_size(self, v): print(user) """ - if not isinstance(v, six.integer_types): + if not isinstance(v, int): raise TypeError if v == self._fetch_size: return self @@ -939,7 +940,7 @@ def fetch_size(self, v): def allow_filtering(self): """ - Enables the (usually) unwise practive of querying on a clustering key without also defining a partition key + Enables the (usually) unwise practice of querying on a clustering key without also defining a partition key """ clone = copy.deepcopy(self) clone._allow_filtering = True @@ -1418,7 +1419,7 @@ def update(self): prior to calling this. """ if self.instance is None: - raise CQLEngineException("DML Query intance attribute is None") + raise CQLEngineException("DML Query instance attribute is None") assert type(self.instance) == self.model null_clustering_key = False if len(self.instance._clustering_keys) == 0 else True static_changed_only = True @@ -1430,7 +1431,7 @@ def update(self): updated_columns = set() # get defined fields and their column names for name, col in self.model._columns.items(): - # if clustering key is null, don't include non static columns + # if clustering key is null, don't include non-static columns if null_clustering_key and not col.static and not col.partition_key: continue if not col.is_primary_key: @@ -1449,7 +1450,7 @@ def update(self): if statement.assignments: for name, col in self.model._primary_keys.items(): - # only include clustering key if clustering key is not null, and non static columns are changed to avoid cql error + # only include clustering key if clustering key is not null, and non-static columns are changed to avoid cql error if (null_clustering_key or static_changed_only) and (not col.partition_key): continue statement.add_where(col, EqualsOperator(), getattr(self.instance, name)) @@ -1469,7 +1470,7 @@ def save(self): prior to calling this. """ if self.instance is None: - raise CQLEngineException("DML Query intance attribute is None") + raise CQLEngineException("DML Query instance attribute is None") assert type(self.instance) == self.model nulled_fields = set() diff --git a/cassandra/cqlengine/statements.py b/cassandra/cqlengine/statements.py index c6ceb16607..b20b07ef56 100644 --- a/cassandra/cqlengine/statements.py +++ b/cassandra/cqlengine/statements.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,8 +16,6 @@ from datetime import datetime, timedelta import time -import six -from six.moves import filter from cassandra.query import FETCH_SIZE_UNSET from cassandra.cqlengine import columns @@ -114,7 +114,7 @@ def __init__(self, field, operator, value, quote_field=True): def __unicode__(self): field = ('"{0}"' if self.quote_field else '{0}').format(self.field) - return u'{0} {1} {2}'.format(field, self.operator, six.text_type(self.query_value)) + return u'{0} {1} {2}'.format(field, self.operator, str(self.query_value)) def __hash__(self): return super(WhereClause, self).__hash__() ^ hash(self.operator) @@ -186,8 +186,7 @@ def __init__(cls, name, bases, dct): super(ContainerUpdateTypeMapMeta, cls).__init__(name, bases, dct) -@six.add_metaclass(ContainerUpdateTypeMapMeta) -class ContainerUpdateClause(AssignmentClause): +class ContainerUpdateClause(AssignmentClause, metaclass=ContainerUpdateTypeMapMeta): def __init__(self, field, value, operation=None, previous=None): super(ContainerUpdateClause, self).__init__(field, value) @@ -553,7 +552,7 @@ def get_context(self): def add_conditional_clause(self, clause): """ - Adds a iff clause to this statement + Adds an iff clause to this statement :param clause: The clause that will be added to the iff statement :type clause: ConditionalClause @@ -563,7 +562,7 @@ def add_conditional_clause(self, clause): self.conditionals.append(clause) def _get_conditionals(self): - return 'IF {0}'.format(' AND '.join([six.text_type(c) for c in self.conditionals])) + return 'IF {0}'.format(' AND '.join([str(c) for c in self.conditionals])) def get_context_size(self): return len(self.get_context()) @@ -578,13 +577,13 @@ def update_context_id(self, i): @property def timestamp_normalized(self): """ - we're expecting self.timestamp to be either a long, int, a datetime, or a timedelta + We're expecting self.timestamp to be either a long, int, a datetime, or a timedelta :return: """ if not self.timestamp: return None - if isinstance(self.timestamp, six.integer_types): + if isinstance(self.timestamp, int): return self.timestamp if isinstance(self.timestamp, timedelta): @@ -602,7 +601,7 @@ def __repr__(self): @property def _where(self): - return 'WHERE {0}'.format(' AND '.join([six.text_type(c) for c in self.where_clauses])) + return 'WHERE {0}'.format(' AND '.join([str(c) for c in self.where_clauses])) class SelectStatement(BaseCQLStatement): @@ -629,10 +628,10 @@ def __init__(self, fetch_size=fetch_size ) - self.fields = [fields] if isinstance(fields, six.string_types) else (fields or []) + self.fields = [fields] if isinstance(fields, str) else (fields or []) self.distinct_fields = distinct_fields self.count = count - self.order_by = [order_by] if isinstance(order_by, six.string_types) else order_by + self.order_by = [order_by] if isinstance(order_by, str) else order_by self.limit = limit self.allow_filtering = allow_filtering @@ -653,7 +652,7 @@ def __unicode__(self): qs += [self._where] if self.order_by and not self.count: - qs += ['ORDER BY {0}'.format(', '.join(six.text_type(o) for o in self.order_by))] + qs += ['ORDER BY {0}'.format(', '.join(str(o) for o in self.order_by))] if self.limit: qs += ['LIMIT {0}'.format(self.limit)] @@ -798,7 +797,7 @@ def __unicode__(self): qs += ["USING {0}".format(" AND ".join(using_options))] qs += ['SET'] - qs += [', '.join([six.text_type(c) for c in self.assignments])] + qs += [', '.join([str(c) for c in self.assignments])] if self.where_clauses: qs += [self._where] @@ -849,7 +848,7 @@ def __init__(self, table, fields=None, where=None, timestamp=None, conditionals= conditionals=conditionals ) self.fields = [] - if isinstance(fields, six.string_types): + if isinstance(fields, str): fields = [fields] for field in fields or []: self.add_field(field) @@ -874,7 +873,7 @@ def get_context(self): return ctx def add_field(self, field): - if isinstance(field, six.string_types): + if isinstance(field, str): field = FieldDeleteClause(field) if not isinstance(field, BaseClause): raise StatementException("only instances of AssignmentClause can be added to statements") diff --git a/cassandra/cqlengine/usertype.py b/cassandra/cqlengine/usertype.py index 155068d99e..e96534f9c6 100644 --- a/cassandra/cqlengine/usertype.py +++ b/cassandra/cqlengine/usertype.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +15,6 @@ # limitations under the License. import re -import six from cassandra.util import OrderedDict from cassandra.cqlengine import CQLEngineException @@ -72,7 +73,7 @@ def __ne__(self, other): return not self.__eq__(other) def __str__(self): - return "{{{0}}}".format(', '.join("'{0}': {1}".format(k, getattr(self, k)) for k, v in six.iteritems(self._values))) + return "{{{0}}}".format(', '.join("'{0}': {1}".format(k, getattr(self, k)) for k, v in self._values.items())) def has_changed_fields(self): return any(v.changed for v in self._values.values()) @@ -93,14 +94,14 @@ def __getattr__(self, attr): raise AttributeError(attr) def __getitem__(self, key): - if not isinstance(key, six.string_types): + if not isinstance(key, str): raise TypeError if key not in self._fields.keys(): raise KeyError return getattr(self, key) def __setitem__(self, key, val): - if not isinstance(key, six.string_types): + if not isinstance(key, str): raise TypeError if key not in self._fields.keys(): raise KeyError @@ -198,8 +199,7 @@ def _transform_column(field_name, field_obj): return klass -@six.add_metaclass(UserTypeMetaClass) -class UserType(BaseUserType): +class UserType(BaseUserType, metaclass=UserTypeMetaClass): """ This class is used to model User Defined Types. To define a type, declare a class inheriting from this, and assign field types as class attributes: diff --git a/cassandra/cqltypes.py b/cassandra/cqltypes.py index 7946a63af8..5e063a0141 100644 --- a/cassandra/cqltypes.py +++ b/cassandra/cqltypes.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -39,8 +41,6 @@ import re import socket import time -import six -from six.moves import range import struct import sys from uuid import UUID @@ -50,14 +50,11 @@ int32_pack, int32_unpack, int64_pack, int64_unpack, float_pack, float_unpack, double_pack, double_unpack, varint_pack, varint_unpack, point_be, point_le, - vints_pack, vints_unpack) + vints_pack, vints_unpack, uvint_unpack, uvint_pack) from cassandra import util _little_endian_flag = 1 # we always serialize LE -if six.PY3: - import ipaddress - -_ord = ord if six.PY2 else lambda x: x +import ipaddress apache_cassandra_type_prefix = 'org.apache.cassandra.db.marshal.' @@ -66,16 +63,12 @@ log = logging.getLogger(__name__) -if six.PY3: - _number_types = frozenset((int, float)) - long = int +_number_types = frozenset((int, float)) + - def _name_from_hex_string(encoded_name): - bin_str = unhexlify(encoded_name) - return bin_str.decode('ascii') -else: - _number_types = frozenset((int, long, float)) - _name_from_hex_string = unhexlify +def _name_from_hex_string(encoded_name): + bin_str = unhexlify(encoded_name) + return bin_str.decode('ascii') def trim_if_startswith(s, prefix): @@ -235,13 +228,15 @@ def parse_casstype_args(typestring): else: names.append(None) - ctype = lookup_casstype_simple(tok) + try: + ctype = int(tok) + except ValueError: + ctype = lookup_casstype_simple(tok) types.append(ctype) # return the first (outer) type, which will have all parameters applied return args[0][0][0] - def lookup_casstype(casstype): """ Given a Cassandra type as a string (possibly including parameters), hand @@ -276,8 +271,7 @@ def __str__(self): EMPTY = EmptyValue() -@six.add_metaclass(CassandraTypeType) -class _CassandraType(object): +class _CassandraType(object, metaclass=CassandraTypeType): subtypes = () num_subtypes = 0 empty_binary_ok = False @@ -296,7 +290,7 @@ class _CassandraType(object): """ def __repr__(self): - return '<%s( %r )>' % (self.cql_parameterized_type(), self.val) + return '<%s>' % (self.cql_parameterized_type()) @classmethod def from_binary(cls, byts, protocol_version): @@ -380,8 +374,6 @@ def apply_parameters(cls, subtypes, names=None): raise ValueError("%s types require %d subtypes (%d given)" % (cls.typename, cls.num_subtypes, len(subtypes))) newname = cls.cass_parameterized_type_with(subtypes) - if six.PY2 and isinstance(newname, unicode): - newname = newname.encode('utf-8') return type(newname, (cls,), {'subtypes': subtypes, 'cassname': cls.cassname, 'fieldnames': names}) @classmethod @@ -402,6 +394,9 @@ def cass_parameterized_type(cls, full=False): """ return cls.cass_parameterized_type_with(cls.subtypes, full=full) + @classmethod + def serial_size(cls): + return None # it's initially named with a _ to avoid registering it as a real type, but # client programs may want to use the name still for isinstance(), etc @@ -412,16 +407,10 @@ class _UnrecognizedType(_CassandraType): num_subtypes = 'UNKNOWN' -if six.PY3: - def mkUnrecognizedType(casstypename): - return CassandraTypeType(casstypename, - (_UnrecognizedType,), - {'typename': "'%s'" % casstypename}) -else: - def mkUnrecognizedType(casstypename): # noqa - return CassandraTypeType(casstypename.encode('utf8'), - (_UnrecognizedType,), - {'typename': "'%s'" % casstypename}) +def mkUnrecognizedType(casstypename): + return CassandraTypeType(casstypename, + (_UnrecognizedType,), + {'typename': "'%s'" % casstypename}) class BytesType(_CassandraType): @@ -430,7 +419,7 @@ class BytesType(_CassandraType): @staticmethod def serialize(val, protocol_version): - return six.binary_type(val) + return bytes(val) class DecimalType(_CassandraType): @@ -473,6 +462,9 @@ def serialize(uuid, protocol_version): except AttributeError: raise TypeError("Got a non-UUID object for a UUID value") + @classmethod + def serial_size(cls): + return 16 class BooleanType(_CassandraType): typename = 'boolean' @@ -485,6 +477,10 @@ def deserialize(byts, protocol_version): def serialize(truth, protocol_version): return int8_pack(truth) + @classmethod + def serial_size(cls): + return 1 + class ByteType(_CassandraType): typename = 'tinyint' @@ -497,25 +493,20 @@ def serialize(byts, protocol_version): return int8_pack(byts) -if six.PY2: - class AsciiType(_CassandraType): - typename = 'ascii' - empty_binary_ok = True -else: - class AsciiType(_CassandraType): - typename = 'ascii' - empty_binary_ok = True +class AsciiType(_CassandraType): + typename = 'ascii' + empty_binary_ok = True - @staticmethod - def deserialize(byts, protocol_version): - return byts.decode('ascii') + @staticmethod + def deserialize(byts, protocol_version): + return byts.decode('ascii') - @staticmethod - def serialize(var, protocol_version): - try: - return var.encode('ascii') - except UnicodeDecodeError: - return var + @staticmethod + def serialize(var, protocol_version): + try: + return var.encode('ascii') + except UnicodeDecodeError: + return var class FloatType(_CassandraType): @@ -529,6 +520,9 @@ def deserialize(byts, protocol_version): def serialize(byts, protocol_version): return float_pack(byts) + @classmethod + def serial_size(cls): + return 4 class DoubleType(_CassandraType): typename = 'double' @@ -541,6 +535,9 @@ def deserialize(byts, protocol_version): def serialize(byts, protocol_version): return double_pack(byts) + @classmethod + def serial_size(cls): + return 8 class LongType(_CassandraType): typename = 'bigint' @@ -553,6 +550,9 @@ def deserialize(byts, protocol_version): def serialize(byts, protocol_version): return int64_pack(byts) + @classmethod + def serial_size(cls): + return 8 class Int32Type(_CassandraType): typename = 'int' @@ -565,6 +565,9 @@ def deserialize(byts, protocol_version): def serialize(byts, protocol_version): return int32_pack(byts) + @classmethod + def serial_size(cls): + return 4 class IntegerType(_CassandraType): typename = 'varint' @@ -600,7 +603,7 @@ def serialize(addr, protocol_version): # since we've already determined the AF return socket.inet_aton(addr) except: - if six.PY3 and isinstance(addr, (ipaddress.IPv4Address, ipaddress.IPv6Address)): + if isinstance(addr, (ipaddress.IPv4Address, ipaddress.IPv6Address)): return addr.packed raise ValueError("can't interpret %r as an inet address" % (addr,)) @@ -659,8 +662,11 @@ def serialize(v, protocol_version): raise TypeError('DateType arguments must be a datetime, date, or timestamp') timestamp = v - return int64_pack(long(timestamp)) + return int64_pack(int(timestamp)) + @classmethod + def serial_size(cls): + return 8 class TimestampType(DateType): pass @@ -683,6 +689,9 @@ def serialize(timeuuid, protocol_version): except AttributeError: raise TypeError("Got a non-UUID object for a UUID value") + @classmethod + def serial_size(cls): + return 16 class SimpleDateType(_CassandraType): typename = 'date' @@ -703,7 +712,7 @@ def serialize(val, protocol_version): try: days = val.days_from_epoch except AttributeError: - if isinstance(val, six.integer_types): + if isinstance(val, int): # the DB wants offset int values, but util.Date init takes days from epoch # here we assume int values are offset, as they would appear in CQL # short circuit to avoid subtracting just to add offset @@ -723,9 +732,14 @@ def deserialize(byts, protocol_version): def serialize(byts, protocol_version): return int16_pack(byts) - class TimeType(_CassandraType): typename = 'time' + # Time should be a fixed size 8 byte type but Cassandra 5.0 code marks it as + # variable size... and we have to match what the server expects since the server + # uses that specification to encode data of that type. + #@classmethod + #def serial_size(cls): + # return 8 @staticmethod def deserialize(byts, protocol_version): @@ -823,7 +837,7 @@ def deserialize_safe(cls, byts, protocol_version): @classmethod def serialize_safe(cls, items, protocol_version): - if isinstance(items, six.string_types): + if isinstance(items, str): raise TypeError("Received a string for a type that expects a sequence") subtype, = cls.subtypes @@ -897,7 +911,7 @@ def serialize_safe(cls, themap, protocol_version): buf = io.BytesIO() buf.write(pack(len(themap))) try: - items = six.iteritems(themap) + items = themap.items() except AttributeError: raise TypeError("Got a non-map object for a map value") inner_proto = max(3, protocol_version) @@ -972,9 +986,6 @@ class UserType(TupleType): def make_udt_class(cls, keyspace, udt_name, field_names, field_types): assert len(field_names) == len(field_types) - if six.PY2 and isinstance(udt_name, unicode): - udt_name = udt_name.encode('utf-8') - instance = cls._cache.get((keyspace, udt_name)) if not instance or instance.fieldnames != field_names or instance.subtypes != field_types: instance = type(udt_name, (cls,), {'subtypes': field_types, @@ -989,8 +1000,6 @@ def make_udt_class(cls, keyspace, udt_name, field_names, field_types): @classmethod def evict_udt_class(cls, keyspace, udt_name): - if six.PY2 and isinstance(udt_name, unicode): - udt_name = udt_name.encode('utf-8') try: del cls._cache[(keyspace, udt_name)] except KeyError: @@ -1026,7 +1035,9 @@ def serialize_safe(cls, val, protocol_version): try: item = val[i] except TypeError: - item = getattr(val, fieldname) + item = getattr(val, fieldname, None) + if item is None and not hasattr(val, fieldname): + log.warning(f"field {fieldname} is part of the UDT {cls.typename} but is not present in the value {val}") if item is not None: packed_item = subtype.to_binary(item, proto_version) @@ -1145,7 +1156,7 @@ def serialize_safe(cls, val, protocol_version): def is_counter_type(t): - if isinstance(t, six.string_types): + if isinstance(t, str): t = lookup_casstype(t) return issubclass(t, CounterColumnType) @@ -1181,7 +1192,7 @@ def serialize(val, protocol_version): @staticmethod def deserialize(byts, protocol_version): - is_little_endian = bool(_ord(byts[0])) + is_little_endian = bool(byts[0]) point = point_le if is_little_endian else point_be return util.Point(*point.unpack_from(byts, 5)) # ofs = endian byte + int type @@ -1198,7 +1209,7 @@ def serialize(val, protocol_version): @staticmethod def deserialize(byts, protocol_version): - is_little_endian = bool(_ord(byts[0])) + is_little_endian = bool(byts[0]) point = point_le if is_little_endian else point_be coords = ((point.unpack_from(byts, offset) for offset in range(1 + 4 + 4, len(byts), point.size))) # start = endian + int type + int count return util.LineString(coords) @@ -1227,7 +1238,7 @@ def serialize(val, protocol_version): @staticmethod def deserialize(byts, protocol_version): - is_little_endian = bool(_ord(byts[0])) + is_little_endian = bool(byts[0]) if is_little_endian: int_fmt = '" % (cls.typename, cls.subtype.cql_parameterized_type(), cls.vector_size) diff --git a/cassandra/cython_marshal.pyx b/cassandra/cython_marshal.pyx index e4f30e6a85..4733a47935 100644 --- a/cassandra/cython_marshal.pyx +++ b/cassandra/cython_marshal.pyx @@ -1,12 +1,14 @@ # -- cython: profile=True # -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,8 +16,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six - from libc.stdint cimport (int8_t, int16_t, int32_t, int64_t, uint8_t, uint16_t, uint32_t, uint64_t) from libc.string cimport memcpy @@ -24,8 +24,6 @@ from cassandra.buffer cimport Buffer, buf_read, to_bytes cdef bint is_little_endian from cassandra.util import is_little_endian -cdef bint PY3 = six.PY3 - ctypedef fused num_t: int64_t int32_t @@ -57,10 +55,7 @@ cdef inline num_t unpack_num(Buffer *buf, num_t *dummy=NULL): # dummy pointer be cdef varint_unpack(Buffer *term): """Unpack a variable-sized integer""" - if PY3: - return varint_unpack_py3(to_bytes(term)) - else: - return varint_unpack_py2(to_bytes(term)) + return varint_unpack_py3(to_bytes(term)) # TODO: Optimize these two functions cdef varint_unpack_py3(bytes term): @@ -70,13 +65,6 @@ cdef varint_unpack_py3(bytes term): val -= 1 << shift return val -cdef varint_unpack_py2(bytes term): # noqa - val = int(term.encode('hex'), 16) - if (ord(term[0]) & 128) != 0: - shift = len(term) * 8 # * Note below - val = val - (1 << shift) - return val - # * Note * # '1 << (len(term) * 8)' Cython tries to do native # integer shifts, which overflows. We need this to diff --git a/cassandra/cython_utils.pyx b/cassandra/cython_utils.pyx index 7539f33f31..1b6a136c69 100644 --- a/cassandra/cython_utils.pyx +++ b/cassandra/cython_utils.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/__init__.py b/cassandra/datastax/__init__.py index 2c9ca172f8..635f0d9e60 100644 --- a/cassandra/datastax/__init__.py +++ b/cassandra/datastax/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/cloud/__init__.py b/cassandra/datastax/cloud/__init__.py index ecb4a73fd4..e175b2928b 100644 --- a/cassandra/datastax/cloud/__init__.py +++ b/cassandra/datastax/cloud/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +20,7 @@ import sys import tempfile import shutil -import six -from six.moves.urllib.request import urlopen +from urllib.request import urlopen _HAS_SSL = True try: @@ -182,11 +183,9 @@ def _pyopenssl_context_from_cert(ca_cert_location, cert_location, key_location): try: from OpenSSL import SSL except ImportError as e: - six.reraise( - ImportError, - ImportError("PyOpenSSL must be installed to connect to Astra with the Eventlet or Twisted event loops"), - sys.exc_info()[2] - ) + raise ImportError( + "PyOpenSSL must be installed to connect to Astra with the Eventlet or Twisted event loops")\ + .with_traceback(e.__traceback__) ssl_context = SSL.Context(SSL.TLSv1_METHOD) ssl_context.set_verify(SSL.VERIFY_PEER, callback=lambda _1, _2, _3, _4, ok: ok) ssl_context.use_certificate_file(cert_location) diff --git a/cassandra/datastax/graph/__init__.py b/cassandra/datastax/graph/__init__.py index 11785c84f6..8315843a36 100644 --- a/cassandra/datastax/graph/__init__.py +++ b/cassandra/datastax/graph/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/graph/fluent/__init__.py b/cassandra/datastax/graph/fluent/__init__.py index 44a0d136e0..0dfd5230e5 100644 --- a/cassandra/datastax/graph/fluent/__init__.py +++ b/cassandra/datastax/graph/fluent/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -257,7 +259,7 @@ def traversal_source(session=None, graph_name=None, execution_profile=EXEC_PROFI session = c.connect() g = DseGraph.traversal_source(session, 'my_graph') - print g.V().valueMap().toList() + print(g.V().valueMap().toList()) """ diff --git a/cassandra/datastax/graph/fluent/_predicates.py b/cassandra/datastax/graph/fluent/_predicates.py index 95bd533d5e..1c7825455a 100644 --- a/cassandra/datastax/graph/fluent/_predicates.py +++ b/cassandra/datastax/graph/fluent/_predicates.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/graph/fluent/_query.py b/cassandra/datastax/graph/fluent/_query.py index bd89046852..c476653541 100644 --- a/cassandra/datastax/graph/fluent/_query.py +++ b/cassandra/datastax/graph/fluent/_query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six import logging from cassandra.graph import SimpleGraphStatement, GraphProtocol @@ -55,7 +56,7 @@ def get_serializer(self, value): if self.user_types is None: try: user_types = self.context['cluster']._user_types[self.context['graph_name']] - self.user_types = dict(map(reversed, six.iteritems(user_types))) + self.user_types = dict(map(reversed, user_types.items())) except KeyError: self.user_types = {} diff --git a/cassandra/datastax/graph/fluent/_serializers.py b/cassandra/datastax/graph/fluent/_serializers.py index db8e715ef8..b6c705771f 100644 --- a/cassandra/datastax/graph/fluent/_serializers.py +++ b/cassandra/datastax/graph/fluent/_serializers.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,8 +16,6 @@ from collections import OrderedDict -import six - from gremlin_python.structure.io.graphsonV2d0 import ( GraphSONReader as GraphSONReaderV2, GraphSONUtil as GraphSONUtil, # no difference between v2 and v3 @@ -175,7 +175,7 @@ def dictify(cls, p, writer): class DistanceIO(object): @classmethod def dictify(cls, v, _): - return GraphSONUtil.typedValue('Distance', six.text_type(v), prefix='dse') + return GraphSONUtil.typedValue('Distance', str(v), prefix='dse') GremlinUserTypeIO = _GremlinGraphSONTypeSerializer(UserTypeIO) @@ -183,7 +183,7 @@ def dictify(cls, v, _): # GraphSON2 dse_graphson2_serializers = OrderedDict([ (t, _GremlinGraphSONTypeSerializer(s)) - for t, s in six.iteritems(GraphSON2Serializer.get_type_definitions()) + for t, s in GraphSON2Serializer.get_type_definitions().items() ]) dse_graphson2_serializers.update(OrderedDict([ @@ -197,7 +197,7 @@ def dictify(cls, v, _): dse_graphson2_deserializers = { k: _make_gremlin_graphson2_deserializer(v) - for k, v in six.iteritems(GraphSON2Deserializer.get_type_definitions()) + for k, v in GraphSON2Deserializer.get_type_definitions().items() } dse_graphson2_deserializers.update({ @@ -228,7 +228,7 @@ def dictify(cls, v, _): # GraphSON3 dse_graphson3_serializers = OrderedDict([ (t, _GremlinGraphSONTypeSerializer(s)) - for t, s in six.iteritems(GraphSON3Serializer.get_type_definitions()) + for t, s in GraphSON3Serializer.get_type_definitions().items() ]) dse_graphson3_serializers.update(OrderedDict([ @@ -239,7 +239,7 @@ def dictify(cls, v, _): dse_graphson3_deserializers = { k: _make_gremlin_graphson3_deserializer(v) - for k, v in six.iteritems(GraphSON3Deserializer.get_type_definitions()) + for k, v in GraphSON3Deserializer.get_type_definitions().items() } dse_graphson3_deserializers.update({ diff --git a/cassandra/datastax/graph/fluent/predicates.py b/cassandra/datastax/graph/fluent/predicates.py index 6bfd6b3113..8dca8b84ce 100644 --- a/cassandra/datastax/graph/fluent/predicates.py +++ b/cassandra/datastax/graph/fluent/predicates.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/graph/fluent/query.py b/cassandra/datastax/graph/fluent/query.py index c5026cc046..f599f2c979 100644 --- a/cassandra/datastax/graph/fluent/query.py +++ b/cassandra/datastax/graph/fluent/query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/graph/fluent/serializers.py b/cassandra/datastax/graph/fluent/serializers.py index 680e613edf..3c175f92d4 100644 --- a/cassandra/datastax/graph/fluent/serializers.py +++ b/cassandra/datastax/graph/fluent/serializers.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/graph/graphson.py b/cassandra/datastax/graph/graphson.py index 4b333eb1bf..7b284c4c26 100644 --- a/cassandra/datastax/graph/graphson.py +++ b/cassandra/datastax/graph/graphson.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -23,12 +25,7 @@ import itertools from functools import partial -import six - -try: - import ipaddress -except: - ipaddress = None +import ipaddress from cassandra.cqltypes import cql_types_from_string @@ -95,8 +92,7 @@ def graphson_type(cls): return "{0}:{1}".format(cls.prefix, cls.graphson_base_type) -@six.add_metaclass(_GraphSONTypeType) -class GraphSONTypeIO(object): +class GraphSONTypeIO(object, metaclass=_GraphSONTypeType): """Represent a serializable GraphSON type""" prefix = 'g' @@ -109,7 +105,7 @@ def definition(cls, value, writer=None): @classmethod def serialize(cls, value, writer=None): - return six.text_type(value) + return str(value) @classmethod def deserialize(cls, value, reader=None): @@ -141,7 +137,7 @@ def serialize(cls, value, writer=None): @classmethod def get_specialized_serializer(cls, value): - if type(value) in six.integer_types and (value > MAX_INT32 or value < MIN_INT32): + if type(value) is int and (value > MAX_INT32 or value < MIN_INT32): return Int64TypeIO return Int32TypeIO @@ -164,9 +160,7 @@ class Int64TypeIO(IntegerTypeIO): @classmethod def deserialize(cls, value, reader=None): - if six.PY3: - return value - return long(value) + return value class FloatTypeIO(GraphSONTypeIO): @@ -274,8 +268,7 @@ class BlobTypeIO(GraphSONTypeIO): @classmethod def serialize(cls, value, writer=None): value = base64.b64encode(value) - if six.PY3: - value = value.decode('utf-8') + value = value.decode('utf-8') return value @classmethod @@ -343,7 +336,7 @@ def deserialize(cls, value, reader=None): raise ValueError('Invalid duration: {0}'.format(value)) duration = {k: float(v) if v is not None else 0 - for k, v in six.iteritems(duration.groupdict())} + for k, v in duration.groupdict().items()} return datetime.timedelta(days=duration['days'], hours=duration['hours'], minutes=duration['minutes'], seconds=duration['seconds']) @@ -512,7 +505,7 @@ class JsonMapTypeIO(GraphSONTypeIO): @classmethod def serialize(cls, value, writer=None): out = {} - for k, v in six.iteritems(value): + for k, v in value.items(): out[k] = writer.serialize(v, writer) return out @@ -528,7 +521,7 @@ class MapTypeIO(GraphSONTypeIO): def definition(cls, value, writer=None): out = OrderedDict([('cqlType', cls.cql_type)]) out['definition'] = [] - for k, v in six.iteritems(value): + for k, v in value.items(): # we just need the first pair to write the def out['definition'].append(writer.definition(k)) out['definition'].append(writer.definition(v)) @@ -538,7 +531,7 @@ def definition(cls, value, writer=None): @classmethod def serialize(cls, value, writer=None): out = [] - for k, v in six.iteritems(value): + for k, v in value.items(): out.append(writer.serialize(k, writer)) out.append(writer.serialize(v, writer)) @@ -841,16 +834,10 @@ class GraphSON1Serializer(_BaseGraphSONSerializer): ]) -if ipaddress: - GraphSON1Serializer.register(ipaddress.IPv4Address, InetTypeIO) - GraphSON1Serializer.register(ipaddress.IPv6Address, InetTypeIO) - -if six.PY2: - GraphSON1Serializer.register(buffer, ByteBufferTypeIO) - GraphSON1Serializer.register(unicode, TextTypeIO) -else: - GraphSON1Serializer.register(memoryview, ByteBufferTypeIO) - GraphSON1Serializer.register(bytes, ByteBufferTypeIO) +GraphSON1Serializer.register(ipaddress.IPv4Address, InetTypeIO) +GraphSON1Serializer.register(ipaddress.IPv6Address, InetTypeIO) +GraphSON1Serializer.register(memoryview, ByteBufferTypeIO) +GraphSON1Serializer.register(bytes, ByteBufferTypeIO) class _BaseGraphSONDeserializer(object): @@ -922,9 +909,7 @@ def deserialize_int(cls, value): @classmethod def deserialize_bigint(cls, value): - if six.PY3: - return cls.deserialize_int(value) - return long(value) + return cls.deserialize_int(value) @classmethod def deserialize_double(cls, value): @@ -1007,8 +992,6 @@ def serialize(self, value, writer=None): GraphSON2Serializer.register(int, IntegerTypeIO) -if six.PY2: - GraphSON2Serializer.register(long, IntegerTypeIO) class GraphSON2Deserializer(_BaseGraphSONDeserializer): @@ -1055,7 +1038,7 @@ def deserialize(self, obj): except KeyError: pass # list and map are treated as normal json objs (could be isolated deserializers) - return {self.deserialize(k): self.deserialize(v) for k, v in six.iteritems(obj)} + return {self.deserialize(k): self.deserialize(v) for k, v in obj.items()} elif isinstance(obj, list): return [self.deserialize(o) for o in obj] else: @@ -1109,7 +1092,7 @@ def get_serializer(self, value): if self.user_types is None: try: user_types = self.context['cluster']._user_types[self.context['graph_name']] - self.user_types = dict(map(reversed, six.iteritems(user_types))) + self.user_types = dict(map(reversed, user_types.items())) except KeyError: self.user_types = {} diff --git a/cassandra/datastax/graph/query.py b/cassandra/datastax/graph/query.py index 7c0e265dbf..d5f2a594b3 100644 --- a/cassandra/datastax/graph/query.py +++ b/cassandra/datastax/graph/query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +17,6 @@ import json from warnings import warn -import six - from cassandra import ConsistencyLevel from cassandra.query import Statement, SimpleStatement from cassandra.datastax.graph.types import Vertex, Edge, Path, VertexProperty @@ -77,7 +77,7 @@ def __init__(self, **kwargs): self._graph_options = {} kwargs.setdefault('graph_source', 'g') kwargs.setdefault('graph_language', GraphOptions.DEFAULT_GRAPH_LANGUAGE) - for attr, value in six.iteritems(kwargs): + for attr, value in kwargs.items(): if attr not in _graph_option_names: warn("Unknown keyword argument received for GraphOptions: {0}".format(attr)) setattr(self, attr, value) @@ -103,7 +103,7 @@ def get_options_map(self, other_options=None): for cl in ('graph-write-consistency', 'graph-read-consistency'): cl_enum = options.get(cl) if cl_enum is not None: - options[cl] = six.b(ConsistencyLevel.value_to_name[cl_enum]) + options[cl] = ConsistencyLevel.value_to_name[cl_enum].encode() return options def set_source_default(self): @@ -157,8 +157,8 @@ def get(self, key=opt[2]): def set(self, value, key=opt[2]): if value is not None: # normalize text here so it doesn't have to be done every time we get options map - if isinstance(value, six.text_type) and not isinstance(value, six.binary_type): - value = six.b(value) + if isinstance(value, str): + value = value.encode() self._graph_options[key] = value else: self._graph_options.pop(key, None) @@ -278,7 +278,7 @@ def __getattr__(self, attr): raise AttributeError("Result has no top-level attribute %r" % (attr,)) def __getitem__(self, item): - if isinstance(self.value, dict) and isinstance(item, six.string_types): + if isinstance(self.value, dict) and isinstance(item, str): return self.value[item] elif isinstance(self.value, list) and isinstance(item, int): return self.value[item] diff --git a/cassandra/datastax/graph/types.py b/cassandra/datastax/graph/types.py index 9817c99d7d..75902c6622 100644 --- a/cassandra/datastax/graph/types.py +++ b/cassandra/datastax/graph/types.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/insights/__init__.py b/cassandra/datastax/insights/__init__.py index 2c9ca172f8..635f0d9e60 100644 --- a/cassandra/datastax/insights/__init__.py +++ b/cassandra/datastax/insights/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/datastax/insights/registry.py b/cassandra/datastax/insights/registry.py index 3dd1d255ae..523af4dc84 100644 --- a/cassandra/datastax/insights/registry.py +++ b/cassandra/datastax/insights/registry.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six from collections import OrderedDict from warnings import warn @@ -59,7 +60,7 @@ def _get_serializer(self, cls): try: return self._mapping_dict[cls] except KeyError: - for registered_cls, serializer in six.iteritems(self._mapping_dict): + for registered_cls, serializer in self._mapping_dict.items(): if issubclass(cls, registered_cls): return self._mapping_dict[registered_cls] raise ValueError diff --git a/cassandra/datastax/insights/reporter.py b/cassandra/datastax/insights/reporter.py index b05a88deb0..607c723a1a 100644 --- a/cassandra/datastax/insights/reporter.py +++ b/cassandra/datastax/insights/reporter.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -24,7 +26,6 @@ import sys from threading import Event, Thread import time -import six from cassandra.policies import HostDistance from cassandra.util import ms_timestamp_from_datetime @@ -199,9 +200,9 @@ def _get_startup_data(self): }, 'platformInfo': { 'os': { - 'name': uname_info.system if six.PY3 else uname_info[0], - 'version': uname_info.release if six.PY3 else uname_info[2], - 'arch': uname_info.machine if six.PY3 else uname_info[4] + 'name': uname_info.system, + 'version': uname_info.release, + 'arch': uname_info.machine }, 'cpus': { 'length': multiprocessing.cpu_count(), diff --git a/cassandra/datastax/insights/serializers.py b/cassandra/datastax/insights/serializers.py index aec4467a6a..b1fe0ac5e9 100644 --- a/cassandra/datastax/insights/serializers.py +++ b/cassandra/datastax/insights/serializers.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,8 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six - def initialize_registry(insights_registry): # This will be called from the cluster module, so we put all this behavior @@ -203,8 +203,8 @@ def graph_options_insights_serializer(options): 'language': options.graph_language, 'graphProtocol': options.graph_protocol } - updates = {k: v.decode('utf-8') for k, v in six.iteritems(rv) - if isinstance(v, six.binary_type)} + updates = {k: v.decode('utf-8') for k, v in rv.items() + if isinstance(v, bytes)} rv.update(updates) return rv diff --git a/cassandra/datastax/insights/util.py b/cassandra/datastax/insights/util.py index a483b3f64d..0ce96c7edf 100644 --- a/cassandra/datastax/insights/util.py +++ b/cassandra/datastax/insights/util.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/deserializers.pxd b/cassandra/deserializers.pxd index 7b307226ad..c8408a57b6 100644 --- a/cassandra/deserializers.pxd +++ b/cassandra/deserializers.pxd @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/deserializers.pyx b/cassandra/deserializers.pyx index 7de6949099..c07d67be91 100644 --- a/cassandra/deserializers.pyx +++ b/cassandra/deserializers.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -29,8 +31,6 @@ from uuid import UUID from cassandra import cqltypes from cassandra import util -cdef bint PY2 = six.PY2 - cdef class Deserializer: """Cython-based deserializer class for a cqltype""" @@ -90,8 +90,6 @@ cdef class DesAsciiType(Deserializer): cdef deserialize(self, Buffer *buf, int protocol_version): if buf.size == 0: return "" - if PY2: - return to_bytes(buf) return to_bytes(buf).decode('ascii') diff --git a/cassandra/encoder.py b/cassandra/encoder.py index f2c3f8dfed..94093e85b6 100644 --- a/cassandra/encoder.py +++ b/cassandra/encoder.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,34 +23,22 @@ log = logging.getLogger(__name__) from binascii import hexlify +from decimal import Decimal import calendar import datetime import math import sys import types from uuid import UUID -import six +import ipaddress from cassandra.util import (OrderedDict, OrderedMap, OrderedMapSerializedKey, sortedset, Time, Date, Point, LineString, Polygon) -if six.PY3: - import ipaddress - -if six.PY3: - long = int - def cql_quote(term): - # The ordering of this method is important for the result of this method to - # be a native str type (for both Python 2 and 3) - if isinstance(term, str): return "'%s'" % str(term).replace("'", "''") - # This branch of the if statement will only be used by Python 2 to catch - # unicode strings, text_type is used to prevent type errors with Python 3. - elif isinstance(term, six.text_type): - return "'%s'" % term.encode('utf8').replace("'", "''") else: return str(term) @@ -72,6 +62,7 @@ class Encoder(object): def __init__(self): self.mapping = { float: self.cql_encode_float, + Decimal: self.cql_encode_decimal, bytearray: self.cql_encode_bytes, str: self.cql_encode_str, int: self.cql_encode_object, @@ -97,21 +88,13 @@ def __init__(self): Polygon: self.cql_encode_str_quoted } - if six.PY2: - self.mapping.update({ - unicode: self.cql_encode_unicode, - buffer: self.cql_encode_bytes, - long: self.cql_encode_object, - types.NoneType: self.cql_encode_none, - }) - else: - self.mapping.update({ - memoryview: self.cql_encode_bytes, - bytes: self.cql_encode_bytes, - type(None): self.cql_encode_none, - ipaddress.IPv4Address: self.cql_encode_ipaddress, - ipaddress.IPv6Address: self.cql_encode_ipaddress - }) + self.mapping.update({ + memoryview: self.cql_encode_bytes, + bytes: self.cql_encode_bytes, + type(None): self.cql_encode_none, + ipaddress.IPv4Address: self.cql_encode_ipaddress, + ipaddress.IPv6Address: self.cql_encode_ipaddress + }) def cql_encode_none(self, val): """ @@ -134,16 +117,8 @@ def cql_encode_str(self, val): def cql_encode_str_quoted(self, val): return "'%s'" % val - if six.PY3: - def cql_encode_bytes(self, val): - return (b'0x' + hexlify(val)).decode('utf-8') - elif sys.version_info >= (2, 7): - def cql_encode_bytes(self, val): # noqa - return b'0x' + hexlify(val) - else: - # python 2.6 requires string or read-only buffer for hexlify - def cql_encode_bytes(self, val): # noqa - return b'0x' + hexlify(buffer(val)) + def cql_encode_bytes(self, val): + return (b'0x' + hexlify(val)).decode('utf-8') def cql_encode_object(self, val): """ @@ -169,7 +144,7 @@ def cql_encode_datetime(self, val): with millisecond precision. """ timestamp = calendar.timegm(val.utctimetuple()) - return str(long(timestamp * 1e3 + getattr(val, 'microsecond', 0) / 1e3)) + return str(int(timestamp * 1e3 + getattr(val, 'microsecond', 0) / 1e3)) def cql_encode_date(self, val): """ @@ -214,7 +189,7 @@ def cql_encode_map_collection(self, val): return '{%s}' % ', '.join('%s: %s' % ( self.mapping.get(type(k), self.cql_encode_object)(k), self.mapping.get(type(v), self.cql_encode_object)(v) - ) for k, v in six.iteritems(val)) + ) for k, v in val.items()) def cql_encode_list_collection(self, val): """ @@ -236,14 +211,16 @@ def cql_encode_all_types(self, val, as_text_type=False): if :attr:`~Encoder.mapping` does not contain an entry for the type. """ encoded = self.mapping.get(type(val), self.cql_encode_object)(val) - if as_text_type and not isinstance(encoded, six.text_type): + if as_text_type and not isinstance(encoded, str): return encoded.decode('utf-8') return encoded - if six.PY3: - def cql_encode_ipaddress(self, val): - """ - Converts an ipaddress (IPV4Address, IPV6Address) to a CQL string. This - is suitable for ``inet`` type columns. - """ - return "'%s'" % val.compressed + def cql_encode_ipaddress(self, val): + """ + Converts an ipaddress (IPV4Address, IPV6Address) to a CQL string. This + is suitable for ``inet`` type columns. + """ + return "'%s'" % val.compressed + + def cql_encode_decimal(self, val): + return self.cql_encode_float(float(val)) \ No newline at end of file diff --git a/cassandra/graph/__init__.py b/cassandra/graph/__init__.py index 51bd1de16a..1d33345aad 100644 --- a/cassandra/graph/__init__.py +++ b/cassandra/graph/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/graph/graphson.py b/cassandra/graph/graphson.py index d37c172a6b..576d5063fe 100644 --- a/cassandra/graph/graphson.py +++ b/cassandra/graph/graphson.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/graph/query.py b/cassandra/graph/query.py index 50eef72ad0..9003fe280f 100644 --- a/cassandra/graph/query.py +++ b/cassandra/graph/query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/graph/types.py b/cassandra/graph/types.py index c8b613f8e4..53febe7e9c 100644 --- a/cassandra/graph/types.py +++ b/cassandra/graph/types.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/io/__init__.py b/cassandra/io/__init__.py index 386372eb4a..588a655d98 100644 --- a/cassandra/io/__init__.py +++ b/cassandra/io/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/io/asyncioreactor.py b/cassandra/io/asyncioreactor.py index 7cb0444a32..95f92e26e0 100644 --- a/cassandra/io/asyncioreactor.py +++ b/cassandra/io/asyncioreactor.py @@ -41,14 +41,12 @@ def end(self): def __init__(self, timeout, callback, loop): delayed = self._call_delayed_coro(timeout=timeout, - callback=callback, - loop=loop) + callback=callback) self._handle = asyncio.run_coroutine_threadsafe(delayed, loop=loop) @staticmethod - @asyncio.coroutine - def _call_delayed_coro(timeout, callback, loop): - yield from asyncio.sleep(timeout, loop=loop) + async def _call_delayed_coro(timeout, callback): + await asyncio.sleep(timeout) return callback() def __lt__(self, other): @@ -91,8 +89,8 @@ def __init__(self, *args, **kwargs): self._connect_socket() self._socket.setblocking(0) - self._write_queue = asyncio.Queue(loop=self._loop) - self._write_queue_lock = asyncio.Lock(loop=self._loop) + self._write_queue = asyncio.Queue() + self._write_queue_lock = asyncio.Lock() # see initialize_reactor -- loop is running in a separate thread, so we # have to use a threadsafe call @@ -136,8 +134,7 @@ def close(self): self._close(), loop=self._loop ) - @asyncio.coroutine - def _close(self): + async def _close(self): log.debug("Closing connection (%s) to %s" % (id(self), self.endpoint)) if self._write_watcher: self._write_watcher.cancel() @@ -174,21 +171,19 @@ def push(self, data): # avoid races/hangs by just scheduling this, not using threadsafe self._loop.create_task(self._push_msg(chunks)) - @asyncio.coroutine - def _push_msg(self, chunks): + async def _push_msg(self, chunks): # This lock ensures all chunks of a message are sequential in the Queue - with (yield from self._write_queue_lock): + with await self._write_queue_lock: for chunk in chunks: self._write_queue.put_nowait(chunk) - @asyncio.coroutine - def handle_write(self): + async def handle_write(self): while True: try: - next_msg = yield from self._write_queue.get() + next_msg = await self._write_queue.get() if next_msg: - yield from self._loop.sock_sendall(self._socket, next_msg) + await self._loop.sock_sendall(self._socket, next_msg) except socket.error as err: log.debug("Exception in send for %s: %s", self, err) self.defunct(err) @@ -196,18 +191,19 @@ def handle_write(self): except asyncio.CancelledError: return - @asyncio.coroutine - def handle_read(self): + async def handle_read(self): while True: try: - buf = yield from self._loop.sock_recv(self._socket, self.in_buffer_size) + buf = await self._loop.sock_recv(self._socket, self.in_buffer_size) self._iobuf.write(buf) # sock_recv expects EWOULDBLOCK if socket provides no data, but # nonblocking ssl sockets raise these instead, so we handle them # ourselves by yielding to the event loop, where the socket will # get the reading/writing it "wants" before retrying except (ssl.SSLWantWriteError, ssl.SSLWantReadError): - yield + # Apparently the preferred way to yield to the event loop from within + # a native coroutine based on https://github.com/python/asyncio/issues/284 + await asyncio.sleep(0) continue except socket.error as err: log.debug("Exception during socket recv for %s: %s", diff --git a/cassandra/io/asyncorereactor.py b/cassandra/io/asyncorereactor.py index e07aab4697..e1bcafb39e 100644 --- a/cassandra/io/asyncorereactor.py +++ b/cassandra/io/asyncorereactor.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -24,32 +26,25 @@ import sys import ssl -from six.moves import range - try: from weakref import WeakSet except ImportError: from cassandra.util import WeakSet # noqa -import asyncore +from cassandra import DependencyException +try: + import asyncore +except ModuleNotFoundError: + raise DependencyException( + "Unable to import asyncore module. Note that this module has been removed in Python 3.12 " + "so when using the driver with this version (or anything newer) you will need to use one of the " + "other event loop implementations." + ) from cassandra.connection import Connection, ConnectionShutdown, NONBLOCKING, Timer, TimerManager -# TODO: Remove when Python 2 is removed -class LogWrapper(object): - """ PYTHON-1228. If our logger has disappeared, there's nothing we can do, so just execute nothing """ - def __init__(self): - self._log = logging.getLogger(__name__) - - def __getattr__(self, name): - try: - return getattr(self._log, name) - except: - return lambda *args, **kwargs: None - - -log = LogWrapper() +log = logging.getLogger(__name__) _dispatcher_map = {} @@ -261,12 +256,21 @@ def _run_loop(self): try: self._loop_dispatcher.loop(self.timer_resolution) self._timers.service_timeouts() - except Exception: - log.debug("Asyncore event loop stopped unexepectedly", exc_info=True) + except Exception as exc: + self._maybe_log_debug("Asyncore event loop stopped unexpectedly", exc_info=exc) break self._started = False - log.debug("Asyncore event loop ended") + self._maybe_log_debug("Asyncore event loop ended") + + def _maybe_log_debug(self, *args, **kwargs): + try: + log.debug(*args, **kwargs) + except Exception: + # TODO: Remove when Python 2 support is removed + # PYTHON-1266. If our logger has disappeared, there's nothing we + # can do, so just log nothing. + pass def add_timer(self, timer): self._timers.add_timer(timer) diff --git a/cassandra/io/eventletreactor.py b/cassandra/io/eventletreactor.py index 162661f468..94e1e49544 100644 --- a/cassandra/io/eventletreactor.py +++ b/cassandra/io/eventletreactor.py @@ -1,11 +1,13 @@ # Copyright 2014 Symantec Corporation -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -23,8 +25,6 @@ from threading import Event import time -from six.moves import xrange - from cassandra.connection import Connection, ConnectionShutdown, Timer, TimerManager try: from eventlet.green.OpenSSL import SSL @@ -105,11 +105,12 @@ def __init__(self, *args, **kwargs): def _wrap_socket_from_context(self): _check_pyopenssl() - self._socket = SSL.Connection(self.ssl_context, self._socket) - self._socket.set_connect_state() + rv = SSL.Connection(self.ssl_context, self._socket) + rv.set_connect_state() if self.ssl_options and 'server_hostname' in self.ssl_options: # This is necessary for SNI - self._socket.set_tlsext_host_name(self.ssl_options['server_hostname'].encode('ascii')) + rv.set_tlsext_host_name(self.ssl_options['server_hostname'].encode('ascii')) + return rv def _initiate_connection(self, sockaddr): if self.uses_legacy_ssl_options: @@ -119,14 +120,12 @@ def _initiate_connection(self, sockaddr): if self.ssl_context or self.ssl_options: self._socket.do_handshake() - def _match_hostname(self): - if self.uses_legacy_ssl_options: - super(EventletConnection, self)._match_hostname() - else: + def _validate_hostname(self): + if not self.uses_legacy_ssl_options: cert_name = self._socket.get_peer_certificate().get_subject().commonName if cert_name != self.endpoint.address: raise Exception("Hostname verification failed! Certificate name '{}' " - "doesn't endpoint '{}'".format(cert_name, self.endpoint.address)) + "doesn't match endpoint '{}'".format(cert_name, self.endpoint.address)) def close(self): with self.lock: @@ -190,5 +189,5 @@ def handle_read(self): def push(self, data): chunk_size = self.out_buffer_size - for i in xrange(0, len(data), chunk_size): + for i in range(0, len(data), chunk_size): self._write_queue.put(data[i:i + chunk_size]) diff --git a/cassandra/io/geventreactor.py b/cassandra/io/geventreactor.py index ebc664d485..8ad4ee99e7 100644 --- a/cassandra/io/geventreactor.py +++ b/cassandra/io/geventreactor.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +22,6 @@ import logging import time -from six.moves import range from cassandra.connection import Connection, ConnectionShutdown, Timer, TimerManager diff --git a/cassandra/io/libevreactor.py b/cassandra/io/libevreactor.py index 54e2d0de03..275f79c374 100644 --- a/cassandra/io/libevreactor.py +++ b/cassandra/io/libevreactor.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,21 +23,21 @@ from threading import Lock, Thread import time -from six.moves import range - -from cassandra.connection import (Connection, ConnectionShutdown, - NONBLOCKING, Timer, TimerManager) +from cassandra import DependencyException try: import cassandra.io.libevwrapper as libev except ImportError: - raise ImportError( + raise DependencyException( "The C extension needed to use libev was not found. This " "probably means that you didn't have the required build dependencies " "when installing the driver. See " - "http://datastax.github.io/python-driver/installation.html#c-extensions " + "https://docs.datastax.com/en/developer/python-driver/latest/installation/index.html#c-extensions " "for instructions on installing build dependencies and building " "the C extension.") +from cassandra.connection import (Connection, ConnectionShutdown, + NONBLOCKING, Timer, TimerManager) + log = logging.getLogger(__name__) diff --git a/cassandra/io/libevwrapper.c b/cassandra/io/libevwrapper.c index 99e1df30f7..85ed551951 100644 --- a/cassandra/io/libevwrapper.c +++ b/cassandra/io/libevwrapper.c @@ -583,7 +583,6 @@ static PyMethodDef module_methods[] = { PyDoc_STRVAR(module_doc, "libev wrapper methods"); -#if PY_MAJOR_VERSION >= 3 static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "libevwrapper", @@ -600,13 +599,6 @@ static struct PyModuleDef moduledef = { PyObject * PyInit_libevwrapper(void) - -# else -# define INITERROR return - -void -initlibevwrapper(void) -#endif { PyObject *module = NULL; @@ -629,11 +621,7 @@ initlibevwrapper(void) if (PyType_Ready(&libevwrapper_TimerType) < 0) INITERROR; -# if PY_MAJOR_VERSION >= 3 module = PyModule_Create(&moduledef); -# else - module = Py_InitModule3("libevwrapper", module_methods, module_doc); -# endif if (module == NULL) INITERROR; @@ -665,11 +653,14 @@ initlibevwrapper(void) if (PyModule_AddObject(module, "Timer", (PyObject *)&libevwrapper_TimerType) == -1) INITERROR; +#if PY_MAJOR_VERSION < 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 7) + // Since CPython 3.7, `Py_Initialize()` routing always initializes GIL. + // Routine `PyEval_ThreadsInitialized()` has been deprecated in CPython 3.7 + // and completely removed in CPython 3.13. if (!PyEval_ThreadsInitialized()) { PyEval_InitThreads(); } +#endif -#if PY_MAJOR_VERSION >= 3 return module; -#endif } diff --git a/cassandra/io/twistedreactor.py b/cassandra/io/twistedreactor.py index 9b3ff09398..b55ac4d1a3 100644 --- a/cassandra/io/twistedreactor.py +++ b/cassandra/io/twistedreactor.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -102,6 +104,9 @@ def maybe_start(self): self._thread.start() atexit.register(partial(_cleanup, weakref.ref(self))) + def _reactor_stopped(self): + return reactor._stopped + def _cleanup(self): if self._thread: reactor.callFromThread(reactor.stop) diff --git a/cassandra/ioutils.pyx b/cassandra/ioutils.pyx index b0ab4f16cb..91c2bf9542 100644 --- a/cassandra/ioutils.pyx +++ b/cassandra/ioutils.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/marshal.py b/cassandra/marshal.py index 7533ebd307..e8733f0544 100644 --- a/cassandra/marshal.py +++ b/cassandra/marshal.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six import struct @@ -28,6 +29,7 @@ def _make_packer(format_string): int8_pack, int8_unpack = _make_packer('>b') uint64_pack, uint64_unpack = _make_packer('>Q') uint32_pack, uint32_unpack = _make_packer('>I') +uint32_le_pack, uint32_le_unpack = _make_packer('H') uint8_pack, uint8_unpack = _make_packer('>B') float_pack, float_unpack = _make_packer('>f') @@ -44,35 +46,16 @@ def _make_packer(format_string): v3_header_unpack = v3_header_struct.unpack -if six.PY3: - def byte2int(b): - return b - - - def varint_unpack(term): - val = int(''.join("%02x" % i for i in term), 16) - if (term[0] & 128) != 0: - len_term = len(term) # pulling this out of the expression to avoid overflow in cython optimized code - val -= 1 << (len_term * 8) - return val -else: - def byte2int(b): - return ord(b) - - - def varint_unpack(term): # noqa - val = int(term.encode('hex'), 16) - if (ord(term[0]) & 128) != 0: - len_term = len(term) # pulling this out of the expression to avoid overflow in cython optimized code - val = val - (1 << (len_term * 8)) - return val +def varint_unpack(term): + val = int(''.join("%02x" % i for i in term), 16) + if (term[0] & 128) != 0: + len_term = len(term) # pulling this out of the expression to avoid overflow in cython optimized code + val -= 1 << (len_term * 8) + return val def bit_length(n): - if six.PY3 or isinstance(n, int): - return int.bit_length(n) - else: - return long.bit_length(n) + return int.bit_length(n) def varint_pack(big): @@ -90,7 +73,7 @@ def varint_pack(big): if pos and revbytes[-1] & 0x80: revbytes.append(0) revbytes.reverse() - return six.binary_type(revbytes) + return bytes(revbytes) point_be = struct.Struct('>dd') @@ -112,7 +95,7 @@ def vints_unpack(term): # noqa values = [] n = 0 while n < len(term): - first_byte = byte2int(term[n]) + first_byte = term[n] if (first_byte & 128) == 0: val = first_byte @@ -123,14 +106,13 @@ def vints_unpack(term): # noqa while n < end: n += 1 val <<= 8 - val |= byte2int(term[n]) & 0xff + val |= term[n] & 0xff n += 1 values.append(decode_zig_zag(val)) return tuple(values) - def vints_pack(values): revbytes = bytearray() values = [int(v) for v in values[::-1]] @@ -142,8 +124,8 @@ def vints_pack(values): num_extra_bytes = 0 num_bits = v.bit_length() # We need to reserve (num_extra_bytes+1) bits in the first byte - # ie. with 1 extra byte, the first byte needs to be something like '10XXXXXX' # 2 bits reserved - # ie. with 8 extra bytes, the first byte needs to be '11111111' # 8 bits reserved + # i.e. with 1 extra byte, the first byte needs to be something like '10XXXXXX' # 2 bits reserved + # i.e. with 8 extra bytes, the first byte needs to be '11111111' # 8 bits reserved reserved_bits = num_extra_bytes + 1 while num_bits > (8-(reserved_bits)): num_extra_bytes += 1 @@ -161,4 +143,49 @@ def vints_pack(values): revbytes.append(abs(v)) revbytes.reverse() - return six.binary_type(revbytes) + return bytes(revbytes) + +def uvint_unpack(bytes): + first_byte = bytes[0] + + if (first_byte & 128) == 0: + return (first_byte,1) + + num_extra_bytes = 8 - (~first_byte & 0xff).bit_length() + rv = first_byte & (0xff >> num_extra_bytes) + for idx in range(1,num_extra_bytes + 1): + new_byte = bytes[idx] + rv <<= 8 + rv |= new_byte & 0xff + + return (rv, num_extra_bytes + 1) + +def uvint_pack(val): + rv = bytearray() + if val < 128: + rv.append(val) + else: + v = val + num_extra_bytes = 0 + num_bits = v.bit_length() + # We need to reserve (num_extra_bytes+1) bits in the first byte + # i.e. with 1 extra byte, the first byte needs to be something like '10XXXXXX' # 2 bits reserved + # i.e. with 8 extra bytes, the first byte needs to be '11111111' # 8 bits reserved + reserved_bits = num_extra_bytes + 1 + while num_bits > (8-(reserved_bits)): + num_extra_bytes += 1 + num_bits -= 8 + reserved_bits = min(num_extra_bytes + 1, 8) + rv.append(v & 0xff) + v >>= 8 + + if num_extra_bytes > 8: + raise ValueError('Value %d is too big and cannot be encoded as vint' % val) + + # We can now store the last bits in the first byte + n = 8 - num_extra_bytes + v |= (0xff >> n << n) + rv.append(abs(v)) + + rv.reverse() + return bytes(rv) diff --git a/cassandra/metadata.py b/cassandra/metadata.py index df38fc6670..2c13f92e42 100644 --- a/cassandra/metadata.py +++ b/cassandra/metadata.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +17,12 @@ from binascii import unhexlify from bisect import bisect_left from collections import defaultdict +from collections.abc import Mapping from functools import total_ordering from hashlib import md5 import json import logging import re -import six -from six.moves import zip import sys from threading import RLock import struct @@ -42,7 +43,6 @@ from cassandra.util import OrderedDict, Version from cassandra.pool import HostDistance from cassandra.connection import EndPoint -from cassandra.compat import Mapping log = logging.getLogger(__name__) @@ -52,18 +52,18 @@ 'counter', 'create', 'custom', 'date', 'decimal', 'default', 'delete', 'desc', 'describe', 'deterministic', 'distinct', 'double', 'drop', 'entries', 'execute', 'exists', 'filtering', 'finalfunc', 'float', 'from', 'frozen', 'full', 'function', 'functions', 'grant', 'if', 'in', 'index', 'inet', 'infinity', 'initcond', 'input', 'insert', 'int', 'into', 'is', 'json', - 'key', 'keys', 'keyspace', 'keyspaces', 'language', 'limit', 'list', 'login', 'map', 'materialized', 'modify', 'monotonic', 'nan', 'nologin', - 'norecursive', 'nosuperuser', 'not', 'null', 'of', 'on', 'options', 'or', 'order', 'password', 'permission', + 'key', 'keys', 'keyspace', 'keyspaces', 'language', 'limit', 'list', 'login', 'map', 'materialized', 'mbean', 'mbeans', 'modify', 'monotonic', + 'nan', 'nologin', 'norecursive', 'nosuperuser', 'not', 'null', 'of', 'on', 'options', 'or', 'order', 'password', 'permission', 'permissions', 'primary', 'rename', 'replace', 'returns', 'revoke', 'role', 'roles', 'schema', 'select', 'set', 'sfunc', 'smallint', 'static', 'storage', 'stype', 'superuser', 'table', 'text', 'time', 'timestamp', 'timeuuid', - 'tinyint', 'to', 'token', 'trigger', 'truncate', 'ttl', 'tuple', 'type', 'unlogged', 'update', 'use', 'user', + 'tinyint', 'to', 'token', 'trigger', 'truncate', 'ttl', 'tuple', 'type', 'unlogged', 'unset', 'update', 'use', 'user', 'users', 'using', 'uuid', 'values', 'varchar', 'varint', 'view', 'where', 'with', 'writetime', # DSE specifics "node", "nodes", "plan", "active", "application", "applications", "java", "executor", "executors", "std_out", "std_err", "renew", "delegation", "no", "redact", "token", "lowercasestring", "cluster", "authentication", "schemes", "scheme", "internal", "ldap", "kerberos", "remote", "object", "method", "call", "calls", "search", "schema", "config", "rows", - "columns", "profiles", "commit", "reload", "unset", "rebuild", "field", "workpool", "any", "submission", "indices", + "columns", "profiles", "commit", "reload", "rebuild", "field", "workpool", "any", "submission", "indices", "restrict", "unrestrict" )) """ @@ -292,7 +292,7 @@ def rebuild_token_map(self, partitioner, token_map): token_to_host_owner = {} ring = [] - for host, token_strings in six.iteritems(token_map): + for host, token_strings in token_map.items(): for token_string in token_strings: token = token_class.from_string(token_string) ring.append(token) @@ -350,7 +350,7 @@ def get_host(self, endpoint_or_address, port=None): return self._hosts.get(endpoint_or_address) def _get_host_by_address(self, address, port=None): - for host in six.itervalues(self._hosts): + for host in self._hosts.values(): if (host.broadcast_rpc_address == address and (port is None or host.broadcast_rpc_port is None or host.broadcast_rpc_port == port)): return host @@ -387,8 +387,7 @@ def __new__(metacls, name, bases, dct): -@six.add_metaclass(ReplicationStrategyTypeType) -class _ReplicationStrategy(object): +class _ReplicationStrategy(object, metaclass=ReplicationStrategyTypeType): options_map = None @classmethod @@ -627,7 +626,7 @@ def make_token_replica_map(self, token_to_host_owner, ring): racks_this_dc = dc_racks[dc] hosts_this_dc = len(hosts_per_dc[dc]) - for token_offset_index in six.moves.range(index, index+num_tokens): + for token_offset_index in range(index, index+num_tokens): if token_offset_index >= len(token_offsets): token_offset_index = token_offset_index - len(token_offsets) @@ -854,7 +853,7 @@ def _add_table_metadata(self, table_metadata): # note the intentional order of add before remove # this makes sure the maps are never absent something that existed before this update - for index_name, index_metadata in six.iteritems(table_metadata.indexes): + for index_name, index_metadata in table_metadata.indexes.items(): self.indexes[index_name] = index_metadata for index_name in (n for n in old_indexes if n not in table_metadata.indexes): @@ -1341,7 +1340,7 @@ def _all_as_cql(self): if self.extensions: registry = _RegisteredExtensionType._extension_registry - for k in six.viewkeys(registry) & self.extensions: # no viewkeys on OrderedMapSerializeKey + for k in registry.keys() & self.extensions: # no viewkeys on OrderedMapSerializeKey ext = registry[k] cql = ext.after_table_cql(self, k, self.extensions[k]) if cql: @@ -1557,8 +1556,7 @@ def __new__(mcs, name, bases, dct): return cls -@six.add_metaclass(_RegisteredExtensionType) -class RegisteredTableExtension(TableExtensionInterface): +class RegisteredTableExtension(TableExtensionInterface, metaclass=_RegisteredExtensionType): """ Extending this class registers it by name (associated by key in the `system_schema.tables.extensions` map). """ @@ -1864,7 +1862,7 @@ class MD5Token(HashToken): @classmethod def hash_fn(cls, key): - if isinstance(key, six.text_type): + if isinstance(key, str): key = key.encode('UTF-8') return abs(varint_unpack(md5(key).digest())) @@ -1878,7 +1876,7 @@ class BytesToken(Token): def from_string(cls, token_string): """ `token_string` should be the string representation from the server. """ # unhexlify works fine with unicode input in everythin but pypy3, where it Raises "TypeError: 'str' does not support the buffer interface" - if isinstance(token_string, six.text_type): + if isinstance(token_string, str): token_string = token_string.encode('ascii') # The BOP stores a hex string return cls(unhexlify(token_string)) @@ -2970,17 +2968,17 @@ def _build_table_graph_metadata(table_meta): try: # Make sure we process vertices before edges - for table_meta in [t for t in six.itervalues(keyspace_meta.tables) + for table_meta in [t for t in keyspace_meta.tables.values() if t.name in self.keyspace_table_vertex_rows[keyspace_meta.name]]: _build_table_graph_metadata(table_meta) # all other tables... - for table_meta in [t for t in six.itervalues(keyspace_meta.tables) + for table_meta in [t for t in keyspace_meta.tables.values() if t.name not in self.keyspace_table_vertex_rows[keyspace_meta.name]]: _build_table_graph_metadata(table_meta) except Exception: # schema error, remove all graph metadata for this keyspace - for t in six.itervalues(keyspace_meta.tables): + for t in keyspace_meta.tables.values(): t.edge = t.vertex = None keyspace_meta._exc_info = sys.exc_info() log.exception("Error while parsing graph metadata for keyspace %s", keyspace_meta.name) @@ -3194,7 +3192,7 @@ def as_cql_query(self, formatted=False): if self.extensions: registry = _RegisteredExtensionType._extension_registry - for k in six.viewkeys(registry) & self.extensions: # no viewkeys on OrderedMapSerializeKey + for k in registry.keys() & self.extensions: # no viewkeys on OrderedMapSerializeKey ext = registry[k] cql = ext.after_table_cql(self, k, self.extensions[k]) if cql: diff --git a/cassandra/metrics.py b/cassandra/metrics.py index 223b0c7c6e..a1eadc1fc4 100644 --- a/cassandra/metrics.py +++ b/cassandra/metrics.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -185,7 +187,7 @@ def get_stats(self): def set_stats_name(self, stats_name): """ Set the metrics stats name. - The stats_name is a string used to access the metris through scales: scales.getStats()[] + The stats_name is a string used to access the metrics through scales: scales.getStats()[] Default is 'cassandra-'. """ diff --git a/cassandra/murmur3.py b/cassandra/murmur3.py index 7c8d641b32..282c43578d 100644 --- a/cassandra/murmur3.py +++ b/cassandra/murmur3.py @@ -1,4 +1,3 @@ -from six.moves import range import struct diff --git a/cassandra/numpy_parser.pyx b/cassandra/numpy_parser.pyx index bb5b9a1c8c..2377258b36 100644 --- a/cassandra/numpy_parser.pyx +++ b/cassandra/numpy_parser.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -134,7 +136,7 @@ def make_array(coltype, array_size): """ try: a = np.ma.empty((array_size,), dtype=_cqltype_to_numpy[coltype]) - a.mask = np.zeros((array_size,), dtype=np.bool) + a.mask = np.zeros((array_size,), dtype=bool) except KeyError: a = np.empty((array_size,), dtype=obj_dtype) return a diff --git a/cassandra/obj_parser.pyx b/cassandra/obj_parser.pyx index a0b5316a33..f1bfb551ef 100644 --- a/cassandra/obj_parser.pyx +++ b/cassandra/obj_parser.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,9 +19,12 @@ include "ioutils.pyx" from cassandra import DriverException from cassandra.bytesio cimport BytesIOReader from cassandra.deserializers cimport Deserializer, from_binary +from cassandra.deserializers import find_deserializer from cassandra.parsing cimport ParseDesc, ColumnParser, RowParser from cassandra.tuple cimport tuple_new, tuple_set +from cpython.bytes cimport PyBytes_AsStringAndSize + cdef class ListParser(ColumnParser): """Decode a ResultMessage into a list of tuples (or other objects)""" @@ -58,18 +63,29 @@ cdef class TupleRowParser(RowParser): assert desc.rowsize >= 0 cdef Buffer buf + cdef Buffer newbuf cdef Py_ssize_t i, rowsize = desc.rowsize cdef Deserializer deserializer cdef tuple res = tuple_new(desc.rowsize) + ce_policy = desc.column_encryption_policy for i in range(rowsize): # Read the next few bytes get_buf(reader, &buf) # Deserialize bytes to python object deserializer = desc.deserializers[i] + coldesc = desc.coldescs[i] + uses_ce = ce_policy and ce_policy.contains_column(coldesc) try: - val = from_binary(deserializer, &buf, desc.protocol_version) + if uses_ce: + col_type = ce_policy.column_type(coldesc) + decrypted_bytes = ce_policy.decrypt(coldesc, to_bytes(&buf)) + PyBytes_AsStringAndSize(decrypted_bytes, &newbuf.ptr, &newbuf.size) + deserializer = find_deserializer(ce_policy.column_type(coldesc)) + val = from_binary(deserializer, &newbuf, desc.protocol_version) + else: + val = from_binary(deserializer, &buf, desc.protocol_version) except Exception as e: raise DriverException('Failed decoding result column "%s" of type %s: %s' % (desc.colnames[i], desc.coltypes[i].cql_parameterized_type(), diff --git a/cassandra/parsing.pxd b/cassandra/parsing.pxd index aa9478cd14..1b3ed3dcbf 100644 --- a/cassandra/parsing.pxd +++ b/cassandra/parsing.pxd @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +20,8 @@ from cassandra.deserializers cimport Deserializer cdef class ParseDesc: cdef public object colnames cdef public object coltypes + cdef public object column_encryption_policy + cdef public list coldescs cdef Deserializer[::1] deserializers cdef public int protocol_version cdef Py_ssize_t rowsize diff --git a/cassandra/parsing.pyx b/cassandra/parsing.pyx index d2bc0a3abe..085544a362 100644 --- a/cassandra/parsing.pyx +++ b/cassandra/parsing.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -19,9 +21,11 @@ Module containing the definitions and declarations (parsing.pxd) for parsers. cdef class ParseDesc: """Description of what structure to parse""" - def __init__(self, colnames, coltypes, deserializers, protocol_version): + def __init__(self, colnames, coltypes, column_encryption_policy, coldescs, deserializers, protocol_version): self.colnames = colnames self.coltypes = coltypes + self.column_encryption_policy = column_encryption_policy + self.coldescs = coldescs self.deserializers = deserializers self.protocol_version = protocol_version self.rowsize = len(colnames) diff --git a/cassandra/policies.py b/cassandra/policies.py index fa1e8cf385..d6f7063e7a 100644 --- a/cassandra/policies.py +++ b/cassandra/policies.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,26 +14,26 @@ # See the License for the specific language governing permissions and # limitations under the License. +from collections import namedtuple +from functools import lru_cache from itertools import islice, cycle, groupby, repeat import logging from random import randint, shuffle from threading import Lock import socket import warnings -from cassandra import WriteType as WT +log = logging.getLogger(__name__) + +from cassandra import WriteType as WT # This is done this way because WriteType was originally # defined here and in order not to break the API. -# It may removed in the next mayor. +# It may be removed in the next major. WriteType = WT - from cassandra import ConsistencyLevel, OperationTimedOut -log = logging.getLogger(__name__) - - class HostDistance(object): """ A measure of how "distant" a node is from the client, which @@ -126,7 +128,7 @@ def populate(self, cluster, hosts): def make_query_plan(self, working_keyspace=None, query=None): """ - Given a :class:`~.query.Statement` instance, return a iterable + Given a :class:`~.query.Statement` instance, return an iterable of :class:`.Host` instances which should be queried in that order. A generator may work well for custom implementations of this method. @@ -455,7 +457,7 @@ class HostFilterPolicy(LoadBalancingPolicy): A :class:`.LoadBalancingPolicy` subclass configured with a child policy, and a single-argument predicate. This policy defers to the child policy for hosts where ``predicate(host)`` is truthy. Hosts for which - ``predicate(host)`` is falsey will be considered :attr:`.IGNORED`, and will + ``predicate(host)`` is falsy will be considered :attr:`.IGNORED`, and will not be used in a query plan. This can be used in the cases where you need a whitelist or blacklist @@ -491,7 +493,7 @@ def __init__(self, child_policy, predicate): :param child_policy: an instantiated :class:`.LoadBalancingPolicy` that this one will defer to. :param predicate: a one-parameter function that takes a :class:`.Host`. - If it returns a falsey value, the :class:`.Host` will + If it returns a falsy value, the :class:`.Host` will be :attr:`.IGNORED` and not returned in query plans. """ super(HostFilterPolicy, self).__init__() @@ -527,7 +529,7 @@ def predicate(self): def distance(self, host): """ Checks if ``predicate(host)``, then returns - :attr:`~HostDistance.IGNORED` if falsey, and defers to the child policy + :attr:`~HostDistance.IGNORED` if falsy, and defers to the child policy otherwise. """ if self.predicate(host): @@ -616,7 +618,7 @@ class ReconnectionPolicy(object): def new_schedule(self): """ This should return a finite or infinite iterable of delays (each as a - floating point number of seconds) inbetween each failed reconnection + floating point number of seconds) in-between each failed reconnection attempt. Note that if the iterable is finite, reconnection attempts will cease once the iterable is exhausted. """ @@ -626,12 +628,12 @@ def new_schedule(self): class ConstantReconnectionPolicy(ReconnectionPolicy): """ A :class:`.ReconnectionPolicy` subclass which sleeps for a fixed delay - inbetween each reconnection attempt. + in-between each reconnection attempt. """ def __init__(self, delay, max_attempts=64): """ - `delay` should be a floating point number of seconds to wait inbetween + `delay` should be a floating point number of seconds to wait in-between each attempt. `max_attempts` should be a total number of attempts to be made before @@ -655,7 +657,7 @@ def new_schedule(self): class ExponentialReconnectionPolicy(ReconnectionPolicy): """ A :class:`.ReconnectionPolicy` subclass which exponentially increases - the length of the delay inbetween each reconnection attempt up to + the length of the delay in-between each reconnection attempt up to a set maximum delay. A random amount of jitter (+/- 15%) will be added to the pure exponential @@ -715,7 +717,7 @@ class RetryPolicy(object): timeout and unavailable failures. These are failures reported from the server side. Timeouts are configured by `settings in cassandra.yaml `_. - Unavailable failures occur when the coordinator cannot acheive the consistency + Unavailable failures occur when the coordinator cannot achieve the consistency level for a request. For further information see the method descriptions below. @@ -809,8 +811,8 @@ def on_write_timeout(self, query, consistency, write_type, `retry_num` counts how many times the operation has been retried, so the first time this method is called, `retry_num` will be 0. - By default, failed write operations will retried at most once, and - they will only be retried if the `write_type` was + By default, a failed write operations will be retried at most once, and + will only be retried if the `write_type` was :attr:`~.WriteType.BATCH_LOG`. """ if retry_num != 0: @@ -865,7 +867,7 @@ def on_request_error(self, query, consistency, error, retry_num): `retry_num` counts how many times the operation has been retried, so the first time this method is called, `retry_num` will be 0. - The default, it triggers a retry on the next host in the query plan + By default, it triggers a retry on the next host in the query plan with the same consistency level. """ # TODO revisit this for the next major @@ -907,7 +909,7 @@ class DowngradingConsistencyRetryPolicy(RetryPolicy): policy unless you have understood the cases where this can happen and are ok with that. It is also recommended to subclass this class so that queries that required a consistency level downgrade can be - recorded (so that repairs can be made later, etc). + recorded (so that repairs can be made later, etc.). This policy implements the same retries as :class:`.RetryPolicy`, but on top of that, it also retries in the following cases: @@ -1006,7 +1008,7 @@ class AddressTranslator(object): The driver discovers nodes using server metadata and topology change events. Normally, the endpoint defined by the server is the right way to connect to a node. In some environments, these addresses may not be reachable, or not preferred (public vs. private IPs in cloud environments, - suboptimal routing, etc). This interface allows for translating from server defined endpoints to + suboptimal routing, etc.). This interface allows for translating from server defined endpoints to preferred addresses for driver connections. *Note:* :attr:`~Cluster.contact_points` provided while creating the :class:`~.Cluster` instance are not @@ -1036,7 +1038,7 @@ def translate(self, addr): Reverse DNS the public broadcast_address, then lookup that hostname to get the AWS-resolved IP, which will point to the private IP address within the same datacenter. """ - # get family of this address so we translate to the same + # get family of this address, so we translate to the same family = socket.getaddrinfo(addr, 0, socket.AF_UNSPEC, socket.SOCK_STREAM)[0][0] host = socket.getfqdn(addr) for a in socket.getaddrinfo(host, 0, family, socket.SOCK_STREAM): @@ -1181,3 +1183,62 @@ def _rethrow(self, *args, **kwargs): on_read_timeout = _rethrow on_write_timeout = _rethrow on_unavailable = _rethrow + + +ColDesc = namedtuple('ColDesc', ['ks', 'table', 'col']) + +class ColumnEncryptionPolicy(object): + """ + A policy enabling (mostly) transparent encryption and decryption of data before it is + sent to the cluster. + + Key materials and other configurations are specified on a per-column basis. This policy can + then be used by driver structures which are aware of the underlying columns involved in their + work. In practice this includes the following cases: + + * Prepared statements - data for columns specified by the cluster's policy will be transparently + encrypted before they are sent + * Rows returned from any query - data for columns specified by the cluster's policy will be + transparently decrypted before they are returned to the user + + To enable this functionality, create an instance of this class (or more likely a subclass) + before creating a cluster. This policy should then be configured and supplied to the Cluster + at creation time via the :attr:`.Cluster.column_encryption_policy` attribute. + """ + + def encrypt(self, coldesc, obj_bytes): + """ + Encrypt the specified bytes using the cryptography materials for the specified column. + Largely used internally, although this could also be used to encrypt values supplied + to non-prepared statements in a way that is consistent with this policy. + """ + raise NotImplementedError() + + def decrypt(self, coldesc, encrypted_bytes): + """ + Decrypt the specified (encrypted) bytes using the cryptography materials for the + specified column. Used internally; could be used externally as well but there's + not currently an obvious use case. + """ + raise NotImplementedError() + + def add_column(self, coldesc, key): + """ + Provide cryptography materials to be used when encrypted and/or decrypting data + for the specified column. + """ + raise NotImplementedError() + + def contains_column(self, coldesc): + """ + Predicate to determine if a specific column is supported by this policy. + Currently only used internally. + """ + raise NotImplementedError() + + def encode_and_encrypt(self, coldesc, obj): + """ + Helper function to enable use of this policy on simple (i.e. non-prepared) + statements. + """ + raise NotImplementedError() diff --git a/cassandra/pool.py b/cassandra/pool.py index cd27656046..37fdaee96b 100644 --- a/cassandra/pool.py +++ b/cassandra/pool.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -390,6 +392,10 @@ def __init__(self, host, host_distance, session): # this is used in conjunction with the connection streams. Not using the connection lock because the connection can be replaced in the lifetime of the pool. self._stream_available_condition = Condition(self._lock) self._is_replacing = False + # Contains connections which shouldn't be used anymore + # and are waiting until all requests time out or complete + # so that we can dispose of them. + self._trash = set() if host_distance == HostDistance.IGNORED: log.debug("Not opening connection to ignored host %s", self.host) @@ -399,13 +405,13 @@ def __init__(self, host, host_distance, session): return log.debug("Initializing connection for host %s", self.host) - self._connection = session.cluster.connection_factory(host.endpoint) + self._connection = session.cluster.connection_factory(host.endpoint, on_orphaned_stream_released=self.on_orphaned_stream_released) self._keyspace = session.keyspace if self._keyspace: self._connection.set_keyspace_blocking(self._keyspace) log.debug("Finished initializing connection for host %s", self.host) - def borrow_connection(self, timeout): + def _get_connection(self): if self.is_shutdown: raise ConnectionException( "Pool for %s is shutdown" % (self.host,), self.host) @@ -413,12 +419,25 @@ def borrow_connection(self, timeout): conn = self._connection if not conn: raise NoConnectionsAvailable() + return conn + + def borrow_connection(self, timeout): + conn = self._get_connection() + if conn.orphaned_threshold_reached: + with self._lock: + if not self._is_replacing: + self._is_replacing = True + self._session.submit(self._replace, conn) + log.debug( + "Connection to host %s reached orphaned stream limit, replacing...", + self.host + ) start = time.time() remaining = timeout while True: with conn.lock: - if conn.in_flight < conn.max_request_id: + if not (conn.orphaned_threshold_reached and conn.is_closed) and conn.in_flight < conn.max_request_id: conn.in_flight += 1 return conn, conn.get_request_id() if timeout is not None: @@ -426,15 +445,19 @@ def borrow_connection(self, timeout): if remaining < 0: break with self._stream_available_condition: - self._stream_available_condition.wait(remaining) + if conn.orphaned_threshold_reached and conn.is_closed: + conn = self._get_connection() + else: + self._stream_available_condition.wait(remaining) raise NoConnectionsAvailable("All request IDs are currently in use") - def return_connection(self, connection): - with connection.lock: - connection.in_flight -= 1 - with self._stream_available_condition: - self._stream_available_condition.notify() + def return_connection(self, connection, stream_was_orphaned=False): + if not stream_was_orphaned: + with connection.lock: + connection.in_flight -= 1 + with self._stream_available_condition: + self._stream_available_condition.notify() if connection.is_defunct or connection.is_closed: if connection.signaled_error and not self.shutdown_on_error: @@ -461,6 +484,24 @@ def return_connection(self, connection): return self._is_replacing = True self._session.submit(self._replace, connection) + else: + if connection in self._trash: + with connection.lock: + if connection.in_flight == len(connection.orphaned_request_ids): + with self._lock: + if connection in self._trash: + self._trash.remove(connection) + log.debug("Closing trashed connection (%s) to %s", id(connection), self.host) + connection.close() + return + + def on_orphaned_stream_released(self): + """ + Called when a response for an orphaned stream (timed out on the client + side) was received. + """ + with self._stream_available_condition: + self._stream_available_condition.notify() def _replace(self, connection): with self._lock: @@ -469,7 +510,7 @@ def _replace(self, connection): log.debug("Replacing connection (%s) to %s", id(connection), self.host) try: - conn = self._session.cluster.connection_factory(self.host.endpoint) + conn = self._session.cluster.connection_factory(self.host.endpoint, on_orphaned_stream_released=self.on_orphaned_stream_released) if self._keyspace: conn.set_keyspace_blocking(self._keyspace) self._connection = conn @@ -477,9 +518,15 @@ def _replace(self, connection): log.warning("Failed reconnecting %s. Retrying." % (self.host.endpoint,)) self._session.submit(self._replace, connection) else: - with self._lock: - self._is_replacing = False - self._stream_available_condition.notify() + with connection.lock: + with self._lock: + if connection.orphaned_threshold_reached: + if connection.in_flight == len(connection.orphaned_request_ids): + connection.close() + else: + self._trash.add(connection) + self._is_replacing = False + self._stream_available_condition.notify() def shutdown(self): with self._lock: @@ -493,6 +540,16 @@ def shutdown(self): self._connection.close() self._connection = None + trash_conns = None + with self._lock: + if self._trash: + trash_conns = self._trash + self._trash = set() + + if trash_conns is not None: + for conn in self._trash: + conn.close() + def _set_keyspace_for_all_conns(self, keyspace, callback): if self.is_shutdown or not self._connection: return @@ -513,7 +570,9 @@ def get_state(self): connection = self._connection open_count = 1 if connection and not (connection.is_closed or connection.is_defunct) else 0 in_flights = [connection.in_flight] if connection else [] - return {'shutdown': self.is_shutdown, 'open_count': open_count, 'in_flights': in_flights} + orphan_requests = [connection.orphaned_request_ids] if connection else [] + return {'shutdown': self.is_shutdown, 'open_count': open_count, \ + 'in_flights': in_flights, 'orphan_requests': orphan_requests} @property def open_count(self): @@ -548,7 +607,7 @@ def __init__(self, host, host_distance, session): log.debug("Initializing new connection pool for host %s", self.host) core_conns = session.cluster.get_core_connections_per_host(host_distance) - self._connections = [session.cluster.connection_factory(host.endpoint) + self._connections = [session.cluster.connection_factory(host.endpoint, on_orphaned_stream_released=self.on_orphaned_stream_released) for i in range(core_conns)] self._keyspace = session.keyspace @@ -609,7 +668,7 @@ def borrow_connection(self, timeout): # wait_for_conn will increment in_flight on the conn least_busy, request_id = self._wait_for_conn(timeout) - # if we have too many requests on this connection but we still + # if we have too many requests on this connection, but we still # have space to open a new connection against this host, go ahead # and schedule the creation of a new connection if least_busy.in_flight >= max_reqs and len(self._connections) < max_conns: @@ -652,14 +711,14 @@ def _add_conn_if_under_max(self): log.debug("Going to open new connection to host %s", self.host) try: - conn = self._session.cluster.connection_factory(self.host.endpoint) + conn = self._session.cluster.connection_factory(self.host.endpoint, on_orphaned_stream_released=self.on_orphaned_stream_released) if self._keyspace: conn.set_keyspace_blocking(self._session.keyspace) self._next_trash_allowed_at = time.time() + _MIN_TRASH_INTERVAL with self._lock: new_connections = self._connections[:] + [conn] self._connections = new_connections - log.debug("Added new connection (%s) to pool for host %s, signaling availablility", + log.debug("Added new connection (%s) to pool for host %s, signaling availability", id(conn), self.host) self._signal_available_conn() return True @@ -712,9 +771,10 @@ def _wait_for_conn(self, timeout): raise NoConnectionsAvailable() - def return_connection(self, connection): + def return_connection(self, connection, stream_was_orphaned=False): with connection.lock: - connection.in_flight -= 1 + if not stream_was_orphaned: + connection.in_flight -= 1 in_flight = connection.in_flight if connection.is_defunct or connection.is_closed: @@ -750,6 +810,13 @@ def return_connection(self, connection): else: self._signal_available_conn() + def on_orphaned_stream_released(self): + """ + Called when a response for an orphaned stream (timed out on the client + side) was received. + """ + self._signal_available_conn() + def _maybe_trash_connection(self, connection): core_conns = self._session.cluster.get_core_connections_per_host(self.host_distance) did_trash = False @@ -863,4 +930,6 @@ def get_connections(self): def get_state(self): in_flights = [c.in_flight for c in self._connections] - return {'shutdown': self.is_shutdown, 'open_count': self.open_count, 'in_flights': in_flights} + orphan_requests = [c.orphaned_request_ids for c in self._connections] + return {'shutdown': self.is_shutdown, 'open_count': self.open_count, \ + 'in_flights': in_flights, 'orphan_requests': orphan_requests} diff --git a/cassandra/protocol.py b/cassandra/protocol.py index eac9ebb8b5..510aea44a8 100644 --- a/cassandra/protocol.py +++ b/cassandra/protocol.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +20,6 @@ import socket from uuid import UUID -import six -from six.moves import range import io from cassandra import ProtocolVersion @@ -29,9 +29,6 @@ AlreadyExists, InvalidRequest, Unauthorized, UnsupportedOperation, UserFunctionDescriptor, UserAggregateDescriptor, SchemaTargetType) -from cassandra.marshal import (int32_pack, int32_unpack, uint16_pack, uint16_unpack, - uint8_pack, int8_unpack, uint64_pack, header_pack, - v3_header_pack, uint32_pack) from cassandra.cqltypes import (AsciiType, BytesType, BooleanType, CounterColumnType, DateType, DecimalType, DoubleType, FloatType, Int32Type, @@ -40,6 +37,10 @@ UTF8Type, VarcharType, UUIDType, UserType, TupleType, lookup_casstype, SimpleDateType, TimeType, ByteType, ShortType, DurationType) +from cassandra.marshal import (int32_pack, int32_unpack, uint16_pack, uint16_unpack, + uint8_pack, int8_unpack, uint64_pack, header_pack, + v3_header_pack, uint32_pack, uint32_le_unpack, uint32_le_pack) +from cassandra.policies import ColDesc from cassandra import WriteType from cassandra.cython_deps import HAVE_CYTHON, HAVE_NUMPY from cassandra import util @@ -85,8 +86,7 @@ def __init__(cls, name, bases, dct): register_class(cls) -@six.add_metaclass(_RegisterMessageType) -class _MessageType(object): +class _MessageType(object, metaclass=_RegisterMessageType): tracing = False custom_payload = None @@ -136,8 +136,6 @@ def recv_body(cls, f, protocol_version, *args): def summary_msg(self): msg = 'Error from server: code=%04x [%s] message="%s"' \ % (self.code, self.summary, self.message) - if six.PY2 and isinstance(msg, six.text_type): - msg = msg.encode('utf-8') return msg def __str__(self): @@ -158,8 +156,7 @@ def __init__(cls, name, bases, dct): error_classes[cls.error_code] = cls -@six.add_metaclass(ErrorMessageSubclass) -class ErrorMessageSub(ErrorMessage): +class ErrorMessageSub(ErrorMessage, metaclass=ErrorMessageSubclass): error_code = None @@ -180,6 +177,10 @@ class ProtocolException(ErrorMessageSub): summary = 'Protocol error' error_code = 0x000A + @property + def is_beta_protocol_error(self): + return 'USE_BETA flag is unset' in str(self) + class BadCredentials(ErrorMessageSub): summary = 'Bad credentials' @@ -719,11 +720,11 @@ class ResultMessage(_MessageType): def __init__(self, kind): self.kind = kind - def recv(self, f, protocol_version, user_type_map, result_metadata): + def recv(self, f, protocol_version, user_type_map, result_metadata, column_encryption_policy): if self.kind == RESULT_KIND_VOID: return elif self.kind == RESULT_KIND_ROWS: - self.recv_results_rows(f, protocol_version, user_type_map, result_metadata) + self.recv_results_rows(f, protocol_version, user_type_map, result_metadata, column_encryption_policy) elif self.kind == RESULT_KIND_SET_KEYSPACE: self.new_keyspace = read_string(f) elif self.kind == RESULT_KIND_PREPARED: @@ -734,32 +735,40 @@ def recv(self, f, protocol_version, user_type_map, result_metadata): raise DriverException("Unknown RESULT kind: %d" % self.kind) @classmethod - def recv_body(cls, f, protocol_version, user_type_map, result_metadata): + def recv_body(cls, f, protocol_version, user_type_map, result_metadata, column_encryption_policy): kind = read_int(f) msg = cls(kind) - msg.recv(f, protocol_version, user_type_map, result_metadata) + msg.recv(f, protocol_version, user_type_map, result_metadata, column_encryption_policy) return msg - def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata): + def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata, column_encryption_policy): self.recv_results_metadata(f, user_type_map) column_metadata = self.column_metadata or result_metadata rowcount = read_int(f) rows = [self.recv_row(f, len(column_metadata)) for _ in range(rowcount)] self.column_names = [c[2] for c in column_metadata] self.column_types = [c[3] for c in column_metadata] + col_descs = [ColDesc(md[0], md[1], md[2]) for md in column_metadata] + + def decode_val(val, col_md, col_desc): + uses_ce = column_encryption_policy and column_encryption_policy.contains_column(col_desc) + col_type = column_encryption_policy.column_type(col_desc) if uses_ce else col_md[3] + raw_bytes = column_encryption_policy.decrypt(col_desc, val) if uses_ce else val + return col_type.from_binary(raw_bytes, protocol_version) + + def decode_row(row): + return tuple(decode_val(val, col_md, col_desc) for val, col_md, col_desc in zip(row, column_metadata, col_descs)) + try: - self.parsed_rows = [ - tuple(ctype.from_binary(val, protocol_version) - for ctype, val in zip(self.column_types, row)) - for row in rows] + self.parsed_rows = [decode_row(row) for row in rows] except Exception: for row in rows: - for i in range(len(row)): + for val, col_md, col_desc in zip(row, column_metadata, col_descs): try: - self.column_types[i].from_binary(row[i], protocol_version) + decode_val(val, col_md, col_desc) except Exception as e: - raise DriverException('Failed decoding result column "%s" of type %s: %s' % (self.column_names[i], - self.column_types[i].cql_parameterized_type(), + raise DriverException('Failed decoding result column "%s" of type %s: %s' % (col_md[2], + col_md[3].cql_parameterized_type(), str(e))) def recv_results_prepared(self, f, protocol_version, user_type_map): @@ -1095,6 +1104,9 @@ class _ProtocolHandler(object): result decoding implementations. """ + column_encryption_policy = None + """Instance of :class:`cassandra.policies.ColumnEncryptionPolicy` in use by this handler""" + @classmethod def encode_message(cls, msg, stream_id, protocol_version, compressor, allow_beta_protocol_version): """ @@ -1115,7 +1127,9 @@ def encode_message(cls, msg, stream_id, protocol_version, compressor, allow_beta msg.send_body(body, protocol_version) body = body.getvalue() - if compressor and len(body) > 0: + # With checksumming, the compression is done at the segment frame encoding + if (not ProtocolVersion.has_checksumming_support(protocol_version) + and compressor and len(body) > 0): body = compressor(body) flags |= COMPRESSED_FLAG @@ -1155,7 +1169,8 @@ def decode_message(cls, protocol_version, user_type_map, stream_id, flags, opcod :param decompressor: optional decompression function to inflate the body :return: a message decoded from the body and frame attributes """ - if flags & COMPRESSED_FLAG: + if (not ProtocolVersion.has_checksumming_support(protocol_version) and + flags & COMPRESSED_FLAG): if decompressor is None: raise RuntimeError("No de-compressor available for compressed frame!") body = decompressor(body) @@ -1186,7 +1201,7 @@ def decode_message(cls, protocol_version, user_type_map, stream_id, flags, opcod log.warning("Unknown protocol flags set: %02x. May cause problems.", flags) msg_class = cls.message_types_by_opcode[opcode] - msg = msg_class.recv_body(body, protocol_version, user_type_map, result_metadata) + msg = msg_class.recv_body(body, protocol_version, user_type_map, result_metadata, cls.column_encryption_policy) msg.stream_id = stream_id msg.trace_id = trace_id msg.custom_payload = custom_payload @@ -1271,6 +1286,33 @@ def read_int(f): return int32_unpack(f.read(4)) +def read_uint_le(f, size=4): + """ + Read a sequence of little endian bytes and return an unsigned integer. + """ + + if size == 4: + value = uint32_le_unpack(f.read(4)) + else: + value = 0 + for i in range(size): + value |= (read_byte(f) & 0xFF) << 8 * i + + return value + + +def write_uint_le(f, i, size=4): + """ + Write an unsigned integer on a sequence of little endian bytes. + """ + if size == 4: + f.write(uint32_le_pack(i)) + else: + for j in range(size): + shift = j * 8 + write_byte(f, i >> shift & 0xFF) + + def write_int(f, i): f.write(int32_pack(i)) @@ -1312,7 +1354,7 @@ def read_binary_string(f): def write_string(f, s): - if isinstance(s, six.text_type): + if isinstance(s, str): s = s.encode('utf8') write_short(f, len(s)) f.write(s) @@ -1329,7 +1371,7 @@ def read_longstring(f): def write_longstring(f, s): - if isinstance(s, six.text_type): + if isinstance(s, str): s = s.encode('utf8') write_int(f, len(s)) f.write(s) diff --git a/cassandra/query.py b/cassandra/query.py index 0e7a41dc2d..40e4d63c9e 100644 --- a/cassandra/query.py +++ b/cassandra/query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -19,18 +21,17 @@ """ from collections import namedtuple -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import re import struct import time -import six -from six.moves import range, zip import warnings from cassandra import ConsistencyLevel, OperationTimedOut from cassandra.util import unix_time_from_uuid1 from cassandra.encoder import Encoder import cassandra.encoder +from cassandra.policies import ColDesc from cassandra.protocol import _UNSET_VALUE from cassandra.util import OrderedDict, _sanitize_identifiers @@ -76,7 +77,7 @@ def tuple_factory(colnames, rows): >>> session = cluster.connect('mykeyspace') >>> session.row_factory = tuple_factory >>> rows = session.execute("SELECT name, age FROM users LIMIT 1") - >>> print rows[0] + >>> print(rows[0]) ('Bob', 42) .. versionchanged:: 2.0.0 @@ -132,16 +133,16 @@ def named_tuple_factory(colnames, rows): >>> user = rows[0] >>> # you can access field by their name: - >>> print "name: %s, age: %d" % (user.name, user.age) + >>> print("name: %s, age: %d" % (user.name, user.age)) name: Bob, age: 42 >>> # or you can access fields by their position (like a tuple) >>> name, age = user - >>> print "name: %s, age: %d" % (name, age) + >>> print("name: %s, age: %d" % (name, age)) name: Bob, age: 42 >>> name = user[0] >>> age = user[1] - >>> print "name: %s, age: %d" % (name, age) + >>> print("name: %s, age: %d" % (name, age)) name: Bob, age: 42 .. versionchanged:: 2.0.0 @@ -187,7 +188,7 @@ def dict_factory(colnames, rows): >>> session = cluster.connect('mykeyspace') >>> session.row_factory = dict_factory >>> rows = session.execute("SELECT name, age FROM users LIMIT 1") - >>> print rows[0] + >>> print(rows[0]) {u'age': 42, u'name': u'Bob'} .. versionchanged:: 2.0.0 @@ -442,12 +443,14 @@ class PreparedStatement(object): query_string = None result_metadata = None result_metadata_id = None + column_encryption_policy = None routing_key_indexes = None _routing_key_index_set = None serial_consistency_level = None # TODO never used? def __init__(self, column_metadata, query_id, routing_key_indexes, query, - keyspace, protocol_version, result_metadata, result_metadata_id): + keyspace, protocol_version, result_metadata, result_metadata_id, + column_encryption_policy=None): self.column_metadata = column_metadata self.query_id = query_id self.routing_key_indexes = routing_key_indexes @@ -456,14 +459,17 @@ def __init__(self, column_metadata, query_id, routing_key_indexes, query, self.protocol_version = protocol_version self.result_metadata = result_metadata self.result_metadata_id = result_metadata_id + self.column_encryption_policy = column_encryption_policy self.is_idempotent = False @classmethod def from_message(cls, query_id, column_metadata, pk_indexes, cluster_metadata, query, prepared_keyspace, protocol_version, result_metadata, - result_metadata_id): + result_metadata_id, column_encryption_policy=None): if not column_metadata: - return PreparedStatement(column_metadata, query_id, None, query, prepared_keyspace, protocol_version, result_metadata, result_metadata_id) + return PreparedStatement(column_metadata, query_id, None, + query, prepared_keyspace, protocol_version, result_metadata, + result_metadata_id, column_encryption_policy) if pk_indexes: routing_key_indexes = pk_indexes @@ -489,7 +495,7 @@ def from_message(cls, query_id, column_metadata, pk_indexes, cluster_metadata, return PreparedStatement(column_metadata, query_id, routing_key_indexes, query, prepared_keyspace, protocol_version, result_metadata, - result_metadata_id) + result_metadata_id, column_encryption_policy) def bind(self, values): """ @@ -577,6 +583,7 @@ def bind(self, values): values = () proto_version = self.prepared_statement.protocol_version col_meta = self.prepared_statement.column_metadata + ce_policy = self.prepared_statement.column_encryption_policy # special case for binding dicts if isinstance(values, dict): @@ -623,7 +630,13 @@ def bind(self, values): raise ValueError("Attempt to bind UNSET_VALUE while using unsuitable protocol version (%d < 4)" % proto_version) else: try: - self.values.append(col_spec.type.serialize(value, proto_version)) + col_desc = ColDesc(col_spec.keyspace_name, col_spec.table_name, col_spec.name) + uses_ce = ce_policy and ce_policy.contains_column(col_desc) + col_type = ce_policy.column_type(col_desc) if uses_ce else col_spec.type + col_bytes = col_type.serialize(value, proto_version) + if uses_ce: + col_bytes = ce_policy.encrypt(col_desc, col_bytes) + self.values.append(col_bytes) except (TypeError, struct.error) as exc: actual_type = type(value) message = ('Received an argument of invalid type for column "%s". ' @@ -804,7 +817,7 @@ def add(self, statement, parameters=None): Like with other statements, parameters must be a sequence, even if there is only one item. """ - if isinstance(statement, six.string_types): + if isinstance(statement, str): if parameters: encoder = Encoder() if self._session is None else self._session.encoder statement = bind_params(statement, parameters, encoder) @@ -888,10 +901,8 @@ def __str__(self): def bind_params(query, params, encoder): - if six.PY2 and isinstance(query, six.text_type): - query = query.encode('utf-8') if isinstance(params, dict): - return query % dict((k, encoder.cql_encode_all_types(v)) for k, v in six.iteritems(params)) + return query % dict((k, encoder.cql_encode_all_types(v)) for k, v in params.items()) else: return query % tuple(encoder.cql_encode_all_types(v) for v in params) @@ -981,7 +992,7 @@ def populate(self, max_wait=2.0, wait_for_complete=True, query_cl=None): This can be used to query events from partial sessions. `query_cl` specifies a consistency level to use for polling the trace tables, - if it should be different than the session default. + if different from the session default. """ attempt = 0 start = time.time() @@ -996,7 +1007,8 @@ def populate(self, max_wait=2.0, wait_for_complete=True, query_cl=None): SimpleStatement(self._SELECT_SESSIONS_FORMAT, consistency_level=query_cl), (self.trace_id,), time_spent, max_wait) # PYTHON-730: There is race condition that the duration mutation is written before started_at the for fast queries - is_complete = session_results and session_results[0].duration is not None and session_results[0].started_at is not None + session_row = session_results.one() if session_results else None + is_complete = session_row is not None and session_row.duration is not None and session_row.started_at is not None if not session_results or (wait_for_complete and not is_complete): time.sleep(self._BASE_RETRY_SLEEP * (2 ** attempt)) attempt += 1 @@ -1004,9 +1016,8 @@ def populate(self, max_wait=2.0, wait_for_complete=True, query_cl=None): if is_complete: log.debug("Fetched trace info for trace ID: %s", self.trace_id) else: - log.debug("Fetching parital trace info for trace ID: %s", self.trace_id) + log.debug("Fetching partial trace info for trace ID: %s", self.trace_id) - session_row = session_results[0] self.request_type = session_row.request self.duration = timedelta(microseconds=session_row.duration) if is_complete else None self.started_at = session_row.started_at @@ -1076,7 +1087,7 @@ class TraceEvent(object): def __init__(self, description, timeuuid, source, source_elapsed, thread_name): self.description = description - self.datetime = datetime.utcfromtimestamp(unix_time_from_uuid1(timeuuid)) + self.datetime = datetime.fromtimestamp(unix_time_from_uuid1(timeuuid), tz=timezone.utc) self.source = source if source_elapsed is not None: self.source_elapsed = timedelta(microseconds=source_elapsed) diff --git a/cassandra/row_parser.pyx b/cassandra/row_parser.pyx index 3a4b2f4604..d172f1bcaf 100644 --- a/cassandra/row_parser.pyx +++ b/cassandra/row_parser.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,13 +15,14 @@ # limitations under the License. from cassandra.parsing cimport ParseDesc, ColumnParser +from cassandra.policies import ColDesc from cassandra.obj_parser import TupleRowParser from cassandra.deserializers import make_deserializers include "ioutils.pyx" def make_recv_results_rows(ColumnParser colparser): - def recv_results_rows(self, f, int protocol_version, user_type_map, result_metadata): + def recv_results_rows(self, f, int protocol_version, user_type_map, result_metadata, column_encryption_policy): """ Parse protocol data given as a BytesIO f into a set of columns (e.g. list of tuples) This is used as the recv_results_rows method of (Fast)ResultMessage @@ -28,11 +31,12 @@ def make_recv_results_rows(ColumnParser colparser): column_metadata = self.column_metadata or result_metadata - self.column_names = [c[2] for c in column_metadata] - self.column_types = [c[3] for c in column_metadata] + self.column_names = [md[2] for md in column_metadata] + self.column_types = [md[3] for md in column_metadata] - desc = ParseDesc(self.column_names, self.column_types, make_deserializers(self.column_types), - protocol_version) + desc = ParseDesc(self.column_names, self.column_types, column_encryption_policy, + [ColDesc(md[0], md[1], md[2]) for md in column_metadata], + make_deserializers(self.column_types), protocol_version) reader = BytesIOReader(f.read()) try: self.parsed_rows = colparser.parse_rows(reader, desc) diff --git a/cassandra/segment.py b/cassandra/segment.py new file mode 100644 index 0000000000..2d7a107566 --- /dev/null +++ b/cassandra/segment.py @@ -0,0 +1,222 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import zlib + +from cassandra import DriverException +from cassandra.marshal import int32_pack +from cassandra.protocol import write_uint_le, read_uint_le + +CRC24_INIT = 0x875060 +CRC24_POLY = 0x1974F0B +CRC24_LENGTH = 3 +CRC32_LENGTH = 4 +CRC32_INITIAL = zlib.crc32(b"\xfa\x2d\x55\xca") + + +class CrcException(Exception): + """ + CRC mismatch error. + + TODO: here to avoid import cycles with cassandra.connection. In the next + major, the exceptions should be declared in a separated exceptions.py + file. + """ + pass + + +def compute_crc24(data, length): + crc = CRC24_INIT + + for _ in range(length): + crc ^= (data & 0xff) << 16 + data >>= 8 + + for i in range(8): + crc <<= 1 + if crc & 0x1000000 != 0: + crc ^= CRC24_POLY + + return crc + + +def compute_crc32(data, value): + crc32 = zlib.crc32(data, value) + return crc32 + + +class SegmentHeader(object): + + payload_length = None + uncompressed_payload_length = None + is_self_contained = None + + def __init__(self, payload_length, uncompressed_payload_length, is_self_contained): + self.payload_length = payload_length + self.uncompressed_payload_length = uncompressed_payload_length + self.is_self_contained = is_self_contained + + @property + def segment_length(self): + """ + Return the total length of the segment, including the CRC. + """ + hl = SegmentCodec.UNCOMPRESSED_HEADER_LENGTH if self.uncompressed_payload_length < 1 \ + else SegmentCodec.COMPRESSED_HEADER_LENGTH + return hl + CRC24_LENGTH + self.payload_length + CRC32_LENGTH + + +class Segment(object): + + MAX_PAYLOAD_LENGTH = 128 * 1024 - 1 + + payload = None + is_self_contained = None + + def __init__(self, payload, is_self_contained): + self.payload = payload + self.is_self_contained = is_self_contained + + +class SegmentCodec(object): + + COMPRESSED_HEADER_LENGTH = 5 + UNCOMPRESSED_HEADER_LENGTH = 3 + FLAG_OFFSET = 17 + + compressor = None + decompressor = None + + def __init__(self, compressor=None, decompressor=None): + self.compressor = compressor + self.decompressor = decompressor + + @property + def header_length(self): + return self.COMPRESSED_HEADER_LENGTH if self.compression \ + else self.UNCOMPRESSED_HEADER_LENGTH + + @property + def header_length_with_crc(self): + return (self.COMPRESSED_HEADER_LENGTH if self.compression + else self.UNCOMPRESSED_HEADER_LENGTH) + CRC24_LENGTH + + @property + def compression(self): + return self.compressor and self.decompressor + + def compress(self, data): + # the uncompressed length is already encoded in the header, so + # we remove it here + return self.compressor(data)[4:] + + def decompress(self, encoded_data, uncompressed_length): + return self.decompressor(int32_pack(uncompressed_length) + encoded_data) + + def encode_header(self, buffer, payload_length, uncompressed_length, is_self_contained): + if payload_length > Segment.MAX_PAYLOAD_LENGTH: + raise DriverException('Payload length exceed Segment.MAX_PAYLOAD_LENGTH') + + header_data = payload_length + + flag_offset = self.FLAG_OFFSET + if self.compression: + header_data |= uncompressed_length << flag_offset + flag_offset += 17 + + if is_self_contained: + header_data |= 1 << flag_offset + + write_uint_le(buffer, header_data, size=self.header_length) + header_crc = compute_crc24(header_data, self.header_length) + write_uint_le(buffer, header_crc, size=CRC24_LENGTH) + + def _encode_segment(self, buffer, payload, is_self_contained): + """ + Encode a message to a single segment. + """ + uncompressed_payload = payload + uncompressed_payload_length = len(payload) + + if self.compression: + compressed_payload = self.compress(uncompressed_payload) + if len(compressed_payload) >= uncompressed_payload_length: + encoded_payload = uncompressed_payload + uncompressed_payload_length = 0 + else: + encoded_payload = compressed_payload + else: + encoded_payload = uncompressed_payload + + payload_length = len(encoded_payload) + self.encode_header(buffer, payload_length, uncompressed_payload_length, is_self_contained) + payload_crc = compute_crc32(encoded_payload, CRC32_INITIAL) + buffer.write(encoded_payload) + write_uint_le(buffer, payload_crc) + + def encode(self, buffer, msg): + """ + Encode a message to one of more segments. + """ + msg_length = len(msg) + + if msg_length > Segment.MAX_PAYLOAD_LENGTH: + payloads = [] + for i in range(0, msg_length, Segment.MAX_PAYLOAD_LENGTH): + payloads.append(msg[i:i + Segment.MAX_PAYLOAD_LENGTH]) + else: + payloads = [msg] + + is_self_contained = len(payloads) == 1 + for payload in payloads: + self._encode_segment(buffer, payload, is_self_contained) + + def decode_header(self, buffer): + header_data = read_uint_le(buffer, self.header_length) + + expected_header_crc = read_uint_le(buffer, CRC24_LENGTH) + actual_header_crc = compute_crc24(header_data, self.header_length) + if actual_header_crc != expected_header_crc: + raise CrcException('CRC mismatch on header {:x}. Received {:x}", computed {:x}.'.format( + header_data, expected_header_crc, actual_header_crc)) + + payload_length = header_data & Segment.MAX_PAYLOAD_LENGTH + header_data >>= 17 + + if self.compression: + uncompressed_payload_length = header_data & Segment.MAX_PAYLOAD_LENGTH + header_data >>= 17 + else: + uncompressed_payload_length = -1 + + is_self_contained = (header_data & 1) == 1 + + return SegmentHeader(payload_length, uncompressed_payload_length, is_self_contained) + + def decode(self, buffer, header): + encoded_payload = buffer.read(header.payload_length) + expected_payload_crc = read_uint_le(buffer) + + actual_payload_crc = compute_crc32(encoded_payload, CRC32_INITIAL) + if actual_payload_crc != expected_payload_crc: + raise CrcException('CRC mismatch on payload. Received {:x}", computed {:x}.'.format( + expected_payload_crc, actual_payload_crc)) + + payload = encoded_payload + if self.compression and header.uncompressed_payload_length > 0: + payload = self.decompress(encoded_payload, header.uncompressed_payload_length) + + return Segment(payload, header.is_self_contained) diff --git a/cassandra/timestamps.py b/cassandra/timestamps.py index d11359cf13..e2a2c1ea4c 100644 --- a/cassandra/timestamps.py +++ b/cassandra/timestamps.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/tuple.pxd b/cassandra/tuple.pxd index 08d95b6c1f..b519e177bb 100644 --- a/cassandra/tuple.pxd +++ b/cassandra/tuple.pxd @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/type_codes.pxd b/cassandra/type_codes.pxd index 076cacd3de..336263b83c 100644 --- a/cassandra/type_codes.pxd +++ b/cassandra/type_codes.pxd @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/cassandra/util.py b/cassandra/util.py index ead58c82f6..f973912574 100644 --- a/cassandra/util.py +++ b/cassandra/util.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,17 +14,22 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import with_statement +from _weakref import ref import calendar +from collections import OrderedDict +from collections.abc import Mapping import datetime from functools import total_ordering -import logging from itertools import chain +import keyword +import logging +import pickle import random import re -import six -import uuid +import socket import sys +import time +import uuid _HAS_GEOMET = True try: @@ -33,8 +40,8 @@ from cassandra import DriverException -DATETIME_EPOC = datetime.datetime(1970, 1, 1) -UTC_DATETIME_EPOC = datetime.datetime.utcfromtimestamp(0) +DATETIME_EPOC = datetime.datetime(1970, 1, 1).replace(tzinfo=None) +UTC_DATETIME_EPOC = datetime.datetime.fromtimestamp(0, tz=datetime.timezone.utc).replace(tzinfo=None) _nan = float('nan') @@ -212,147 +219,6 @@ def _resolve_contact_points_to_string_map(contact_points): ) -try: - from collections import OrderedDict -except ImportError: - # OrderedDict from Python 2.7+ - - # Copyright (c) 2009 Raymond Hettinger - # - # Permission is hereby granted, free of charge, to any person - # obtaining a copy of this software and associated documentation files - # (the "Software"), to deal in the Software without restriction, - # including without limitation the rights to use, copy, modify, merge, - # publish, distribute, sublicense, and/or sell copies of the Software, - # and to permit persons to whom the Software is furnished to do so, - # subject to the following conditions: - # - # The above copyright notice and this permission notice shall be - # included in all copies or substantial portions of the Software. - # - # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - # OTHER DEALINGS IN THE SOFTWARE. - from UserDict import DictMixin - - class OrderedDict(dict, DictMixin): # noqa - """ A dictionary which maintains the insertion order of keys. """ - - def __init__(self, *args, **kwds): - """ A dictionary which maintains the insertion order of keys. """ - - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__end - except AttributeError: - self.clear() - self.update(*args, **kwds) - - def clear(self): - self.__end = end = [] - end += [None, end, end] # sentinel node for doubly linked list - self.__map = {} # key --> [key, prev, next] - dict.clear(self) - - def __setitem__(self, key, value): - if key not in self: - end = self.__end - curr = end[1] - curr[2] = end[1] = self.__map[key] = [key, curr, end] - dict.__setitem__(self, key, value) - - def __delitem__(self, key): - dict.__delitem__(self, key) - key, prev, next = self.__map.pop(key) - prev[2] = next - next[1] = prev - - def __iter__(self): - end = self.__end - curr = end[2] - while curr is not end: - yield curr[0] - curr = curr[2] - - def __reversed__(self): - end = self.__end - curr = end[1] - while curr is not end: - yield curr[0] - curr = curr[1] - - def popitem(self, last=True): - if not self: - raise KeyError('dictionary is empty') - if last: - key = next(reversed(self)) - else: - key = next(iter(self)) - value = self.pop(key) - return key, value - - def __reduce__(self): - items = [[k, self[k]] for k in self] - tmp = self.__map, self.__end - del self.__map, self.__end - inst_dict = vars(self).copy() - self.__map, self.__end = tmp - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def keys(self): - return list(self) - - setdefault = DictMixin.setdefault - update = DictMixin.update - pop = DictMixin.pop - values = DictMixin.values - items = DictMixin.items - iterkeys = DictMixin.iterkeys - itervalues = DictMixin.itervalues - iteritems = DictMixin.iteritems - - def __repr__(self): - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - - def copy(self): - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - if isinstance(other, OrderedDict): - if len(self) != len(other): - return False - for p, q in zip(self.items(), other.items()): - if p != q: - return False - return True - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other - - -# WeakSet from Python 2.7+ (https://code.google.com/p/weakrefset) - -from _weakref import ref - - class _IterationGuard(object): # This context manager registers itself in the current iterators of the # weak container, such as to delay all removals until the context manager @@ -776,7 +642,7 @@ def _find_insertion(self, x): # could not compare a[mid] with x # start scanning to find insertion point while swallowing type errors lo = 0 - compared_one = False # flag is used to determine whether uncomparables are grouped at the front or back + compared_one = False # flag is used to determine whether un-comparables are grouped at the front or back while lo < hi: try: if a[lo] == x or a[lo] >= x: break @@ -789,15 +655,11 @@ def _find_insertion(self, x): sortedset = SortedSet # backwards-compatibility -from cassandra.compat import Mapping -from six.moves import cPickle - - class OrderedMap(Mapping): ''' An ordered map that accepts non-hashable types for keys. It also maintains the insertion order of items, behaving as OrderedDict in that regard. These maps - are constructed and read just as normal mapping types, exept that they may + are constructed and read just as normal mapping types, except that they may contain arbitrary collections and other non-hashable items as keys:: >>> od = OrderedMap([({'one': 1, 'two': 2}, 'value'), @@ -835,7 +697,7 @@ def __init__(self, *args, **kwargs): for k, v in e: self._insert(k, v) - for k, v in six.iteritems(kwargs): + for k, v in kwargs.items(): self._insert(k, v) def _insert(self, key, value): @@ -901,7 +763,7 @@ def popitem(self): raise KeyError() def _serialize_key(self, key): - return cPickle.dumps(key) + return pickle.dumps(key) class OrderedMapSerializedKey(OrderedMap): @@ -919,13 +781,6 @@ def _serialize_key(self, key): return self.cass_key_type.serialize(key, self.protocol_version) -import datetime -import time - -if six.PY3: - long = int - - @total_ordering class Time(object): ''' @@ -951,11 +806,11 @@ def __init__(self, value): - datetime.time: built-in time - string_type: a string time of the form "HH:MM:SS[.mmmuuunnn]" """ - if isinstance(value, six.integer_types): + if isinstance(value, int): self._from_timestamp(value) elif isinstance(value, datetime.time): self._from_time(value) - elif isinstance(value, six.string_types): + elif isinstance(value, str): self._from_timestring(value) else: raise TypeError('Time arguments must be a whole number, datetime.time, or string') @@ -1031,7 +886,7 @@ def __eq__(self, other): if isinstance(other, Time): return self.nanosecond_time == other.nanosecond_time - if isinstance(other, six.integer_types): + if isinstance(other, int): return self.nanosecond_time == other return self.nanosecond_time % Time.MICRO == 0 and \ @@ -1080,11 +935,11 @@ def __init__(self, value): - datetime.date: built-in date - string_type: a string time of the form "yyyy-mm-dd" """ - if isinstance(value, six.integer_types): + if isinstance(value, int): self.days_from_epoch = value elif isinstance(value, (datetime.date, datetime.datetime)): self._from_timetuple(value.timetuple()) - elif isinstance(value, six.string_types): + elif isinstance(value, str): self._from_datestring(value) else: raise TypeError('Date arguments must be a whole number, datetime.date, or string') @@ -1124,7 +979,7 @@ def __eq__(self, other): if isinstance(other, Date): return self.days_from_epoch == other.days_from_epoch - if isinstance(other, six.integer_types): + if isinstance(other, int): return self.days_from_epoch == other try: @@ -1151,97 +1006,9 @@ def __str__(self): # If we overflow datetime.[MIN|MAX] return str(self.days_from_epoch) -import socket -if hasattr(socket, 'inet_pton'): - inet_pton = socket.inet_pton - inet_ntop = socket.inet_ntop -else: - """ - Windows doesn't have socket.inet_pton and socket.inet_ntop until Python 3.4 - This is an alternative impl using ctypes, based on this win_inet_pton project: - https://github.com/hickeroar/win_inet_pton - """ - import ctypes - class sockaddr(ctypes.Structure): - """ - Shared struct for ipv4 and ipv6. - - https://msdn.microsoft.com/en-us/library/windows/desktop/ms740496(v=vs.85).aspx - - ``__pad1`` always covers the port. - - When being used for ``sockaddr_in6``, ``ipv4_addr`` actually covers ``sin6_flowinfo``, resulting - in proper alignment for ``ipv6_addr``. - """ - _fields_ = [("sa_family", ctypes.c_short), - ("__pad1", ctypes.c_ushort), - ("ipv4_addr", ctypes.c_byte * 4), - ("ipv6_addr", ctypes.c_byte * 16), - ("__pad2", ctypes.c_ulong)] - - if hasattr(ctypes, 'windll'): - WSAStringToAddressA = ctypes.windll.ws2_32.WSAStringToAddressA - WSAAddressToStringA = ctypes.windll.ws2_32.WSAAddressToStringA - else: - def not_windows(*args): - raise OSError("IPv6 addresses cannot be handled on Windows. " - "Missing ctypes.windll") - WSAStringToAddressA = not_windows - WSAAddressToStringA = not_windows - - def inet_pton(address_family, ip_string): - if address_family == socket.AF_INET: - return socket.inet_aton(ip_string) - - addr = sockaddr() - addr.sa_family = address_family - addr_size = ctypes.c_int(ctypes.sizeof(addr)) - - if WSAStringToAddressA( - ip_string, - address_family, - None, - ctypes.byref(addr), - ctypes.byref(addr_size) - ) != 0: - raise socket.error(ctypes.FormatError()) - - if address_family == socket.AF_INET6: - return ctypes.string_at(addr.ipv6_addr, 16) - - raise socket.error('unknown address family') - - def inet_ntop(address_family, packed_ip): - if address_family == socket.AF_INET: - return socket.inet_ntoa(packed_ip) - - addr = sockaddr() - addr.sa_family = address_family - addr_size = ctypes.c_int(ctypes.sizeof(addr)) - ip_string = ctypes.create_string_buffer(128) - ip_string_size = ctypes.c_int(ctypes.sizeof(ip_string)) - - if address_family == socket.AF_INET6: - if len(packed_ip) != ctypes.sizeof(addr.ipv6_addr): - raise socket.error('packed IP wrong length for inet_ntoa') - ctypes.memmove(addr.ipv6_addr, packed_ip, 16) - else: - raise socket.error('unknown address family') - - if WSAAddressToStringA( - ctypes.byref(addr), - addr_size, - None, - ip_string, - ctypes.byref(ip_string_size) - ) != 0: - raise socket.error(ctypes.FormatError()) - - return ip_string[:ip_string_size.value - 1] - - -import keyword +inet_pton = socket.inet_pton +inet_ntop = socket.inet_ntop # similar to collections.namedtuple, reproduced here because Python 2.6 did not have the rename logic @@ -1541,8 +1308,11 @@ class Duration(object): """ months = 0 + "" days = 0 + "" nanoseconds = 0 + "" def __init__(self, months=0, days=0, nanoseconds=0): self.months = months @@ -1625,7 +1395,7 @@ def _round_to_precision(cls, ms, precision, default_dt): if precision_idx <= cls._to_int(DateRangePrecision.MINUTE): replace_kwargs['second'] = default_dt.second if precision_idx <= cls._to_int(DateRangePrecision.SECOND): - # truncate to nearest 1000 so we deal in ms, not us + # truncate to nearest 1000, so we deal in ms, not us replace_kwargs['microsecond'] = (default_dt.microsecond // 1000) * 1000 if precision_idx == cls._to_int(DateRangePrecision.MILLISECOND): replace_kwargs['microsecond'] = int(round(dt.microsecond, -3)) @@ -1634,7 +1404,7 @@ def _round_to_precision(cls, ms, precision, default_dt): @classmethod def round_up_to_precision(cls, ms, precision): # PYTHON-912: this is the only case in which we can't take as upper bound - # datetime.datetime.max because the month from ms may be February and we'd + # datetime.datetime.max because the month from ms may be February, and we'd # be setting 31 as the month day if precision == cls.MONTH: date_ms = utc_datetime_from_ms_timestamp(ms) @@ -1685,7 +1455,7 @@ def __init__(self, value, precision): if value is None: milliseconds = None - elif isinstance(value, six.integer_types): + elif isinstance(value, int): milliseconds = value elif isinstance(value, datetime.datetime): value = value.replace( @@ -1953,12 +1723,10 @@ def __init__(self, version): try: self.major = int(parts.pop()) - except ValueError: - six.reraise( - ValueError, - ValueError("Couldn't parse version {}. Version should start with a number".format(version)), - sys.exc_info()[2] - ) + except ValueError as e: + raise ValueError( + "Couldn't parse version {}. Version should start with a number".format(version))\ + .with_traceback(e.__traceback__) try: self.minor = int(parts.pop()) if parts else 0 self.patch = int(parts.pop()) if parts else 0 @@ -1991,8 +1759,8 @@ def __str__(self): @staticmethod def _compare_version_part(version, other_version, cmp): - if not (isinstance(version, six.integer_types) and - isinstance(other_version, six.integer_types)): + if not (isinstance(version, int) and + isinstance(other_version, int)): version = str(version) other_version = str(other_version) diff --git a/docs.yaml b/docs.yaml index 3a33e5a4e8..63269a3001 100644 --- a/docs.yaml +++ b/docs.yaml @@ -2,10 +2,6 @@ title: DataStax Python Driver summary: DataStax Python Driver for Apache Cassandra® output: docs/_build/ swiftype_drivers: pythondrivers -checks: - external_links: - exclude: - - 'http://aka.ms/vcpython27' sections: - title: N/A prefix: / @@ -22,8 +18,18 @@ sections: # build extensions like libev CASS_DRIVER_NO_CYTHON=1 python setup.py build_ext --inplace --force versions: + - name: '3.29' + ref: 434b1f52 + - name: '3.28' + ref: 4325afb6 + - name: '3.27' + ref: 910f0282 + - name: '3.26' + ref: f1e9126 + - name: '3.25' + ref: a83c36a5 - name: '3.24' - ref: e0b7e73c + ref: 21cac12b - name: '3.23' ref: a40a2af7 - name: '3.22' @@ -65,9 +71,47 @@ versions: redirects: - \A\/(.*)/\Z: /\1.html rewrites: - - search: cassandra.apache.org/doc/cql3/CQL.html - replace: cassandra.apache.org/doc/cql3/CQL-3.0.html - - search: http://www.datastax.com/documentation/cql/3.1/ - replace: https://docs.datastax.com/en/archived/cql/3.1/ - search: http://www.datastax.com/docs/1.2/cql_cli/cql/BATCH replace: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_reference/cql_commands/cqlBatch.html + - search: http://www.datastax.com/documentation/cql/3.1/ + replace: https://docs.datastax.com/en/archived/cql/3.1/ + - search: 'https://community.datastax.com' + replace: 'https://www.datastax.com/dev/community' + - search: 'https://docs.datastax.com/en/astra/aws/doc/index.html' + replace: 'https://docs.datastax.com/en/astra-serverless/docs/connect/drivers/connect-python.html' + - search: 'http://cassandra.apache.org/doc/cql3/CQL.html#timeuuidFun' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/functions.html#timeuuid-functions' + - search: 'http://cassandra.apache.org/doc/cql3/CQL.html#tokenFun' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/functions.html#token' + - search: 'http://cassandra.apache.org/doc/cql3/CQL.html#collections' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/types.html#collections' + - search: 'http://cassandra.apache.org/doc/cql3/CQL.html#batchStmt' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/dml.html#batch_statement' + - search: 'http://cassandra.apache.org/doc/cql3/CQL-3.0.html#timeuuidFun' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/functions.html#timeuuid-functions' + - search: 'http://cassandra.apache.org/doc/cql3/CQL-3.0.html#tokenFun' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/functions.html#token' + - search: 'http://cassandra.apache.org/doc/cql3/CQL-3.0.html#collections' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/types.html#collections' + - search: 'http://cassandra.apache.org/doc/cql3/CQL-3.0.html#batchStmt' + replace: 'https://cassandra.apache.org/doc/3.11/cassandra/cql/dml.html#batch_statement' +checks: + external_links: + exclude: + - 'https://twitter.com/dsJavaDriver' + - 'https://twitter.com/datastaxeng' + - 'https://twitter.com/datastax' + - 'https://projectreactor.io' + - 'https://docs.datastax.com/en/drivers/java/4.[0-9]+/com/datastax/oss/driver/internal/' + - 'http://www.planetcassandra.org/blog/user-defined-functions-in-cassandra-3-0/' + - 'http://www.planetcassandra.org/making-the-change-from-thrift-to-cql/' + - 'https://academy.datastax.com/slack' + - 'https://community.datastax.com/index.html' + - 'https://micrometer.io/docs' + - 'http://datastax.github.io/java-driver/features/shaded_jar/' + - 'http://aka.ms/vcpython27' + internal_links: + exclude: + - 'netty_pipeline/' + - '../core/' + - '%5Bguava%20eviction%5D' diff --git a/docs/.nav b/docs/.nav index 375f058817..79f3029073 100644 --- a/docs/.nav +++ b/docs/.nav @@ -3,10 +3,6 @@ getting_started execution_profiles lwt object_mapper -geo_types -graph -graph_fluent -classic_graph performance query_paging security @@ -14,5 +10,12 @@ upgrading user_defined_types dates_and_times cloud +column_encryption +geo_types +graph +classic_graph +graph_fluent +CHANGELOG faq api + diff --git a/docs/api/cassandra/cluster.rst b/docs/api/cassandra/cluster.rst index 2b3d7828a8..a9a9d378a4 100644 --- a/docs/api/cassandra/cluster.rst +++ b/docs/api/cassandra/cluster.rst @@ -215,7 +215,7 @@ .. automethod:: add_errback(fn, *args, **kwargs) - .. automethod:: add_callbacks(callback, errback, callback_args=(), callback_kwargs=None, errback_args=(), errback_args=None) + .. automethod:: add_callbacks(callback, errback, callback_args=(), callback_kwargs=None, errback_args=(), errback_kwargs=None) .. autoclass:: ResultSet () :members: diff --git a/docs/api/cassandra/cqlengine/models.rst b/docs/api/cassandra/cqlengine/models.rst index 60b1471184..ee689a2b48 100644 --- a/docs/api/cassandra/cqlengine/models.rst +++ b/docs/api/cassandra/cqlengine/models.rst @@ -103,7 +103,7 @@ Model TestIfNotExistsModel.if_not_exists().create(id=id, count=9, text='111111111111') except LWTException as e: # handle failure case - print e.existing # dict containing LWT result fields + print(e.existing # dict containing LWT result fields) This method is supported on Cassandra 2.0 or later. @@ -144,7 +144,7 @@ Model t.iff(count=5).update('other text') except LWTException as e: # handle failure case - print e.existing # existing object + print(e.existing # existing object) .. automethod:: get diff --git a/docs/cloud.rst b/docs/cloud.rst index 7ff7693736..3230720ec9 100644 --- a/docs/cloud.rst +++ b/docs/cloud.rst @@ -38,6 +38,22 @@ the default temporary directory of the system will be used as base dir. } ... +connect_timeout ++++++++++++++++++++ + +As part of the process of connecting to Astra the Python driver will query a service to retrieve +current information about your cluster. You can control the connection timeout for this operation +using *connect_timeout*. If you observe errors in `read_metadata_info` you might consider increasing +this parameter. This timeout is specified in seconds. + +.. code:: python + + cloud_config = { + 'secure_connect_bundle': '/path/to/secure-connect-dbname.zip', + 'connect_timeout': 120 + } + ... + Astra Differences ================== In most circumstances, the client code for interacting with an Astra cluster will be the same as interacting with any other Cassandra cluster. The exceptions being: @@ -54,3 +70,36 @@ Limitations Event loops ^^^^^^^^^^^ Evenlet isn't yet supported for python 3.7+ due to an `issue in Eventlet `_. + + +CqlEngine +========= + +When using the object mapper, you can configure cqlengine with :func:`~.cqlengine.connection.set_session`: + +.. code:: python + + from cassandra.cqlengine import connection + ... + + c = Cluster(cloud={'secure_connect_bundle':'/path/to/secure-connect-test.zip'}, + auth_provider=PlainTextAuthProvider('user', 'pass')) + s = c.connect('myastrakeyspace') + connection.set_session(s) + ... + +If you are using some third-party libraries (flask, django, etc.), you might not be able to change the +configuration mechanism. For this reason, the `hosts` argument of the default +:func:`~.cqlengine.connection.setup` function will be ignored if a `cloud` config is provided: + +.. code:: python + + from cassandra.cqlengine import connection + ... + + connection.setup( + None, # or anything else + "myastrakeyspace", cloud={ + 'secure_connect_bundle':'/path/to/secure-connect-test.zip' + }, + auth_provider=PlainTextAuthProvider('user', 'pass')) diff --git a/docs/column_encryption.rst b/docs/column_encryption.rst new file mode 100644 index 0000000000..ab67ef16d0 --- /dev/null +++ b/docs/column_encryption.rst @@ -0,0 +1,101 @@ +Column Encryption +================= + +Overview +-------- +Support for client-side encryption of data was added in version 3.27.0 of the Python driver. When using +this feature data will be encrypted on-the-fly according to a specified :class:`~.ColumnEncryptionPolicy` +instance. This policy is also used to decrypt data in returned rows. If a prepared statement is used +this decryption is transparent to the user; retrieved data will be decrypted and converted into the original +type (according to definitions in the encryption policy). Support for simple (i.e. non-prepared) queries is +also available, although in this case values must be manually encrypted and/or decrypted. The +:class:`~.ColumnEncryptionPolicy` instance provides methods to assist with these operations. + +Client-side encryption and decryption should work against all versions of Cassandra and DSE. It does not +utilize any server-side functionality to do its work. + +WARNING: Encryption format changes in 3.28.0 +------------------------------------------------ +Python driver 3.28.0 introduces a new encryption format for data written by :class:`~.AES256ColumnEncryptionPolicy`. +As a result, any encrypted data written by Python driver 3.27.0 will **NOT** be readable. +If you upgraded from 3.27.0, you should re-encrypt your data with 3.28.0. + +Configuration +------------- +Client-side encryption is enabled by creating an instance of a subclass of :class:`~.ColumnEncryptionPolicy` +and adding information about columns to be encrypted to it. This policy is then supplied to :class:`~.Cluster` +when it's created. + +.. code-block:: python + + import os + + from cassandra.policies import ColDesc + from cassandra.column_encryption.policies import AES256ColumnEncryptionPolicy, AES256_KEY_SIZE_BYTES + + key = os.urandom(AES256_KEY_SIZE_BYTES) + cl_policy = AES256ColumnEncryptionPolicy() + col_desc = ColDesc('ks1','table1','column1') + cql_type = "int" + cl_policy.add_column(col_desc, key, cql_type) + cluster = Cluster(column_encryption_policy=cl_policy) + +:class:`~.AES256ColumnEncryptionPolicy` is a subclass of :class:`~.ColumnEncryptionPolicy` which provides +encryption and decryption via AES-256. This class is currently the only available column encryption policy +implementation, although users can certainly implement their own by subclassing :class:`~.ColumnEncryptionPolicy`. + +:class:`~.ColDesc` is a named tuple which uniquely identifies a column in a given keyspace and table. When we +have this tuple, the encryption key and the CQL type contained by this column we can add the column to the policy +using :func:`~.ColumnEncryptionPolicy.add_column`. Once we have added all column definitions to the policy we +pass it along to the cluster. + +The CQL type for the column only has meaning at the client; it is never sent to Cassandra. The encryption key +is also never sent to the server; all the server ever sees are random bytes reflecting the encrypted data. As a +result all columns containing client-side encrypted values should be declared with the CQL type "blob" at the +Cassandra server. + +Usage +----- + +Encryption +^^^^^^^^^^ +Client-side encryption shines most when used with prepared statements. A prepared statement is aware of information +about the columns in the query it was built from and we can use this information to transparently encrypt any +supplied parameters. For example, we can create a prepared statement to insert a value into column1 (as defined above) +by executing the following code after creating a :class:`~.Cluster` in the manner described above: + +.. code-block:: python + + session = cluster.connect() + prepared = session.prepare("insert into ks1.table1 (column1) values (?)") + session.execute(prepared, (1000,)) + +Our encryption policy will detect that "column1" is an encrypted column and take appropriate action. + +As mentioned above client-side encryption can also be used with simple queries, although such use cases are +certainly not transparent. :class:`~.ColumnEncryptionPolicy` provides a helper named +:func:`~.ColumnEncryptionPolicy.encode_and_encrypt` which will convert an input value into bytes using the +standard serialization methods employed by the driver. The result is then encrypted according to the configuration +of the policy. Using this approach the example above could be implemented along the lines of the following: + +.. code-block:: python + + session = cluster.connect() + session.execute("insert into ks1.table1 (column1) values (%s)",(cl_policy.encode_and_encrypt(col_desc, 1000),)) + +Decryption +^^^^^^^^^^ +Decryption of values returned from the server is always transparent. Whether we're executing a simple or prepared +statement encrypted columns will be decrypted automatically and made available via rows just like any other +result. + +Limitations +----------- +:class:`~.AES256ColumnEncryptionPolicy` uses the implementation of AES-256 provided by the +`cryptography `_ module. Any limitations of this module should be considered +when deploying client-side encryption. Note specifically that a Rust compiler is required for modern versions +of the cryptography package, although wheels exist for many common platforms. + +Client-side encryption has been implemented for both the default Cython and pure Python row processing logic. +This functionality has not yet been ported to the NumPy Cython implementation. During testing, +the NumPy processing works on Python 3.7 but fails for Python 3.8. diff --git a/docs/cqlengine/connections.rst b/docs/cqlengine/connections.rst index 03ade27521..fd44303514 100644 --- a/docs/cqlengine/connections.rst +++ b/docs/cqlengine/connections.rst @@ -99,7 +99,7 @@ You can specify a default connection per model: year = columns.Integer(primary_key=True) model = columns.Text(primary_key=True) - print len(Automobile.objects.all()) # executed on the connection 'cluster2' + print(len(Automobile.objects.all())) # executed on the connection 'cluster2' QuerySet and model instance --------------------------- diff --git a/docs/cqlengine/models.rst b/docs/cqlengine/models.rst index c0ba390119..719513f4a9 100644 --- a/docs/cqlengine/models.rst +++ b/docs/cqlengine/models.rst @@ -201,7 +201,7 @@ are only created, presisted, and queried via table Models. A short example to in users.create(name="Joe", addr=address(street="Easy St.", zipcode=99999)) user = users.objects(name="Joe")[0] - print user.name, user.addr + print(user.name, user.addr) # Joe address(street=u'Easy St.', zipcode=99999) UDTs are modeled by inheriting :class:`~.usertype.UserType`, and setting column type attributes. Types are then used in defining diff --git a/docs/execution_profiles.rst b/docs/execution_profiles.rst index 7be1a85e3f..0965d77f3d 100644 --- a/docs/execution_profiles.rst +++ b/docs/execution_profiles.rst @@ -43,7 +43,7 @@ Default session = cluster.connect() local_query = 'SELECT rpc_address FROM system.local' for _ in cluster.metadata.all_hosts(): - print session.execute(local_query)[0] + print(session.execute(local_query)[0]) .. parsed-literal:: @@ -69,7 +69,7 @@ Initializing cluster with profiles profiles = {'node1': node1_profile, 'node2': node2_profile} session = Cluster(execution_profiles=profiles).connect() for _ in cluster.metadata.all_hosts(): - print session.execute(local_query, execution_profile='node1')[0] + print(session.execute(local_query, execution_profile='node1')[0]) .. parsed-literal:: @@ -81,7 +81,7 @@ Initializing cluster with profiles .. code:: python for _ in cluster.metadata.all_hosts(): - print session.execute(local_query, execution_profile='node2')[0] + print(session.execute(local_query, execution_profile='node2')[0]) .. parsed-literal:: @@ -93,7 +93,7 @@ Initializing cluster with profiles .. code:: python for _ in cluster.metadata.all_hosts(): - print session.execute(local_query)[0] + print(session.execute(local_query)[0]) .. parsed-literal:: @@ -123,7 +123,7 @@ New profiles can be added constructing from scratch, or deriving from default: cluster.add_execution_profile(node1_profile, locked_execution) for _ in cluster.metadata.all_hosts(): - print session.execute(local_query, execution_profile=node1_profile)[0] + print(session.execute(local_query, execution_profile=node1_profile)[0]) .. parsed-literal:: @@ -144,8 +144,8 @@ We also have the ability to pass profile instances to be used for execution, but tmp = session.execution_profile_clone_update('node1', request_timeout=100, row_factory=tuple_factory) - print session.execute(local_query, execution_profile=tmp)[0] - print session.execute(local_query, execution_profile='node1')[0] + print(session.execute(local_query, execution_profile=tmp)[0]) + print(session.execute(local_query, execution_profile='node1')[0]) .. parsed-literal:: diff --git a/docs/faq.rst b/docs/faq.rst index 56cb648a24..194d5520e8 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -44,7 +44,7 @@ Since tracing is done asynchronously to the request, this method polls until the >>> result = future.result() >>> trace = future.get_query_trace() >>> for e in trace.events: - >>> print e.source_elapsed, e.description + >>> print(e.source_elapsed, e.description) 0:00:00.000077 Parsing select * from system.local 0:00:00.000153 Preparing statement @@ -67,7 +67,7 @@ With prepared statements, the replicas are obtained by ``routing_key``, based on >>> bound = prepared.bind((1,)) >>> replicas = cluster.metadata.get_replicas(bound.keyspace, bound.routing_key) >>> for h in replicas: - >>> print h.address + >>> print(h.address) 127.0.0.1 127.0.0.2 diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 8cb86a5504..432e42ec4f 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -3,16 +3,42 @@ Getting Started First, make sure you have the driver properly :doc:`installed `. -Connecting to Cassandra +Connecting to a Cluster ----------------------- Before we can start executing any queries against a Cassandra cluster we need to setup an instance of :class:`~.Cluster`. As the name suggests, you will typically have one instance of :class:`~.Cluster` for each Cassandra cluster you want to interact with. -The simplest way to create a :class:`~.Cluster` is like this: First, make sure you have the Cassandra driver properly :doc:`installed `. +Connecting to Astra ++++++++++++++++++++ + +If you are a DataStax `Astra `_ user, +here is how to connect to your cluster: + +1. Download the secure connect bundle from your Astra account. +2. Connect to your cluster with + +.. code-block:: python + + from cassandra.cluster import Cluster + from cassandra.auth import PlainTextAuthProvider + + cloud_config = { + 'secure_connect_bundle': '/path/to/secure-connect-dbname.zip' + } + auth_provider = PlainTextAuthProvider(username='user', password='pass') + cluster = Cluster(cloud=cloud_config, auth_provider=auth_provider) + session = cluster.connect() + +See `Astra `_ and :doc:`cloud` for more details. + +Connecting to Cassandra ++++++++++++++++++++++++ +The simplest way to create a :class:`~.Cluster` is like this: + .. code-block:: python from cassandra.cluster import Cluster @@ -52,6 +78,8 @@ To establish connections and begin executing queries we need a cluster = Cluster() session = cluster.connect() +Session Keyspace +---------------- The :meth:`~.Cluster.connect()` method takes an optional ``keyspace`` argument which sets the default keyspace for all queries made through that :class:`~.Session`: @@ -60,7 +88,6 @@ which sets the default keyspace for all queries made through that :class:`~.Sess cluster = Cluster() session = cluster.connect('mykeyspace') - You can always change a Session's keyspace using :meth:`~.Session.set_keyspace` or by executing a ``USE `` query: @@ -70,6 +97,8 @@ by executing a ``USE `` query: # or you can do this instead session.execute('USE users') +Execution Profiles +------------------ Profiles are passed in by ``execution_profiles`` dict. In this case we can construct the base ``ExecutionProfile`` passing all attributes: @@ -113,7 +142,7 @@ way to execute a query is to use :meth:`~.Session.execute()`: rows = session.execute('SELECT name, age, email FROM users') for user_row in rows: - print user_row.name, user_row.age, user_row.email + print(user_row.name, user_row.age, user_row.email) This will transparently pick a Cassandra node to execute the query against and handle any retries that are necessary if the operation fails. @@ -129,19 +158,19 @@ examples are equivalent: rows = session.execute('SELECT name, age, email FROM users') for row in rows: - print row.name, row.age, row.email + print(row.name, row.age, row.email) .. code-block:: python rows = session.execute('SELECT name, age, email FROM users') for (name, age, email) in rows: - print name, age, email + print(name, age, email) .. code-block:: python rows = session.execute('SELECT name, age, email FROM users') for row in rows: - print row[0], row[1], row[2] + print(row[0], row[1], row[2]) If you prefer another result format, such as a ``dict`` per row, you can change the :attr:`~.Session.row_factory` attribute. @@ -329,7 +358,7 @@ For example: try: rows = future.result() user = rows[0] - print user.name, user.age + print(user.name, user.age) except ReadTimeout: log.exception("Query timed out:") @@ -346,7 +375,7 @@ This works well for executing many queries concurrently: # wait for them to complete and use the results for future in futures: rows = future.result() - print rows[0].name + print(rows[0].name) Alternatively, instead of calling :meth:`~.ResponseFuture.result()`, you can attach callback and errback functions through the diff --git a/docs/graph_fluent.rst b/docs/graph_fluent.rst index 03cf8d36c0..8d5ad5377d 100644 --- a/docs/graph_fluent.rst +++ b/docs/graph_fluent.rst @@ -83,7 +83,7 @@ to accomplish this configuration: session = cluster.connect() g = DseGraph.traversal_source(session) # Build the GraphTraversalSource - print g.V().toList() # Traverse the Graph + print(g.V().toList()) # Traverse the Graph Note that the execution profile created with :meth:`DseGraph.create_execution_profile <.datastax.graph.fluent.DseGraph.create_execution_profile>` cannot be used for any groovy string queries. @@ -231,11 +231,11 @@ Batch Queries DSE Graph supports batch queries using a :class:`TraversalBatch <.datastax.graph.fluent.query.TraversalBatch>` object instantiated with :meth:`DseGraph.batch <.datastax.graph.fluent.DseGraph.batch>`. A :class:`TraversalBatch <.datastax.graph.fluent.query.TraversalBatch>` allows -you to execute multiple graph traversals in a single atomic transaction. A -traversal batch is executed with :meth:`.Session.execute_graph` or using -:meth:`TraversalBatch.execute <.datastax.graph.fluent.query.TraversalBatch.execute>` if bounded to a DSE session. +you to execute multiple graph traversals in a single atomic transaction. A +traversal batch is executed with :meth:`.Session.execute_graph` or using +:meth:`TraversalBatch.execute <.datastax.graph.fluent.query.TraversalBatch.execute>` if bounded to a DSE session. -Either way you choose to execute the traversal batch, you need to configure +Either way you choose to execute the traversal batch, you need to configure the execution profile accordingly. Here is a example:: from cassandra.cluster import Cluster diff --git a/docs/index.rst b/docs/index.rst index 4cdd637e0a..2fcaf43884 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -4,7 +4,7 @@ A Python client driver for `Apache Cassandra® `_. This driver works exclusively with the Cassandra Query Language v3 (CQL3) and Cassandra's native protocol. Cassandra 2.1+ is supported, including DSE 4.7+. -The driver supports Python 2.7, 3.4, 3.5, 3.6, 3.7 and 3.8. +The driver supports Python 3.9 through 3.13. This driver is open source under the `Apache v2 License `_. @@ -50,6 +50,9 @@ Contents :doc:`cloud` A guide to connecting to Datastax Astra +:doc:`column_encryption` + Transparent client-side per-column encryption and decryption + :doc:`geo_types` Working with DSE geometry types diff --git a/docs/installation.rst b/docs/installation.rst index b381425302..be31551e79 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -3,7 +3,7 @@ Installation Supported Platforms ------------------- -Python 2.7, 3.4, 3.5, 3.6, 3.7 and 3.8 are supported. Both CPython (the standard Python +Python 3.9 through 3.13 are supported. Both CPython (the standard Python implementation) and `PyPy `_ are supported and tested. Linux, OSX, and Windows are supported. @@ -24,9 +24,9 @@ Verifying your Installation --------------------------- To check if the installation was successful, you can run:: - python -c 'import cassandra; print cassandra.__version__' + python -c 'import cassandra; print(cassandra.__version__)' -It should print something like "3.22.0". +It should print something like "3.29.3". .. _installation-datastax-graph: @@ -34,7 +34,7 @@ It should print something like "3.22.0". --------------------------- The driver provides an optional fluent graph API that depends on Apache TinkerPop (gremlinpython). It is not installed by default. To be able to build Gremlin traversals, you need to install -the `graph` requirements:: +the `graph` extra:: pip install cassandra-driver[graph] @@ -67,6 +67,27 @@ support this:: pip install scales +*Optional:* Column-Level Encryption (CLE) Support +-------------------------------------------------- +The driver has built-in support for client-side encryption and +decryption of data. For more, see :doc:`column_encryption`. + +CLE depends on the Python `cryptography `_ module. +When installing Python driver 3.27.0. the `cryptography` module is +also downloaded and installed. +If you are using Python driver 3.28.0 or later and want to use CLE, you must +install the `cryptography `_ module. + +You can install this module along with the driver by specifying the `cle` extra:: + + pip install cassandra-driver[cle] + +Alternatively, you can also install the module directly via `pip`:: + + pip install cryptography + +Any version of cryptography >= 35.0 will work for the CLE feature. You can find additional +details at `PYTHON-1351 `_ Speeding Up Installation ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -190,16 +211,19 @@ If your sudo configuration does not allow SETENV, you must push the option flag applies these options to all dependencies (which break on the custom flag). Therefore, you must first install dependencies, then use install-option:: - sudo pip install six futures + sudo pip install futures sudo pip install --install-option="--no-cython" +Supported Event Loops +^^^^^^^^^^^^^^^^^^^^^ +For Python versions before 3.12 the driver uses the ``asyncore`` module for its default +event loop. Other event loops such as ``libev``, ``gevent`` and ``eventlet`` are also +available via Python modules or C extensions. Python 3.12 has removed ``asyncore`` entirely +so for this platform one of these other event loops must be used. + libev support ^^^^^^^^^^^^^ -The driver currently uses Python's ``asyncore`` module for its default -event loop. For better performance, ``libev`` is also supported through -a C extension. - If you're on Linux, you should be able to install libev through a package manager. For example, on Debian/Ubuntu:: @@ -214,8 +238,10 @@ through `Homebrew `_. For example, on Mac OS X:: $ brew install libev -The libev extension is not built for Windows (the build process is complex, and the Windows implementation uses -select anyway). +The libev extension can now be built for Windows as of Python driver version 3.29.2. You can +install libev using any Windows package manager. For example, to install using `vcpkg `_:: + + $ vcpkg install libev If successful, you should be able to build and install the extension (just using ``setup.py build`` or ``setup.py install``) and then use diff --git a/docs/object_mapper.rst b/docs/object_mapper.rst index 50d3cbf320..21d2954f4b 100644 --- a/docs/object_mapper.rst +++ b/docs/object_mapper.rst @@ -63,7 +63,7 @@ Getting Started description = columns.Text(required=False) #next, setup the connection to your cassandra server(s)... - # see http://datastax.github.io/python-driver/api/cassandra/cluster.html for options + # see https://docs.datastax.com/en/developer/python-driver/latest/api/cassandra/cluster.html for options # the list of hosts will be passed to create a Cluster() instance connection.setup(['127.0.0.1'], "cqlengine", protocol_version=3) @@ -87,7 +87,7 @@ Getting Started >>> q.count() 4 >>> for instance in q: - >>> print instance.description + >>> print(instance.description) example5 example6 example7 @@ -101,5 +101,5 @@ Getting Started >>> q2.count() 1 >>> for instance in q2: - >>> print instance.description + >>> print(instance.description) example5 diff --git a/docs/security.rst b/docs/security.rst index 4cf3163fb0..6dd2624c24 100644 --- a/docs/security.rst +++ b/docs/security.rst @@ -119,9 +119,9 @@ The driver configuration: .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1 + from ssl import SSLContext, PROTOCOL_TLS - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) cluster = Cluster(['127.0.0.1'], ssl_context=ssl_context) session = cluster.connect() @@ -147,9 +147,9 @@ to `CERT_REQUIRED`. Otherwise, the loaded verify certificate will have no effect .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_verify_locations('/path/to/rootca.crt') ssl_context.verify_mode = CERT_REQUIRED @@ -161,9 +161,9 @@ Additionally, you can also force the driver to verify the `hostname` of the serv .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_verify_locations('/path/to/rootca.crt') ssl_context.verify_mode = CERT_REQUIRED ssl_context.check_hostname = True @@ -228,9 +228,9 @@ Finally, you can use that configuration with the following driver code: .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1 + from ssl import SSLContext, PROTOCOL_TLS - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_cert_chain( certfile='/path/to/client.crt_signed', keyfile='/path/to/client.key') @@ -251,9 +251,9 @@ The following driver code specifies that the connection should use two-way verif .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_verify_locations('/path/to/rootca.crt') ssl_context.verify_mode = CERT_REQUIRED ssl_context.load_cert_chain( @@ -275,7 +275,7 @@ for more details about ``SSLContext`` configuration. from cassandra.cluster import Cluster from cassandra.io.twistedreactor import TwistedConnection - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.set_verify(SSL.VERIFY_PEER, callback=lambda _1, _2, _3, _4, ok: ok) ssl_context.use_certificate_file('/path/to/client.crt_signed') ssl_context.use_privatekey_file('/path/to/client.key') @@ -303,7 +303,7 @@ deprecated in the next major release. By default, a ``ca_certs`` value should be supplied (the value should be a string pointing to the location of the CA certs file), and you probably -want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLSv1`` to match +want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLS`` to match Cassandra's default protocol. For example: @@ -311,11 +311,11 @@ For example: .. code-block:: python from cassandra.cluster import Cluster - from ssl import PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import PROTOCOL_TLS, CERT_REQUIRED ssl_opts = { 'ca_certs': '/path/to/my/ca.certs', - 'ssl_version': PROTOCOL_TLSv1, + 'ssl_version': PROTOCOL_TLS, 'cert_reqs': CERT_REQUIRED # Certificates are required and validated } cluster = Cluster(ssl_options=ssl_opts) diff --git a/docs/upgrading.rst b/docs/upgrading.rst index 3a600e9ac0..3fd937d7bc 100644 --- a/docs/upgrading.rst +++ b/docs/upgrading.rst @@ -382,7 +382,3 @@ The following dependencies have officially been made optional: * ``scales`` * ``blist`` - -And one new dependency has been added (to enable Python 3 support): - -* ``six`` diff --git a/example_core.py b/example_core.py index 01c766e109..56e8924d1d 100644 --- a/example_core.py +++ b/example_core.py @@ -1,12 +1,14 @@ #!/usr/bin/env python -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/example_mapper.py b/example_mapper.py index 35100471c7..8105dbe2b1 100755 --- a/example_mapper.py +++ b/example_mapper.py @@ -1,12 +1,14 @@ #!/usr/bin/env python -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/concurrent_executions/execute_async_with_queue.py b/examples/concurrent_executions/execute_async_with_queue.py index 60d2a69c3c..44a91a530c 100644 --- a/examples/concurrent_executions/execute_async_with_queue.py +++ b/examples/concurrent_executions/execute_async_with_queue.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +21,7 @@ import time import uuid -from six.moves import queue +import queue from cassandra.cluster import Cluster diff --git a/examples/concurrent_executions/execute_with_threads.py b/examples/concurrent_executions/execute_with_threads.py index e3c80f5d6b..69126de6ec 100644 --- a/examples/concurrent_executions/execute_with_threads.py +++ b/examples/concurrent_executions/execute_with_threads.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/request_init_listener.py b/examples/request_init_listener.py index b90bce6b64..6cce4953e2 100644 --- a/examples/request_init_listener.py +++ b/examples/request_init_listener.py @@ -1,11 +1,13 @@ #!/usr/bin/env python -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +20,6 @@ # about the encoded request size. Note that the counts would be available using the internal 'metrics' tracking -- # this is just demonstrating a way to track a few custom attributes. -from __future__ import print_function from cassandra.cluster import Cluster from greplin import scales diff --git a/ez_setup.py b/ez_setup.py index 2535472190..76e71057f0 100644 --- a/ez_setup.py +++ b/ez_setup.py @@ -20,6 +20,7 @@ import tarfile import optparse import subprocess +from urllib.request import urlopen from distutils import log @@ -148,10 +149,6 @@ def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) - try: - from urllib.request import urlopen - except ImportError: - from urllib2 import urlopen tgz_name = "setuptools-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) @@ -197,13 +194,7 @@ def _extractall(self, path=".", members=None): self.extract(tarinfo, path) # Reverse sort directories. - if sys.version_info < (2, 4): - def sorter(dir1, dir2): - return cmp(dir1.name, dir2.name) - directories.sort(sorter) - directories.reverse() - else: - directories.sort(key=operator.attrgetter('name'), reverse=True) + directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: diff --git a/requirements.txt b/requirements.txt index f784fba1b9..1d5f0bcfc4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1 @@ -geomet>=0.1,<0.3 -six >=1.9 -futures <=2.2.0 -# Futures is not required for Python 3, but it works up through 2.2.0 (after which it introduced breaking syntax). -# This is left here to make sure install -r works with any runtime. When installing via setup.py, futures is omitted -# for Python 3, in favor of the standard library implementation. -# see PYTHON-393 +geomet>=1.1 diff --git a/setup.py b/setup.py index 745d05dfb3..5144e90501 100644 --- a/setup.py +++ b/setup.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,21 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function import os import sys import warnings -if __name__ == '__main__' and sys.argv[1] == "gevent_nosetests": - print("Running gevent tests") - from gevent.monkey import patch_all - patch_all() - -if __name__ == '__main__' and sys.argv[1] == "eventlet_nosetests": - print("Running eventlet tests") - from eventlet import monkey_patch - monkey_patch() - import ez_setup ez_setup.use_setuptools() @@ -37,8 +28,6 @@ DistutilsExecError) from distutils.cmd import Command -PY3 = sys.version_info[0] == 3 - try: import subprocess has_subprocess = True @@ -51,19 +40,6 @@ with open("README.rst") as f: long_description = f.read() - -try: - from nose.commands import nosetests -except ImportError: - gevent_nosetests = None - eventlet_nosetests = None -else: - class gevent_nosetests(nosetests): - description = "run nosetests with gevent monkey patching" - - class eventlet_nosetests(nosetests): - description = "run nosetests with eventlet monkey patching" - has_cqlengine = False if __name__ == '__main__' and sys.argv[1] == "install": try: @@ -100,7 +76,7 @@ def run(self): try: os.makedirs(path) - except: + except OSError: pass if has_subprocess: @@ -108,7 +84,7 @@ def run(self): # http://docs.cython.org/src/userguide/special_methods.html#docstrings import glob for f in glob.glob("cassandra/*.so"): - print("Removing '%s' to allow docs to run on pure python modules." %(f,)) + print("Removing '%s' to allow docs to run on pure python modules." % (f,)) os.unlink(f) # Build io extension to make import and docstrings work @@ -141,14 +117,30 @@ def __init__(self, ext): self.ext = ext +is_windows = sys.platform.startswith('win32') +is_macos = sys.platform.startswith('darwin') + murmur3_ext = Extension('cassandra.cmurmur3', sources=['cassandra/cmurmur3.c']) + +def eval_env_var_as_array(varname): + val = os.environ.get(varname) + return None if not val else [v.strip() for v in val.split(',')] + + +DEFAULT_LIBEV_INCLUDES = ['/usr/include/libev', '/usr/local/include', '/opt/local/include', '/usr/include'] +DEFAULT_LIBEV_LIBDIRS = ['/usr/local/lib', '/opt/local/lib', '/usr/lib64'] +libev_includes = eval_env_var_as_array('CASS_DRIVER_LIBEV_INCLUDES') or DEFAULT_LIBEV_INCLUDES +libev_libdirs = eval_env_var_as_array('CASS_DRIVER_LIBEV_LIBS') or DEFAULT_LIBEV_LIBDIRS +if is_macos: + libev_includes.extend(['/opt/homebrew/include', os.path.expanduser('~/homebrew/include')]) + libev_libdirs.extend(['/opt/homebrew/lib']) libev_ext = Extension('cassandra.io.libevwrapper', sources=['cassandra/io/libevwrapper.c'], - include_dirs=['/usr/include/libev', '/usr/local/include', '/opt/local/include'], + include_dirs=libev_includes, libraries=['ev'], - library_dirs=['/usr/local/lib', '/opt/local/lib']) + library_dirs=libev_libdirs) platform_unsupported_msg = \ """ @@ -171,8 +163,6 @@ def __init__(self, ext): ================================================================================= """ -is_windows = os.name == 'nt' - is_pypy = "PyPy" in sys.version if is_pypy: sys.stderr.write(pypy_unsupported_msg) @@ -186,7 +176,7 @@ def __init__(self, ext): try_extensions = "--no-extensions" not in sys.argv and is_supported_platform and is_supported_arch and not os.environ.get('CASS_DRIVER_NO_EXTENSIONS') try_murmur3 = try_extensions and "--no-murmur3" not in sys.argv -try_libev = try_extensions and "--no-libev" not in sys.argv and not is_pypy and not is_windows +try_libev = try_extensions and "--no-libev" not in sys.argv and not is_pypy and not os.environ.get('CASS_DRIVER_NO_LIBEV') try_cython = try_extensions and "--no-cython" not in sys.argv and not is_pypy and not os.environ.get('CASS_DRIVER_NO_CYTHON') try_cython &= 'egg_info' not in sys.argv # bypass setup_requires for pip egg_info calls, which will never have --install-option"--no-cython" coming fomr pip @@ -301,6 +291,7 @@ def _setup_extensions(self): self.extensions.append(murmur3_ext) if try_libev: + sys.stderr.write("Appending libev extension %s" % libev_ext) self.extensions.append(libev_ext) if try_cython: @@ -352,7 +343,7 @@ def pre_build_check(): # We must be able to initialize the compiler if it has that method if hasattr(compiler, "initialize"): compiler.initialize() - except: + except OSError: return False executables = [] @@ -380,12 +371,6 @@ def pre_build_check(): def run_setup(extensions): kw = {'cmdclass': {'doc': DocCommand}} - if gevent_nosetests is not None: - kw['cmdclass']['gevent_nosetests'] = gevent_nosetests - - if eventlet_nosetests is not None: - kw['cmdclass']['eventlet_nosetests'] = eventlet_nosetests - kw['cmdclass']['build_ext'] = build_extensions kw['ext_modules'] = [Extension('DUMMY', [])] # dummy extension makes sure build_ext is called for install @@ -395,7 +380,7 @@ def run_setup(extensions): # 1.) build_ext eats errors at compile time, letting the install complete while producing useful feedback # 2.) there could be a case where the python environment has cython installed but the system doesn't have build tools if pre_build_check(): - cython_dep = 'Cython>=0.20,!=0.25,<0.30' + cython_dep = 'Cython>=3.0' user_specified_cython_version = os.environ.get('CASS_DRIVER_ALLOWED_CYTHON_VERSION') if user_specified_cython_version is not None: cython_dep = 'Cython==%s' % (user_specified_cython_version,) @@ -403,21 +388,19 @@ def run_setup(extensions): else: sys.stderr.write("Bypassing Cython setup requirement\n") - dependencies = ['six >=1.9', - 'geomet>=0.1,<0.3'] - - if not PY3: - dependencies.append('futures') + dependencies = ['geomet>=1.1'] _EXTRAS_REQUIRE = { - 'graph': ['gremlinpython==3.4.6'] + 'graph': ['gremlinpython==3.4.6'], + 'cle': ['cryptography>=42.0'] } setup( name='cassandra-driver', version=__version__, - description=' DataStax Driver for Apache Cassandra', + description='Apache Cassandra Python Driver', long_description=long_description, + long_description_content_type='text/x-rst', url='http://github.com/datastax/python-driver', project_urls={ 'Documentation': 'https://docs.datastax.com/en/developer/python-driver/latest/', @@ -428,13 +411,14 @@ def run_setup(extensions): packages=[ 'cassandra', 'cassandra.io', 'cassandra.cqlengine', 'cassandra.graph', 'cassandra.datastax', 'cassandra.datastax.insights', 'cassandra.datastax.graph', - 'cassandra.datastax.graph.fluent', 'cassandra.datastax.cloud' + 'cassandra.datastax.graph.fluent', 'cassandra.datastax.cloud', + "cassandra.column_encryption" ], keywords='cassandra,cql,orm,dse,graph', include_package_data=True, install_requires=dependencies, extras_require=_EXTRAS_REQUIRE, - tests_require=['nose', 'mock>=2.0.0', 'PyYAML', 'pytz', 'sure'], + tests_require=['pytest', 'PyYAML', 'pytz'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', @@ -442,12 +426,11 @@ def run_setup(extensions): 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Libraries :: Python Modules' diff --git a/test-datastax-requirements.txt b/test-datastax-requirements.txt index 3a47b8de16..d605f6dc51 100644 --- a/test-datastax-requirements.txt +++ b/test-datastax-requirements.txt @@ -1,3 +1,4 @@ -r test-requirements.txt kerberos gremlinpython==3.4.6 +cryptography >= 42.0 diff --git a/test-requirements.txt b/test-requirements.txt index f150a73247..513451b496 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,19 +1,13 @@ -r requirements.txt scales -nose -mock>1.1 -ccm>=2.1.2 -unittest2 +pytest +ccm>=3.1.5 pytz -sure pure-sasl -twisted[tls]; python_version >= '3.5' -twisted[tls]==19.2.1; python_version < '3.5' -gevent>=1.0 +twisted[tls] +gevent eventlet -cython>=0.20,<0.30 +cython>=3.0 packaging -backports.ssl_match_hostname; python_version < '2.7.9' -futurist; python_version >= '3.7' -asynctest; python_version > '3.4' -ipaddress; python_version < '3.3.0' +futurist +asynctest diff --git a/tests/__init__.py b/tests/__init__.py index cea5a872c6..7799b51399 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging import sys import socket @@ -23,6 +22,8 @@ import os from concurrent.futures import ThreadPoolExecutor +from cassandra import DependencyException + log = logging.getLogger() log.setLevel('DEBUG') # if nose didn't already attach a log handler, add one here @@ -35,9 +36,12 @@ def is_eventlet_monkey_patched(): if 'eventlet.patcher' not in sys.modules: return False - import eventlet.patcher - return eventlet.patcher.is_monkey_patched('socket') - + try: + import eventlet.patcher + return eventlet.patcher.is_monkey_patched('socket') + # Yet another case related to PYTHON-1364 + except AttributeError: + return False def is_gevent_monkey_patched(): if 'gevent.monkey' not in sys.modules: @@ -89,17 +93,18 @@ def is_monkey_patched(): elif "asyncio" in EVENT_LOOP_MANAGER: from cassandra.io.asyncioreactor import AsyncioConnection connection_class = AsyncioConnection - else: + log.debug("Using default event loop (libev)") try: from cassandra.io.libevreactor import LibevConnection connection_class = LibevConnection - except ImportError as e: + except DependencyException as e: log.debug('Could not import LibevConnection, ' 'using connection_class=None; ' 'failed with error:\n {}'.format( repr(e) )) + log.debug("Will attempt to set connection class at cluster initialization") connection_class = None diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index 1e1f582804..3b0103db31 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,16 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +import re import os from cassandra.cluster import Cluster from tests import connection_class, EVENT_LOOP_MANAGER Cluster.connection_class = connection_class -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from packaging.version import Version import logging @@ -33,7 +33,6 @@ from threading import Event from subprocess import call from itertools import groupby -import six import shutil from cassandra import OperationTimedOut, ReadTimeout, ReadFailure, WriteTimeout, WriteFailure, AlreadyExists,\ @@ -43,6 +42,7 @@ try: from ccmlib.dse_cluster import DseCluster + from ccmlib.hcd_cluster import HcdCluster from ccmlib.cluster import Cluster as CCMCluster from ccmlib.cluster_factory import ClusterFactory as CCMClusterFactory from ccmlib import common @@ -55,6 +55,14 @@ SINGLE_NODE_CLUSTER_NAME = 'single_node' MULTIDC_CLUSTER_NAME = 'multidc_test_cluster' +# When use_single_interface is specified ccm will assign distinct port numbers to each +# node in the cluster. This value specifies the default port value used for the first +# node that comes up. +# +# TODO: In the future we may want to make this configurable, but this should only apply +# if a non-standard port were specified when starting up the cluster. +DEFAULT_SINGLE_INTERFACE_PORT=9046 + CCM_CLUSTER = None path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'ccm') @@ -142,6 +150,10 @@ def _get_cass_version_from_dse(dse_version): return Version(cass_ver) +def _get_cass_version_from_hcd(hcd_version): + return Version("4.0.11") + + def _get_dse_version_from_cass(cass_version): if cass_version.startswith('2.1'): dse_ver = "4.8.15" @@ -161,13 +173,18 @@ def _get_dse_version_from_cass(cass_version): SIMULACRON_JAR = os.getenv('SIMULACRON_JAR', None) CLOUD_PROXY_PATH = os.getenv('CLOUD_PROXY_PATH', None) -# Supported Clusters: Cassandra, DDAC, DSE +# Supported Clusters: Cassandra, DDAC, DSE, HCD DSE_VERSION = None +HCD_VERSION = None if os.getenv('DSE_VERSION', None): # we are testing against DSE DSE_VERSION = Version(os.getenv('DSE_VERSION', None)) DSE_CRED = os.getenv('DSE_CREDS', None) CASSANDRA_VERSION = _get_cass_version_from_dse(DSE_VERSION.base_version) CCM_VERSION = DSE_VERSION.base_version +elif os.getenv('HCD_VERSION', None): # we are testing against HCD + HCD_VERSION = Version(os.getenv('HCD_VERSION', None)) + CASSANDRA_VERSION = _get_cass_version_from_hcd(HCD_VERSION.base_version) + CCM_VERSION = HCD_VERSION.base_version else: # we are testing against Cassandra or DDAC cv_string = os.getenv('CASSANDRA_VERSION', None) mcv_string = os.getenv('MAPPED_CASSANDRA_VERSION', None) @@ -191,6 +208,9 @@ def _get_dse_version_from_cass(cass_version): if DSE_CRED: log.info("Using DSE credentials file located at {0}".format(DSE_CRED)) CCM_KWARGS['dse_credentials_file'] = DSE_CRED +elif HCD_VERSION: + log.info('Using HCD version: %s', HCD_VERSION) + CCM_KWARGS['version'] = HCD_VERSION elif CASSANDRA_DIR: log.info("Using Cassandra dir: %s", CASSANDRA_DIR) CCM_KWARGS['install_dir'] = CASSANDRA_DIR @@ -207,8 +227,6 @@ def get_default_protocol(): if DSE_VERSION: return ProtocolVersion.DSE_V2 else: - global ALLOW_BETA_PROTOCOL - ALLOW_BETA_PROTOCOL = True return ProtocolVersion.V5 if CASSANDRA_VERSION >= Version('3.10'): if DSE_VERSION: @@ -234,9 +252,12 @@ def get_supported_protocol_versions(): 3.X -> 4, 3 3.10(C*) -> 5(beta),4,3 3.10(DSE) -> DSE_V1,4,3 - 4.0(C*) -> 5(beta),4,3 + 4.0(C*) -> 6(beta),5,4,3 4.0(DSE) -> DSE_v2, DSE_V1,4,3 ` """ + if CASSANDRA_VERSION >= Version('4.0-beta5'): + if not DSE_VERSION: + return (3, 4, 5, 6) if CASSANDRA_VERSION >= Version('4.0-a'): if DSE_VERSION: return (3, 4, ProtocolVersion.DSE_V1, ProtocolVersion.DSE_V2) @@ -316,7 +337,7 @@ def _id_and_mark(f): notprotocolv1 = unittest.skipUnless(PROTOCOL_VERSION > 1, 'Protocol v1 not supported') lessthenprotocolv4 = unittest.skipUnless(PROTOCOL_VERSION < 4, 'Protocol versions 4 or greater not supported') greaterthanprotocolv3 = unittest.skipUnless(PROTOCOL_VERSION >= 4, 'Protocol versions less than 4 are not supported') -protocolv5 = unittest.skipUnless(5 in get_supported_protocol_versions(), 'Protocol versions less than 5 are not supported') +protocolv6 = unittest.skipUnless(6 in get_supported_protocol_versions(), 'Protocol versions less than 6 are not supported') greaterthancass20 = unittest.skipUnless(CASSANDRA_VERSION >= Version('2.1'), 'Cassandra version 2.1 or greater required') greaterthancass21 = unittest.skipUnless(CASSANDRA_VERSION >= Version('2.2'), 'Cassandra version 2.2 or greater required') greaterthanorequalcass30 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.0'), 'Cassandra version 3.0 or greater required') @@ -324,9 +345,10 @@ def _id_and_mark(f): greaterthanorequalcass36 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.6'), 'Cassandra version 3.6 or greater required') greaterthanorequalcass3_10 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.10'), 'Cassandra version 3.10 or greater required') greaterthanorequalcass3_11 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.11'), 'Cassandra version 3.11 or greater required') -greaterthanorequalcass40 = unittest.skipUnless(CASSANDRA_VERSION >= Version('4.0-a'), 'Cassandra version 4.0 or greater required') -lessthanorequalcass40 = unittest.skipUnless(CASSANDRA_VERSION <= Version('4.0-a'), 'Cassandra version less or equal to 4.0 required') -lessthancass40 = unittest.skipUnless(CASSANDRA_VERSION < Version('4.0-a'), 'Cassandra version less than 4.0 required') +greaterthanorequalcass40 = unittest.skipUnless(CASSANDRA_VERSION >= Version('4.0'), 'Cassandra version 4.0 or greater required') +greaterthanorequalcass50 = unittest.skipUnless(CASSANDRA_VERSION >= Version('5.0-beta'), 'Cassandra version 5.0 or greater required') +lessthanorequalcass40 = unittest.skipUnless(CASSANDRA_VERSION <= Version('4.0'), 'Cassandra version less or equal to 4.0 required') +lessthancass40 = unittest.skipUnless(CASSANDRA_VERSION < Version('4.0'), 'Cassandra version less than 4.0 required') lessthancass30 = unittest.skipUnless(CASSANDRA_VERSION < Version('3.0'), 'Cassandra version less then 3.0 required') greaterthanorequaldse68 = unittest.skipUnless(DSE_VERSION and DSE_VERSION >= Version('6.8'), "DSE 6.8 or greater required for this test") greaterthanorequaldse67 = unittest.skipUnless(DSE_VERSION and DSE_VERSION >= Version('6.7'), "DSE 6.7 or greater required for this test") @@ -335,9 +357,9 @@ def _id_and_mark(f): greaterthanorequaldse50 = unittest.skipUnless(DSE_VERSION and DSE_VERSION >= Version('5.0'), "DSE 5.0 or greater required for this test") lessthandse51 = unittest.skipUnless(DSE_VERSION and DSE_VERSION < Version('5.1'), "DSE version less than 5.1 required") lessthandse60 = unittest.skipUnless(DSE_VERSION and DSE_VERSION < Version('6.0'), "DSE version less than 6.0 required") +lessthandse69 = unittest.skipUnless(DSE_VERSION and DSE_VERSION < Version('6.9'), "DSE version less than 6.9 required") pypy = unittest.skipUnless(platform.python_implementation() == "PyPy", "Test is skipped unless it's on PyPy") -notpy3 = unittest.skipIf(sys.version_info >= (3, 0), "Test not applicable for Python 3.x runtime") requiresmallclockgranularity = unittest.skipIf("Windows" in platform.system() or "asyncore" in EVENT_LOOP_MANAGER, "This test is not suitible for environments with large clock granularity") requiressimulacron = unittest.skipIf(SIMULACRON_JAR is None or CASSANDRA_VERSION < Version("2.1"), "Simulacron jar hasn't been specified or C* version is 2.0") @@ -382,15 +404,15 @@ def get_node(node_id): return CCM_CLUSTER.nodes['node%s' % node_id] -def use_multidc(dc_list, workloads=[]): +def use_multidc(dc_list, workloads=None): use_cluster(MULTIDC_CLUSTER_NAME, dc_list, start=True, workloads=workloads) -def use_singledc(start=True, workloads=[], use_single_interface=USE_SINGLE_INTERFACE): +def use_singledc(start=True, workloads=None, use_single_interface=USE_SINGLE_INTERFACE): use_cluster(CLUSTER_NAME, [3], start=start, workloads=workloads, use_single_interface=use_single_interface) -def use_single_node(start=True, workloads=[], configuration_options={}, dse_options={}): +def use_single_node(start=True, workloads=None, configuration_options=None, dse_options=None): use_cluster(SINGLE_NODE_CLUSTER_NAME, [1], start=start, workloads=workloads, configuration_options=configuration_options, dse_options=dse_options) @@ -452,12 +474,14 @@ def start_cluster_wait_for_up(cluster): def use_cluster(cluster_name, nodes, ipformat=None, start=True, workloads=None, set_keyspace=True, ccm_options=None, - configuration_options={}, dse_options={}, use_single_interface=USE_SINGLE_INTERFACE): + configuration_options=None, dse_options=None, use_single_interface=USE_SINGLE_INTERFACE): + configuration_options = configuration_options or {} + dse_options = dse_options or {} + workloads = workloads or [] dse_cluster = True if DSE_VERSION else False - if not workloads: - workloads = [] + hcd_cluster = True if HCD_VERSION else False - if ccm_options is None and DSE_VERSION: + if ccm_options is None and (DSE_VERSION or HCD_VERSION): ccm_options = {"version": CCM_VERSION} elif ccm_options is None: ccm_options = CCM_KWARGS.copy() @@ -555,19 +579,36 @@ def use_cluster(cluster_name, nodes, ipformat=None, start=True, workloads=None, CCM_CLUSTER.populate(nodes, ipformat=ipformat) CCM_CLUSTER.set_dse_configuration_options(dse_options) + elif hcd_cluster: + CCM_CLUSTER = HcdCluster(path, cluster_name, **ccm_options) + CCM_CLUSTER.set_configuration_options({'start_native_transport': True}) + CCM_CLUSTER.set_configuration_options({'batch_size_warn_threshold_in_kb': 5}) + CCM_CLUSTER.set_configuration_options({'enable_user_defined_functions': True}) + CCM_CLUSTER.set_configuration_options({'enable_scripted_user_defined_functions': True}) + CCM_CLUSTER.set_configuration_options({'enable_materialized_views': True}) + CCM_CLUSTER.set_configuration_options({'enable_sasi_indexes': True}) + CCM_CLUSTER.set_configuration_options({'enable_transient_replication': True}) + common.switch_cluster(path, cluster_name) + CCM_CLUSTER.set_configuration_options(configuration_options) + CCM_CLUSTER.populate(nodes, ipformat=ipformat, use_single_interface=use_single_interface) else: - CCM_CLUSTER = CCMCluster(path, cluster_name, **ccm_options) + ccm_cluster_clz = CCMCluster if Version(cassandra_version) < Version('4.1') else Cassandra41CCMCluster + CCM_CLUSTER = ccm_cluster_clz(path, cluster_name, **ccm_options) CCM_CLUSTER.set_configuration_options({'start_native_transport': True}) if Version(cassandra_version) >= Version('2.2'): CCM_CLUSTER.set_configuration_options({'enable_user_defined_functions': True}) if Version(cassandra_version) >= Version('3.0'): - CCM_CLUSTER.set_configuration_options({'enable_scripted_user_defined_functions': True}) - if Version(cassandra_version) >= Version('4.0-a'): + # The config.yml option below is deprecated in C* 4.0 per CASSANDRA-17280 + if Version(cassandra_version) < Version('4.0'): + CCM_CLUSTER.set_configuration_options({'enable_scripted_user_defined_functions': True}) + else: + # Cassandra version >= 4.0 CCM_CLUSTER.set_configuration_options({ 'enable_materialized_views': True, 'enable_sasi_indexes': True, 'enable_transient_replication': True, }) + common.switch_cluster(path, cluster_name) CCM_CLUSTER.set_configuration_options(configuration_options) CCM_CLUSTER.populate(nodes, ipformat=ipformat, use_single_interface=use_single_interface) @@ -594,13 +635,16 @@ def use_cluster(cluster_name, nodes, ipformat=None, start=True, workloads=None, wait_for_node_socket(node, 300) log.debug("Binary ports are open") if set_keyspace: - setup_keyspace(ipformat=ipformat) + args = {"ipformat": ipformat} + if use_single_interface: + args["port"] = DEFAULT_SINGLE_INTERFACE_PORT + setup_keyspace(**args) except Exception: log.exception("Failed to start CCM cluster; removing cluster.") if os.name == "nt": if CCM_CLUSTER: - for node in six.itervalues(CCM_CLUSTER.nodes): + for node in CCM_CLUSTER.nodes.items(): os.system("taskkill /F /PID " + str(node.pid)) else: call(["pkill", "-9", "-f", ".ccm"]) @@ -684,7 +728,7 @@ def drop_keyspace_shutdown_cluster(keyspace_name, session, cluster): try: execute_with_long_wait_retry(session, "DROP KEYSPACE {0}".format(keyspace_name)) except: - log.warning("Error encountered when droping keyspace {0}".format(keyspace_name)) + log.warning("Error encountered when dropping keyspace {0}".format(keyspace_name)) ex_type, ex, tb = sys.exc_info() log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb))) del tb @@ -693,7 +737,7 @@ def drop_keyspace_shutdown_cluster(keyspace_name, session, cluster): cluster.shutdown() -def setup_keyspace(ipformat=None, wait=True, protocol_version=None): +def setup_keyspace(ipformat=None, wait=True, protocol_version=None, port=9042): # wait for nodes to startup if wait: time.sleep(10) @@ -704,9 +748,9 @@ def setup_keyspace(ipformat=None, wait=True, protocol_version=None): _protocol_version = PROTOCOL_VERSION if not ipformat: - cluster = TestCluster(protocol_version=_protocol_version) + cluster = TestCluster(protocol_version=_protocol_version, port=port) else: - cluster = TestCluster(contact_points=["::1"], protocol_version=_protocol_version) + cluster = TestCluster(contact_points=["::1"], protocol_version=_protocol_version, port=port) session = cluster.connect() try: @@ -989,6 +1033,7 @@ def assert_startswith(s, prefix): class TestCluster(object): + __test__ = False DEFAULT_PROTOCOL_VERSION = default_protocol_version DEFAULT_CASSANDRA_IP = CASSANDRA_IP DEFAULT_ALLOW_BETA = ALLOW_BETA_PROTOCOL @@ -1002,3 +1047,38 @@ def __new__(cls, **kwargs): kwargs['allow_beta_protocol_version'] = cls.DEFAULT_ALLOW_BETA return Cluster(**kwargs) +# Subclass of CCMCluster (i.e. ccmlib.cluster.Cluster) which transparently performs +# conversion of cassandra.yml directives into something matching the new syntax +# introduced by CASSANDRA-15234 +class Cassandra41CCMCluster(CCMCluster): + __test__ = False + IN_MS_REGEX = re.compile('^(\w+)_in_ms$') + IN_KB_REGEX = re.compile('^(\w+)_in_kb$') + ENABLE_REGEX = re.compile('^enable_(\w+)$') + + def _get_config_key(self, k, v): + if "." in k: + return k + m = self.IN_MS_REGEX.match(k) + if m: + return m.group(1) + m = self.ENABLE_REGEX.search(k) + if m: + return "%s_enabled" % (m.group(1)) + m = self.IN_KB_REGEX.match(k) + if m: + return m.group(1) + return k + + def _get_config_val(self, k, v): + m = self.IN_MS_REGEX.match(k) + if m: + return "%sms" % (v) + m = self.IN_KB_REGEX.match(k) + if m: + return "%sKiB" % (v) + return v + + def set_configuration_options(self, values=None, *args, **kwargs): + new_values = {self._get_config_key(k, str(v)):self._get_config_val(k, str(v)) for (k,v) in values.items()} + super(Cassandra41CCMCluster, self).set_configuration_options(values=new_values, *args, **kwargs) \ No newline at end of file diff --git a/tests/integration/advanced/__init__.py b/tests/integration/advanced/__init__.py index b2820e037b..b1ed70f157 100644 --- a/tests/integration/advanced/__init__.py +++ b/tests/integration/advanced/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -from six.moves.urllib.request import build_opener, Request, HTTPHandler +from urllib.request import build_opener, Request, HTTPHandler import re import os import time @@ -91,7 +90,7 @@ def use_singledc_wth_graph_and_spark(start=True): def use_cluster_with_graph(num_nodes): """ - This is a work around to account for the fact that spark nodes will conflict over master assignment + This is a workaround to account for the fact that spark nodes will conflict over master assignment when started all at once. """ if USE_CASS_EXTERNAL: @@ -128,7 +127,7 @@ def use_cluster_with_graph(num_nodes): class BasicGeometricUnitTestCase(BasicKeyspaceUnitTestCase): """ - This base test class is used by all the geomteric tests. It contains class level teardown and setup + This base test class is used by all the geometric tests. It contains class level teardown and setup methods. It also contains the test fixtures used by those tests """ diff --git a/tests/integration/advanced/graph/__init__.py b/tests/integration/advanced/graph/__init__.py index 6c9458dd02..71554c9bad 100644 --- a/tests/integration/advanced/graph/__init__.py +++ b/tests/integration/advanced/graph/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -22,7 +24,6 @@ import datetime from cassandra.util import Point, LineString, Polygon, Duration -import six from cassandra.cluster import EXEC_PROFILE_GRAPH_DEFAULT, EXEC_PROFILE_GRAPH_ANALYTICS_DEFAULT from cassandra.cluster import GraphAnalyticsExecutionProfile, GraphExecutionProfile, EXEC_PROFILE_GRAPH_SYSTEM_DEFAULT, \ @@ -51,11 +52,6 @@ def setup_module(): MIN_LONG = -9223372036854775808 ZERO_LONG = 0 -if sys.version_info < (3, 0): - MAX_LONG = long(MAX_LONG) - MIN_LONG = long(MIN_LONG) - ZERO_LONG = long(ZERO_LONG) - MAKE_STRICT = "schema.config().option('graph.schema_mode').set('production')" MAKE_NON_STRICT = "schema.config().option('graph.schema_mode').set('development')" ALLOW_SCANS = "schema.config().option('graph.allow_scan').set('true')" @@ -457,15 +453,11 @@ def datatypes(): "duration1": ["Duration()", datetime.timedelta(1, 16, 0), GraphSON1Deserializer.deserialize_duration], "duration2": ["Duration()", datetime.timedelta(days=1, seconds=16, milliseconds=15), - GraphSON1Deserializer.deserialize_duration] + GraphSON1Deserializer.deserialize_duration], + "blob3": ["Blob()", bytes(b"Hello World Again"), GraphSON1Deserializer.deserialize_blob], + "blob4": ["Blob()", memoryview(b"And Again Hello World"), GraphSON1Deserializer.deserialize_blob] } - if six.PY2: - data["blob2"] = ["Blob()", buffer(b"Hello World"), GraphSON1Deserializer.deserialize_blob] - else: - data["blob3"] = ["Blob()", bytes(b"Hello World Again"), GraphSON1Deserializer.deserialize_blob] - data["blob4"] = ["Blob()", memoryview(b"And Again Hello World"), GraphSON1Deserializer.deserialize_blob] - if DSE_VERSION >= Version("5.1"): data["time1"] = ["Time()", datetime.time(12, 6, 12, 444), GraphSON1Deserializer.deserialize_time] data["time2"] = ["Time()", datetime.time(12, 6, 12), GraphSON1Deserializer.deserialize_time] @@ -965,7 +957,7 @@ def generate_tests(cls, schema=None, graphson=None, traversal=False): """Generate tests for a graph configuration""" def decorator(klass): if DSE_VERSION: - predicate = inspect.ismethod if six.PY2 else inspect.isfunction + predicate = inspect.isfunction for name, func in inspect.getmembers(klass, predicate=predicate): if not name.startswith('_test'): continue @@ -984,7 +976,7 @@ def generate_schema_tests(cls, schema=None): """Generate schema tests for a graph configuration""" def decorator(klass): if DSE_VERSION: - predicate = inspect.ismethod if six.PY2 else inspect.isfunction + predicate = inspect.isfunction for name, func in inspect.getmembers(klass, predicate=predicate): if not name.startswith('_test'): continue @@ -1026,7 +1018,7 @@ def __init__(self, properties): @property def non_pk_properties(self): - return {p: v for p, v in six.iteritems(self.properties) if p != 'pkid'} + return {p: v for p, v in self.properties.items() if p != 'pkid'} class GraphSchema(object): @@ -1134,7 +1126,7 @@ def clear(session): @classmethod def create_vertex_label(cls, session, vertex_label, execution_profile=EXEC_PROFILE_GRAPH_DEFAULT): statements = ["schema.propertyKey('pkid').Int().ifNotExists().create();"] - for k, v in six.iteritems(vertex_label.non_pk_properties): + for k, v in vertex_label.non_pk_properties.items(): typ = cls.sanitize_type(v) statements.append("schema.propertyKey('{name}').{type}.create();".format( name=k, type=typ @@ -1142,7 +1134,7 @@ def create_vertex_label(cls, session, vertex_label, execution_profile=EXEC_PROFI statements.append("schema.vertexLabel('{label}').partitionKey('pkid').properties(".format( label=vertex_label.label)) - property_names = [name for name in six.iterkeys(vertex_label.non_pk_properties)] + property_names = [name for name in vertex_label.non_pk_properties.keys()] statements.append(", ".join(["'{}'".format(p) for p in property_names])) statements.append(").create();") @@ -1189,7 +1181,7 @@ def create_vertex_label(cls, session, vertex_label, execution_profile=EXEC_PROFI statements = ["schema.vertexLabel('{label}').partitionBy('pkid', Int)".format( label=vertex_label.label)] - for name, typ in six.iteritems(vertex_label.non_pk_properties): + for name, typ in vertex_label.non_pk_properties.items(): typ = cls.sanitize_type(typ) statements.append(".property('{name}', {type})".format(name=name, type=typ)) statements.append(".create();") diff --git a/tests/integration/advanced/graph/fluent/__init__.py b/tests/integration/advanced/graph/fluent/__init__.py index 3bb81e78e3..1c07cd46c0 100644 --- a/tests/integration/advanced/graph/fluent/__init__.py +++ b/tests/integration/advanced/graph/fluent/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +16,6 @@ import sys import datetime -import six import time from collections import namedtuple from packaging.version import Version @@ -35,10 +36,7 @@ VertexLabel) from tests.integration import requiredse -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import ipaddress @@ -460,10 +458,10 @@ def _test_udt_with_namedtuples(self, schema, graphson): def _write_and_read_data_types(self, schema, graphson, use_schema=True): g = self.fetch_traversal_source(graphson) ep = self.get_execution_profile(graphson) - for data in six.itervalues(schema.fixtures.datatypes()): + for data in schema.fixtures.datatypes().values(): typ, value, deserializer = data vertex_label = VertexLabel([typ]) - property_name = next(six.iterkeys(vertex_label.non_pk_properties)) + property_name = next(iter(vertex_label.non_pk_properties.keys())) if use_schema or schema is CoreGraphSchema: schema.create_vertex_label(self.session, vertex_label, execution_profile=ep) @@ -539,9 +537,9 @@ def __test_udt(self, schema, graphson, address_class, address_with_tags_class, } g = self.fetch_traversal_source(graphson) - for typ, value in six.itervalues(data): + for typ, value in data.values(): vertex_label = VertexLabel([typ]) - property_name = next(six.iterkeys(vertex_label.non_pk_properties)) + property_name = next(iter(vertex_label.non_pk_properties.keys())) schema.create_vertex_label(self.session, vertex_label, execution_profile=ep) write_traversal = g.addV(str(vertex_label.label)).property('pkid', vertex_label.id). \ @@ -600,7 +598,7 @@ def _validate_prop(key, value, unittest): elif any(key.startswith(t) for t in ('Linestring',)): typ = LineString elif any(key.startswith(t) for t in ('neg',)): - typ = six.string_types + typ = str elif any(key.startswith(t) for t in ('date',)): typ = datetime.date elif any(key.startswith(t) for t in ('time',)): diff --git a/tests/integration/advanced/graph/fluent/test_graph.py b/tests/integration/advanced/graph/fluent/test_graph.py index 02611c12c0..a2c01affb3 100644 --- a/tests/integration/advanced/graph/fluent/test_graph.py +++ b/tests/integration/advanced/graph/fluent/test_graph.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,8 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six - from cassandra import cluster from cassandra.cluster import ContinuousPagingOptions from cassandra.datastax.graph.fluent import DseGraph @@ -28,10 +28,7 @@ from tests.integration.advanced.graph.fluent import ( BaseExplicitExecutionTest, create_traversal_profiles, check_equality_base) -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest @greaterthanorequaldse60 @@ -123,10 +120,10 @@ def _send_batch_and_read_results(self, schema, graphson, add_all=False, use_sche ep = self.get_execution_profile(graphson) batch = DseGraph.batch(session=self.session, execution_profile=self.get_execution_profile(graphson, traversal=True)) - for data in six.itervalues(datatypes): + for data in datatypes.values(): typ, value, deserializer = data vertex_label = VertexLabel([typ]) - property_name = next(six.iterkeys(vertex_label.non_pk_properties)) + property_name = next(iter(vertex_label.non_pk_properties.keys())) values[property_name] = value if use_schema or schema is CoreGraphSchema: schema.create_vertex_label(self.session, vertex_label, execution_profile=ep) diff --git a/tests/integration/advanced/graph/fluent/test_graph_explicit_execution.py b/tests/integration/advanced/graph/fluent/test_graph_explicit_execution.py index 1a5846203d..a5dd4306c5 100644 --- a/tests/integration/advanced/graph/fluent/test_graph_explicit_execution.py +++ b/tests/integration/advanced/graph/fluent/test_graph_explicit_execution.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/advanced/graph/fluent/test_graph_implicit_execution.py b/tests/integration/advanced/graph/fluent/test_graph_implicit_execution.py index 50e6795867..1407dd1ea3 100644 --- a/tests/integration/advanced/graph/fluent/test_graph_implicit_execution.py +++ b/tests/integration/advanced/graph/fluent/test_graph_implicit_execution.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/advanced/graph/fluent/test_search.py b/tests/integration/advanced/graph/fluent/test_search.py index d50016d576..b6857f3560 100644 --- a/tests/integration/advanced/graph/fluent/test_search.py +++ b/tests/integration/advanced/graph/fluent/test_search.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/advanced/graph/test_graph.py b/tests/integration/advanced/graph/test_graph.py index a0b6534c34..3624b5e1ef 100644 --- a/tests/integration/advanced/graph/test_graph.py +++ b/tests/integration/advanced/graph/test_graph.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six import re from cassandra import OperationTimedOut, InvalidRequest @@ -266,6 +267,6 @@ def test_graph_protocol_default_for_core_fallback_to_graphson1_if_no_graph_name( self.assertEqual(ep.row_factory, graph_object_row_factory) regex = re.compile(".*Variable.*is unknown.*", re.S) - with six.assertRaisesRegex(self, SyntaxException, regex): + with self.assertRaisesRegex(SyntaxException, regex): self.execute_graph_queries(CoreGraphSchema.fixtures.classic(), execution_profile=ep, verify_graphson=GraphProtocol.GRAPHSON_1_0) diff --git a/tests/integration/advanced/graph/test_graph_cont_paging.py b/tests/integration/advanced/graph/test_graph_cont_paging.py index 065d01d939..17c43c4e3d 100644 --- a/tests/integration/advanced/graph/test_graph_cont_paging.py +++ b/tests/integration/advanced/graph/test_graph_cont_paging.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/advanced/graph/test_graph_datatype.py b/tests/integration/advanced/graph/test_graph_datatype.py index 222b1f5ace..0fda2f0d44 100644 --- a/tests/integration/advanced/graph/test_graph_datatype.py +++ b/tests/integration/advanced/graph/test_graph_datatype.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,13 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import time -import six import logging from packaging.version import Version from collections import namedtuple @@ -70,13 +68,13 @@ def _validate_type(self, vertex): if any(type_indicator.startswith(t) for t in ('int', 'short', 'long', 'bigint', 'decimal', 'smallint', 'varint')): - typ = six.integer_types + typ = int elif any(type_indicator.startswith(t) for t in ('float', 'double')): typ = float elif any(type_indicator.startswith(t) for t in ('duration', 'date', 'negdate', 'time', 'blob', 'timestamp', 'point', 'linestring', 'polygon', 'inet', 'uuid')): - typ = six.text_type + typ = str else: pass self.fail("Received unexpected type: %s" % type_indicator) @@ -88,10 +86,10 @@ class GenericGraphDataTypeTest(GraphUnitTestCase): def _test_all_datatypes(self, schema, graphson): ep = self.get_execution_profile(graphson) - for data in six.itervalues(schema.fixtures.datatypes()): + for data in schema.fixtures.datatypes().values(): typ, value, deserializer = data vertex_label = VertexLabel([typ]) - property_name = next(six.iterkeys(vertex_label.non_pk_properties)) + property_name = next(iter(vertex_label.non_pk_properties.keys())) schema.create_vertex_label(self.session, vertex_label, execution_profile=ep) vertex = list(schema.add_vertex(self.session, vertex_label, property_name, value, execution_profile=ep))[0] @@ -170,9 +168,9 @@ def __test_udt(self, schema, graphson, address_class, address_with_tags_class, ), 'hello')] } - for typ, value in six.itervalues(data): + for typ, value in data.values(): vertex_label = VertexLabel([typ]) - property_name = next(six.iterkeys(vertex_label.non_pk_properties)) + property_name = next(iter(vertex_label.non_pk_properties.keys())) schema.create_vertex_label(self.session, vertex_label, execution_profile=ep) vertex = list(schema.add_vertex(self.session, vertex_label, property_name, value, execution_profile=ep))[0] diff --git a/tests/integration/advanced/graph/test_graph_query.py b/tests/integration/advanced/graph/test_graph_query.py index 0eda67894d..5bad1b71c5 100644 --- a/tests/integration/advanced/graph/test_graph_query.py +++ b/tests/integration/advanced/graph/test_graph_query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +16,6 @@ import sys -import six from packaging.version import Version from copy import copy @@ -22,10 +23,7 @@ import json import time -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra import OperationTimedOut, ConsistencyLevel, InvalidRequest from cassandra.cluster import EXEC_PROFILE_GRAPH_DEFAULT, NoHostAvailable @@ -59,7 +57,7 @@ def test_consistency_passing(self): cl_attrs = ('graph_read_consistency_level', 'graph_write_consistency_level') # Iterates over the graph options and constructs an array containing - # The graph_options that correlate to graoh read and write consistency levels + # The graph_options that correlate to graph read and write consistency levels graph_params = [a[2] for a in _graph_options if a[0] in cl_attrs] s = self.session @@ -86,7 +84,7 @@ def test_consistency_passing(self): res = s.execute_graph("null") for k, v in cl.items(): - self.assertEqual(res.response_future.message.custom_payload[graph_params[k]], six.b(ConsistencyLevel.value_to_name[v])) + self.assertEqual(res.response_future.message.custom_payload[graph_params[k]], ConsistencyLevel.value_to_name[v].encode()) # passed profile values override session defaults cl = {0: ConsistencyLevel.ALL, 1: ConsistencyLevel.QUORUM} @@ -100,7 +98,7 @@ def test_consistency_passing(self): res = s.execute_graph("null", execution_profile=tmp_profile) for k, v in cl.items(): - self.assertEqual(res.response_future.message.custom_payload[graph_params[k]], six.b(ConsistencyLevel.value_to_name[v])) + self.assertEqual(res.response_future.message.custom_payload[graph_params[k]], ConsistencyLevel.value_to_name[v].encode()) finally: default_profile.graph_options = default_graph_opts @@ -248,7 +246,7 @@ def _test_range_query(self, schema, graphson): """ Test to validate range queries are handled correctly. - Creates a very large line graph script and executes it. Then proceeds to to a range + Creates a very large line graph script and executes it. Then proceeds to a range limited query against it, and ensure that the results are formatted correctly and that the result set is properly sized. @@ -335,7 +333,7 @@ def _test_large_create_script(self, schema, graphson): @test_category dse graph """ self.execute_graph(schema.fixtures.line(150), graphson) - self.execute_graph(schema.fixtures.line(300), graphson) # This should passed since the queries are splitted + self.execute_graph(schema.fixtures.line(300), graphson) # This should pass since the queries are split self.assertRaises(SyntaxException, self.execute_graph, schema.fixtures.line(300, single_script=True), graphson) # this is not and too big def _test_large_result_set(self, schema, graphson): @@ -591,7 +589,7 @@ def _test_basic_query_with_type_wrapper(self, schema, graphson): vl = VertexLabel(['tupleOf(Int, Bigint)']) schema.create_vertex_label(self.session, vl, execution_profile=ep) - prop_name = next(six.iterkeys(vl.non_pk_properties)) + prop_name = next(iter(vl.non_pk_properties.keys())) with self.assertRaises(InvalidRequest): schema.add_vertex(self.session, vl, prop_name, (1, 42), execution_profile=ep) diff --git a/tests/integration/advanced/test_adv_metadata.py b/tests/integration/advanced/test_adv_metadata.py index b3af6fa5d1..80309302f0 100644 --- a/tests/integration/advanced/test_adv_metadata.py +++ b/tests/integration/advanced/test_adv_metadata.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -20,10 +22,7 @@ greaterthanorequaldse68, use_single_node, DSE_VERSION, requiredse, TestCluster) -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging import time diff --git a/tests/integration/advanced/test_auth.py b/tests/integration/advanced/test_auth.py index 7e9aa8c23e..df1f385f74 100644 --- a/tests/integration/advanced/test_auth.py +++ b/tests/integration/advanced/test_auth.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging import os import subprocess @@ -70,7 +69,7 @@ class BasicDseAuthTest(unittest.TestCase): @classmethod def setUpClass(self): """ - This will setup the necessary infrastructure to run our authentication tests. It requres the ADS_HOME environment variable + This will setup the necessary infrastructure to run our authentication tests. It requires the ADS_HOME environment variable and our custom embedded apache directory server jar in order to run. """ if not DSE_VERSION: @@ -89,7 +88,7 @@ def setUpClass(self): self.charlie_keytab = os.path.join(self.conf_file_dir, "charlie.keytab") actual_jar = os.path.join(ADS_HOME, "embedded-ads.jar") - # Create configuration directories if they don't already exists + # Create configuration directories if they don't already exist if not os.path.exists(self.conf_file_dir): os.makedirs(self.conf_file_dir) if not os.path.exists(actual_jar): @@ -178,7 +177,7 @@ def test_should_not_authenticate_with_bad_user_ticket(self): auth_provider = DSEGSSAPIAuthProvider(service='dse', qops=["auth"]) self.assertRaises(NoHostAvailable, self.connect_and_query, auth_provider) - def test_should_not_athenticate_without_ticket(self): + def test_should_not_authenticate_without_ticket(self): """ This tests will attempt to authenticate with a user that is valid but has no ticket @since 3.20 diff --git a/tests/integration/advanced/test_cont_paging.py b/tests/integration/advanced/test_cont_paging.py index c5f1cbfff3..0f64835674 100644 --- a/tests/integration/advanced/test_cont_paging.py +++ b/tests/integration/advanced/test_cont_paging.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,13 +20,9 @@ import logging log = logging.getLogger(__name__) -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from itertools import cycle, count -from six.moves import range from packaging.version import Version import time @@ -74,12 +72,12 @@ def create_cluster(cls): cls.select_all_statement = "SELECT * FROM {0}.{0}".format(cls.ks_name) - def test_continous_paging(self): + def test_continuous_paging(self): """ Test to ensure that various continuous paging schemes return the full set of results. @since 3.20 @jira_ticket PYTHON-615 - @expected_result various continous paging options should fetch all the results + @expected_result various continuous paging options should fetch all the results @test_category queries """ @@ -135,9 +133,9 @@ def test_paging_cancel(self): self.session_with_profiles.default_fetch_size = 1 # This combination should fetch one result a second. We should see a very few results results = self.session_with_profiles.execute_async(self.select_all_statement, execution_profile= "SLOW") - result_set =results.result() + result_set = results.result() result_set.cancel_continuous_paging() - result_lst =list(result_set) + result_lst = list(result_set) self.assertLess(len(result_lst), 2, "Cancel should have aborted fetch immediately") def test_con_paging_verify_writes(self): @@ -187,7 +185,7 @@ def test_con_paging_verify_writes(self): def test_can_get_results_when_no_more_pages(self): """ - Test to validate that the resutls can be fetched when + Test to validate that the results can be fetched when has_more_pages is False @since 3.20 @jira_ticket PYTHON-946 diff --git a/tests/integration/advanced/test_cqlengine_where_operators.py b/tests/integration/advanced/test_cqlengine_where_operators.py index 8ade3db09d..b39cde0f02 100644 --- a/tests/integration/advanced/test_cqlengine_where_operators.py +++ b/tests/integration/advanced/test_cqlengine_where_operators.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import os import time diff --git a/tests/integration/advanced/test_geometry.py b/tests/integration/advanced/test_geometry.py index 8bee144d19..3bbf04bb7a 100644 --- a/tests/integration/advanced/test_geometry.py +++ b/tests/integration/advanced/test_geometry.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +20,7 @@ from cassandra.util import OrderedMap, sortedset from collections import namedtuple -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from uuid import uuid1 from cassandra.util import Point, LineString, Polygon from cassandra.cqltypes import LineStringType, PointType, PolygonType @@ -38,7 +37,7 @@ class AbstractGeometricTypeTest(): def test_should_insert_simple(self): """ - This tests will attempt to insert a point, polygon, or line, using simple inline formating. + This tests will attempt to insert a point, polygon, or line, using simple inline formatting. @since 3.20 @jira_ticket PYTHON-456 @test_category dse geometric diff --git a/tests/integration/advanced/test_spark.py b/tests/integration/advanced/test_spark.py index a307913abb..197f99c934 100644 --- a/tests/integration/advanced/test_spark.py +++ b/tests/integration/advanced/test_spark.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -30,7 +32,7 @@ def setup_module(): @requiredse class SparkLBTests(BasicGraphUnitTestCase): """ - Test to validate that analtics query can run in a multi-node enviroment. Also check to to ensure + Test to validate that analytics query can run in a multi-node environment. Also check to ensure that the master spark node is correctly targeted when OLAP queries are run @since 3.20 @@ -42,7 +44,7 @@ def test_spark_analytic_query(self): self.session.execute_graph(ClassicGraphFixtures.classic()) spark_master = find_spark_master(self.session) - # Run multipltle times to ensure we don't round robin + # Run multiple times to ensure we don't round-robin for i in range(3): to_run = SimpleGraphStatement("g.V().count()") rs = self.session.execute_graph(to_run, execution_profile=EXEC_PROFILE_GRAPH_ANALYTICS_DEFAULT) diff --git a/tests/integration/advanced/test_unixsocketendpoint.py b/tests/integration/advanced/test_unixsocketendpoint.py index 10cbc1b362..f2795d1a68 100644 --- a/tests/integration/advanced/test_unixsocketendpoint.py +++ b/tests/integration/advanced/test_unixsocketendpoint.py @@ -11,10 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import time import subprocess diff --git a/tests/integration/cloud/__init__.py b/tests/integration/cloud/__init__.py index ca05ae4ce5..a6a4ab7a5d 100644 --- a/tests/integration/cloud/__init__.py +++ b/tests/integration/cloud/__init__.py @@ -13,10 +13,7 @@ # limitations under the License from cassandra.cluster import Cluster -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import os import subprocess diff --git a/tests/integration/cloud/conftest.py b/tests/integration/cloud/conftest.py new file mode 100644 index 0000000000..6bfda32534 --- /dev/null +++ b/tests/integration/cloud/conftest.py @@ -0,0 +1,10 @@ +import pytest + +from tests.integration.cloud import setup_package, teardown_package + + +@pytest.fixture(scope='session', autouse=True) +def setup_and_teardown_packages(): + setup_package() + yield + teardown_package() diff --git a/tests/integration/cloud/test_cloud.py b/tests/integration/cloud/test_cloud.py index 5b9b268f5c..1c1e75c1ee 100644 --- a/tests/integration/cloud/test_cloud.py +++ b/tests/integration/cloud/test_cloud.py @@ -13,14 +13,14 @@ # limitations under the License from cassandra.datastax.cloud import parse_metadata_info from cassandra.query import SimpleStatement +from cassandra.cqlengine import connection +from cassandra.cqlengine.management import sync_table, create_keyspace_simple +from cassandra.cqlengine.models import Model +from cassandra.cqlengine import columns -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -import six -from ssl import SSLContext, PROTOCOL_TLSv1 +from ssl import SSLContext, PROTOCOL_TLS from cassandra import DriverException, ConsistencyLevel, InvalidRequest from cassandra.cluster import NoHostAvailable, ExecutionProfile, Cluster, _execution_profile_to_string @@ -28,9 +28,9 @@ from cassandra.auth import PlainTextAuthProvider from cassandra.policies import TokenAwarePolicy, DCAwareRoundRobinPolicy, ConstantReconnectionPolicy -from mock import patch +from unittest.mock import patch -from tests.integration import requirescloudproxy, TestCluster +from tests.integration import requirescloudproxy from tests.util import wait_until_not_raised from tests.integration.cloud import CloudProxyCluster, CLOUD_PROXY_SERVER @@ -88,7 +88,7 @@ def test_support_overriding_auth_provider(self): def test_error_overriding_ssl_context(self): with self.assertRaises(ValueError) as cm: - self.connect(self.creds, ssl_context=SSLContext(PROTOCOL_TLSv1)) + self.connect(self.creds, ssl_context=SSLContext(PROTOCOL_TLS)) self.assertIn('cannot be specified with a cloud configuration', str(cm.exception)) @@ -113,10 +113,7 @@ def test_error_when_bundle_doesnt_exist(self): try: self.connect('/invalid/path/file.zip') except Exception as e: - if six.PY2: - self.assertIsInstance(e, IOError) - else: - self.assertIsInstance(e, FileNotFoundError) + self.assertIsInstance(e, FileNotFoundError) def test_load_balancing_policy_is_dcawaretokenlbp(self): self.connect(self.creds) @@ -143,7 +140,7 @@ def test_resolve_and_reconnect_on_node_down(self): wait_until_not_raised( lambda: self.assertEqual(len(self.hosts_up()), 3), 0.02, 250) - mocked_resolve.assert_called_once() + mocked_resolve.assert_called() def test_metadata_unreachable(self): with self.assertRaises(DriverException) as cm: @@ -162,7 +159,7 @@ def test_default_consistency(self): self.assertEqual(self.session.default_consistency_level, ConsistencyLevel.LOCAL_QUORUM) # Verify EXEC_PROFILE_DEFAULT, EXEC_PROFILE_GRAPH_DEFAULT, # EXEC_PROFILE_GRAPH_SYSTEM_DEFAULT, EXEC_PROFILE_GRAPH_ANALYTICS_DEFAULT - for ep_key in six.iterkeys(self.cluster.profile_manager.profiles): + for ep_key in self.cluster.profile_manager.profiles.keys(): ep = self.cluster.profile_manager.profiles[ep_key] self.assertEqual( ep.consistency_level, @@ -234,3 +231,14 @@ def test_consistency_guardrails(self): self.session.execute(statement) except InvalidRequest: self.fail("InvalidRequest was incorrectly raised for write query at LOCAL QUORUM!") + + def test_cqlengine_can_connect(self): + class TestModel(Model): + id = columns.Integer(primary_key=True) + val = columns.Text() + + connection.setup(None, "test", cloud={'secure_connect_bundle': self.creds}) + create_keyspace_simple('test', 1) + sync_table(TestModel) + TestModel.objects.create(id=42, value='test') + self.assertEqual(len(TestModel.objects.all()), 1) diff --git a/tests/integration/cloud/test_cloud_schema.py b/tests/integration/cloud/test_cloud_schema.py index 1d52e8e428..8dff49508a 100644 --- a/tests/integration/cloud/test_cloud_schema.py +++ b/tests/integration/cloud/test_cloud_schema.py @@ -110,9 +110,9 @@ def test_for_schema_disagreement_attribute(self): self.check_and_wait_for_agreement(session, rs, True) cluster.shutdown() - def check_and_wait_for_agreement(self, session, rs, exepected): + def check_and_wait_for_agreement(self, session, rs, expected): # Wait for RESULT_KIND_SCHEMA_CHANGE message to arrive time.sleep(1) - self.assertEqual(rs.response_future.is_schema_agreed, exepected) + self.assertEqual(rs.response_future.is_schema_agreed, expected) if not rs.response_future.is_schema_agreed: session.cluster.control_connection.wait_for_schema_agreement(wait_time=1000) \ No newline at end of file diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000000..e17ac302c8 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,10 @@ +import pytest + +from tests.integration import teardown_package + + +@pytest.fixture(scope='session', autouse=True) +def setup_and_teardown_packages(): + print('setup') + yield + teardown_package() diff --git a/tests/integration/cqlengine/__init__.py b/tests/integration/cqlengine/__init__.py index e68baaabf1..5148d6417f 100644 --- a/tests/integration/cqlengine/__init__.py +++ b/tests/integration/cqlengine/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,10 +16,7 @@ import os import warnings -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra import ConsistencyLevel from cassandra.cqlengine import connection diff --git a/tests/integration/cqlengine/advanced/__init__.py b/tests/integration/cqlengine/advanced/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/integration/cqlengine/advanced/__init__.py +++ b/tests/integration/cqlengine/advanced/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/advanced/test_cont_paging.py b/tests/integration/cqlengine/advanced/test_cont_paging.py index 38b4355312..82b0818fae 100644 --- a/tests/integration/cqlengine/advanced/test_cont_paging.py +++ b/tests/integration/cqlengine/advanced/test_cont_paging.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,10 +16,7 @@ -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from packaging.version import Version diff --git a/tests/integration/cqlengine/base.py b/tests/integration/cqlengine/base.py index 8a6903350f..1b99005fc4 100644 --- a/tests/integration/cqlengine/base.py +++ b/tests/integration/cqlengine/base.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import sys diff --git a/tests/integration/cqlengine/columns/__init__.py b/tests/integration/cqlengine/columns/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/integration/cqlengine/columns/__init__.py +++ b/tests/integration/cqlengine/columns/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/columns/test_container_columns.py b/tests/integration/cqlengine/columns/test_container_columns.py index 2acf36457b..abdbb6185b 100644 --- a/tests/integration/cqlengine/columns/test_container_columns.py +++ b/tests/integration/cqlengine/columns/test_container_columns.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +17,6 @@ from datetime import datetime, timedelta import json import logging -import six import sys import traceback from uuid import uuid4 @@ -48,7 +49,7 @@ class JsonTestColumn(columns.Column): def to_python(self, value): if value is None: return - if isinstance(value, six.string_types): + if isinstance(value, str): return json.loads(value) else: return value diff --git a/tests/integration/cqlengine/columns/test_counter_column.py b/tests/integration/cqlengine/columns/test_counter_column.py index 95792dd452..e68af62050 100644 --- a/tests/integration/cqlengine/columns/test_counter_column.py +++ b/tests/integration/cqlengine/columns/test_counter_column.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/columns/test_static_column.py b/tests/integration/cqlengine/columns/test_static_column.py index 69e222d2b9..8d16ec6227 100644 --- a/tests/integration/cqlengine/columns/test_static_column.py +++ b/tests/integration/cqlengine/columns/test_static_column.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from uuid import uuid4 diff --git a/tests/integration/cqlengine/columns/test_validation.py b/tests/integration/cqlengine/columns/test_validation.py index 69682fd68d..48ae74b5ab 100644 --- a/tests/integration/cqlengine/columns/test_validation.py +++ b/tests/integration/cqlengine/columns/test_validation.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,13 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import sys -from datetime import datetime, timedelta, date, tzinfo, time +from datetime import datetime, timedelta, date, tzinfo, time, timezone from decimal import Decimal as D from uuid import uuid4, uuid1 from packaging.version import Version @@ -100,7 +99,7 @@ def test_datetime_timestamp(self): dt_value = 1454520554 self.DatetimeTest.objects.create(test_id=5, created_at=dt_value) dt2 = self.DatetimeTest.objects(test_id=5).first() - self.assertEqual(dt2.created_at, datetime.utcfromtimestamp(dt_value)) + self.assertEqual(dt2.created_at, datetime.fromtimestamp(dt_value, tz=timezone.utc).replace(tzinfo=None)) def test_datetime_large(self): dt_value = datetime(2038, 12, 31, 10, 10, 10, 123000) @@ -812,7 +811,7 @@ def test_conversion_specific_date(self): assert isinstance(uuid, UUID) ts = (uuid.time - 0x01b21dd213814000) / 1e7 # back to a timestamp - new_dt = datetime.utcfromtimestamp(ts) + new_dt = datetime.fromtimestamp(ts, tz=timezone.utc).replace(tzinfo=None) # checks that we created a UUID1 with the proper timestamp assert new_dt == dt diff --git a/tests/integration/cqlengine/columns/test_value_io.py b/tests/integration/cqlengine/columns/test_value_io.py index 243c2b0fdb..faca854fdb 100644 --- a/tests/integration/cqlengine/columns/test_value_io.py +++ b/tests/integration/cqlengine/columns/test_value_io.py @@ -1,25 +1,23 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from datetime import datetime, timedelta, time from decimal import Decimal from uuid import uuid1, uuid4, UUID -import six from cassandra.cqlengine import columns from cassandra.cqlengine.management import sync_table @@ -104,15 +102,15 @@ def test_column_io(self): class TestBlobIO(BaseColumnIOTest): column = columns.Blob - pkey_val = six.b('blake'), uuid4().bytes - data_val = six.b('eggleston'), uuid4().bytes + pkey_val = b'blake', uuid4().bytes + data_val = b'eggleston', uuid4().bytes class TestBlobIO2(BaseColumnIOTest): column = columns.Blob - pkey_val = bytearray(six.b('blake')), uuid4().bytes - data_val = bytearray(six.b('eggleston')), uuid4().bytes + pkey_val = bytearray(b'blake'), uuid4().bytes + data_val = bytearray(b'eggleston'), uuid4().bytes class TestTextIO(BaseColumnIOTest): diff --git a/tests/integration/cqlengine/conftest.py b/tests/integration/cqlengine/conftest.py new file mode 100644 index 0000000000..2dc695828b --- /dev/null +++ b/tests/integration/cqlengine/conftest.py @@ -0,0 +1,12 @@ +import pytest + +from tests.integration import teardown_package as parent_teardown_package +from tests.integration.cqlengine import setup_package, teardown_package + + +@pytest.fixture(scope='session', autouse=True) +def setup_and_teardown_packages(): + setup_package() + yield + teardown_package() + parent_teardown_package() \ No newline at end of file diff --git a/tests/integration/cqlengine/connections/__init__.py b/tests/integration/cqlengine/connections/__init__.py index 2c9ca172f8..635f0d9e60 100644 --- a/tests/integration/cqlengine/connections/__init__.py +++ b/tests/integration/cqlengine/connections/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/connections/test_connection.py b/tests/integration/cqlengine/connections/test_connection.py index c46df31280..2235fc0c56 100644 --- a/tests/integration/cqlengine/connections/test_connection.py +++ b/tests/integration/cqlengine/connections/test_connection.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra import ConsistencyLevel @@ -105,7 +104,7 @@ def tearDown(self): def test_connection_session_switch(self): """ Test to ensure that when the default keyspace is changed in a session and that session, - is set in the connection class, that the new defaul keyspace is honored. + is set in the connection class, that the new default keyspace is honored. @since 3.1 @jira_ticket PYTHON-486 diff --git a/tests/integration/cqlengine/management/__init__.py b/tests/integration/cqlengine/management/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/integration/cqlengine/management/__init__.py +++ b/tests/integration/cqlengine/management/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/management/test_compaction_settings.py b/tests/integration/cqlengine/management/test_compaction_settings.py index d5dea12744..fbb5870ebb 100644 --- a/tests/integration/cqlengine/management/test_compaction_settings.py +++ b/tests/integration/cqlengine/management/test_compaction_settings.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,8 +15,7 @@ # limitations under the License. import copy -from mock import patch -import six +from unittest.mock import patch from cassandra.cqlengine import columns from cassandra.cqlengine.management import drop_table, sync_table, _get_table_metadata, _update_options @@ -83,7 +84,7 @@ def test_alter_actually_alters(self): table_meta = _get_table_metadata(tmp) - self.assertRegexpMatches(table_meta.export_as_string(), '.*SizeTieredCompactionStrategy.*') + self.assertRegex(table_meta.export_as_string(), '.*SizeTieredCompactionStrategy.*') def test_alter_options(self): @@ -97,11 +98,11 @@ class AlterTable(Model): drop_table(AlterTable) sync_table(AlterTable) table_meta = _get_table_metadata(AlterTable) - self.assertRegexpMatches(table_meta.export_as_string(), ".*'sstable_size_in_mb': '64'.*") + self.assertRegex(table_meta.export_as_string(), ".*'sstable_size_in_mb': '64'.*") AlterTable.__options__['compaction']['sstable_size_in_mb'] = '128' sync_table(AlterTable) table_meta = _get_table_metadata(AlterTable) - self.assertRegexpMatches(table_meta.export_as_string(), ".*'sstable_size_in_mb': '128'.*") + self.assertRegex(table_meta.export_as_string(), ".*'sstable_size_in_mb': '128'.*") class OptionsTest(BaseCassEngTestCase): @@ -110,7 +111,7 @@ def _verify_options(self, table_meta, expected_options): cql = table_meta.export_as_string() for name, value in expected_options.items(): - if isinstance(value, six.string_types): + if isinstance(value, str): self.assertIn("%s = '%s'" % (name, value), cql) else: start = cql.find("%s = {" % (name,)) diff --git a/tests/integration/cqlengine/management/test_management.py b/tests/integration/cqlengine/management/test_management.py index 7edb3e71dd..c424c187ce 100644 --- a/tests/integration/cqlengine/management/test_management.py +++ b/tests/integration/cqlengine/management/test_management.py @@ -1,22 +1,21 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -import mock +from unittest import mock import logging from packaging.version import Version from cassandra.cqlengine.connection import get_session, get_cluster @@ -257,14 +256,14 @@ def test_table_property_update(self): table_options = management._get_table_metadata(ModelWithTableProperties).options - self.assertDictContainsSubset(ModelWithTableProperties.__options__, table_options) + self.assertLessEqual(ModelWithTableProperties.__options__.items(), table_options.items()) def test_bogus_option_update(self): sync_table(ModelWithTableProperties) option = 'no way will this ever be an option' try: ModelWithTableProperties.__options__[option] = 'what was I thinking?' - self.assertRaisesRegexp(KeyError, "Invalid table option.*%s.*" % option, sync_table, ModelWithTableProperties) + self.assertRaisesRegex(KeyError, "Invalid table option.*%s.*" % option, sync_table, ModelWithTableProperties) finally: ModelWithTableProperties.__options__.pop(option, None) diff --git a/tests/integration/cqlengine/model/__init__.py b/tests/integration/cqlengine/model/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/integration/cqlengine/model/__init__.py +++ b/tests/integration/cqlengine/model/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/model/test_class_construction.py b/tests/integration/cqlengine/model/test_class_construction.py index 9c5afecbfc..00051d9248 100644 --- a/tests/integration/cqlengine/model/test_class_construction.py +++ b/tests/integration/cqlengine/model/test_class_construction.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -91,7 +93,7 @@ def test_attempting_to_make_duplicate_column_names_fails(self): Tests that trying to create conflicting db column names will fail """ - with self.assertRaisesRegexp(ModelException, r".*more than once$"): + with self.assertRaisesRegex(ModelException, r".*more than once$"): class BadNames(Model): words = columns.Text(primary_key=True) content = columns.Text(db_field='words') diff --git a/tests/integration/cqlengine/model/test_equality_operations.py b/tests/integration/cqlengine/model/test_equality_operations.py index 3b40ed4bf3..89045d7714 100644 --- a/tests/integration/cqlengine/model/test_equality_operations.py +++ b/tests/integration/cqlengine/model/test_equality_operations.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/model/test_model.py b/tests/integration/cqlengine/model/test_model.py index bbd9e0cbb6..c2c4906441 100644 --- a/tests/integration/cqlengine/model/test_model.py +++ b/tests/integration/cqlengine/model/test_model.py @@ -1,22 +1,21 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -from mock import patch +from unittest.mock import patch from cassandra.cqlengine import columns, CQLEngineException from cassandra.cqlengine.management import sync_table, drop_table, create_keyspace_simple, drop_keyspace diff --git a/tests/integration/cqlengine/model/test_model_io.py b/tests/integration/cqlengine/model/test_model_io.py index 32ace5363f..9cff0af6a6 100644 --- a/tests/integration/cqlengine/model/test_model_io.py +++ b/tests/integration/cqlengine/model/test_model_io.py @@ -1,24 +1,23 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from uuid import uuid4, UUID import random -from datetime import datetime, date, time +from datetime import datetime, date, time, timezone from decimal import Decimal from operator import itemgetter @@ -200,13 +199,13 @@ class AllDatatypesModel(Model): sync_table(AllDatatypesModel) - input = ['ascii', 2 ** 63 - 1, bytearray(b'hello world'), True, datetime.utcfromtimestamp(872835240), + input = ['ascii', 2 ** 63 - 1, bytearray(b'hello world'), True, datetime.fromtimestamp(872835240, tz=timezone.utc).replace(tzinfo=None), Decimal('12.3E+7'), 2.39, 3.4028234663852886e+38, '123.123.123.123', 2147483647, 'text', UUID('FE2B4360-28C6-11E2-81C1-0800200C9A66'), UUID('067e6162-3b6f-4ae2-a171-2470b63dff00'), int(str(2147483647) + '000')] AllDatatypesModel.create(id=0, a='ascii', b=2 ** 63 - 1, c=bytearray(b'hello world'), d=True, - e=datetime.utcfromtimestamp(872835240), f=Decimal('12.3E+7'), g=2.39, + e=datetime.fromtimestamp(872835240, tz=timezone.utc), f=Decimal('12.3E+7'), g=2.39, h=3.4028234663852886e+38, i='123.123.123.123', j=2147483647, k='text', l=UUID('FE2B4360-28C6-11E2-81C1-0800200C9A66'), m=UUID('067e6162-3b6f-4ae2-a171-2470b63dff00'), n=int(str(2147483647) + '000'), diff --git a/tests/integration/cqlengine/model/test_polymorphism.py b/tests/integration/cqlengine/model/test_polymorphism.py index e78fef498e..fc5e9c57ff 100644 --- a/tests/integration/cqlengine/model/test_polymorphism.py +++ b/tests/integration/cqlengine/model/test_polymorphism.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +15,7 @@ # limitations under the License. import uuid -import mock +from unittest import mock from cassandra.cqlengine import columns from cassandra.cqlengine import models diff --git a/tests/integration/cqlengine/model/test_udts.py b/tests/integration/cqlengine/model/test_udts.py index 82973436ac..bab9c51c1f 100644 --- a/tests/integration/cqlengine/model/test_udts.py +++ b/tests/integration/cqlengine/model/test_udts.py @@ -1,24 +1,23 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -from datetime import datetime, date, time +from datetime import datetime, date, time, timezone from decimal import Decimal -from mock import Mock +from unittest.mock import Mock from uuid import UUID, uuid4 from cassandra.cqlengine.models import Model @@ -275,7 +274,7 @@ def test_can_insert_udts_with_all_datatypes(self): self.addCleanup(drop_table, AllDatatypesModel) input = AllDatatypes(a='ascii', b=2 ** 63 - 1, c=bytearray(b'hello world'), d=True, - e=datetime.utcfromtimestamp(872835240), f=Decimal('12.3E+7'), g=2.39, + e=datetime.fromtimestamp(872835240, tz=timezone.utc).replace(tzinfo=None), f=Decimal('12.3E+7'), g=2.39, h=3.4028234663852886e+38, i='123.123.123.123', j=2147483647, k='text', l=UUID('FE2B4360-28C6-11E2-81C1-0800200C9A66'), m=UUID('067e6162-3b6f-4ae2-a171-2470b63dff00'), n=int(str(2147483647) + '000')) diff --git a/tests/integration/cqlengine/model/test_updates.py b/tests/integration/cqlengine/model/test_updates.py index 17eed8ddd9..096417baac 100644 --- a/tests/integration/cqlengine/model/test_updates.py +++ b/tests/integration/cqlengine/model/test_updates.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,9 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from unittest.mock import patch from uuid import uuid4 -from mock import patch from cassandra.cqlengine import ValidationError from tests.integration import greaterthancass21 diff --git a/tests/integration/cqlengine/model/test_value_lists.py b/tests/integration/cqlengine/model/test_value_lists.py index 0c913158cf..a6fc0b25f3 100644 --- a/tests/integration/cqlengine/model/test_value_lists.py +++ b/tests/integration/cqlengine/model/test_value_lists.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/operators/__init__.py b/tests/integration/cqlengine/operators/__init__.py index 05a41c46fd..9d1d6564dc 100644 --- a/tests/integration/cqlengine/operators/__init__.py +++ b/tests/integration/cqlengine/operators/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/operators/test_where_operators.py b/tests/integration/cqlengine/operators/test_where_operators.py index fdfce1f0b8..808e14df04 100644 --- a/tests/integration/cqlengine/operators/test_where_operators.py +++ b/tests/integration/cqlengine/operators/test_where_operators.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cqlengine.operators import * @@ -30,8 +29,6 @@ from tests.integration.cqlengine.operators import check_lookup from tests.integration import greaterthanorequalcass30 -import six - class TestWhereOperators(unittest.TestCase): @@ -50,15 +47,15 @@ def test_symbol_lookup(self): def test_operator_rendering(self): """ tests symbols are rendered properly """ - self.assertEqual("=", six.text_type(EqualsOperator())) - self.assertEqual("!=", six.text_type(NotEqualsOperator())) - self.assertEqual("IN", six.text_type(InOperator())) - self.assertEqual(">", six.text_type(GreaterThanOperator())) - self.assertEqual(">=", six.text_type(GreaterThanOrEqualOperator())) - self.assertEqual("<", six.text_type(LessThanOperator())) - self.assertEqual("<=", six.text_type(LessThanOrEqualOperator())) - self.assertEqual("CONTAINS", six.text_type(ContainsOperator())) - self.assertEqual("LIKE", six.text_type(LikeOperator())) + self.assertEqual("=", str(EqualsOperator())) + self.assertEqual("!=", str(NotEqualsOperator())) + self.assertEqual("IN", str(InOperator())) + self.assertEqual(">", str(GreaterThanOperator())) + self.assertEqual(">=", str(GreaterThanOrEqualOperator())) + self.assertEqual("<", str(LessThanOperator())) + self.assertEqual("<=", str(LessThanOrEqualOperator())) + self.assertEqual("CONTAINS", str(ContainsOperator())) + self.assertEqual("LIKE", str(LikeOperator())) class TestIsNotNull(BaseCassEngTestCase): diff --git a/tests/integration/cqlengine/query/__init__.py b/tests/integration/cqlengine/query/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/integration/cqlengine/query/__init__.py +++ b/tests/integration/cqlengine/query/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/query/test_batch_query.py b/tests/integration/cqlengine/query/test_batch_query.py index f0c9c43266..d3cddc0c7e 100644 --- a/tests/integration/cqlengine/query/test_batch_query.py +++ b/tests/integration/cqlengine/query/test_batch_query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +from unittest import mock from cassandra.cqlengine import columns from cassandra.cqlengine.connection import NOT_SET diff --git a/tests/integration/cqlengine/query/test_datetime_queries.py b/tests/integration/cqlengine/query/test_datetime_queries.py index ba1c90bb9e..8225b2d9f3 100644 --- a/tests/integration/cqlengine/query/test_datetime_queries.py +++ b/tests/integration/cqlengine/query/test_datetime_queries.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/query/test_named.py b/tests/integration/cqlengine/query/test_named.py index 3a6f83b32e..b6ba23a2e1 100644 --- a/tests/integration/cqlengine/query/test_named.py +++ b/tests/integration/cqlengine/query/test_named.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra import ConsistencyLevel from cassandra.cqlengine import operators diff --git a/tests/integration/cqlengine/query/test_queryoperators.py b/tests/integration/cqlengine/query/test_queryoperators.py index fd148bafcf..8f0dae06e7 100644 --- a/tests/integration/cqlengine/query/test_queryoperators.py +++ b/tests/integration/cqlengine/query/test_queryoperators.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/query/test_queryset.py b/tests/integration/cqlengine/query/test_queryset.py index 6bc9d701b8..d09d7eeb04 100644 --- a/tests/integration/cqlengine/query/test_queryset.py +++ b/tests/integration/cqlengine/query/test_queryset.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,10 +15,7 @@ # limitations under the License. from __future__ import absolute_import -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from datetime import datetime from uuid import uuid4 @@ -27,7 +26,7 @@ from cassandra import InvalidRequest from tests.integration.cqlengine.base import BaseCassEngTestCase from cassandra.cqlengine.connection import NOT_SET -import mock +from unittest import mock from cassandra.cqlengine import functions from cassandra.cqlengine.management import sync_table, drop_table from cassandra.cqlengine.models import Model @@ -1365,11 +1364,21 @@ def tearDownClass(cls): super(TestModelQueryWithFetchSize, cls).tearDownClass() drop_table(TestModelSmall) - @execute_count(9) + @execute_count(19) def test_defaultFetchSize(self): + # Use smaller batch sizes to avoid hitting the max. We trigger an InvalidRequest + # response for Cassandra 4.1.x and 5.0.x if we just do the whole thing as one + # large batch. We're just using this to populate values for a test, however, + # so shifting to smaller batches should be fine. + for i in range(0, 5000, 500): + with BatchQuery() as b: + range_max = i + 500 + for j in range(i, range_max): + TestModelSmall.batch(b).create(test_id=j) with BatchQuery() as b: - for i in range(5100): + for i in range(5000, 5100): TestModelSmall.batch(b).create(test_id=i) + self.assertEqual(len(TestModelSmall.objects.fetch_size(1)), 5100) self.assertEqual(len(TestModelSmall.objects.fetch_size(500)), 5100) self.assertEqual(len(TestModelSmall.objects.fetch_size(4999)), 5100) diff --git a/tests/integration/cqlengine/query/test_updates.py b/tests/integration/cqlengine/query/test_updates.py index fb6082bfe2..b0b9155ea2 100644 --- a/tests/integration/cqlengine/query/test_updates.py +++ b/tests/integration/cqlengine/query/test_updates.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/statements/__init__.py b/tests/integration/cqlengine/statements/__init__.py index 2c9ca172f8..635f0d9e60 100644 --- a/tests/integration/cqlengine/statements/__init__.py +++ b/tests/integration/cqlengine/statements/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/statements/test_assignment_clauses.py b/tests/integration/cqlengine/statements/test_assignment_clauses.py index 594224d72d..c6d75a447e 100644 --- a/tests/integration/cqlengine/statements/test_assignment_clauses.py +++ b/tests/integration/cqlengine/statements/test_assignment_clauses.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cqlengine.statements import AssignmentClause, SetUpdateClause, ListUpdateClause, MapUpdateClause, MapDeleteClause, FieldDeleteClause, CounterUpdateClause diff --git a/tests/integration/cqlengine/statements/test_base_clause.py b/tests/integration/cqlengine/statements/test_base_clause.py index 351983806b..cbba1ae36e 100644 --- a/tests/integration/cqlengine/statements/test_base_clause.py +++ b/tests/integration/cqlengine/statements/test_base_clause.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/statements/test_base_statement.py b/tests/integration/cqlengine/statements/test_base_statement.py index 474c45d02b..211d76cf5c 100644 --- a/tests/integration/cqlengine/statements/test_base_statement.py +++ b/tests/integration/cqlengine/statements/test_base_statement.py @@ -1,23 +1,21 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from uuid import uuid4 -import six from cassandra.query import FETCH_SIZE_UNSET from cassandra.cqlengine.statements import BaseCQLStatement @@ -29,7 +27,7 @@ from tests.integration.cqlengine.base import BaseCassEngTestCase, TestQueryUpdateModel from tests.integration.cqlengine import DEFAULT_KEYSPACE -from tests.integration import greaterthanorequalcass3_10, TestCluster +from tests.integration import greaterthanorequalcass3_10, lessthandse69, TestCluster from cassandra.cqlengine.connection import execute @@ -105,6 +103,7 @@ def test_insert_statement_execute(self): self.assertEqual(TestQueryUpdateModel.objects.count(), 0) @greaterthanorequalcass3_10 + @lessthandse69 def test_like_operator(self): """ Test to verify the like operator works appropriately @@ -130,7 +129,7 @@ def test_like_operator(self): ss = SelectStatement(self.table_name) like_clause = "text_for_%" ss.add_where(Column(db_field='text'), LikeOperator(), like_clause) - self.assertEqual(six.text_type(ss), + self.assertEqual(str(ss), 'SELECT * FROM {} WHERE "text" LIKE %(0)s'.format(self.table_name)) result = execute(ss) diff --git a/tests/integration/cqlengine/statements/test_delete_statement.py b/tests/integration/cqlengine/statements/test_delete_statement.py index 5e2894a06b..433fa759ac 100644 --- a/tests/integration/cqlengine/statements/test_delete_statement.py +++ b/tests/integration/cqlengine/statements/test_delete_statement.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +19,6 @@ from cassandra.cqlengine.columns import Column from cassandra.cqlengine.statements import DeleteStatement, WhereClause, MapDeleteClause, ConditionalClause from cassandra.cqlengine.operators import * -import six class DeleteStatementTests(TestCase): @@ -31,24 +32,24 @@ def test_single_field_is_listified(self): def test_field_rendering(self): """ tests that fields are properly added to the select statement """ ds = DeleteStatement('table', ['f1', 'f2']) - self.assertTrue(six.text_type(ds).startswith('DELETE "f1", "f2"'), six.text_type(ds)) + self.assertTrue(str(ds).startswith('DELETE "f1", "f2"'), str(ds)) self.assertTrue(str(ds).startswith('DELETE "f1", "f2"'), str(ds)) def test_none_fields_rendering(self): """ tests that a '*' is added if no fields are passed in """ ds = DeleteStatement('table', None) - self.assertTrue(six.text_type(ds).startswith('DELETE FROM'), six.text_type(ds)) + self.assertTrue(str(ds).startswith('DELETE FROM'), str(ds)) self.assertTrue(str(ds).startswith('DELETE FROM'), str(ds)) def test_table_rendering(self): ds = DeleteStatement('table', None) - self.assertTrue(six.text_type(ds).startswith('DELETE FROM table'), six.text_type(ds)) + self.assertTrue(str(ds).startswith('DELETE FROM table'), str(ds)) self.assertTrue(str(ds).startswith('DELETE FROM table'), str(ds)) def test_where_clause_rendering(self): ds = DeleteStatement('table', None) ds.add_where(Column(db_field='a'), EqualsOperator(), 'b') - self.assertEqual(six.text_type(ds), 'DELETE FROM table WHERE "a" = %(0)s', six.text_type(ds)) + self.assertEqual(str(ds), 'DELETE FROM table WHERE "a" = %(0)s', str(ds)) def test_context_update(self): ds = DeleteStatement('table', None) @@ -56,7 +57,7 @@ def test_context_update(self): ds.add_where(Column(db_field='a'), EqualsOperator(), 'b') ds.update_context_id(7) - self.assertEqual(six.text_type(ds), 'DELETE "d"[%(8)s] FROM table WHERE "a" = %(7)s') + self.assertEqual(str(ds), 'DELETE "d"[%(8)s] FROM table WHERE "a" = %(7)s') self.assertEqual(ds.get_context(), {'7': 'b', '8': 3}) def test_context(self): @@ -69,23 +70,23 @@ def test_range_deletion_rendering(self): ds.add_where(Column(db_field='a'), EqualsOperator(), 'b') ds.add_where(Column(db_field='created_at'), GreaterThanOrEqualOperator(), '0') ds.add_where(Column(db_field='created_at'), LessThanOrEqualOperator(), '10') - self.assertEqual(six.text_type(ds), 'DELETE FROM table WHERE "a" = %(0)s AND "created_at" >= %(1)s AND "created_at" <= %(2)s', six.text_type(ds)) + self.assertEqual(str(ds), 'DELETE FROM table WHERE "a" = %(0)s AND "created_at" >= %(1)s AND "created_at" <= %(2)s', str(ds)) ds = DeleteStatement('table', None) ds.add_where(Column(db_field='a'), EqualsOperator(), 'b') ds.add_where(Column(db_field='created_at'), InOperator(), ['0', '10', '20']) - self.assertEqual(six.text_type(ds), 'DELETE FROM table WHERE "a" = %(0)s AND "created_at" IN %(1)s', six.text_type(ds)) + self.assertEqual(str(ds), 'DELETE FROM table WHERE "a" = %(0)s AND "created_at" IN %(1)s', str(ds)) ds = DeleteStatement('table', None) ds.add_where(Column(db_field='a'), NotEqualsOperator(), 'b') - self.assertEqual(six.text_type(ds), 'DELETE FROM table WHERE "a" != %(0)s', six.text_type(ds)) + self.assertEqual(str(ds), 'DELETE FROM table WHERE "a" != %(0)s', str(ds)) def test_delete_conditional(self): where = [WhereClause('id', EqualsOperator(), 1)] conditionals = [ConditionalClause('f0', 'value0'), ConditionalClause('f1', 'value1')] ds = DeleteStatement('table', where=where, conditionals=conditionals) self.assertEqual(len(ds.conditionals), len(conditionals)) - self.assertEqual(six.text_type(ds), 'DELETE FROM table WHERE "id" = %(0)s IF "f0" = %(1)s AND "f1" = %(2)s', six.text_type(ds)) + self.assertEqual(str(ds), 'DELETE FROM table WHERE "id" = %(0)s IF "f0" = %(1)s AND "f1" = %(2)s', str(ds)) fields = ['one', 'two'] ds = DeleteStatement('table', fields=fields, where=where, conditionals=conditionals) - self.assertEqual(six.text_type(ds), 'DELETE "one", "two" FROM table WHERE "id" = %(0)s IF "f0" = %(1)s AND "f1" = %(2)s', six.text_type(ds)) + self.assertEqual(str(ds), 'DELETE "one", "two" FROM table WHERE "id" = %(0)s IF "f0" = %(1)s AND "f1" = %(2)s', str(ds)) diff --git a/tests/integration/cqlengine/statements/test_insert_statement.py b/tests/integration/cqlengine/statements/test_insert_statement.py index 3bf90ec313..f3f6b4fd92 100644 --- a/tests/integration/cqlengine/statements/test_insert_statement.py +++ b/tests/integration/cqlengine/statements/test_insert_statement.py @@ -1,22 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import six +import unittest from cassandra.cqlengine.columns import Column from cassandra.cqlengine.statements import InsertStatement @@ -30,7 +27,7 @@ def test_statement(self): ist.add_assignment(Column(db_field='c'), 'd') self.assertEqual( - six.text_type(ist), + str(ist), 'INSERT INTO table ("a", "c") VALUES (%(0)s, %(1)s)' ) @@ -41,7 +38,7 @@ def test_context_update(self): ist.update_context_id(4) self.assertEqual( - six.text_type(ist), + str(ist), 'INSERT INTO table ("a", "c") VALUES (%(4)s, %(5)s)' ) ctx = ist.get_context() @@ -51,4 +48,4 @@ def test_additional_rendering(self): ist = InsertStatement('table', ttl=60) ist.add_assignment(Column(db_field='a'), 'b') ist.add_assignment(Column(db_field='c'), 'd') - self.assertIn('USING TTL 60', six.text_type(ist)) + self.assertIn('USING TTL 60', str(ist)) diff --git a/tests/integration/cqlengine/statements/test_select_statement.py b/tests/integration/cqlengine/statements/test_select_statement.py index 90c14bcfb6..9478202786 100644 --- a/tests/integration/cqlengine/statements/test_select_statement.py +++ b/tests/integration/cqlengine/statements/test_select_statement.py @@ -1,25 +1,23 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cqlengine.columns import Column from cassandra.cqlengine.statements import SelectStatement, WhereClause from cassandra.cqlengine.operators import * -import six class SelectStatementTests(unittest.TestCase): @@ -31,42 +29,42 @@ def test_single_field_is_listified(self): def test_field_rendering(self): """ tests that fields are properly added to the select statement """ ss = SelectStatement('table', ['f1', 'f2']) - self.assertTrue(six.text_type(ss).startswith('SELECT "f1", "f2"'), six.text_type(ss)) + self.assertTrue(str(ss).startswith('SELECT "f1", "f2"'), str(ss)) self.assertTrue(str(ss).startswith('SELECT "f1", "f2"'), str(ss)) def test_none_fields_rendering(self): """ tests that a '*' is added if no fields are passed in """ ss = SelectStatement('table') - self.assertTrue(six.text_type(ss).startswith('SELECT *'), six.text_type(ss)) + self.assertTrue(str(ss).startswith('SELECT *'), str(ss)) self.assertTrue(str(ss).startswith('SELECT *'), str(ss)) def test_table_rendering(self): ss = SelectStatement('table') - self.assertTrue(six.text_type(ss).startswith('SELECT * FROM table'), six.text_type(ss)) + self.assertTrue(str(ss).startswith('SELECT * FROM table'), str(ss)) self.assertTrue(str(ss).startswith('SELECT * FROM table'), str(ss)) def test_where_clause_rendering(self): ss = SelectStatement('table') ss.add_where(Column(db_field='a'), EqualsOperator(), 'b') - self.assertEqual(six.text_type(ss), 'SELECT * FROM table WHERE "a" = %(0)s', six.text_type(ss)) + self.assertEqual(str(ss), 'SELECT * FROM table WHERE "a" = %(0)s', str(ss)) def test_count(self): ss = SelectStatement('table', count=True, limit=10, order_by='d') ss.add_where(Column(db_field='a'), EqualsOperator(), 'b') - self.assertEqual(six.text_type(ss), 'SELECT COUNT(*) FROM table WHERE "a" = %(0)s LIMIT 10', six.text_type(ss)) - self.assertIn('LIMIT', six.text_type(ss)) - self.assertNotIn('ORDER', six.text_type(ss)) + self.assertEqual(str(ss), 'SELECT COUNT(*) FROM table WHERE "a" = %(0)s LIMIT 10', str(ss)) + self.assertIn('LIMIT', str(ss)) + self.assertNotIn('ORDER', str(ss)) def test_distinct(self): ss = SelectStatement('table', distinct_fields=['field2']) ss.add_where(Column(db_field='field1'), EqualsOperator(), 'b') - self.assertEqual(six.text_type(ss), 'SELECT DISTINCT "field2" FROM table WHERE "field1" = %(0)s', six.text_type(ss)) + self.assertEqual(str(ss), 'SELECT DISTINCT "field2" FROM table WHERE "field1" = %(0)s', str(ss)) ss = SelectStatement('table', distinct_fields=['field1', 'field2']) - self.assertEqual(six.text_type(ss), 'SELECT DISTINCT "field1", "field2" FROM table') + self.assertEqual(str(ss), 'SELECT DISTINCT "field1", "field2" FROM table') ss = SelectStatement('table', distinct_fields=['field1'], count=True) - self.assertEqual(six.text_type(ss), 'SELECT DISTINCT COUNT("field1") FROM table') + self.assertEqual(str(ss), 'SELECT DISTINCT COUNT("field1") FROM table') def test_context(self): ss = SelectStatement('table') @@ -92,20 +90,20 @@ def test_additional_rendering(self): limit=15, allow_filtering=True ) - qstr = six.text_type(ss) + qstr = str(ss) self.assertIn('LIMIT 15', qstr) self.assertIn('ORDER BY x, y', qstr) self.assertIn('ALLOW FILTERING', qstr) def test_limit_rendering(self): ss = SelectStatement('table', None, limit=10) - qstr = six.text_type(ss) + qstr = str(ss) self.assertIn('LIMIT 10', qstr) ss = SelectStatement('table', None, limit=0) - qstr = six.text_type(ss) + qstr = str(ss) self.assertNotIn('LIMIT', qstr) ss = SelectStatement('table', None, limit=None) - qstr = six.text_type(ss) + qstr = str(ss) self.assertNotIn('LIMIT', qstr) diff --git a/tests/integration/cqlengine/statements/test_update_statement.py b/tests/integration/cqlengine/statements/test_update_statement.py index c6ed228d91..4c6966b10f 100644 --- a/tests/integration/cqlengine/statements/test_update_statement.py +++ b/tests/integration/cqlengine/statements/test_update_statement.py @@ -1,27 +1,25 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cqlengine.columns import Column, Set, List, Text from cassandra.cqlengine.operators import * from cassandra.cqlengine.statements import (UpdateStatement, WhereClause, AssignmentClause, SetUpdateClause, ListUpdateClause) -import six class UpdateStatementTests(unittest.TestCase): @@ -29,7 +27,7 @@ class UpdateStatementTests(unittest.TestCase): def test_table_rendering(self): """ tests that fields are properly added to the select statement """ us = UpdateStatement('table') - self.assertTrue(six.text_type(us).startswith('UPDATE table SET'), six.text_type(us)) + self.assertTrue(str(us).startswith('UPDATE table SET'), str(us)) self.assertTrue(str(us).startswith('UPDATE table SET'), str(us)) def test_rendering(self): @@ -37,10 +35,10 @@ def test_rendering(self): us.add_assignment(Column(db_field='a'), 'b') us.add_assignment(Column(db_field='c'), 'd') us.add_where(Column(db_field='a'), EqualsOperator(), 'x') - self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = %(0)s, "c" = %(1)s WHERE "a" = %(2)s', six.text_type(us)) + self.assertEqual(str(us), 'UPDATE table SET "a" = %(0)s, "c" = %(1)s WHERE "a" = %(2)s', str(us)) us.add_where(Column(db_field='a'), NotEqualsOperator(), 'y') - self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = %(0)s, "c" = %(1)s WHERE "a" = %(2)s AND "a" != %(3)s', six.text_type(us)) + self.assertEqual(str(us), 'UPDATE table SET "a" = %(0)s, "c" = %(1)s WHERE "a" = %(2)s AND "a" != %(3)s', str(us)) def test_context(self): us = UpdateStatement('table') @@ -55,19 +53,19 @@ def test_context_update(self): us.add_assignment(Column(db_field='c'), 'd') us.add_where(Column(db_field='a'), EqualsOperator(), 'x') us.update_context_id(3) - self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = %(4)s, "c" = %(5)s WHERE "a" = %(3)s') + self.assertEqual(str(us), 'UPDATE table SET "a" = %(4)s, "c" = %(5)s WHERE "a" = %(3)s') self.assertEqual(us.get_context(), {'4': 'b', '5': 'd', '3': 'x'}) def test_additional_rendering(self): us = UpdateStatement('table', ttl=60) us.add_assignment(Column(db_field='a'), 'b') us.add_where(Column(db_field='a'), EqualsOperator(), 'x') - self.assertIn('USING TTL 60', six.text_type(us)) + self.assertIn('USING TTL 60', str(us)) def test_update_set_add(self): us = UpdateStatement('table') us.add_update(Set(Text, db_field='a'), set((1,)), 'add') - self.assertEqual(six.text_type(us), 'UPDATE table SET "a" = "a" + %(0)s') + self.assertEqual(str(us), 'UPDATE table SET "a" = "a" + %(0)s') def test_update_empty_set_add_does_not_assign(self): us = UpdateStatement('table') diff --git a/tests/integration/cqlengine/statements/test_where_clause.py b/tests/integration/cqlengine/statements/test_where_clause.py index 3173320f7c..76eab13c3e 100644 --- a/tests/integration/cqlengine/statements/test_where_clause.py +++ b/tests/integration/cqlengine/statements/test_where_clause.py @@ -1,22 +1,20 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -import six from cassandra.cqlengine.operators import EqualsOperator from cassandra.cqlengine.statements import StatementException, WhereClause @@ -33,7 +31,7 @@ def test_where_clause_rendering(self): wc = WhereClause('a', EqualsOperator(), 'c') wc.set_context_id(5) - self.assertEqual('"a" = %(5)s', six.text_type(wc), six.text_type(wc)) + self.assertEqual('"a" = %(5)s', str(wc), str(wc)) self.assertEqual('"a" = %(5)s', str(wc), type(wc)) def test_equality_method(self): diff --git a/tests/integration/cqlengine/test_batch_query.py b/tests/integration/cqlengine/test_batch_query.py index 7b78fa9979..26f312c50a 100644 --- a/tests/integration/cqlengine/test_batch_query.py +++ b/tests/integration/cqlengine/test_batch_query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +15,7 @@ # limitations under the License. import warnings -import sure +import pytest from cassandra.cqlengine import columns from cassandra.cqlengine.management import drop_table, sync_table @@ -21,7 +23,7 @@ from cassandra.cqlengine.query import BatchQuery from tests.integration.cqlengine.base import BaseCassEngTestCase -from mock import patch +from unittest.mock import patch class TestMultiKeyModel(Model): partition = columns.Integer(primary_key=True) @@ -217,13 +219,13 @@ def test_callbacks_work_multiple_times(self): def my_callback(*args, **kwargs): call_history.append(args) - with warnings.catch_warnings(record=True) as w: + with pytest.warns() as w: with BatchQuery() as batch: batch.add_callback(my_callback) batch.execute() batch.execute() self.assertEqual(len(w), 2) # package filter setup to warn always - self.assertRegexpMatches(str(w[0].message), r"^Batch.*multiple.*") + self.assertRegex(str(w[0].message), r"^Batch.*multiple.*") def test_disable_multiple_callback_warning(self): """ diff --git a/tests/integration/cqlengine/test_connections.py b/tests/integration/cqlengine/test_connections.py index 15adff3380..e767ece617 100644 --- a/tests/integration/cqlengine/test_connections.py +++ b/tests/integration/cqlengine/test_connections.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -79,7 +81,7 @@ def test_context_connection_priority(self): """ Tests to ensure the proper connection priority is honored. - Explicit connection should have higest priority, + Explicit connection should have the highest priority, Followed by context query connection Default connection should be honored last. @@ -458,7 +460,7 @@ def test_keyspace(self): @since 3.7 @jira_ticket PYTHON-613 - @expected_result Keyspace segration is honored + @expected_result Keyspace segregation is honored @test_category object_mapper """ diff --git a/tests/integration/cqlengine/test_consistency.py b/tests/integration/cqlengine/test_consistency.py index dc0aa32c64..3a6485eaed 100644 --- a/tests/integration/cqlengine/test_consistency.py +++ b/tests/integration/cqlengine/test_consistency.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +from unittest import mock from uuid import uuid4 from cassandra import ConsistencyLevel as CL, ConsistencyLevel diff --git a/tests/integration/cqlengine/test_context_query.py b/tests/integration/cqlengine/test_context_query.py index 6f2a161352..bb226f58ce 100644 --- a/tests/integration/cqlengine/test_context_query.py +++ b/tests/integration/cqlengine/test_context_query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/cqlengine/test_ifexists.py b/tests/integration/cqlengine/test_ifexists.py index 2797edd846..32f48b58ff 100644 --- a/tests/integration/cqlengine/test_ifexists.py +++ b/tests/integration/cqlengine/test_ifexists.py @@ -1,22 +1,20 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import mock +import unittest +from unittest import mock from uuid import uuid4 from cassandra.cqlengine import columns @@ -104,7 +102,7 @@ def test_update_if_exists(self): m = TestIfExistsModel.get(id=id) self.assertEqual(m.text, 'changed_again') - m = TestIfExistsModel(id=uuid4(), count=44) # do not exists + m = TestIfExistsModel(id=uuid4(), count=44) # do not exist with self.assertRaises(LWTException) as assertion: m.if_exists().update() @@ -159,7 +157,7 @@ def test_batch_update_if_exists_success(self): @unittest.skipUnless(PROTOCOL_VERSION >= 2, "only runs against the cql3 protocol v2.0") def test_batch_mixed_update_if_exists_success(self): """ - Tests that batch update with with one bad query will still fail with LWTException + Tests that batch update with one bad query will still fail with LWTException @since 3.1 @jira_ticket PYTHON-432 @@ -181,7 +179,7 @@ def test_batch_mixed_update_if_exists_success(self): @unittest.skipUnless(PROTOCOL_VERSION >= 2, "only runs against the cql3 protocol v2.0") def test_delete_if_exists(self): """ - Tests that delete with if_exists work, and throw proper LWT exception when they are are not applied + Tests that delete with if_exists work, and throws proper LWT exception when they are not applied @since 3.1 @jira_ticket PYTHON-432 @@ -197,7 +195,7 @@ def test_delete_if_exists(self): q = TestIfExistsModel.objects(id=id) self.assertEqual(len(q), 0) - m = TestIfExistsModel(id=uuid4(), count=44) # do not exists + m = TestIfExistsModel(id=uuid4(), count=44) # do not exist with self.assertRaises(LWTException) as assertion: m.if_exists().delete() @@ -216,7 +214,7 @@ def test_delete_if_exists(self): @unittest.skipUnless(PROTOCOL_VERSION >= 2, "only runs against the cql3 protocol v2.0") def test_batch_delete_if_exists_success(self): """ - Tests that batch deletes with if_exists work, and throw proper LWTException when they are are not applied + Tests that batch deletes with if_exists work, and throws proper LWTException when they are not applied @since 3.1 @jira_ticket PYTHON-432 @@ -247,7 +245,7 @@ def test_batch_delete_if_exists_success(self): @unittest.skipUnless(PROTOCOL_VERSION >= 2, "only runs against the cql3 protocol v2.0") def test_batch_delete_mixed(self): """ - Tests that batch deletes with multiple queries and throw proper LWTException when they are are not all applicable + Tests that batch deletes with multiple queries and throws proper LWTException when they are not all applicable @since 3.1 @jira_ticket PYTHON-432 @@ -313,4 +311,3 @@ def test_instance_raise_exception(self): id = uuid4() with self.assertRaises(IfExistsWithCounterColumn): TestIfExistsWithCounterModel.if_exists() - diff --git a/tests/integration/cqlengine/test_ifnotexists.py b/tests/integration/cqlengine/test_ifnotexists.py index 206101f1b2..793ca80355 100644 --- a/tests/integration/cqlengine/test_ifnotexists.py +++ b/tests/integration/cqlengine/test_ifnotexists.py @@ -1,22 +1,20 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import mock +import unittest +from unittest import mock from uuid import uuid4 from cassandra.cqlengine import columns @@ -202,4 +200,3 @@ def test_instance_raise_exception(self): id = uuid4() with self.assertRaises(IfNotExistsWithCounterColumn): TestIfNotExistsWithCounterModel.if_not_exists() - diff --git a/tests/integration/cqlengine/test_lwt_conditional.py b/tests/integration/cqlengine/test_lwt_conditional.py index 1c418ae6d8..91edce44c1 100644 --- a/tests/integration/cqlengine/test_lwt_conditional.py +++ b/tests/integration/cqlengine/test_lwt_conditional.py @@ -1,23 +1,20 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import mock -import six +import unittest +from unittest import mock from uuid import uuid4 from cassandra.cqlengine import columns @@ -116,7 +113,7 @@ def test_conditional_clause(self): tc = ConditionalClause('some_value', 23) tc.set_context_id(3) - self.assertEqual('"some_value" = %(3)s', six.text_type(tc)) + self.assertEqual('"some_value" = %(3)s', str(tc)) self.assertEqual('"some_value" = %(3)s', str(tc)) def test_batch_update_conditional(self): @@ -163,7 +160,6 @@ def test_batch_update_conditional_several_rows(self): second_row.delete() b.execute() - def test_delete_conditional(self): # DML path t = TestConditionalModel.if_not_exists().create(text='something', count=5) diff --git a/tests/integration/cqlengine/test_timestamp.py b/tests/integration/cqlengine/test_timestamp.py index abf751ec47..c68fc8fa5b 100644 --- a/tests/integration/cqlengine/test_timestamp.py +++ b/tests/integration/cqlengine/test_timestamp.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,8 +15,7 @@ # limitations under the License. from datetime import timedelta, datetime -import mock -import sure +from unittest import mock from uuid import uuid4 from cassandra.cqlengine import columns @@ -44,7 +45,7 @@ def test_batch_is_included(self): with BatchQuery(timestamp=timedelta(seconds=30)) as b: TestTimestampModel.batch(b).create(count=1) - "USING TIMESTAMP".should.be.within(m.call_args[0][0].query_string) + self.assertIn("USING TIMESTAMP", m.call_args[0][0].query_string) class CreateWithTimestampTest(BaseTimestampTest): @@ -56,27 +57,27 @@ def test_batch(self): query = m.call_args[0][0].query_string - query.should.match(r"INSERT.*USING TIMESTAMP") - query.should_not.match(r"TIMESTAMP.*INSERT") + self.assertRegex(query, r"INSERT.*USING TIMESTAMP") + self.assertNotRegex(query, r"TIMESTAMP.*INSERT") def test_timestamp_not_included_on_normal_create(self): with mock.patch.object(self.session, "execute") as m: TestTimestampModel.create(count=2) - "USING TIMESTAMP".shouldnt.be.within(m.call_args[0][0].query_string) + self.assertNotIn("USING TIMESTAMP", m.call_args[0][0].query_string) def test_timestamp_is_set_on_model_queryset(self): delta = timedelta(seconds=30) tmp = TestTimestampModel.timestamp(delta) - tmp._timestamp.should.equal(delta) + self.assertEqual(tmp._timestamp, delta) def test_non_batch_syntax_integration(self): tmp = TestTimestampModel.timestamp(timedelta(seconds=30)).create(count=1) - tmp.should.be.ok + self.assertIsNotNone(tmp) def test_non_batch_syntax_with_tll_integration(self): tmp = TestTimestampModel.timestamp(timedelta(seconds=30)).ttl(30).create(count=1) - tmp.should.be.ok + self.assertIsNotNone(tmp) def test_non_batch_syntax_unit(self): @@ -85,7 +86,7 @@ def test_non_batch_syntax_unit(self): query = m.call_args[0][0].query_string - "USING TIMESTAMP".should.be.within(query) + self.assertIn("USING TIMESTAMP", query) def test_non_batch_syntax_with_ttl_unit(self): @@ -95,7 +96,7 @@ def test_non_batch_syntax_with_ttl_unit(self): query = m.call_args[0][0].query_string - query.should.match(r"USING TTL \d* AND TIMESTAMP") + self.assertRegex(query, r"USING TTL \d* AND TIMESTAMP") class UpdateWithTimestampTest(BaseTimestampTest): @@ -109,7 +110,7 @@ def test_instance_update_includes_timestamp_in_query(self): with mock.patch.object(self.session, "execute") as m: self.instance.timestamp(timedelta(seconds=30)).update(count=2) - "USING TIMESTAMP".should.be.within(m.call_args[0][0].query_string) + self.assertIn("USING TIMESTAMP", m.call_args[0][0].query_string) def test_instance_update_in_batch(self): with mock.patch.object(self.session, "execute") as m: @@ -117,7 +118,7 @@ def test_instance_update_in_batch(self): self.instance.batch(b).timestamp(timedelta(seconds=30)).update(count=2) query = m.call_args[0][0].query_string - "USING TIMESTAMP".should.be.within(query) + self.assertIn("USING TIMESTAMP", query) class DeleteWithTimestampTest(BaseTimestampTest): @@ -129,7 +130,7 @@ def test_non_batch(self): uid = uuid4() tmp = TestTimestampModel.create(id=uid, count=1) - TestTimestampModel.get(id=uid).should.be.ok + self.assertIsNotNone(TestTimestampModel.get(id=uid)) tmp.timestamp(timedelta(seconds=5)).delete() @@ -143,15 +144,15 @@ def test_non_batch(self): # calling .timestamp sets the TS on the model tmp.timestamp(timedelta(seconds=5)) - tmp._timestamp.should.be.ok + self.assertIsNotNone(tmp._timestamp) # calling save clears the set timestamp tmp.save() - tmp._timestamp.shouldnt.be.ok + self.assertIsNone(tmp._timestamp) tmp.timestamp(timedelta(seconds=5)) tmp.update() - tmp._timestamp.shouldnt.be.ok + self.assertIsNone(tmp._timestamp) def test_blind_delete(self): """ @@ -160,7 +161,7 @@ def test_blind_delete(self): uid = uuid4() tmp = TestTimestampModel.create(id=uid, count=1) - TestTimestampModel.get(id=uid).should.be.ok + self.assertIsNotNone(TestTimestampModel.get(id=uid)) TestTimestampModel.objects(id=uid).timestamp(timedelta(seconds=5)).delete() @@ -179,7 +180,7 @@ def test_blind_delete_with_datetime(self): uid = uuid4() tmp = TestTimestampModel.create(id=uid, count=1) - TestTimestampModel.get(id=uid).should.be.ok + self.assertIsNotNone(TestTimestampModel.get(id=uid)) plus_five_seconds = datetime.now() + timedelta(seconds=5) @@ -197,11 +198,9 @@ def test_delete_in_the_past(self): uid = uuid4() tmp = TestTimestampModel.create(id=uid, count=1) - TestTimestampModel.get(id=uid).should.be.ok + self.assertIsNotNone(TestTimestampModel.get(id=uid)) - # delete the in past, should not affect the object created above + # delete in the past, should not affect the object created above TestTimestampModel.objects(id=uid).timestamp(timedelta(seconds=-60)).delete() TestTimestampModel.get(id=uid) - - diff --git a/tests/integration/cqlengine/test_ttl.py b/tests/integration/cqlengine/test_ttl.py index a9aa32db94..0e0f8d2c28 100644 --- a/tests/integration/cqlengine/test_ttl.py +++ b/tests/integration/cqlengine/test_ttl.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,10 +15,7 @@ # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from packaging.version import Version @@ -26,7 +25,7 @@ from cassandra.cqlengine.models import Model from uuid import uuid4 from cassandra.cqlengine import columns -import mock +from unittest import mock from cassandra.cqlengine.connection import get_session from tests.integration import CASSANDRA_VERSION, greaterthancass20 @@ -182,7 +181,7 @@ def test_default_ttl_not_set(self): self.assertEqual(default_ttl, 0) with mock.patch.object(session, 'execute') as m: - TestTTLModel.objects(id=tid).update(text="aligators") + TestTTLModel.objects(id=tid).update(text="alligators") query = m.call_args[0][0].query_string self.assertNotIn("USING TTL", query) @@ -200,7 +199,7 @@ def test_default_ttl_set(self): self.assertEqual(default_ttl, 20) with mock.patch.object(session, 'execute') as m: - TestTTLModel.objects(id=tid).update(text="aligators expired") + TestTTLModel.objects(id=tid).update(text="alligators expired") # Should not be set either query = m.call_args[0][0].query_string @@ -231,7 +230,7 @@ def test_override_default_ttl(self): self.assertEqual(o._ttl, 3600) with mock.patch.object(session, 'execute') as m: - TestDefaultTTLModel.objects(id=tid).ttl(None).update(text="aligators expired") + TestDefaultTTLModel.objects(id=tid).ttl(None).update(text="alligators expired") query = m.call_args[0][0].query_string self.assertNotIn("USING TTL", query) diff --git a/tests/integration/datatype_utils.py b/tests/integration/datatype_utils.py index 8a1c813baa..a4c4cdb4d8 100644 --- a/tests/integration/datatype_utils.py +++ b/tests/integration/datatype_utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,8 +16,8 @@ from decimal import Decimal from datetime import datetime, date, time +import ipaddress from uuid import uuid1, uuid4 -import six from cassandra.util import OrderedMap, Date, Time, sortedset, Duration @@ -91,11 +93,10 @@ def get_sample_data(): sample_data[datatype] = 3.4028234663852886e+38 elif datatype == 'inet': - sample_data[datatype] = ('123.123.123.123', '2001:db8:85a3:8d3:1319:8a2e:370:7348') - if six.PY3: - import ipaddress - sample_data[datatype] += (ipaddress.IPv4Address("123.123.123.123"), - ipaddress.IPv6Address('2001:db8:85a3:8d3:1319:8a2e:370:7348')) + sample_data[datatype] = ('123.123.123.123', + '2001:db8:85a3:8d3:1319:8a2e:370:7348', + ipaddress.IPv4Address("123.123.123.123"), + ipaddress.IPv6Address('2001:db8:85a3:8d3:1319:8a2e:370:7348')) elif datatype == 'int': sample_data[datatype] = 2147483647 diff --git a/tests/integration/long/__init__.py b/tests/integration/long/__init__.py index 447f4885cc..f369b97a81 100644 --- a/tests/integration/long/__init__.py +++ b/tests/integration/long/__init__.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest try: from ccmlib import common diff --git a/tests/integration/long/ssl/127.0.0.1.keystore b/tests/integration/long/ssl/127.0.0.1.keystore index 3855f00a1a..98193ab54e 100644 Binary files a/tests/integration/long/ssl/127.0.0.1.keystore and b/tests/integration/long/ssl/127.0.0.1.keystore differ diff --git a/tests/integration/long/ssl/cassandra.truststore b/tests/integration/long/ssl/cassandra.truststore index 4de3d31919..b31e34b8aa 100644 Binary files a/tests/integration/long/ssl/cassandra.truststore and b/tests/integration/long/ssl/cassandra.truststore differ diff --git a/tests/integration/long/ssl/client.crt_signed b/tests/integration/long/ssl/client.crt_signed index b0da180632..db3d903f19 100644 --- a/tests/integration/long/ssl/client.crt_signed +++ b/tests/integration/long/ssl/client.crt_signed @@ -1,19 +1,19 @@ -----BEGIN CERTIFICATE----- -MIIDDjCCAfYCFAdgzL+DD0fzl77VnEr2V4axiX0qMA0GCSqGSIb3DQEBCwUAMEIx +MIIDDjCCAfYCFAG4WryLorTXxNtrkEJ56zUg/XdDMA0GCSqGSIb3DQEBCwUAMEIx CzAJBgNVBAYTAlVTMREwDwYDVQQKDAhkYXRhc3RheDEPMA0GA1UECwwGZmllbGRz -MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjAwMTEwMjExMTM3WhcNMjEwMTA5MjExMTM3 +MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjEwMzE3MTcwNTE4WhcNMjIwMzE3MTcwNTE4 WjBFMQswCQYDVQQGEwJVUzERMA8GA1UECgwIZGF0YXN0YXgxDzANBgNVBAsMBmZp ZWxkczESMBAGA1UEAwwJMTI3LjAuMC4xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAvGQ6G4o1PYcDiRuZnWhSzq2Zh0i/73JPyUAauPzXFRjZmOQdUaro -inkyoSpJkARxL6SIGbUeKq6rD/2Std4b6T1lj5GOmmqetUtNf1tiXvgu0NFUuWh7 -KJllojuFvUgiWRN3oMEoYgN+EqIBS+wYx5Nh59UW2v+1D/zD3ckn3kHXX2c+fNjJ -UqYUbUBElpb7N9j6SouakW3UzTonvl1kxMO3UjiWyy03IVRS8zQo/zpHHOqsqYhr -G4jCWB+7JRHi8qxiA3sjA6mUPatgF46dJKlEXEP5OSsESC20CDhmOzFQFA4/PIc9 -srG9MSaZlENyhF/ZTW4CJeH9QmCmFnv4ewIDAQABMA0GCSqGSIb3DQEBCwUAA4IB -AQA5LFiDq/+2SpfmL6US0x93E4LNCut7qa7eQhqEdmnKqshO3HwmNmYzLWhpifHL -OFz3Tzz7JxuIyNNI2XPQvLcyncymw69PdIbaYiNhuwvV7DtZUuxY/z0dlDPNDOqU -NJCS6/ny2KcCK5d36b+ppvLB8DHdyQAn++7mqqvB1vyePtkKx801qC0VFvP/NVKf -R5gFWBwFX+wNz66oFhikPSxG96SaddE5XIpRWY2richacgn2/f+Z04P6AN7DyAWA -lc94hdua7X+AV5lKM7VMWcgSJq/xhGZb0GsJv9+yZwTHWnv4qOtT5FVGGIVXrl97 -yjeRAMOqzv8+w1nGGZ1Kv1Ym +MIIBCgKCAQEAnrpE3g8pbQn2tVVidX2Ww1rh/6YIH6EGW9hXMO/F506ReMruv+Al +ilc7B2sPpGRDKXupy23IcpfMIe9+Lm74/yu7pW51rJ/r2jMqg+tViFa/GQxSQLKd +AxDAvwJaAM41kro0DKmcm4RwfYAltupwc6pC7AfBtT08PBuDK7WfaNnFbhGAWkHv +MbULNWAKbPWqITHbUEvLgS/uPj+/W4SHk5GaYk0Y2mU3aWypeDOBqEfKTi2W0ix1 +O7SpOHyfA0hvXS9IilF/HWURvr9u13mnvJNe8W+uqWqlQMdyFsbPCIhbVwVwGYQp +yoyBrgz6y5SPwSyugAb2F8Yk3UpvqH30yQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB +AQB5XV+3NS5UpwpTXTYsadLL8XcdGsfITMs4MSv0N3oir++TUzTc3cOd2T6YVdEc +ypw5CKTYnFTK9oF2PZXeV+aLIjdvK4AukQurB8EdXq4Hu7y1b61OaGRqiKTVsIne +LwxCXpc42jqMFt4mMXpmU/hSCjRSvoumTcL1aHUzaPlSIasD2JDyLurO64gxQypi +wbD9gliPJ60pdhY0m9NfF5F2PdqBuJXrhF1VuxYx1/cfo/c1A4UK2slhsZCDls7/ +HbM8ri5Z74M1EtCGFcTNYvm0xlfF5arisGQSKhTw+06LnpUlQi5a8NRNBLeAmem/ +cuICJJbnSzjmq9skkp8i/ejH -----END CERTIFICATE----- diff --git a/tests/integration/long/ssl/client.key b/tests/integration/long/ssl/client.key index b942d628c1..d6b8811a94 100644 --- a/tests/integration/long/ssl/client.key +++ b/tests/integration/long/ssl/client.key @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC8ZDobijU9hwOJ -G5mdaFLOrZmHSL/vck/JQBq4/NcVGNmY5B1RquiKeTKhKkmQBHEvpIgZtR4qrqsP -/ZK13hvpPWWPkY6aap61S01/W2Je+C7Q0VS5aHsomWWiO4W9SCJZE3egwShiA34S -ogFL7BjHk2Hn1Rba/7UP/MPdySfeQddfZz582MlSphRtQESWlvs32PpKi5qRbdTN -Oie+XWTEw7dSOJbLLTchVFLzNCj/Okcc6qypiGsbiMJYH7slEeLyrGIDeyMDqZQ9 -q2AXjp0kqURcQ/k5KwRILbQIOGY7MVAUDj88hz2ysb0xJpmUQ3KEX9lNbgIl4f1C -YKYWe/h7AgMBAAECggEASoGdFY04dyfxdwUqYL2Emma/5GgaOJnOAjrPFsAwVBCq -5jO5gLYGF9XM9z5hL4sCNKRuizQ9RQYlc0KHBlRcV4dHplsbuehW8j5g3PCIXCTt -ZvqS9mzi4HCiaGIAB5cCtpXjZvldfj4BW18lAiDSwAOC4gw9aMlek38U+571nIlh -gs2rtCuWR6usJbJdFfJ4ouIPX1F4Gz5s/lYhG5jTGUAnKyE1NgqPj+PY+ZfH0qNX -AVJVNn9Ze3hT3JdEjTYFNo5c1YHui9Krlh5EMabVWgqmbI2WS8sZsRcMAluDB5lY -OaCKLFYwOIG4nGBU9TE2s7fxl3qCkBGNDhuD4n80iQKBgQDrZ233CVfSLgTyKKuV -i2niqIk1fAgLIFefHxsI1wlApLBCc58dAGhAvmHRdYSw8iBinbLXGnS9yPZvjNik -kLyWPznq1p94CZTD1KsLz5qmRlReKHJEhdajlR5LnisULvqiDc7/lk18W2fD0KeD -UnN5hUjjOeaQRjvOTghTie+1PwKBgQDM39Agd/INPr6WsYSqYPEeE9fZu27p0YUQ -4Y8zHVENeg1jUMDXgHo3weZl5n3sFMYeuYm6d9nE0k/Mjr2hL/zlnqQBG5XtJ5oW -2r7Sh1pKCxV+oYgsdFctkJKSi5Q8RyK4YrUQA55EZqDewfZlmmWW3q/fahls49Ny -uTJESV/BxQKBgDiIh10rjj64tJFfeQ2aBJzdcs44ckoRw1lAhCKUWfF/W6Ep2U2C -uobJ8f32ph5El8h3LOsBvIWTjLNvdNvYsqG2n3cpgfS3AFYjbcyRWAeUnlBakE6q -gciZWEQ6wQfA3IosnMi+1O8HmJzrMD+WforxmnaPgjKl21kJXnCJkNrLAoGAdlG3 -6Fh9UUrwVSVWgfOUrRM2sMd/yce4OsSZqCKBQfBANSBZDtxjOTphbm5MQQDKXso2 -kZtQCEyRy4iQWbvWKWKSQxWEY79gTVytofaLnYGDO2vcshfKlUUOcVXtGVbX5XcU -LJh6WfSPabbJL2qYyyX2mmezIWD+KB5uumNJyyUCgYEA6afM7Gzv/jvm8Vrqx+Gj -gDFvO0ZxkxOCtA1Qd42NYu9rgn0pMIJyukxvEXePxUu164ehE7VRN0HhofXMGbMG -R8aVqbl0w7jSN6M97vo6Xn+sRwFcgMfjo3uKgdXbdxyKnzPGxOTYUuy7Q5B0teiw -kz4fRgPkfSQ+nfVrHAgX3WA= +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCeukTeDyltCfa1 +VWJ1fZbDWuH/pggfoQZb2Fcw78XnTpF4yu6/4CWKVzsHaw+kZEMpe6nLbchyl8wh +734ubvj/K7ulbnWsn+vaMyqD61WIVr8ZDFJAsp0DEMC/AloAzjWSujQMqZybhHB9 +gCW26nBzqkLsB8G1PTw8G4MrtZ9o2cVuEYBaQe8xtQs1YAps9aohMdtQS8uBL+4+ +P79bhIeTkZpiTRjaZTdpbKl4M4GoR8pOLZbSLHU7tKk4fJ8DSG9dL0iKUX8dZRG+ +v27Xeae8k17xb66paqVAx3IWxs8IiFtXBXAZhCnKjIGuDPrLlI/BLK6ABvYXxiTd +Sm+offTJAgMBAAECggEAN+VysRx3wy1aEvuRo7xpZjxQD/5BKBpFqfxioBogAFfb +xMT6FNnzfmc/o1ohdQvV1vr0jW4Iw8oPGfhD4Eg2KW4WM6jVicf7f6i7FR+/zDZ4 +L3L2WFBOGLFCn0FNvrDfjt9Byx/DxcR69Mc3ANZIaYMQ9Bu7LH73AlfR9oeMLpjL ++6g1qz2yz8Sm2CMCGXTyXtvUCgn2ld6nz8KlZ8FTUG9C9mAabuvV91Ko6rmTxuiv +YKvHSPnIjXRjuC+Ozjf1rYTOJ5LVMNNhlbIKBG/Nx5QzL7bA3XDtMD1BEI9pdHR+ +5HwA0tV2Ex67tBCJwlBAhYLxuPjfOj1R5KV8wriE3QKBgQDNvqOaGYiXwp9Rajoo +ltlOBPfnjshd9tPdc6tTUQR34vSbkHrg0HVJhvIP5LRbyx/M/8ACQxFkDRE4U7fJ +xVGDs8Pi0FqcqFTnm/AYQ5eZbJkPp9qe71aDOPanncrVNEGFeW26LaeLGbTLrOMM +6mTmsfGig0MKgml35IMrP+oPuwKBgQDFf56DdaFe08xSK9pDWuKxUuBIagGExQkQ +r9eYasBc336CXh3FWtpSlxl73dqtISh/HbKbv+OZfkVdbmkcTVGlWm/N/XvLqpPK +86kbKW6PY8FxIY/RxiZANf/JJ5gzPp6VQMJeSy+oepeWj11mTLcT02plvIMM0Jmg +Z5B9Hw37SwKBgDR/59lDmLI47FRnCc4fp/WbmPKSYZhwimFgyZ/p9XzuAcLMXD6P +ks4fTBc4IbmmnEfAHuu013QzTWiVHDm1SvaTYXG3/tcosPmkteBLJxz0NB5lk4io +w+eaGn5s6jv7KJj5gkFWswDwn0y1of5CtVqUn3b7jZjZ7DW2rq3TklNPAoGAIzaW +56+AfyzaQEhrWRkKVD2HmcG01Zxf+mav1RArjiOXJd1sB3UkehdQxuIOjFHeK5P6 +9YQoK4T1DyyRdydeCFJwntS0TuLyCPyaySoA+XX61pX6U5e12DsIiTATFgfzNH9g +aHmVXL/G6WRUbdn9xn4qeUs8Pnuu+IeenoB7+LMCgYBBnig9nTp81U+SGsNl2D3J +WUz4z+XzEfKU1nq2s4KNjIPB2T1ne+1x3Uso2hagtEHeuEbZoRY4dtCahAvYwrPM +8wtDFQXWmvFyN3X0Js65GZ++knuseQ1tdlbc/4C+k4u26tVe2GcwhKTjn08++L2E +UB3pLXbssswH271OjD+QkQ== -----END PRIVATE KEY----- diff --git a/tests/integration/long/ssl/client_encrypted.key b/tests/integration/long/ssl/client_encrypted.key index cb787d657f..49f475d7fe 100644 --- a/tests/integration/long/ssl/client_encrypted.key +++ b/tests/integration/long/ssl/client_encrypted.key @@ -1,30 +1,30 @@ -----BEGIN RSA PRIVATE KEY----- Proc-Type: 4,ENCRYPTED -DEK-Info: AES-256-CBC,12FC332B1EC2742035449FF59F9C5F71 +DEK-Info: AES-256-CBC,7288A409E846EBE2DE421B77598DAF98 -mjFChSmk+AX00EmSz22A+kv8X5XwtW8awkrcud2cH1tUopGv5B7PL6dfprrNp4fj -T9nmncH65b+GovBClEqOy6I+Tzm/WJ6aOqQnkfL48QT5KW6bM/Gzm8JSJkXSEfDZ -Cck1FvMG4ituL50Rvlw5XpFjSyFt6VEZi/s6taSLPiJmuw8cWeEkMCs3I4nZee0k -lDvsiDRcqV/+Uk7jM4umepa7fBMumI8fydtscpFxJBspBVIb4UMx3i2lFWs7NeXf -EN92S1fhq3uWBDIlQMtAAoV2qf3cPSg7+du/iLB6/Sc7mxGyQ6aVWoIyxB+dwVVX -oqmeBgyNV8xS94Qcw2aNcqyyFsU0cIitafIzBuJO9+G2/FB+UWTJwPzbp/Kk24OV -wZ1wExFcYo4UOe6FT5LAihhfkGFdpXba8XviEgQK18o8lQL1SitwGSrlBNQsohJt -3Ug9aHvTGvECBdjj7NwNP0EDs7IxMVSsOHPAJyDLdWzHe0eLqRYCWNQlWGn7LKIP -dsqJqpMJkh3G3m7Lkkb0Z4YaUoFRi5DyCB2vkBcgmvbV2QdNHwc6EWk4OnFTq4vE -kWrMGa7lQkxnXV12NUNaUoHbsX7waeO5d81IUHbfa6r3/N046QGDrkLAFnsghfiS -I76y8QklqVIp5/5t4hg96n55lSqxbDAikHMrTI5niHUajqU74xkoCgh/SpwwwndT -QwdRH8jAMLyBf4xf5PQMPJwiNltX7QE6XB9sD80f9SJdf3xHF9L3H++ANPDzhHJC -ntGd/JmsMv+G/u+VLPTF9hB83UEMuTi1OL2Fm7V41yR6ZROYDUXyf8hhUxKAGasu -Hn1Q41mVb1nFE2NaD65+6MD2UtqBBXaD8raTGzcauxgkNTg4JroMM3vc+PSanSUp -ZSN28MYp4XlGl5dDhpoY782V09YIweLKAqC/4NZK9X0r563BLj5pektuqNlNG9N3 -E+X9F4fP4tjOYGeVDhZ1sYBYsUfHrkbngacGpnD+eis+ReSaBE7T5/jfxnbW9AW7 -Cj4qRlUYvyYznvHxCrQ8Su6IleW49z0lD8jLmxUPZkzMqEX8tYe8dxz5AaG2Egb9 -bhwheQiiih1LTv9ZuLMGyARQcOZsPqsi1SRMzaln5f/PNlh4RCP+JbjvHxy5IMAN -g5ti9ejF2D7A1YOxlyqv0xvN5z4OEkXngg3HCT/FFgJWsppEG6nbpgpDlnC/xBiJ -3VgtwmU/CsJ11lkCZihl2xMxiazA0WoKMNs8N4qxjr5YEnyBwRqFWjpMgXUyjcPH -H+QBAkUL11qbr8O0Sj2cn/urVZNCQMMDpYT2VDbJK6vrj2ZyEsH3XeeQr2ohevwp -4GVyIDgzvP/+B67k65Pvj6iUXI2kXqZhhURBPKDo00XCFSz+9guDJX7HeRII1D1k -VzW3aly1SvrEUgI30FpC1L47LY/NksVybSD1kxNMMdb1g/yVJCzGeMgMyiyreaxT -+UXJ5/sZIQ+FZqzh24EShM/JmUC9epO0nl8nee8FAd9kY9kkDCjTXf3KzYTiyKN8 -ma2xnLwWWK1bqj282/dj6D/QlXYgePb90Duq5X19HOTe6wouNnr6fx8ZH/k/tAGQ +ahiUSf+k9POIEUJb5BGbQ6knk4+FtTz+e+6fouqVc4Lq+RXR9f0pFBi9eDEkFNiN +AcUjLkxh+3TmihTZJprqXSbQ3jacwbnwDOFgtZE3PxoA1heHxADaKCNr+Ph0lC/T +3cIzsoIZ6slk+3n6ERieZRdmvoMH1SY8nXKT5+bLMR4RIjw1y7h26MRhjQS+lXaX +Asd5EOGROCIgefeEBGHAbrlg0FoHy7slqVBxuZphTHKtyK/VK4fRLt6doUzBu5GJ +T2jdrqJCWr5PRn3bAqMemJWxDhZLX4DyNDQPn8riZ8jMbwPOVUSnF8B8re1tNkQ0 +CsH77sYIIjmPdizCdvj91+jH6o7MRCZPvky+PHG/9G5WsPiw5W1i/nrPemT1XJyy +oPRc/fMFfbHmW3HCGqgv2/6Wg+17un/a6UyzXsbNdhDZLCVqtAQ7PSv83z5oUazT +djzFHgxSqRknUY0lOUvP8Rni67MG+Rcksj9HgszhLoC0be64IX0Ey5oc5+pBYrf9 +FVEPsuyyu4aDSRYYATC2E1V/EQRwcvpKEZNFTbqMpQhjrWtlBM/GgQnQBeQdLAGX +yefDSzkH31y5gcdgHLElriWwbHHbcaAmf3e15W94YHgTytJBsQ9A19SmtmgUmo4h +jaFoUooM5mFA8hc/snSe2PdkEefkzS72g8qxa//61LTJAAkVk43dYjoqQ34wq6WR +OB4nn/W2xlfv/ClZJTWf8YvQTrQptJY5VQq/TTEcrXy67Uc0wRHXZK2rTjKeyRj9 +65SkyyXhMopWEl2vX25ReITVfdJ0FgjqI/ugYSf25iOfJtsk+jgrtrswZ+8F2eMq +iAQ+0JSiYmlot2Pn1QCalLjtTz8zeMfXPyo5fbKNMdp52U1cPYld90kUGHZfjqju +GmY/aHa6N8lZGxj8SC/JM36GawaGKe4S/F5BetYJOpaEzkpowqlTC8Syv529rm46 +vvgf+EJL8gRvdtnIEe/qtzbtel299VhaBpuOcApfTDSxRHZmvkCpdHo9I3KgOZB9 +Cqu9Bz+FiJmTk8rGQwmI8EYj38jneEoqA+fN7tUkzxCGacg+x6ke4nOcJzgBhd94 +8DvGclrcAwBY1mlNYRceFJKFXhwLZTKBojZlS8Q9863EAH3DOBLeP85V3YvBD/MK +O+kzPoxN/jPVNho7y4gL7skcqe/IXePzPxBcZrHJjoU7mGVDcVcouRj16XSezMbB +5Pft0/gGiItRJ2+v9DlPjzDfjTuRdS78muaZ4nNqX6B+JmyPJtkb2CdiHz6B21RO +3hjGrffM1nhmYBegyjTVc88IxzYg0T8CZLq1FYxuTZmwyahA520IpwsbfwXxLVMU +5rmou5dj1pVlvoP3l+ivPqugeY3k7UjZ33m5H9p009JR40dybr1S2RbI8Gqhe953 +0bedA4DWvPakODXgYu43al92uR/tyjazeB5t7Iu8uB5Xcm3/Mqoofe9xtdQSCWa0 +jKKvXzSpL1MM2C0bRyYHIkVR65K7Zmi/BzvTaPECo1+Uv+EwqRZRyBzUZKPP8LMq +jTCOBmYaK8+0dTRk8MEzrPW2ihVVJYVMmFyTZKW0iK7kOMKZRkhDCaNSUlPEty7j -----END RSA PRIVATE KEY----- diff --git a/tests/integration/long/ssl/rootCa.crt b/tests/integration/long/ssl/rootCa.crt index ee7805354b..a0a0ec73cf 100644 --- a/tests/integration/long/ssl/rootCa.crt +++ b/tests/integration/long/ssl/rootCa.crt @@ -1,19 +1,19 @@ -----BEGIN CERTIFICATE----- -MIIDCzCCAfMCFG+iEODhRSw0SUMut7vX0hPTLCe1MA0GCSqGSIb3DQEBCwUAMEIx +MIIDCzCCAfMCFCoTNYhIQpOXMBnAq8Bw72qfKwGLMA0GCSqGSIb3DQEBCwUAMEIx CzAJBgNVBAYTAlVTMREwDwYDVQQKDAhkYXRhc3RheDEPMA0GA1UECwwGZmllbGRz -MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjAwMTEwMjExMTM1WhcNMzAwMTA3MjExMTM1 +MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjEwMzE3MTcwNTE2WhcNMzEwMzE1MTcwNTE2 WjBCMQswCQYDVQQGEwJVUzERMA8GA1UECgwIZGF0YXN0YXgxDzANBgNVBAsMBmZp ZWxkczEPMA0GA1UEAwwGcm9vdENhMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAvLrr+qvLvUHZe4Py2QvqlAh/SMxscWsPKfCvJLYS4SX4tDxyn/Xgn1+M -9XMlhtnpJHSdL6bzVm3PMDJmPe+8zKa8s/vsbKCOakIi9GmjxGHMdRfN3NsubRvZ -sr6rX4VFxu6ATmZU9q1vNlOBSoFntOSTYTQH9svrQxsM3dNnDOXr3eFUAvNaNp/l -/YFcjD0LBSVYBUvLf+1RvIDDXS8nTCHZLaKbCevGBMMy8tWYgDD4CD8q9gQ3xail -yc7ES94NiZ6OB+a9GbUce3308NqVmWpDhXBzXshKy5TaH4VfthkJlnnYATjsYI/1 -XkLWVCSgwI2Yl7fHmttJXvmA/zggbQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCX -05lSh/MBzbxg6zEjQdyN3Mm/TNiZv+wIDDddYKVA8psX0gLiCgnpC0B4BK+ualYk -AQwJvgQQzbdxy/7RC/a0OEvXg4syjLs3vRq4fgCXebzfbpxhqykCO7czyS4WPZp3 -8AEeqjV0iZHE2WN1+kfWT6Dwc8rjFxWyLB4qgtKxpeW8sNdpSussNpD+IoKpIzzh -VBNRzb+g1tc87zDnvy7GfEjLRke57sZ82QN/bGZakdVgfppg1mbnrrbsOPEMSTMU -iCQo2JP64HS7oGmYUp5KQFge0fGqou0Ww/rkVp2AtR/tNKw4XwZybTQgwsdvz54S -KEmDs3I5S2S5QO5hRDG+ +CgKCAQEApFoQtNu0+XQuMBPle4WAJYIMR74HL15uk9ToKBqMEXL7ah3r23xTTeGr +NyUXicM6Owiup7DK27F4vni+MYKAn7L4uZ99mW0ATYNXBDLFB+wwy1JBk4Dw5+eZ +q9lz1TGK7uBvTOXCllOA2qxRqtMTl2aPy5OuciWQe794abwFqs5+1l9GEuzJGsp1 +P9L4yljbmijC8RmvDFAeUZoKRdKXw2G5kUOHqK9Aej5gLxIK920PezpgLxm0V/PD +ZAlwlsW0vT79RgZCF/vtKcKSLtFTHgPBNPPbkZmOdE7s/6KoAkORBV/9CIsKeTC3 +Y/YeYQ2+G0gxiq1RcMavPw8f58POTQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQA1 +MXBlk6u2oVBM+4SyYc2nsaHyerM+omUEysAUNFJq6S6i0pu32ULcusDfrnrIQoyR +xPJ/GSYqZkIDX0s9LvPVD6A6bnugR+Z6VfEniLkG1+TkFC+JMCblgJyaF/EbuayU +3iJX+uj7ikTySjMSDvXxOHik2i0aOh90B/351+sFnSPQrFDQ0XqxeG8s0d7EiLTV +wWJmsYglSeTo1vF3ilVRwjmHO9sX6cmQhRvRNmiQrdWaM3gLS5F6yoQ2UQQ3YdFp +quhYuNwy0Ip6ZpORHYtzkCKSanz/oUh17QWvi7aaJyqD5G5hWZgn3R4RCutoOHRS +TEJ+xzhY768rpsrrNUou -----END CERTIFICATE----- diff --git a/tests/integration/long/test_consistency.py b/tests/integration/long/test_consistency.py index bbf446861a..dfb30297f0 100644 --- a/tests/integration/long/test_consistency.py +++ b/tests/integration/long/test_consistency.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -28,10 +30,7 @@ force_stop, create_schema, wait_for_down, wait_for_up, start, CoordinatorStats ) -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest ALL_CONSISTENCY_LEVELS = { ConsistencyLevel.ANY, ConsistencyLevel.ONE, ConsistencyLevel.TWO, ConsistencyLevel.QUORUM, diff --git a/tests/integration/long/test_failure_types.py b/tests/integration/long/test_failure_types.py index 6bdff8d15d..c4751657e8 100644 --- a/tests/integration/long/test_failure_types.py +++ b/tests/integration/long/test_failure_types.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,8 +19,7 @@ import traceback import time from packaging.version import Version - -from mock import Mock +from unittest.mock import Mock from cassandra.policies import HostFilterPolicy, RoundRobinPolicy from cassandra import ( @@ -34,10 +35,7 @@ local, CASSANDRA_VERSION, TestCluster) -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest log = logging.getLogger(__name__) diff --git a/tests/integration/long/test_ipv6.py b/tests/integration/long/test_ipv6.py index a49c1677e8..e20f11cc9c 100644 --- a/tests/integration/long/test_ipv6.py +++ b/tests/integration/long/test_ipv6.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -30,10 +32,7 @@ except ImportError: LibevConnection = None -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest # If more modules do IPV6 testing, this can be moved down to integration.__init__. @@ -85,7 +84,7 @@ def test_connect(self): def test_error(self): cluster = TestCluster(connection_class=self.connection_class, contact_points=['::1'], port=9043, connect_timeout=10) - self.assertRaisesRegexp(NoHostAvailable, '\(\'Unable to connect.*%s.*::1\', 9043.*Connection refused.*' + self.assertRaisesRegex(NoHostAvailable, '\(\'Unable to connect.*%s.*::1\', 9043.*Connection refused.*' % errno.ECONNREFUSED, cluster.connect) def test_error_multiple(self): @@ -93,7 +92,7 @@ def test_error_multiple(self): raise unittest.SkipTest('localhost only resolves one address') cluster = TestCluster(connection_class=self.connection_class, contact_points=['localhost'], port=9043, connect_timeout=10) - self.assertRaisesRegexp(NoHostAvailable, '\(\'Unable to connect.*Tried connecting to \[\(.*\(.*\].*Last error', + self.assertRaisesRegex(NoHostAvailable, '\(\'Unable to connect.*Tried connecting to \[\(.*\(.*\].*Last error', cluster.connect) diff --git a/tests/integration/long/test_large_data.py b/tests/integration/long/test_large_data.py index ce7e4398da..8ff482271e 100644 --- a/tests/integration/long/test_large_data.py +++ b/tests/integration/long/test_large_data.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -27,10 +29,7 @@ from tests.integration import use_singledc, PROTOCOL_VERSION, TestCluster from tests.integration.long.utils import create_schema -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest log = logging.getLogger(__name__) diff --git a/tests/integration/long/test_loadbalancingpolicies.py b/tests/integration/long/test_loadbalancingpolicies.py index f245569a80..7cb173643c 100644 --- a/tests/integration/long/test_loadbalancingpolicies.py +++ b/tests/integration/long/test_loadbalancingpolicies.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -36,10 +38,7 @@ wait_for_down, decommission, start, bootstrap, stop, IP_FORMAT) -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest log = logging.getLogger(__name__) diff --git a/tests/integration/long/test_policies.py b/tests/integration/long/test_policies.py index 0648e6cc93..751c6131ec 100644 --- a/tests/integration/long/test_policies.py +++ b/tests/integration/long/test_policies.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra import ConsistencyLevel, Unavailable from cassandra.cluster import ExecutionProfile, EXEC_PROFILE_DEFAULT diff --git a/tests/integration/long/test_schema.py b/tests/integration/long/test_schema.py index e2945a117b..4e6784a967 100644 --- a/tests/integration/long/test_schema.py +++ b/tests/integration/long/test_schema.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,10 +23,7 @@ import time -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest log = logging.getLogger(__name__) diff --git a/tests/integration/long/test_ssl.py b/tests/integration/long/test_ssl.py index 7698849945..5d86063d3e 100644 --- a/tests/integration/long/test_ssl.py +++ b/tests/integration/long/test_ssl.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest import os, sys, traceback, logging, ssl, time, math, uuid from cassandra.cluster import NoHostAvailable @@ -31,7 +30,7 @@ if not hasattr(ssl, 'match_hostname'): try: - from backports.ssl_match_hostname import match_hostname + from ssl import match_hostname ssl.match_hostname = match_hostname except ImportError: pass # tests will fail @@ -54,11 +53,11 @@ USES_PYOPENSSL = "twisted" in EVENT_LOOP_MANAGER or "eventlet" in EVENT_LOOP_MANAGER if "twisted" in EVENT_LOOP_MANAGER: import OpenSSL - ssl_version = OpenSSL.SSL.TLSv1_METHOD + ssl_version = OpenSSL.SSL.TLSv1_2_METHOD verify_certs = {'cert_reqs': SSL.VERIFY_PEER, 'check_hostname': True} else: - ssl_version = ssl.PROTOCOL_TLSv1 + ssl_version = ssl.PROTOCOL_TLS verify_certs = {'cert_reqs': ssl.CERT_REQUIRED, 'check_hostname': True} @@ -404,7 +403,7 @@ def test_can_connect_with_sslcontext_certificate(self): @test_category connection:ssl """ if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.load_verify_locations(CLIENT_CA_CERTS) else: ssl_context = ssl.SSLContext(ssl_version) @@ -428,7 +427,7 @@ def test_can_connect_with_ssl_client_auth_password_private_key(self): ssl_options = {} if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.use_certificate_file(abs_driver_certfile) with open(abs_driver_keyfile) as keyfile: key = crypto.load_privatekey(crypto.FILETYPE_PEM, keyfile.read(), b'cassandra') @@ -449,7 +448,7 @@ def test_can_connect_with_ssl_context_ca_host_match(self): """ ssl_options = {} if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.use_certificate_file(DRIVER_CERTFILE) with open(DRIVER_KEYFILE_ENCRYPTED) as keyfile: key = crypto.load_privatekey(crypto.FILETYPE_PEM, keyfile.read(), b'cassandra') @@ -472,7 +471,7 @@ def test_can_connect_with_ssl_context_ca_host_match(self): def test_cannot_connect_ssl_context_with_invalid_hostname(self): ssl_options = {} if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.use_certificate_file(DRIVER_CERTFILE) with open(DRIVER_KEYFILE_ENCRYPTED) as keyfile: key = crypto.load_privatekey(crypto.FILETYPE_PEM, keyfile.read(), b"cassandra") diff --git a/tests/integration/long/utils.py b/tests/integration/long/utils.py index a5b5bdd226..cdbb177ec4 100644 --- a/tests/integration/long/utils.py +++ b/tests/integration/long/utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function import logging import time diff --git a/tests/integration/simulacron/__init__.py b/tests/integration/simulacron/__init__.py index 6543265db2..c959fd6e08 100644 --- a/tests/integration/simulacron/__init__.py +++ b/tests/integration/simulacron/__init__.py @@ -11,10 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from tests.integration import requiredse, CASSANDRA_VERSION, DSE_VERSION, SIMULACRON_JAR, PROTOCOL_VERSION from tests.integration.simulacron.utils import ( diff --git a/tests/integration/simulacron/advanced/__init__.py b/tests/integration/simulacron/advanced/__init__.py index 2c9ca172f8..635f0d9e60 100644 --- a/tests/integration/simulacron/advanced/__init__.py +++ b/tests/integration/simulacron/advanced/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/simulacron/advanced/test_insights.py b/tests/integration/simulacron/advanced/test_insights.py index 3da14659af..07005a479b 100644 --- a/tests/integration/simulacron/advanced/test_insights.py +++ b/tests/integration/simulacron/advanced/test_insights.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import time import json diff --git a/tests/integration/simulacron/conftest.py b/tests/integration/simulacron/conftest.py new file mode 100644 index 0000000000..a4377996bb --- /dev/null +++ b/tests/integration/simulacron/conftest.py @@ -0,0 +1,9 @@ +import pytest + +from tests.integration.simulacron import teardown_package + +@pytest.fixture(scope='session', autouse=True) +def setup_and_teardown_packages(): + print('setup') + yield + teardown_package() \ No newline at end of file diff --git a/tests/integration/simulacron/test_backpressure.py b/tests/integration/simulacron/test_backpressure.py index 69c38da8fe..0418c05814 100644 --- a/tests/integration/simulacron/test_backpressure.py +++ b/tests/integration/simulacron/test_backpressure.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/simulacron/test_cluster.py b/tests/integration/simulacron/test_cluster.py index b89f564f08..2dfbc1f786 100644 --- a/tests/integration/simulacron/test_cluster.py +++ b/tests/integration/simulacron/test_cluster.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging from packaging.version import Version diff --git a/tests/integration/simulacron/test_connection.py b/tests/integration/simulacron/test_connection.py index 4ef97247a6..de8060da2d 100644 --- a/tests/integration/simulacron/test_connection.py +++ b/tests/integration/simulacron/test_connection.py @@ -1,25 +1,23 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging import time - -from mock import Mock, patch +from unittest.mock import Mock, patch from cassandra import OperationTimedOut from cassandra.cluster import (EXEC_PROFILE_DEFAULT, Cluster, ExecutionProfile, @@ -265,7 +263,7 @@ def connection_factory(self, *args, **kwargs): prime_request(PrimeOptions(then={"result": "no_result", "delay_in_ms": never})) prime_request(RejectConnections("unbind")) - self.assertRaisesRegexp(OperationTimedOut, "Connection defunct by heartbeat", future.result) + self.assertRaisesRegex(OperationTimedOut, "Connection defunct by heartbeat", future.result) def test_close_when_query(self): """ diff --git a/tests/integration/simulacron/test_empty_column.py b/tests/integration/simulacron/test_empty_column.py index bd7fe6ead0..38d4c0f2a9 100644 --- a/tests/integration/simulacron/test_empty_column.py +++ b/tests/integration/simulacron/test_empty_column.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from collections import namedtuple, OrderedDict @@ -27,8 +26,8 @@ from cassandra.cqlengine.connection import set_session from cassandra.cqlengine.models import Model -from tests.integration import PROTOCOL_VERSION, requiressimulacron -from tests.integration.simulacron import SimulacronCluster +from tests.integration import requiressimulacron +from tests.integration.simulacron import PROTOCOL_VERSION, SimulacronCluster from tests.integration.simulacron.utils import PrimeQuery, prime_request diff --git a/tests/integration/simulacron/test_endpoint.py b/tests/integration/simulacron/test_endpoint.py index 691fcc8718..6ab190091d 100644 --- a/tests/integration/simulacron/test_endpoint.py +++ b/tests/integration/simulacron/test_endpoint.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from functools import total_ordering diff --git a/tests/integration/simulacron/test_policies.py b/tests/integration/simulacron/test_policies.py index 855a4de3ca..a41fd54c59 100644 --- a/tests/integration/simulacron/test_policies.py +++ b/tests/integration/simulacron/test_policies.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra import OperationTimedOut, WriteTimeout from cassandra.cluster import Cluster, ExecutionProfile, ResponseFuture, EXEC_PROFILE_DEFAULT, NoHostAvailable @@ -184,7 +183,7 @@ def test_delay_can_be_0(self): spec = ExecutionProfile(load_balancing_policy=RoundRobinPolicy(), speculative_execution_policy=ConstantSpeculativeExecutionPolicy(0, number_of_requests)) - cluster = Cluster(compression=False) + cluster = Cluster(protocol_version=PROTOCOL_VERSION, compression=False) cluster.add_execution_profile("spec", spec) session = cluster.connect(wait_for_all_pools=True) self.addCleanup(cluster.shutdown) diff --git a/tests/integration/simulacron/utils.py b/tests/integration/simulacron/utils.py index ba9573fd23..01d94fc539 100644 --- a/tests/integration/simulacron/utils.py +++ b/tests/integration/simulacron/utils.py @@ -15,7 +15,7 @@ import json import subprocess import time -from six.moves.urllib.request import build_opener, Request, HTTPHandler +from urllib.request import build_opener, Request, HTTPHandler from cassandra.metadata import SchemaParserV4, SchemaParserDSE68 diff --git a/tests/integration/standard/__init__.py b/tests/integration/standard/__init__.py index e54b6fd6bd..13d6eb6071 100644 --- a/tests/integration/standard/__init__.py +++ b/tests/integration/standard/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest try: from ccmlib import common diff --git a/tests/integration/standard/column_encryption/test_policies.py b/tests/integration/standard/column_encryption/test_policies.py new file mode 100644 index 0000000000..0d692ac5c1 --- /dev/null +++ b/tests/integration/standard/column_encryption/test_policies.py @@ -0,0 +1,174 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import unittest + +from tests.integration import use_singledc, TestCluster + +from cassandra.policies import ColDesc + +from cassandra.column_encryption.policies import AES256ColumnEncryptionPolicy, \ + AES256_KEY_SIZE_BYTES, AES256_BLOCK_SIZE_BYTES + + +def setup_module(): + use_singledc() + + +class ColumnEncryptionPolicyTest(unittest.TestCase): + + def _recreate_keyspace(self, session): + session.execute("drop keyspace if exists foo") + session.execute("CREATE KEYSPACE foo WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}") + session.execute("CREATE TABLE foo.bar(encrypted blob, unencrypted int, primary key(unencrypted))") + + def _create_policy(self, key, iv = None): + cl_policy = AES256ColumnEncryptionPolicy() + col_desc = ColDesc('foo','bar','encrypted') + cl_policy.add_column(col_desc, key, "int") + return (col_desc, cl_policy) + + def test_end_to_end_prepared(self): + + # We only currently perform testing on a single type/expected value pair since CLE functionality is essentially + # independent of the underlying type. We intercept data after it's been encoded when it's going out and before it's + # encoded when coming back; the actual types of the data involved don't impact us. + expected = 0 + + key = os.urandom(AES256_KEY_SIZE_BYTES) + (_, cl_policy) = self._create_policy(key) + cluster = TestCluster(column_encryption_policy=cl_policy) + session = cluster.connect() + self._recreate_keyspace(session) + + prepared = session.prepare("insert into foo.bar (encrypted, unencrypted) values (?,?)") + for i in range(100): + session.execute(prepared, (i, i)) + + # A straight select from the database will now return the decrypted bits. We select both encrypted and unencrypted + # values here to confirm that we don't interfere with regular processing of unencrypted vals. + (encrypted, unencrypted) = session.execute("select encrypted, unencrypted from foo.bar where unencrypted = %s allow filtering", (expected,)).one() + self.assertEqual(expected, encrypted) + self.assertEqual(expected, unencrypted) + + # Confirm the same behaviour from a subsequent prepared statement as well + prepared = session.prepare("select encrypted, unencrypted from foo.bar where unencrypted = ? allow filtering") + (encrypted, unencrypted) = session.execute(prepared, [expected]).one() + self.assertEqual(expected, encrypted) + self.assertEqual(expected, unencrypted) + + def test_end_to_end_simple(self): + + expected = 1 + + key = os.urandom(AES256_KEY_SIZE_BYTES) + (col_desc, cl_policy) = self._create_policy(key) + cluster = TestCluster(column_encryption_policy=cl_policy) + session = cluster.connect() + self._recreate_keyspace(session) + + # Use encode_and_encrypt helper function to populate date + for i in range(1, 100): + self.assertIsNotNone(i) + encrypted = cl_policy.encode_and_encrypt(col_desc, i) + session.execute("insert into foo.bar (encrypted, unencrypted) values (%s,%s)", (encrypted, i)) + + # A straight select from the database will now return the decrypted bits. We select both encrypted and unencrypted + # values here to confirm that we don't interfere with regular processing of unencrypted vals. + (encrypted, unencrypted) = session.execute("select encrypted, unencrypted from foo.bar where unencrypted = %s allow filtering", (expected,)).one() + self.assertEqual(expected, encrypted) + self.assertEqual(expected, unencrypted) + + # Confirm the same behaviour from a subsequent prepared statement as well + prepared = session.prepare("select encrypted, unencrypted from foo.bar where unencrypted = ? allow filtering") + (encrypted, unencrypted) = session.execute(prepared, [expected]).one() + self.assertEqual(expected, encrypted) + self.assertEqual(expected, unencrypted) + + def test_end_to_end_different_cle_contexts_different_ivs(self): + """ + Test to validate PYTHON-1350. We should be able to decode the data from two different contexts (with two different IVs) + since the IV used to decrypt the data is actually now stored with the data. + """ + + expected = 2 + + key = os.urandom(AES256_KEY_SIZE_BYTES) + + # Simulate the creation of two AES256 policies at two different times. Python caches + # default param args at function definition time so a single value will be used any time + # the default val is used. Upshot is that within the same test we'll always have the same + # IV if we rely on the default args, so manually introduce some variation here to simulate + # what actually happens if you have two distinct sessions created at two different times. + iv1 = os.urandom(AES256_BLOCK_SIZE_BYTES) + (col_desc1, cl_policy1) = self._create_policy(key, iv=iv1) + cluster1 = TestCluster(column_encryption_policy=cl_policy1) + session1 = cluster1.connect() + self._recreate_keyspace(session1) + + # Use encode_and_encrypt helper function to populate date + for i in range(1, 100): + self.assertIsNotNone(i) + encrypted = cl_policy1.encode_and_encrypt(col_desc1, i) + session1.execute("insert into foo.bar (encrypted, unencrypted) values (%s,%s)", (encrypted, i)) + session1.shutdown() + cluster1.shutdown() + + # Explicitly clear the class-level cache here; we're trying to simulate a second connection from a completely new process and + # that would entail not re-using any cached ciphers + AES256ColumnEncryptionPolicy._build_cipher.cache_clear() + cache_info = cl_policy1.cache_info() + self.assertEqual(cache_info.currsize, 0) + + iv2 = os.urandom(AES256_BLOCK_SIZE_BYTES) + (_, cl_policy2) = self._create_policy(key, iv=iv2) + cluster2 = TestCluster(column_encryption_policy=cl_policy2) + session2 = cluster2.connect() + (encrypted, unencrypted) = session2.execute("select encrypted, unencrypted from foo.bar where unencrypted = %s allow filtering", (expected,)).one() + self.assertEqual(expected, encrypted) + self.assertEqual(expected, unencrypted) + + def test_end_to_end_different_cle_contexts_different_policies(self): + """ + Test to validate PYTHON-1356. Class variables used to pass CLE policy down to protocol handler shouldn't persist. + """ + + expected = 3 + + key = os.urandom(AES256_KEY_SIZE_BYTES) + (col_desc, cl_policy) = self._create_policy(key) + cluster = TestCluster(column_encryption_policy=cl_policy) + session = cluster.connect() + self._recreate_keyspace(session) + + # Use encode_and_encrypt helper function to populate date + session.execute("insert into foo.bar (encrypted, unencrypted) values (%s,%s)", (cl_policy.encode_and_encrypt(col_desc, expected), expected)) + + # We now open a new session _without_ the CLE policy specified. We should _not_ be able to read decrypted bits from this session. + cluster2 = TestCluster() + session2 = cluster2.connect() + + # A straight select from the database will now return the decrypted bits. We select both encrypted and unencrypted + # values here to confirm that we don't interfere with regular processing of unencrypted vals. + (encrypted, unencrypted) = session2.execute("select encrypted, unencrypted from foo.bar where unencrypted = %s allow filtering", (expected,)).one() + self.assertEqual(cl_policy.encode_and_encrypt(col_desc, expected), encrypted) + self.assertEqual(expected, unencrypted) + + # Confirm the same behaviour from a subsequent prepared statement as well + prepared = session2.prepare("select encrypted, unencrypted from foo.bar where unencrypted = ? allow filtering") + (encrypted, unencrypted) = session2.execute(prepared, [expected]).one() + self.assertEqual(cl_policy.encode_and_encrypt(col_desc, expected), encrypted) diff --git a/tests/integration/standard/conftest.py b/tests/integration/standard/conftest.py new file mode 100644 index 0000000000..6028c2a06d --- /dev/null +++ b/tests/integration/standard/conftest.py @@ -0,0 +1,13 @@ +import pytest +import logging + +# from https://github.com/streamlit/streamlit/pull/5047/files +def pytest_sessionfinish(): + # We're not waiting for scriptrunner threads to cleanly close before ending the PyTest, + # which results in raised exception ValueError: I/O operation on closed file. + # This is well known issue in PyTest, check out these discussions for more: + # * https://github.com/pytest-dev/pytest/issues/5502 + # * https://github.com/pytest-dev/pytest/issues/5282 + # To prevent the exception from being raised on pytest_sessionfinish + # we disable exception raising in logging module + logging.raiseExceptions = False \ No newline at end of file diff --git a/tests/integration/standard/test_authentication.py b/tests/integration/standard/test_authentication.py index 9755c5098b..2d47a93529 100644 --- a/tests/integration/standard/test_authentication.py +++ b/tests/integration/standard/test_authentication.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,20 +14,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +from packaging.version import Version import logging import time from cassandra.cluster import NoHostAvailable from cassandra.auth import PlainTextAuthProvider, SASLClient, SaslAuthProvider -from tests.integration import use_singledc, get_cluster, remove_cluster, PROTOCOL_VERSION, CASSANDRA_IP, \ - USE_CASS_EXTERNAL, start_cluster_wait_for_up, TestCluster +from tests.integration import use_singledc, get_cluster, remove_cluster, PROTOCOL_VERSION, \ + CASSANDRA_IP, CASSANDRA_VERSION, USE_CASS_EXTERNAL, start_cluster_wait_for_up, TestCluster from tests.integration.util import assert_quiescent_pool_state -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest log = logging.getLogger(__name__) @@ -45,12 +45,19 @@ def setup_module(): log.debug("Starting ccm test cluster with %s", config_options) start_cluster_wait_for_up(ccm_cluster) + # PYTHON-1328 + # + # Give the cluster enough time to startup (and perform necessary initialization) + # before executing the test. + if CASSANDRA_VERSION > Version('4.0-a'): + time.sleep(10) def teardown_module(): remove_cluster() # this test messes with config class AuthenticationTests(unittest.TestCase): + """ Tests to cover basic authentication functionality """ @@ -89,6 +96,7 @@ def cluster_as(self, usr, pwd): raise Exception('Unable to connect with creds: {}/{}'.format(usr, pwd)) def test_auth_connect(self): + user = 'u' passwd = 'password' @@ -115,7 +123,7 @@ def test_auth_connect(self): def test_connect_wrong_pwd(self): cluster = self.cluster_as('cassandra', 'wrong_pass') try: - self.assertRaisesRegexp(NoHostAvailable, + self.assertRaisesRegex(NoHostAvailable, '.*AuthenticationFailed.', cluster.connect) assert_quiescent_pool_state(self, cluster) @@ -125,7 +133,7 @@ def test_connect_wrong_pwd(self): def test_connect_wrong_username(self): cluster = self.cluster_as('wrong_user', 'cassandra') try: - self.assertRaisesRegexp(NoHostAvailable, + self.assertRaisesRegex(NoHostAvailable, '.*AuthenticationFailed.*', cluster.connect) assert_quiescent_pool_state(self, cluster) @@ -135,7 +143,7 @@ def test_connect_wrong_username(self): def test_connect_empty_pwd(self): cluster = self.cluster_as('Cassandra', '') try: - self.assertRaisesRegexp(NoHostAvailable, + self.assertRaisesRegex(NoHostAvailable, '.*AuthenticationFailed.*', cluster.connect) assert_quiescent_pool_state(self, cluster) @@ -145,7 +153,7 @@ def test_connect_empty_pwd(self): def test_connect_no_auth_provider(self): cluster = TestCluster() try: - self.assertRaisesRegexp(NoHostAvailable, + self.assertRaisesRegex(NoHostAvailable, '.*AuthenticationFailed.*', cluster.connect) assert_quiescent_pool_state(self, cluster) diff --git a/tests/integration/standard/test_authentication_misconfiguration.py b/tests/integration/standard/test_authentication_misconfiguration.py index 546141d801..a2e2c019a5 100644 --- a/tests/integration/standard/test_authentication_misconfiguration.py +++ b/tests/integration/standard/test_authentication_misconfiguration.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/integration/standard/test_client_warnings.py b/tests/integration/standard/test_client_warnings.py index c5ce5dc726..d20251772a 100644 --- a/tests/integration/standard/test_client_warnings.py +++ b/tests/integration/standard/test_client_warnings.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,10 +15,7 @@ # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from cassandra.query import BatchStatement @@ -73,7 +72,7 @@ def test_warning_basic(self): future = self.session.execute_async(self.warn_batch) future.result() self.assertEqual(len(future.warnings), 1) - self.assertRegexpMatches(future.warnings[0], 'Batch.*exceeding.*') + self.assertRegex(future.warnings[0], 'Batch.*exceeding.*') def test_warning_with_trace(self): """ @@ -89,7 +88,7 @@ def test_warning_with_trace(self): future = self.session.execute_async(self.warn_batch, trace=True) future.result() self.assertEqual(len(future.warnings), 1) - self.assertRegexpMatches(future.warnings[0], 'Batch.*exceeding.*') + self.assertRegex(future.warnings[0], 'Batch.*exceeding.*') self.assertIsNotNone(future.get_query_trace()) @local @@ -108,7 +107,7 @@ def test_warning_with_custom_payload(self): future = self.session.execute_async(self.warn_batch, custom_payload=payload) future.result() self.assertEqual(len(future.warnings), 1) - self.assertRegexpMatches(future.warnings[0], 'Batch.*exceeding.*') + self.assertRegex(future.warnings[0], 'Batch.*exceeding.*') self.assertDictEqual(future.custom_payload, payload) @local @@ -127,6 +126,6 @@ def test_warning_with_trace_and_custom_payload(self): future = self.session.execute_async(self.warn_batch, trace=True, custom_payload=payload) future.result() self.assertEqual(len(future.warnings), 1) - self.assertRegexpMatches(future.warnings[0], 'Batch.*exceeding.*') + self.assertRegex(future.warnings[0], 'Batch.*exceeding.*') self.assertIsNotNone(future.get_query_trace()) self.assertDictEqual(future.custom_payload, payload) diff --git a/tests/integration/standard/test_cluster.py b/tests/integration/standard/test_cluster.py index cdb6f1f3b7..c6fc2a717f 100644 --- a/tests/integration/standard/test_cluster.py +++ b/tests/integration/standard/test_cluster.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,14 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from collections import deque from copy import copy -from mock import Mock, call, patch +from unittest.mock import Mock, call, patch, ANY import time from uuid import uuid4 import logging @@ -42,7 +41,7 @@ from tests import notwindows from tests.integration import use_singledc, get_server_versions, CASSANDRA_VERSION, \ execute_until_pass, execute_with_long_wait_retry, get_node, MockLoggingHandler, get_unsupported_lower_protocol, \ - get_unsupported_upper_protocol, protocolv5, local, CASSANDRA_IP, greaterthanorequalcass30, lessthanorequalcass40, \ + get_unsupported_upper_protocol, protocolv6, local, CASSANDRA_IP, greaterthanorequalcass30, lessthanorequalcass40, \ DSE_VERSION, TestCluster, PROTOCOL_VERSION from tests.integration.util import assert_quiescent_pool_state import sys @@ -150,7 +149,7 @@ def test_raise_error_on_control_connection_timeout(self): get_node(1).pause() cluster = TestCluster(contact_points=['127.0.0.1'], connect_timeout=1) - with self.assertRaisesRegexp(NoHostAvailable, "OperationTimedOut\('errors=Timed out creating connection \(1 seconds\)"): + with self.assertRaisesRegex(NoHostAvailable, "OperationTimedOut\('errors=Timed out creating connection \(1 seconds\)"): cluster.connect() cluster.shutdown() @@ -261,6 +260,18 @@ def test_protocol_negotiation(self): elif DSE_VERSION and DSE_VERSION >= Version("5.1"): self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.DSE_V1) self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.DSE_V1) + elif CASSANDRA_VERSION >= Version('4.0-beta5'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V5) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V5) + elif CASSANDRA_VERSION >= Version('4.0-a'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V4) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V4) + elif CASSANDRA_VERSION >= Version('3.11'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V4) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V4) + elif CASSANDRA_VERSION >= Version('3.0'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V4) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V4) elif CASSANDRA_VERSION >= Version('2.2'): self.assertEqual(updated_protocol_version, 4) self.assertEqual(updated_cluster_version, 4) @@ -526,7 +537,7 @@ def patched_wait_for_responses(*args, **kwargs): # cluster agreement wait used for refresh original_meta = c.metadata.keyspaces start_time = time.time() - self.assertRaisesRegexp(Exception, r"Schema metadata was not refreshed.*", c.refresh_schema_metadata) + self.assertRaisesRegex(Exception, r"Schema metadata was not refreshed.*", c.refresh_schema_metadata) end_time = time.time() self.assertGreaterEqual(end_time - start_time, agreement_timeout) self.assertIs(original_meta, c.metadata.keyspaces) @@ -563,7 +574,7 @@ def patched_wait_for_responses(*args, **kwargs): # refresh wait overrides cluster value original_meta = c.metadata.keyspaces start_time = time.time() - self.assertRaisesRegexp(Exception, r"Schema metadata was not refreshed.*", c.refresh_schema_metadata, + self.assertRaisesRegex(Exception, r"Schema metadata was not refreshed.*", c.refresh_schema_metadata, max_schema_agreement_wait=agreement_timeout) end_time = time.time() self.assertGreaterEqual(end_time - start_time, agreement_timeout) @@ -1469,46 +1480,57 @@ def test_prepare_on_ignored_hosts(self): # the length of mock_calls will vary, but all should use the unignored # address for c in cluster.connection_factory.mock_calls: - self.assertEqual(call(DefaultEndPoint(unignored_address)), c) + # PYTHON-1287 + # + # Cluster._prepare_all_queries() will call connection_factory _without_ the + # on_orphaned_stream_released arg introduced in commit + # 387150acc365b6cf1daaee58c62db13e4929099a. The reconnect handler for the + # downed node _will_ add this arg when it tries to rebuild it's conn pool, and + # whether this occurs while running this test amounts to a race condition. So + # to cover this case we assert one of two call styles here... the key is that + # the _only_ address we should see is the unignored_address. + self.assertTrue( \ + c == call(DefaultEndPoint(unignored_address)) or \ + c == call(DefaultEndPoint(unignored_address), on_orphaned_stream_released=ANY)) cluster.shutdown() -@protocolv5 +@protocolv6 class BetaProtocolTest(unittest.TestCase): - @protocolv5 + @protocolv6 def test_invalid_protocol_version_beta_option(self): """ - Test cluster connection with protocol v5 and beta flag not set + Test cluster connection with protocol v6 and beta flag not set @since 3.7.0 - @jira_ticket PYTHON-614 - @expected_result client shouldn't connect with V5 and no beta flag set + @jira_ticket PYTHON-614, PYTHON-1232 + @expected_result client shouldn't connect with V6 and no beta flag set @test_category connection """ - cluster = TestCluster(protocol_version=cassandra.ProtocolVersion.V5, allow_beta_protocol_version=False) + cluster = TestCluster(protocol_version=cassandra.ProtocolVersion.V6, allow_beta_protocol_version=False) try: with self.assertRaises(NoHostAvailable): cluster.connect() except Exception as e: self.fail("Unexpected error encountered {0}".format(e.message)) - @protocolv5 + @protocolv6 def test_valid_protocol_version_beta_options_connect(self): """ Test cluster connection with protocol version 5 and beta flag set @since 3.7.0 - @jira_ticket PYTHON-614 - @expected_result client should connect with protocol v5 and beta flag set. + @jira_ticket PYTHON-614, PYTHON-1232 + @expected_result client should connect with protocol v6 and beta flag set. @test_category connection """ - cluster = Cluster(protocol_version=cassandra.ProtocolVersion.V5, allow_beta_protocol_version=True) + cluster = Cluster(protocol_version=cassandra.ProtocolVersion.V6, allow_beta_protocol_version=True) session = cluster.connect() - self.assertEqual(cluster.protocol_version, cassandra.ProtocolVersion.V5) + self.assertEqual(cluster.protocol_version, cassandra.ProtocolVersion.V6) self.assertTrue(session.execute("select release_version from system.local")[0]) cluster.shutdown() diff --git a/tests/integration/standard/test_concurrent.py b/tests/integration/standard/test_concurrent.py index 8bd65c7f6f..c935763bcb 100644 --- a/tests/integration/standard/test_concurrent.py +++ b/tests/integration/standard/test_concurrent.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -20,16 +22,11 @@ from cassandra.cluster import ExecutionProfile, EXEC_PROFILE_DEFAULT from cassandra.concurrent import execute_concurrent, execute_concurrent_with_args, ExecutionResult from cassandra.policies import HostDistance -from cassandra.query import tuple_factory, SimpleStatement +from cassandra.query import dict_factory, tuple_factory, SimpleStatement from tests.integration import use_singledc, PROTOCOL_VERSION, TestCluster -from six import next - -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest log = logging.getLogger(__name__) @@ -38,13 +35,16 @@ def setup_module(): use_singledc() +EXEC_PROFILE_DICT = "dict" + class ClusterTests(unittest.TestCase): @classmethod def setUpClass(cls): cls.cluster = TestCluster( execution_profiles = { - EXEC_PROFILE_DEFAULT: ExecutionProfile(row_factory=tuple_factory) + EXEC_PROFILE_DEFAULT: ExecutionProfile(row_factory=tuple_factory), + EXEC_PROFILE_DICT: ExecutionProfile(row_factory=dict_factory) } ) if PROTOCOL_VERSION < 3: @@ -55,11 +55,11 @@ def setUpClass(cls): def tearDownClass(cls): cls.cluster.shutdown() - def execute_concurrent_helper(self, session, query, results_generator=False): + def execute_concurrent_helper(self, session, query, **kwargs): count = 0 while count < 100: try: - return execute_concurrent(session, query, results_generator=False) + return execute_concurrent(session, query, results_generator=False, **kwargs) except (ReadTimeout, WriteTimeout, OperationTimedOut, ReadFailure, WriteFailure): ex_type, ex, tb = sys.exc_info() log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb))) @@ -68,11 +68,11 @@ def execute_concurrent_helper(self, session, query, results_generator=False): raise RuntimeError("Failed to execute query after 100 attempts: {0}".format(query)) - def execute_concurrent_args_helper(self, session, query, params, results_generator=False): + def execute_concurrent_args_helper(self, session, query, params, results_generator=False, **kwargs): count = 0 while count < 100: try: - return execute_concurrent_with_args(session, query, params, results_generator=results_generator) + return execute_concurrent_with_args(session, query, params, results_generator=results_generator, **kwargs) except (ReadTimeout, WriteTimeout, OperationTimedOut, ReadFailure, WriteFailure): ex_type, ex, tb = sys.exc_info() log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb))) @@ -80,7 +80,7 @@ def execute_concurrent_args_helper(self, session, query, params, results_generat raise RuntimeError("Failed to execute query after 100 attempts: {0}".format(query)) - def test_execute_concurrent(self): + def execute_concurrent_base(self, test_fn, validate_fn, zip_args=True): for num_statements in (0, 1, 2, 7, 10, 99, 100, 101, 199, 200, 201): # write statement = SimpleStatement( @@ -89,7 +89,9 @@ def test_execute_concurrent(self): statements = cycle((statement, )) parameters = [(i, i) for i in range(num_statements)] - results = self.execute_concurrent_helper(self.session, list(zip(statements, parameters))) + results = \ + test_fn(self.session, list(zip(statements, parameters))) if zip_args else \ + test_fn(self.session, statement, parameters) self.assertEqual(num_statements, len(results)) for success, result in results: self.assertTrue(success) @@ -102,38 +104,43 @@ def test_execute_concurrent(self): statements = cycle((statement, )) parameters = [(i, ) for i in range(num_statements)] - results = self.execute_concurrent_helper(self.session, list(zip(statements, parameters))) + results = \ + test_fn(self.session, list(zip(statements, parameters))) if zip_args else \ + test_fn(self.session, statement, parameters) + validate_fn(num_statements, results) + + def execute_concurrent_validate_tuple(self, num_statements, results): self.assertEqual(num_statements, len(results)) self.assertEqual([(True, [(i,)]) for i in range(num_statements)], results) - def test_execute_concurrent_with_args(self): - for num_statements in (0, 1, 2, 7, 10, 99, 100, 101, 199, 200, 201): - statement = SimpleStatement( - "INSERT INTO test3rf.test (k, v) VALUES (%s, %s)", - consistency_level=ConsistencyLevel.QUORUM) - parameters = [(i, i) for i in range(num_statements)] - - results = self.execute_concurrent_args_helper(self.session, statement, parameters) + def execute_concurrent_validate_dict(self, num_statements, results): self.assertEqual(num_statements, len(results)) - for success, result in results: - self.assertTrue(success) - self.assertFalse(result) + self.assertEqual([(True, [{"v":i}]) for i in range(num_statements)], results) - # read - statement = SimpleStatement( - "SELECT v FROM test3rf.test WHERE k=%s", - consistency_level=ConsistencyLevel.QUORUM) - parameters = [(i, ) for i in range(num_statements)] + def test_execute_concurrent(self): + self.execute_concurrent_base(self.execute_concurrent_helper, \ + self.execute_concurrent_validate_tuple) - results = self.execute_concurrent_args_helper(self.session, statement, parameters) - self.assertEqual(num_statements, len(results)) - self.assertEqual([(True, [(i,)]) for i in range(num_statements)], results) + def test_execute_concurrent_with_args(self): + self.execute_concurrent_base(self.execute_concurrent_args_helper, \ + self.execute_concurrent_validate_tuple, \ + zip_args=False) + + def test_execute_concurrent_with_execution_profile(self): + def run_fn(*args, **kwargs): + return self.execute_concurrent_helper(*args, execution_profile=EXEC_PROFILE_DICT, **kwargs) + self.execute_concurrent_base(run_fn, self.execute_concurrent_validate_dict) + + def test_execute_concurrent_with_args_and_execution_profile(self): + def run_fn(*args, **kwargs): + return self.execute_concurrent_args_helper(*args, execution_profile=EXEC_PROFILE_DICT, **kwargs) + self.execute_concurrent_base(run_fn, self.execute_concurrent_validate_dict, zip_args=False) def test_execute_concurrent_with_args_generator(self): """ Test to validate that generator based results are surfaced correctly - Repeatedly inserts data into a a table and attempts to query it. It then validates that the + Repeatedly inserts data into a table and attempts to query it. It then validates that the results are returned in the order expected @since 2.7.0 @@ -207,7 +214,7 @@ def test_execute_concurrent_paged_result_generator(self): """ Test to validate that generator based results are surfaced correctly when paging is used - Inserts data into a a table and attempts to query it. It then validates that the + Inserts data into a table and attempts to query it. It then validates that the results are returned as expected (no order specified) @since 2.7.0 diff --git a/tests/integration/standard/test_connection.py b/tests/integration/standard/test_connection.py index aaa5a27dfd..e7177d8770 100644 --- a/tests/integration/standard/test_connection.py +++ b/tests/integration/standard/test_connection.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,27 +14,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from functools import partial -from mock import patch +from unittest.mock import patch import logging -from six.moves import range import sys import threading from threading import Thread, Event import time from unittest import SkipTest -from cassandra import ConsistencyLevel, OperationTimedOut +from cassandra import ConsistencyLevel, OperationTimedOut, DependencyException from cassandra.cluster import NoHostAvailable, ConnectionShutdown, ExecutionProfile, EXEC_PROFILE_DEFAULT -import cassandra.io.asyncorereactor -from cassandra.io.asyncorereactor import AsyncoreConnection from cassandra.protocol import QueryMessage -from cassandra.connection import Connection from cassandra.policies import HostFilterPolicy, RoundRobinPolicy, HostStateListener from cassandra.pool import HostConnectionPool @@ -40,10 +35,16 @@ from tests.integration import use_singledc, get_node, CASSANDRA_IP, local, \ requiresmallclockgranularity, greaterthancass20, TestCluster +try: + import cassandra.io.asyncorereactor + from cassandra.io.asyncorereactor import AsyncoreConnection +except DependencyException: + AsyncoreConnection = None + try: from cassandra.io.libevreactor import LibevConnection import cassandra.io.libevreactor -except ImportError: +except DependencyException: LibevConnection = None @@ -444,6 +445,8 @@ class AsyncoreConnectionTests(ConnectionTests, unittest.TestCase): def setUp(self): if is_monkey_patched(): raise unittest.SkipTest("Can't test asyncore with monkey patching") + if AsyncoreConnection is None: + raise unittest.SkipTest('Unable to import asyncore module') ConnectionTests.setUp(self) def clean_global_loop(self): diff --git a/tests/integration/standard/test_control_connection.py b/tests/integration/standard/test_control_connection.py index db7cff8506..9d579476d2 100644 --- a/tests/integration/standard/test_control_connection.py +++ b/tests/integration/standard/test_control_connection.py @@ -16,10 +16,7 @@ # from cassandra import InvalidRequest -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.protocol import ConfigurationException diff --git a/tests/integration/standard/test_custom_cluster.py b/tests/integration/standard/test_custom_cluster.py index 84e0737086..bb3f716984 100644 --- a/tests/integration/standard/test_custom_cluster.py +++ b/tests/integration/standard/test_custom_cluster.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +18,7 @@ from tests.integration import use_singledc, get_cluster, remove_cluster, local, TestCluster from tests.util import wait_until, wait_until_not_raised -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest def setup_module(): diff --git a/tests/integration/standard/test_custom_payload.py b/tests/integration/standard/test_custom_payload.py index 9906a8243e..374bee9046 100644 --- a/tests/integration/standard/test_custom_payload.py +++ b/tests/integration/standard/test_custom_payload.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,12 +15,7 @@ # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest - -import six +import unittest from cassandra.query import (SimpleStatement, BatchStatement, BatchType) @@ -30,6 +27,8 @@ def setup_module(): #These test rely on the custom payload being returned but by default C* #ignores all the payloads. + + @local class CustomPayloadTests(unittest.TestCase): @@ -141,16 +140,16 @@ def validate_various_custom_payloads(self, statement): # Long key value pair key_value = "x" * 10 - custom_payload = {key_value: six.b(key_value)} + custom_payload = {key_value: key_value.encode()} self.execute_async_validate_custom_payload(statement=statement, custom_payload=custom_payload) # Max supported value key pairs according C* binary protocol v4 should be 65534 (unsigned short max value) for i in range(65534): - custom_payload[str(i)] = six.b('x') + custom_payload[str(i)] = b'x' self.execute_async_validate_custom_payload(statement=statement, custom_payload=custom_payload) # Add one custom payload to this is too many key value pairs and should fail - custom_payload[str(65535)] = six.b('x') + custom_payload[str(65535)] = b'x' with self.assertRaises(ValueError): self.execute_async_validate_custom_payload(statement=statement, custom_payload=custom_payload) diff --git a/tests/integration/standard/test_custom_protocol_handler.py b/tests/integration/standard/test_custom_protocol_handler.py index d5108ed47b..5c75684787 100644 --- a/tests/integration/standard/test_custom_protocol_handler.py +++ b/tests/integration/standard/test_custom_protocol_handler.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.protocol import ProtocolHandler, ResultMessage, QueryMessage, UUIDType, read_int from cassandra.query import tuple_factory, SimpleStatement @@ -25,13 +24,12 @@ from tests.integration import use_singledc, drop_keyspace_shutdown_cluster, \ greaterthanorequalcass30, execute_with_long_wait_retry, greaterthanorequaldse51, greaterthanorequalcass3_10, \ - greaterthanorequalcass31, TestCluster + TestCluster, greaterthanorequalcass40, requirecassandra from tests.integration.datatype_utils import update_datatypes, PRIMITIVE_DATATYPES from tests.integration.standard.utils import create_table_with_all_types, get_all_primitive_params -from six import binary_type import uuid -import mock +from unittest import mock def setup_module(): @@ -77,11 +75,11 @@ def test_custom_raw_uuid_row_results(self): uuid_type = result[0][0] self.assertEqual(type(uuid_type), uuid.UUID) - # use our custom protocol handlder + # use our custom protocol handler session.client_protocol_handler = CustomTestRawRowType result_set = session.execute("SELECT schema_version FROM system.local") raw_value = result_set[0][0] - self.assertTrue(isinstance(raw_value, binary_type)) + self.assertTrue(isinstance(raw_value, bytes)) self.assertEqual(len(raw_value), 16) # Ensure that we get normal uuid back when we re-connect @@ -124,7 +122,8 @@ def test_custom_raw_row_results_all_types(self): self.assertEqual(len(CustomResultMessageTracked.checked_rev_row_set), len(PRIMITIVE_DATATYPES)-1) cluster.shutdown() - @greaterthanorequalcass31 + @requirecassandra + @greaterthanorequalcass40 def test_protocol_divergence_v5_fail_by_continuous_paging(self): """ Test to validate that V5 and DSE_V1 diverge. ContinuousPagingOptions is not supported by V5 @@ -170,7 +169,8 @@ def test_protocol_divergence_v4_fail_by_flag_uses_int(self): self._protocol_divergence_fail_by_flag_uses_int(ProtocolVersion.V4, uses_int_query_flag=False, int_flag=True) - @greaterthanorequalcass3_10 + @requirecassandra + @greaterthanorequalcass40 def test_protocol_v5_uses_flag_int(self): """ Test to validate that the _PAGE_SIZE_FLAG is treated correctly using write_uint for V5 @@ -196,7 +196,8 @@ def test_protocol_dsev1_uses_flag_int(self): self._protocol_divergence_fail_by_flag_uses_int(ProtocolVersion.DSE_V1, uses_int_query_flag=True, int_flag=True) - @greaterthanorequalcass3_10 + @requirecassandra + @greaterthanorequalcass40 def test_protocol_divergence_v5_fail_by_flag_uses_int(self): """ Test to validate that the _PAGE_SIZE_FLAG is treated correctly using write_uint for V5 @@ -261,7 +262,7 @@ class CustomResultMessageRaw(ResultMessage): my_type_codes[0xc] = UUIDType type_codes = my_type_codes - def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata): + def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata, column_encryption_policy): self.recv_results_metadata(f, user_type_map) column_metadata = self.column_metadata or result_metadata rowcount = read_int(f) @@ -272,7 +273,7 @@ def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata) class CustomTestRawRowType(ProtocolHandler): """ - This is the a custom protocol handler that will substitute the the + This is a custom protocol handler that will substitute the customResultMesageRowRaw Result message for our own implementation """ my_opcodes = ProtocolHandler.message_types_by_opcode.copy() @@ -282,7 +283,7 @@ class CustomTestRawRowType(ProtocolHandler): class CustomResultMessageTracked(ResultMessage): """ - This is a custom Result Message that is use to track what primitive types + This is a custom Result Message that is used to track what primitive types have been processed when it receives results """ my_type_codes = ResultMessage.type_codes.copy() @@ -290,7 +291,7 @@ class CustomResultMessageTracked(ResultMessage): type_codes = my_type_codes checked_rev_row_set = set() - def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata): + def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata, column_encryption_policy): self.recv_results_metadata(f, user_type_map) column_metadata = self.column_metadata or result_metadata rowcount = read_int(f) @@ -306,7 +307,7 @@ def recv_results_rows(self, f, protocol_version, user_type_map, result_metadata) class CustomProtocolHandlerResultMessageTracked(ProtocolHandler): """ - This is the a custom protocol handler that will substitute the the + This is a custom protocol handler that will substitute the CustomTestRawRowTypeTracked Result message for our own implementation """ my_opcodes = ProtocolHandler.message_types_by_opcode.copy() diff --git a/tests/integration/standard/test_cython_protocol_handlers.py b/tests/integration/standard/test_cython_protocol_handlers.py index 4e45553be2..83d39407c4 100644 --- a/tests/integration/standard/test_cython_protocol_handlers.py +++ b/tests/integration/standard/test_cython_protocol_handlers.py @@ -2,10 +2,7 @@ # Based on test_custom_protocol_handler.py -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from itertools import count @@ -237,7 +234,7 @@ def test_null_types(self): begin_unset = max(s.execute('select primkey from %s' % (table,))[0]['primkey']) + 1 keys_null = range(begin_unset, begin_unset + 10) - # scatter some emptry rows in here + # scatter some empty rows in here insert = "insert into %s (primkey) values (%%s)" % (table,) execute_concurrent_with_args(s, insert, ((k,) for k in keys_null)) diff --git a/tests/integration/standard/test_dse.py b/tests/integration/standard/test_dse.py index 1b9b5bef84..0a339b6b3d 100644 --- a/tests/integration/standard/test_dse.py +++ b/tests/integration/standard/test_dse.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,10 +23,7 @@ from tests.integration import (execute_until_pass, execute_with_long_wait_retry, use_cluster, TestCluster) -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest CCM_IS_DSE = (os.environ.get('CCM_IS_DSE', None) == 'true') diff --git a/tests/integration/standard/test_metadata.py b/tests/integration/standard/test_metadata.py index bd556f357d..8f7ba04883 100644 --- a/tests/integration/standard/test_metadata.py +++ b/tests/integration/standard/test_metadata.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,20 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from collections import defaultdict import difflib import logging -import six import sys import time import os from packaging.version import Version -from mock import Mock, patch +from unittest.mock import Mock, patch from cassandra import AlreadyExists, SignatureDescriptor, UserFunctionDescriptor, UserAggregateDescriptor @@ -42,7 +40,7 @@ get_supported_protocol_versions, greaterthancass20, greaterthancass21, assert_startswith, greaterthanorequalcass40, greaterthanorequaldse67, lessthancass40, - TestCluster, DSE_VERSION) + TestCluster, DSE_VERSION, HCD_VERSION) log = logging.getLogger(__name__) @@ -673,11 +671,11 @@ def test_refresh_table_metadata(self): """ test for synchronously refreshing table metadata - test_refresh_table_metatadata tests that table metadata is refreshed when calling test_refresh_table_metatadata(). + test_refresh_table_metadata tests that table metadata is refreshed when calling test_refresh_table_metadata(). It creates a second cluster object with schema_event_refresh_window=-1 such that schema refreshes are disabled for schema change push events. It then alters the table, adding a new column, using the first cluster object, and verifies that the table metadata has not changed in the second cluster object. Finally, it calls - test_refresh_table_metatadata() and verifies that the table metadata is updated in the second cluster object. + test_refresh_table_metadata() and verifies that the table metadata is updated in the second cluster object. @since 2.6.0 @jira_ticket PYTHON-291 @@ -707,10 +705,10 @@ def test_refresh_metadata_for_mv(self): test for synchronously refreshing materialized view metadata test_refresh_table_metadata_for_materialized_views tests that materialized view metadata is refreshed when calling - test_refresh_table_metatadata() with the materialized view name as the table. It creates a second cluster object + test_refresh_table_metadata() with the materialized view name as the table. It creates a second cluster object with schema_event_refresh_window=-1 such that schema refreshes are disabled for schema change push events. It then creates a new materialized view , using the first cluster object, and verifies that the materialized view - metadata has not changed in the second cluster object. Finally, it calls test_refresh_table_metatadata() with the + metadata has not changed in the second cluster object. Finally, it calls test_refresh_table_metadata() with the materialized view name as the table name, and verifies that the materialized view metadata is updated in the second cluster object. @@ -993,7 +991,7 @@ class Ext1(Ext0): update_v = s.prepare('UPDATE system_schema.views SET extensions=? WHERE keyspace_name=? AND view_name=?') # extensions registered, one present # -------------------------------------- - ext_map = {Ext0.name: six.b("THA VALUE")} + ext_map = {Ext0.name: b"THA VALUE"} [(s.execute(update_t, (ext_map, ks, t)), s.execute(update_v, (ext_map, ks, v))) for _ in self.cluster.metadata.all_hosts()] # we're manipulating metadata - do it on all hosts self.cluster.refresh_table_metadata(ks, t) @@ -1015,8 +1013,8 @@ class Ext1(Ext0): # extensions registered, one present # -------------------------------------- - ext_map = {Ext0.name: six.b("THA VALUE"), - Ext1.name: six.b("OTHA VALUE")} + ext_map = {Ext0.name: b"THA VALUE", + Ext1.name: b"OTHA VALUE"} [(s.execute(update_t, (ext_map, ks, t)), s.execute(update_v, (ext_map, ks, v))) for _ in self.cluster.metadata.all_hosts()] # we're manipulating metadata - do it on all hosts self.cluster.refresh_table_metadata(ks, t) @@ -1049,7 +1047,7 @@ def test_export_schema(self): cluster = TestCluster() cluster.connect() - self.assertIsInstance(cluster.metadata.export_schema_as_string(), six.string_types) + self.assertIsInstance(cluster.metadata.export_schema_as_string(), str) cluster.shutdown() def test_export_keyspace_schema(self): @@ -1062,8 +1060,8 @@ def test_export_keyspace_schema(self): for keyspace in cluster.metadata.keyspaces: keyspace_metadata = cluster.metadata.keyspaces[keyspace] - self.assertIsInstance(keyspace_metadata.export_as_string(), six.string_types) - self.assertIsInstance(keyspace_metadata.as_cql_query(), six.string_types) + self.assertIsInstance(keyspace_metadata.export_as_string(), str) + self.assertIsInstance(keyspace_metadata.as_cql_query(), str) cluster.shutdown() def assert_equal_diff(self, received, expected): @@ -1241,8 +1239,8 @@ def test_replicas(self): cluster.connect('test3rf') - self.assertNotEqual(list(cluster.metadata.get_replicas('test3rf', six.b('key'))), []) - host = list(cluster.metadata.get_replicas('test3rf', six.b('key')))[0] + self.assertNotEqual(list(cluster.metadata.get_replicas('test3rf', b'key')), []) + host = list(cluster.metadata.get_replicas('test3rf', b'key'))[0] self.assertEqual(host.datacenter, 'dc1') self.assertEqual(host.rack, 'r1') cluster.shutdown() @@ -1593,7 +1591,7 @@ def test_function_no_parameters(self): with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegexpMatches(fn_meta.as_cql_query(), "CREATE FUNCTION.*%s\(\) .*" % kwargs['name']) + self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*%s\(\) .*" % kwargs['name']) def test_functions_follow_keyspace_alter(self): """ @@ -1641,12 +1639,12 @@ def test_function_cql_called_on_null(self): kwargs['called_on_null_input'] = True with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegexpMatches(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) CALLED ON NULL INPUT RETURNS .*") + self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) CALLED ON NULL INPUT RETURNS .*") kwargs['called_on_null_input'] = False with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegexpMatches(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) RETURNS NULL ON NULL INPUT RETURNS .*") + self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) RETURNS NULL ON NULL INPUT RETURNS .*") class AggregateMetadata(FunctionTest): @@ -1755,9 +1753,9 @@ def test_init_cond(self): cql_init = encoder.cql_encode_all_types(init_cond) with self.VerifiedAggregate(self, **self.make_aggregate_kwargs('update_map', 'map', init_cond=cql_init)) as va: map_res = s.execute("SELECT %s(v) AS map_res FROM t" % va.function_kwargs['name'])[0].map_res - self.assertDictContainsSubset(expected_map_values, map_res) + self.assertLessEqual(expected_map_values.items(), map_res.items()) init_not_updated = dict((k, init_cond[k]) for k in set(init_cond) - expected_key_set) - self.assertDictContainsSubset(init_not_updated, map_res) + self.assertLessEqual(init_not_updated.items(), map_res.items()) c.shutdown() def test_aggregates_after_functions(self): @@ -2082,10 +2080,15 @@ def test_materialized_view_metadata_alter(self): @test_category metadata """ - self.assertIn("SizeTieredCompactionStrategy", self.cluster.metadata.keyspaces[self.keyspace_name].tables[self.function_table_name].views["mv1"].options["compaction"]["class"]) + compaction = self.cluster.metadata.keyspaces[self.keyspace_name].tables[self.function_table_name].views["mv1"].options["compaction"]["class"] + if HCD_VERSION: + self.assertIn("UnifiedCompactionStrategy", compaction) + else: + self.assertIn("SizeTieredCompactionStrategy", compaction) self.session.execute("ALTER MATERIALIZED VIEW {0}.mv1 WITH compaction = {{ 'class' : 'LeveledCompactionStrategy' }}".format(self.keyspace_name)) - self.assertIn("LeveledCompactionStrategy", self.cluster.metadata.keyspaces[self.keyspace_name].tables[self.function_table_name].views["mv1"].options["compaction"]["class"]) + compaction = self.cluster.metadata.keyspaces[self.keyspace_name].tables[self.function_table_name].views["mv1"].options["compaction"]["class"] + self.assertIn("LeveledCompactionStrategy", compaction) def test_materialized_view_metadata_drop(self): """ diff --git a/tests/integration/standard/test_metrics.py b/tests/integration/standard/test_metrics.py index 676a5340ef..c33ea26573 100644 --- a/tests/integration/standard/test_metrics.py +++ b/tests/integration/standard/test_metrics.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,10 +19,7 @@ from cassandra.connection import ConnectionShutdown from cassandra.policies import HostFilterPolicy, RoundRobinPolicy, FallthroughRetryPolicy -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.query import SimpleStatement from cassandra import ConsistencyLevel, WriteTimeout, Unavailable, ReadTimeout @@ -279,7 +278,7 @@ def test_duplicate_metrics_per_cluster(self): session2 = cluster2.connect(self.ks_name, wait_for_all_pools=True) session3 = cluster3.connect(self.ks_name, wait_for_all_pools=True) - # Basic validation that naming metrics doesn't impact their segration or accuracy + # Basic validation that naming metrics doesn't impact their segregation or accuracy for i in range(10): query = SimpleStatement("SELECT * FROM {0}.{0}".format(self.ks_name), consistency_level=ConsistencyLevel.ALL) session2.execute(query) @@ -373,7 +372,7 @@ def test_metrics_per_cluster(self): check to ensure that on_success and on_error methods are invoked appropriately. @since 3.7.0 @jira_ticket PYTHON-284 - @expected_result in_error, and on_success should be invoked apropriately + @expected_result in_error, and on_success should be invoked appropriately @test_category metrics """ @@ -384,7 +383,7 @@ def test_metrics_per_cluster(self): for _ in range(3): try: - self.session.execute("nonesense") + self.session.execute("nonsense") except SyntaxException: continue @@ -401,6 +400,6 @@ def test_metrics_per_cluster(self): RequestAnalyzer(self.session, throw_on_success=True) try: - self.session.execute("nonesense") + self.session.execute("nonsense") except SyntaxException: pass diff --git a/tests/integration/standard/test_policies.py b/tests/integration/standard/test_policies.py index 24facf42a0..bb69243212 100644 --- a/tests/integration/standard/test_policies.py +++ b/tests/integration/standard/test_policies.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cluster import ExecutionProfile, EXEC_PROFILE_DEFAULT from cassandra.policies import HostFilterPolicy, RoundRobinPolicy, SimpleConvictionPolicy, \ @@ -27,11 +26,9 @@ from concurrent.futures import wait as wait_futures - def setup_module(): use_singledc() - class HostFilterPolicyTests(unittest.TestCase): def test_predicate_changes(self): diff --git a/tests/integration/standard/test_prepared_statements.py b/tests/integration/standard/test_prepared_statements.py index 5c79f27346..429aa0efc7 100644 --- a/tests/integration/standard/test_prepared_statements.py +++ b/tests/integration/standard/test_prepared_statements.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,12 +15,12 @@ # limitations under the License. -from tests.integration import use_singledc, PROTOCOL_VERSION, TestCluster +from tests.integration import use_singledc, PROTOCOL_VERSION, TestCluster, CASSANDRA_VERSION + +import unittest + +from packaging.version import Version -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa from cassandra import InvalidRequest, DriverException from cassandra import ConsistencyLevel, ProtocolVersion @@ -241,7 +243,7 @@ def test_unset_values(self): @since 2.6.0 @jira_ticket PYTHON-317 - @expected_result UNSET_VALUE is implicitly added to bind parameters, and properly encoded, leving unset values unaffected. + @expected_result UNSET_VALUE is implicitly added to bind parameters, and properly encoded, leaving unset values unaffected. @test_category prepared_statements:binding """ @@ -395,6 +397,9 @@ def test_raise_error_on_prepared_statement_execution_dropped_table(self): with self.assertRaises(InvalidRequest): self.session.execute(prepared, [0]) + @unittest.skipIf((CASSANDRA_VERSION >= Version('3.11.12') and CASSANDRA_VERSION < Version('4.0')) or \ + CASSANDRA_VERSION >= Version('4.0.2'), + "Fixed server-side in Cassandra 3.11.12, 4.0.2") def test_fail_if_different_query_id_on_reprepare(self): """ PYTHON-1124 and CASSANDRA-15252 """ keyspace = "test_fail_if_different_query_id_on_reprepare" @@ -461,7 +466,7 @@ def test_prepared_id_is_update(self): @since 3.12 @jira_ticket PYTHON-808 - The query id from the prepared statment must have changed + The query id from the prepared statement must have changed """ prepared_statement = self.session.prepare("SELECT * from {} WHERE a = ?".format(self.table_name)) id_before = prepared_statement.result_metadata_id @@ -549,7 +554,7 @@ def test_not_reprepare_invalid_statements(self): def test_id_is_not_updated_conditional_v4(self): """ Test that verifies that the result_metadata and the - result_metadata_id are udpated correctly in conditional statements + result_metadata_id are updated correctly in conditional statements in protocol V4 @since 3.13 @@ -564,7 +569,7 @@ def test_id_is_not_updated_conditional_v4(self): def test_id_is_not_updated_conditional_v5(self): """ Test that verifies that the result_metadata and the - result_metadata_id are udpated correctly in conditional statements + result_metadata_id are updated correctly in conditional statements in protocol V5 @since 3.13 @jira_ticket PYTHON-847 @@ -578,7 +583,7 @@ def test_id_is_not_updated_conditional_v5(self): def test_id_is_not_updated_conditional_dsev1(self): """ Test that verifies that the result_metadata and the - result_metadata_id are udpated correctly in conditional statements + result_metadata_id are updated correctly in conditional statements in protocol DSE V1 @since 3.13 @@ -593,7 +598,7 @@ def test_id_is_not_updated_conditional_dsev1(self): def test_id_is_not_updated_conditional_dsev2(self): """ Test that verifies that the result_metadata and the - result_metadata_id are udpated correctly in conditional statements + result_metadata_id are updated correctly in conditional statements in protocol DSE V2 @since 3.13 diff --git a/tests/integration/standard/test_query.py b/tests/integration/standard/test_query.py index d6401a987e..3ede0ac326 100644 --- a/tests/integration/standard/test_query.py +++ b/tests/integration/standard/test_query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +17,7 @@ from cassandra.concurrent import execute_concurrent from cassandra import DriverException -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging from cassandra import ProtocolVersion from cassandra import ConsistencyLevel, Unavailable, InvalidRequest, cluster @@ -28,7 +27,7 @@ from cassandra.policies import HostDistance, RoundRobinPolicy, WhiteListRoundRobinPolicy from tests.integration import use_singledc, PROTOCOL_VERSION, BasicSharedKeyspaceUnitTestCase, \ greaterthanprotocolv3, MockLoggingHandler, get_supported_protocol_versions, local, get_cluster, setup_keyspace, \ - USE_CASS_EXTERNAL, greaterthanorequalcass40, DSE_VERSION, TestCluster + USE_CASS_EXTERNAL, greaterthanorequalcass40, DSE_VERSION, TestCluster, requirecassandra from tests import notwindows from tests.integration import greaterthanorequalcass30, get_node @@ -36,8 +35,7 @@ import random import re -import mock -import six +from unittest import mock log = logging.getLogger(__name__) @@ -463,10 +461,10 @@ def make_query_plan(self, working_keyspace=None, query=None): try: host = [live_hosts[self.host_index_to_use]] except IndexError as e: - six.raise_from(IndexError( + raise IndexError( 'You specified an index larger than the number of hosts. Total hosts: {}. Index specified: {}'.format( len(live_hosts), self.host_index_to_use - )), e) + )) from e return host @@ -946,7 +944,7 @@ def test_no_connection_refused_on_timeout(self): exception_type = type(result).__name__ if exception_type == "NoHostAvailable": self.fail("PYTHON-91: Disconnected from Cassandra: %s" % result.message) - if exception_type in ["WriteTimeout", "WriteFailure", "ReadTimeout", "ReadFailure", "ErrorMessageSub"]: + if exception_type in ["WriteTimeout", "WriteFailure", "ReadTimeout", "ReadFailure", "ErrorMessageSub", "ErrorMessage"]: if type(result).__name__ in ["WriteTimeout", "WriteFailure"]: received_timeout = True continue @@ -1408,6 +1406,8 @@ def test_setting_keyspace(self): """ self._check_set_keyspace_in_statement(self.session) + @requirecassandra + @greaterthanorequalcass40 def test_setting_keyspace_and_session(self): """ Test we can still send the keyspace independently even the session diff --git a/tests/integration/standard/test_query_paging.py b/tests/integration/standard/test_query_paging.py index dac4ec5ce3..465ef8b601 100644 --- a/tests/integration/standard/test_query_paging.py +++ b/tests/integration/standard/test_query_paging.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,13 +18,9 @@ import logging log = logging.getLogger(__name__) -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from itertools import cycle, count -from six.moves import range from threading import Event from cassandra import ConsistencyLevel diff --git a/tests/integration/standard/test_routing.py b/tests/integration/standard/test_routing.py index e1dabba49a..d41e06df6b 100644 --- a/tests/integration/standard/test_routing.py +++ b/tests/integration/standard/test_routing.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from uuid import uuid1 diff --git a/tests/integration/standard/test_row_factories.py b/tests/integration/standard/test_row_factories.py index 93f25d9276..97f16ea106 100644 --- a/tests/integration/standard/test_row_factories.py +++ b/tests/integration/standard/test_row_factories.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +17,7 @@ from tests.integration import get_server_versions, use_singledc, \ BasicSharedKeyspaceUnitTestCaseWFunctionTable, BasicSharedKeyspaceUnitTestCase, execute_until_pass, TestCluster -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cluster import ResultSet, ExecutionProfile, EXEC_PROFILE_DEFAULT from cassandra.query import tuple_factory, named_tuple_factory, dict_factory, ordered_dict_factory diff --git a/tests/integration/standard/test_single_interface.py b/tests/integration/standard/test_single_interface.py index 91451a52a0..6ff331060a 100644 --- a/tests/integration/standard/test_single_interface.py +++ b/tests/integration/standard/test_single_interface.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import six +import unittest from cassandra import ConsistencyLevel from cassandra.query import SimpleStatement @@ -25,7 +22,7 @@ from packaging.version import Version from tests.integration import use_singledc, PROTOCOL_VERSION, \ remove_cluster, greaterthanorequalcass40, notdse, \ - CASSANDRA_VERSION, DSE_VERSION, TestCluster + CASSANDRA_VERSION, DSE_VERSION, TestCluster, DEFAULT_SINGLE_INTERFACE_PORT def setup_module(): @@ -42,7 +39,7 @@ def teardown_module(): class SingleInterfaceTest(unittest.TestCase): def setUp(self): - self.cluster = TestCluster() + self.cluster = TestCluster(port=DEFAULT_SINGLE_INTERFACE_PORT) self.session = self.cluster.connect() def tearDown(self): @@ -57,7 +54,7 @@ def test_single_interface(self): broadcast_rpc_ports = [] broadcast_ports = [] self.assertEqual(len(hosts), 3) - for endpoint, host in six.iteritems(hosts): + for endpoint, host in hosts.items(): self.assertEqual(endpoint.address, host.broadcast_rpc_address) self.assertEqual(endpoint.port, host.broadcast_rpc_port) @@ -74,4 +71,4 @@ def test_single_interface(self): consistency_level=ConsistencyLevel.ALL)) for pool in self.session.get_pools(): - self.assertEquals(1, pool.get_state()['open_count']) + self.assertEqual(1, pool.get_state()['open_count']) diff --git a/tests/integration/standard/test_types.py b/tests/integration/standard/test_types.py index 0592b7d737..3a6de0d4b7 100644 --- a/tests/integration/standard/test_types.py +++ b/tests/integration/standard/test_types.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,15 +14,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -from datetime import datetime +import ipaddress import math +import random +import string +import socket +import uuid + +from datetime import datetime, date, time, timedelta +from decimal import Decimal +from functools import partial + from packaging.version import Version -import six import cassandra from cassandra import InvalidRequest @@ -34,7 +41,7 @@ from tests.integration import use_singledc, execute_until_pass, notprotocolv1, \ BasicSharedKeyspaceUnitTestCase, greaterthancass21, lessthancass30, greaterthanorequaldse51, \ - DSE_VERSION, greaterthanorequalcass3_10, requiredse, TestCluster + DSE_VERSION, greaterthanorequalcass3_10, requiredse, TestCluster, greaterthanorequalcass50 from tests.integration.datatype_utils import update_datatypes, PRIMITIVE_DATATYPES, COLLECTION_TYPES, PRIMITIVE_DATATYPES_KEYS, \ get_sample, get_all_samples, get_collection_sample @@ -63,25 +70,7 @@ def test_can_insert_blob_type_as_string(self): params = ['key1', b'blobbyblob'] query = "INSERT INTO blobstring (a, b) VALUES (%s, %s)" - # In python2, with Cassandra > 2.0, we don't treat the 'byte str' type as a blob, so we'll encode it - # as a string literal and have the following failure. - if six.PY2 and self.cql_version >= (3, 1, 0): - # Blob values can't be specified using string notation in CQL 3.1.0 and - # above which is used by default in Cassandra 2.0. - if self.cass_version >= (2, 1, 0): - msg = r'.*Invalid STRING constant \(.*?\) for "b" of type blob.*' - else: - msg = r'.*Invalid STRING constant \(.*?\) for b of type blob.*' - self.assertRaisesRegexp(InvalidRequest, msg, s.execute, query, params) - return - - # In python2, with Cassandra < 2.0, we can manually encode the 'byte str' type as hex for insertion in a blob. - if six.PY2: - cass_params = [params[0], params[1].encode('hex')] - s.execute(query, cass_params) - # In python 3, the 'bytes' type is treated as a blob, so we can correctly encode it with hex notation. - else: - s.execute(query, params) + s.execute(query, params) results = s.execute("SELECT * FROM blobstring")[0] for expected, actual in zip(params, results): @@ -179,10 +168,9 @@ def test_can_insert_primitive_datatypes(self): # verify data result = s.execute("SELECT {0} FROM alltypes WHERE zz=%s".format(single_columns_string), (key,))[0][1] compare_value = data_sample - if six.PY3: - import ipaddress - if isinstance(data_sample, ipaddress.IPv4Address) or isinstance(data_sample, ipaddress.IPv6Address): - compare_value = str(data_sample) + + if isinstance(data_sample, ipaddress.IPv4Address) or isinstance(data_sample, ipaddress.IPv6Address): + compare_value = str(data_sample) self.assertEqual(result, compare_value) # try the same thing with a prepared statement @@ -1063,8 +1051,8 @@ def _daterange_round_trip(self, to_insert, expected=None): results = self.session.execute(prep_sel) dr = results[0].dr - # sometimes this is truncated in the assertEquals output on failure; - if isinstance(expected, six.string_types): + # sometimes this is truncated in the assertEqual output on failure; + if isinstance(expected, str): self.assertEqual(str(dr), expected) else: self.assertEqual(dr, expected or to_insert) @@ -1117,8 +1105,8 @@ def _daterange_round_trip(self, to_insert, expected=None): results= self.session.execute("SELECT * FROM tab WHERE dr = '{0}' ".format(to_insert)) dr = results[0].dr - # sometimes this is truncated in the assertEquals output on failure; - if isinstance(expected, six.string_types): + # sometimes this is truncated in the assertEqual output on failure; + if isinstance(expected, str): self.assertEqual(str(dr), expected) else: self.assertEqual(dr, expected or to_insert) @@ -1313,3 +1301,180 @@ def run_inserts_at_version(self, proto_ver): finally: session.cluster.shutdown() + +@greaterthanorequalcass50 +class TypeTestsVector(BasicSharedKeyspaceUnitTestCase): + + def _get_first_j(self, rs): + rows = rs.all() + self.assertEqual(len(rows), 1) + return rows[0].j + + def _get_row_simple(self, idx, table_name): + rs = self.session.execute("select j from {0}.{1} where i = {2}".format(self.keyspace_name, table_name, idx)) + return self._get_first_j(rs) + + def _get_row_prepared(self, idx, table_name): + cql = "select j from {0}.{1} where i = ?".format(self.keyspace_name, table_name) + ps = self.session.prepare(cql) + rs = self.session.execute(ps, [idx]) + return self._get_first_j(rs) + + def _round_trip_test(self, subtype, subtype_fn, test_fn, use_positional_parameters=True): + + table_name = subtype.replace("<","A").replace(">", "B").replace(",", "C") + "isH" + + def random_subtype_vector(): + return [subtype_fn() for _ in range(3)] + + ddl = """CREATE TABLE {0}.{1} ( + i int PRIMARY KEY, + j vector<{2}, 3>)""".format(self.keyspace_name, table_name, subtype) + self.session.execute(ddl) + + if use_positional_parameters: + cql = "insert into {0}.{1} (i,j) values (%s,%s)".format(self.keyspace_name, table_name) + expected1 = random_subtype_vector() + data1 = {1:random_subtype_vector(), 2:expected1, 3:random_subtype_vector()} + for k,v in data1.items(): + # Attempt a set of inserts using the driver's support for positional params + self.session.execute(cql, (k,v)) + + cql = "insert into {0}.{1} (i,j) values (?,?)".format(self.keyspace_name, table_name) + expected2 = random_subtype_vector() + ps = self.session.prepare(cql) + data2 = {4:random_subtype_vector(), 5:expected2, 6:random_subtype_vector()} + for k,v in data2.items(): + # Add some additional rows via prepared statements + self.session.execute(ps, [k,v]) + + # Use prepared queries to gather data from the rows we added via simple queries and vice versa + if use_positional_parameters: + observed1 = self._get_row_prepared(2, table_name) + for idx in range(0, 3): + test_fn(observed1[idx], expected1[idx]) + + observed2 = self._get_row_simple(5, table_name) + for idx in range(0, 3): + test_fn(observed2[idx], expected2[idx]) + + def test_round_trip_integers(self): + self._round_trip_test("int", partial(random.randint, 0, 2 ** 31), self.assertEqual) + self._round_trip_test("bigint", partial(random.randint, 0, 2 ** 63), self.assertEqual) + self._round_trip_test("smallint", partial(random.randint, 0, 2 ** 15), self.assertEqual) + self._round_trip_test("tinyint", partial(random.randint, 0, (2 ** 7) - 1), self.assertEqual) + self._round_trip_test("varint", partial(random.randint, 0, 2 ** 63), self.assertEqual) + + def test_round_trip_floating_point(self): + _almost_equal_test_fn = partial(self.assertAlmostEqual, places=5) + def _random_decimal(): + return Decimal(random.uniform(0.0, 100.0)) + + # Max value here isn't really connected to max value for floating point nums in IEEE 754... it's used here + # mainly as a convenient benchmark + self._round_trip_test("float", partial(random.uniform, 0.0, 100.0), _almost_equal_test_fn) + self._round_trip_test("double", partial(random.uniform, 0.0, 100.0), _almost_equal_test_fn) + self._round_trip_test("decimal", _random_decimal, _almost_equal_test_fn) + + def test_round_trip_text(self): + def _random_string(): + return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(24)) + + self._round_trip_test("ascii", _random_string, self.assertEqual) + self._round_trip_test("text", _random_string, self.assertEqual) + + def test_round_trip_date_and_time(self): + _almost_equal_test_fn = partial(self.assertAlmostEqual, delta=timedelta(seconds=1)) + def _random_datetime(): + return datetime.today() - timedelta(hours=random.randint(0,18), days=random.randint(1,1000)) + def _random_date(): + return _random_datetime().date() + def _random_time(): + return _random_datetime().time() + + self._round_trip_test("date", _random_date, self.assertEqual) + self._round_trip_test("time", _random_time, self.assertEqual) + self._round_trip_test("timestamp", _random_datetime, _almost_equal_test_fn) + + def test_round_trip_uuid(self): + self._round_trip_test("uuid", uuid.uuid1, self.assertEqual) + self._round_trip_test("timeuuid", uuid.uuid1, self.assertEqual) + + def test_round_trip_miscellany(self): + def _random_bytes(): + return random.getrandbits(32).to_bytes(4,'big') + def _random_boolean(): + return random.choice([True, False]) + def _random_duration(): + return Duration(random.randint(0,11), random.randint(0,11), random.randint(0,10000)) + def _random_inet(): + return socket.inet_ntoa(_random_bytes()) + + self._round_trip_test("boolean", _random_boolean, self.assertEqual) + self._round_trip_test("duration", _random_duration, self.assertEqual) + self._round_trip_test("inet", _random_inet, self.assertEqual) + self._round_trip_test("blob", _random_bytes, self.assertEqual) + + def test_round_trip_collections(self): + def _random_seq(): + return [random.randint(0,100000) for _ in range(8)] + def _random_set(): + return set(_random_seq()) + def _random_map(): + return {k:v for (k,v) in zip(_random_seq(), _random_seq())} + + # Goal here is to test collections of both fixed and variable size subtypes + self._round_trip_test("list", _random_seq, self.assertEqual) + self._round_trip_test("list", _random_seq, self.assertEqual) + self._round_trip_test("set", _random_set, self.assertEqual) + self._round_trip_test("set", _random_set, self.assertEqual) + self._round_trip_test("map", _random_map, self.assertEqual) + self._round_trip_test("map", _random_map, self.assertEqual) + self._round_trip_test("map", _random_map, self.assertEqual) + self._round_trip_test("map", _random_map, self.assertEqual) + + def test_round_trip_vector_of_vectors(self): + def _random_vector(): + return [random.randint(0,100000) for _ in range(2)] + + self._round_trip_test("vector", _random_vector, self.assertEqual) + self._round_trip_test("vector", _random_vector, self.assertEqual) + + def test_round_trip_tuples(self): + def _random_tuple(): + return (random.randint(0,100000),random.randint(0,100000)) + + # Unfortunately we can't use positional parameters when inserting tuples because the driver will try to encode + # them as lists before sending them to the server... and that confuses the parsing logic. + self._round_trip_test("tuple", _random_tuple, self.assertEqual, use_positional_parameters=False) + self._round_trip_test("tuple", _random_tuple, self.assertEqual, use_positional_parameters=False) + self._round_trip_test("tuple", _random_tuple, self.assertEqual, use_positional_parameters=False) + self._round_trip_test("tuple", _random_tuple, self.assertEqual, use_positional_parameters=False) + + def test_round_trip_udts(self): + def _udt_equal_test_fn(udt1, udt2): + self.assertEqual(udt1.a, udt2.a) + self.assertEqual(udt1.b, udt2.b) + + self.session.execute("create type {}.fixed_type (a int, b int)".format(self.keyspace_name)) + self.session.execute("create type {}.mixed_type_one (a int, b varint)".format(self.keyspace_name)) + self.session.execute("create type {}.mixed_type_two (a varint, b int)".format(self.keyspace_name)) + self.session.execute("create type {}.var_type (a varint, b varint)".format(self.keyspace_name)) + + class GeneralUDT: + def __init__(self, a, b): + self.a = a + self.b = b + + self.cluster.register_user_type(self.keyspace_name,'fixed_type', GeneralUDT) + self.cluster.register_user_type(self.keyspace_name,'mixed_type_one', GeneralUDT) + self.cluster.register_user_type(self.keyspace_name,'mixed_type_two', GeneralUDT) + self.cluster.register_user_type(self.keyspace_name,'var_type', GeneralUDT) + + def _random_udt(): + return GeneralUDT(random.randint(0,100000),random.randint(0,100000)) + + self._round_trip_test("fixed_type", _random_udt, _udt_equal_test_fn) + self._round_trip_test("mixed_type_one", _random_udt, _udt_equal_test_fn) + self._round_trip_test("mixed_type_two", _random_udt, _udt_equal_test_fn) + self._round_trip_test("var_type", _random_udt, _udt_equal_test_fn) diff --git a/tests/integration/standard/test_udts.py b/tests/integration/standard/test_udts.py index 6d9676f25e..9c3e560b76 100644 --- a/tests/integration/standard/test_udts.py +++ b/tests/integration/standard/test_udts.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,14 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from collections import namedtuple from functools import partial -import six from cassandra import InvalidRequest from cassandra.cluster import UserTypeDoesNotExist, ExecutionProfile, EXEC_PROFILE_DEFAULT @@ -294,9 +292,9 @@ def test_can_insert_udts_with_nulls(self): self.assertEqual((None, None, None, None), s.execute(select)[0].b) # also test empty strings - s.execute(insert, [User('', None, None, six.binary_type())]) + s.execute(insert, [User('', None, None, bytes())]) results = s.execute("SELECT b FROM mytable WHERE a=0") - self.assertEqual(('', None, None, six.binary_type()), results[0].b) + self.assertEqual(('', None, None, bytes()), results[0].b) c.shutdown() @@ -721,7 +719,7 @@ def test_type_alteration(self): s.execute("INSERT INTO %s (k, v) VALUES (0, {v0 : 3, v1 : 0xdeadbeef})" % (self.table_name,)) val = s.execute('SELECT v FROM %s' % self.table_name)[0][0] self.assertEqual(val['v0'], 3) - self.assertEqual(val['v1'], six.b('\xde\xad\xbe\xef')) + self.assertEqual(val['v1'], b'\xde\xad\xbe\xef') @lessthancass30 def test_alter_udt(self): diff --git a/tests/integration/upgrade/__init__.py b/tests/integration/upgrade/__init__.py index d2b9076bc2..5dfb4fecf8 100644 --- a/tests/integration/upgrade/__init__.py +++ b/tests/integration/upgrade/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -27,10 +29,7 @@ import time import logging -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest def setup_module(): diff --git a/tests/integration/upgrade/test_upgrade.py b/tests/integration/upgrade/test_upgrade.py index 31df55c02c..837d8232cb 100644 --- a/tests/integration/upgrade/test_upgrade.py +++ b/tests/integration/upgrade/test_upgrade.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -20,10 +22,7 @@ from cassandra.policies import ConstantSpeculativeExecutionPolicy from tests.integration.upgrade import UpgradeBase, UpgradeBaseAuth, UpgradePath, upgrade_paths -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest # Previous Cassandra upgrade diff --git a/tests/integration/util.py b/tests/integration/util.py index 6215449d1f..efdf258b2b 100644 --- a/tests/integration/util.py +++ b/tests/integration/util.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from itertools import chain + from tests.integration import PROTOCOL_VERSION import time @@ -38,14 +42,18 @@ def assert_quiescent_pool_state(test_case, cluster, wait=None): for state in pool_states: test_case.assertFalse(state['shutdown']) test_case.assertGreater(state['open_count'], 0) - test_case.assertTrue(all((i == 0 for i in state['in_flights']))) + no_in_flight = all((i == 0 for i in state['in_flights'])) + orphans_and_inflights = zip(state['orphan_requests'], state['in_flights']) + all_orphaned = all((len(orphans) == inflight for (orphans, inflight) in orphans_and_inflights)) + test_case.assertTrue(no_in_flight or all_orphaned) for holder in cluster.get_connection_holders(): for connection in holder.get_connections(): # all ids are unique req_ids = connection.request_ids + orphan_ids = connection.orphaned_request_ids test_case.assertEqual(len(req_ids), len(set(req_ids))) - test_case.assertEqual(connection.highest_request_id, len(req_ids) - 1) - test_case.assertEqual(connection.highest_request_id, max(req_ids)) + test_case.assertEqual(connection.highest_request_id, len(req_ids) + len(orphan_ids) - 1) + test_case.assertEqual(connection.highest_request_id, max(chain(req_ids, orphan_ids))) if PROTOCOL_VERSION < 3: test_case.assertEqual(connection.highest_request_id, connection.max_request_id) diff --git a/tests/stress_tests/test_load.py b/tests/stress_tests/test_load.py index a9771147ce..30c384f098 100644 --- a/tests/stress_tests/test_load.py +++ b/tests/stress_tests/test_load.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import gc diff --git a/tests/stress_tests/test_multi_inserts.py b/tests/stress_tests/test_multi_inserts.py index 65bbe2a4e4..3e32e233f1 100644 --- a/tests/stress_tests/test_multi_inserts.py +++ b/tests/stress_tests/test_multi_inserts.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest import os from cassandra.cluster import Cluster diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/advanced/__init__.py b/tests/unit/advanced/__init__.py index 2c9ca172f8..635f0d9e60 100644 --- a/tests/unit/advanced/__init__.py +++ b/tests/unit/advanced/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/advanced/cloud/test_cloud.py b/tests/unit/advanced/cloud/test_cloud.py index ab18f0af72..04cbf883f3 100644 --- a/tests/unit/advanced/cloud/test_cloud.py +++ b/tests/unit/advanced/cloud/test_cloud.py @@ -9,18 +9,12 @@ import tempfile import os import shutil -import six - -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest +from unittest.mock import patch from cassandra import DriverException from cassandra.datastax import cloud -from mock import patch - from tests import notwindows class CloudTests(unittest.TestCase): @@ -99,8 +93,7 @@ def clean_tmp_dir(): } # The directory is not writtable.. we expect a permission error - exc = PermissionError if six.PY3 else OSError - with self.assertRaises(exc): + with self.assertRaises(PermissionError): cloud.get_cloud_config(config) # With use_default_tempdir, we expect an connection refused diff --git a/tests/unit/advanced/test_auth.py b/tests/unit/advanced/test_auth.py index bb411afe2b..6457810a6f 100644 --- a/tests/unit/advanced/test_auth.py +++ b/tests/unit/advanced/test_auth.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +17,7 @@ import os from puresasl import QOP -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.auth import DSEGSSAPIAuthProvider diff --git a/tests/unit/advanced/test_execution_profile.py b/tests/unit/advanced/test_execution_profile.py index 8592f56a44..143a391f72 100644 --- a/tests/unit/advanced/test_execution_profile.py +++ b/tests/unit/advanced/test_execution_profile.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cluster import GraphExecutionProfile, GraphAnalyticsExecutionProfile from cassandra.graph import GraphOptions diff --git a/tests/unit/advanced/test_geometry.py b/tests/unit/advanced/test_geometry.py index 4fa2644ff2..0e5dc8f93f 100644 --- a/tests/unit/advanced/test_geometry.py +++ b/tests/unit/advanced/test_geometry.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import struct import math diff --git a/tests/unit/advanced/test_graph.py b/tests/unit/advanced/test_graph.py index f25a229f42..456addb769 100644 --- a/tests/unit/advanced/test_graph.py +++ b/tests/unit/advanced/test_graph.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +17,7 @@ import warnings import json -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import six +import unittest from cassandra import ConsistencyLevel from cassandra.policies import RetryPolicy @@ -262,7 +259,7 @@ def test_init_unknown_kwargs(self): with warnings.catch_warnings(record=True) as w: GraphOptions(unknown_param=42) self.assertEqual(len(w), 1) - self.assertRegexpMatches(str(w[0].message), r"^Unknown keyword.*GraphOptions.*") + self.assertRegex(str(w[0].message), r"^Unknown keyword.*GraphOptions.*") def test_update(self): opts = GraphOptions(**self.api_params) @@ -281,7 +278,7 @@ def test_get_options(self): other = GraphOptions(**kwargs) options = base.get_options_map(other) updated = self.opt_mapping['graph_name'] - self.assertEqual(options[updated], six.b('unit_test')) + self.assertEqual(options[updated], b'unit_test') for name in (n for n in self.opt_mapping.values() if n != updated): self.assertEqual(options[name], base._graph_options[name]) @@ -291,22 +288,22 @@ def test_get_options(self): def test_set_attr(self): expected = 'test@@@@' opts = GraphOptions(graph_name=expected) - self.assertEqual(opts.graph_name, six.b(expected)) + self.assertEqual(opts.graph_name, expected.encode()) expected = 'somethingelse####' opts.graph_name = expected - self.assertEqual(opts.graph_name, six.b(expected)) + self.assertEqual(opts.graph_name, expected.encode()) # will update options with set value another = GraphOptions() self.assertIsNone(another.graph_name) another.update(opts) - self.assertEqual(another.graph_name, six.b(expected)) + self.assertEqual(another.graph_name, expected.encode()) opts.graph_name = None self.assertIsNone(opts.graph_name) # will not update another with its set-->unset value another.update(opts) - self.assertEqual(another.graph_name, six.b(expected)) # remains unset + self.assertEqual(another.graph_name, expected.encode()) # remains unset opt_map = another.get_options_map(opts) self.assertEqual(opt_map, another._graph_options) @@ -321,7 +318,7 @@ def _verify_api_params(self, opts, api_params): self.assertEqual(len(opts._graph_options), len(api_params)) for name, value in api_params.items(): try: - value = six.b(value) + value = value.encode() except: pass # already bytes self.assertEqual(getattr(opts, name), value) @@ -338,8 +335,8 @@ def test_consistency_levels(self): # mapping from base opt_map = opts.get_options_map() - self.assertEqual(opt_map['graph-read-consistency'], six.b(ConsistencyLevel.value_to_name[read_cl])) - self.assertEqual(opt_map['graph-write-consistency'], six.b(ConsistencyLevel.value_to_name[write_cl])) + self.assertEqual(opt_map['graph-read-consistency'], ConsistencyLevel.value_to_name[read_cl].encode()) + self.assertEqual(opt_map['graph-write-consistency'], ConsistencyLevel.value_to_name[write_cl].encode()) # empty by default new_opts = GraphOptions() @@ -349,8 +346,8 @@ def test_consistency_levels(self): # set from other opt_map = new_opts.get_options_map(opts) - self.assertEqual(opt_map['graph-read-consistency'], six.b(ConsistencyLevel.value_to_name[read_cl])) - self.assertEqual(opt_map['graph-write-consistency'], six.b(ConsistencyLevel.value_to_name[write_cl])) + self.assertEqual(opt_map['graph-read-consistency'], ConsistencyLevel.value_to_name[read_cl].encode()) + self.assertEqual(opt_map['graph-write-consistency'], ConsistencyLevel.value_to_name[write_cl].encode()) def test_graph_source_convenience_attributes(self): opts = GraphOptions() diff --git a/tests/unit/advanced/test_insights.py b/tests/unit/advanced/test_insights.py index 2cc170e485..e6be6fc3d1 100644 --- a/tests/unit/advanced/test_insights.py +++ b/tests/unit/advanced/test_insights.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,14 +15,11 @@ # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging -from mock import sentinel import sys +from unittest.mock import sentinel from cassandra import ConsistencyLevel from cassandra.cluster import ( diff --git a/tests/unit/advanced/test_metadata.py b/tests/unit/advanced/test_metadata.py index addd514169..052ad3f465 100644 --- a/tests/unit/advanced/test_metadata.py +++ b/tests/unit/advanced/test_metadata.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.metadata import ( KeyspaceMetadata, TableMetadataDSE68, diff --git a/tests/unit/advanced/test_policies.py b/tests/unit/advanced/test_policies.py index 79e7410799..406263f42b 100644 --- a/tests/unit/advanced/test_policies.py +++ b/tests/unit/advanced/test_policies.py @@ -1,22 +1,20 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -from mock import Mock +import unittest +from unittest.mock import Mock from cassandra.pool import Host from cassandra.policies import RoundRobinPolicy diff --git a/tests/unit/column_encryption/test_policies.py b/tests/unit/column_encryption/test_policies.py new file mode 100644 index 0000000000..f78701aa2f --- /dev/null +++ b/tests/unit/column_encryption/test_policies.py @@ -0,0 +1,171 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import unittest + +from cassandra.policies import ColDesc +from cassandra.column_encryption.policies import AES256ColumnEncryptionPolicy, \ + AES256_BLOCK_SIZE_BYTES, AES256_KEY_SIZE_BYTES + +class AES256ColumnEncryptionPolicyTest(unittest.TestCase): + + def _random_block(self): + return os.urandom(AES256_BLOCK_SIZE_BYTES) + + def _random_key(self): + return os.urandom(AES256_KEY_SIZE_BYTES) + + def _test_round_trip(self, bytes): + coldesc = ColDesc('ks1','table1','col1') + policy = AES256ColumnEncryptionPolicy() + policy.add_column(coldesc, self._random_key(), "blob") + encrypted_bytes = policy.encrypt(coldesc, bytes) + self.assertEqual(bytes, policy.decrypt(coldesc, encrypted_bytes)) + + def test_no_padding_necessary(self): + self._test_round_trip(self._random_block()) + + def test_some_padding_required(self): + for byte_size in range(1,AES256_BLOCK_SIZE_BYTES - 1): + bytes = os.urandom(byte_size) + self._test_round_trip(bytes) + for byte_size in range(AES256_BLOCK_SIZE_BYTES + 1,(2 * AES256_BLOCK_SIZE_BYTES) - 1): + bytes = os.urandom(byte_size) + self._test_round_trip(bytes) + + def test_add_column_invalid_key_size_raises(self): + coldesc = ColDesc('ks1','table1','col1') + policy = AES256ColumnEncryptionPolicy() + for key_size in range(1,AES256_KEY_SIZE_BYTES - 1): + with self.assertRaises(ValueError): + policy.add_column(coldesc, os.urandom(key_size), "blob") + for key_size in range(AES256_KEY_SIZE_BYTES + 1,(2 * AES256_KEY_SIZE_BYTES) - 1): + with self.assertRaises(ValueError): + policy.add_column(coldesc, os.urandom(key_size), "blob") + + def test_add_column_invalid_iv_size_raises(self): + def test_iv_size(iv_size): + policy = AES256ColumnEncryptionPolicy(iv = os.urandom(iv_size)) + policy.add_column(coldesc, os.urandom(AES256_KEY_SIZE_BYTES), "blob") + policy.encrypt(coldesc, os.urandom(128)) + + coldesc = ColDesc('ks1','table1','col1') + for iv_size in range(1,AES256_BLOCK_SIZE_BYTES - 1): + with self.assertRaises(ValueError): + test_iv_size(iv_size) + for iv_size in range(AES256_BLOCK_SIZE_BYTES + 1,(2 * AES256_BLOCK_SIZE_BYTES) - 1): + with self.assertRaises(ValueError): + test_iv_size(iv_size) + + # Finally, confirm that the expected IV size has no issue + test_iv_size(AES256_BLOCK_SIZE_BYTES) + + def test_add_column_null_coldesc_raises(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + policy.add_column(None, self._random_block(), "blob") + + def test_add_column_null_key_raises(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, None, "blob") + + def test_add_column_null_type_raises(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, self._random_block(), None) + + def test_add_column_unknown_type_raises(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, self._random_block(), "foobar") + + def test_encode_and_encrypt_null_coldesc_raises(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, self._random_key(), "blob") + policy.encode_and_encrypt(None, self._random_block()) + + def test_encode_and_encrypt_null_obj_raises(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, self._random_key(), "blob") + policy.encode_and_encrypt(coldesc, None) + + def test_encode_and_encrypt_unknown_coldesc_raises(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, self._random_key(), "blob") + policy.encode_and_encrypt(ColDesc('ks2','table2','col2'), self._random_block()) + + def test_contains_column(self): + coldesc = ColDesc('ks1','table1','col1') + policy = AES256ColumnEncryptionPolicy() + policy.add_column(coldesc, self._random_key(), "blob") + self.assertTrue(policy.contains_column(coldesc)) + self.assertFalse(policy.contains_column(ColDesc('ks2','table1','col1'))) + self.assertFalse(policy.contains_column(ColDesc('ks1','table2','col1'))) + self.assertFalse(policy.contains_column(ColDesc('ks1','table1','col2'))) + self.assertFalse(policy.contains_column(ColDesc('ks2','table2','col2'))) + + def test_encrypt_unknown_column(self): + with self.assertRaises(ValueError): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, self._random_key(), "blob") + policy.encrypt(ColDesc('ks2','table2','col2'), self._random_block()) + + def test_decrypt_unknown_column(self): + policy = AES256ColumnEncryptionPolicy() + coldesc = ColDesc('ks1','table1','col1') + policy.add_column(coldesc, self._random_key(), "blob") + encrypted_bytes = policy.encrypt(coldesc, self._random_block()) + with self.assertRaises(ValueError): + policy.decrypt(ColDesc('ks2','table2','col2'), encrypted_bytes) + + def test_cache_info(self): + # Exclude any interference from tests above + AES256ColumnEncryptionPolicy._build_cipher.cache_clear() + + coldesc1 = ColDesc('ks1','table1','col1') + coldesc2 = ColDesc('ks2','table2','col2') + coldesc3 = ColDesc('ks3','table3','col3') + policy = AES256ColumnEncryptionPolicy() + for coldesc in [coldesc1, coldesc2, coldesc3]: + policy.add_column(coldesc, self._random_key(), "blob") + + # First run for this coldesc should be a miss, everything else should be a cache hit + for _ in range(10): + policy.encrypt(coldesc1, self._random_block()) + cache_info = policy.cache_info() + self.assertEqual(cache_info.hits, 9) + self.assertEqual(cache_info.misses, 1) + self.assertEqual(cache_info.maxsize, 128) + + # Important note: we're measuring the size of the cache of ciphers, NOT stored + # keys. We won't have a cipher here until we actually encrypt something + self.assertEqual(cache_info.currsize, 1) + policy.encrypt(coldesc2, self._random_block()) + self.assertEqual(policy.cache_info().currsize, 2) + policy.encrypt(coldesc3, self._random_block()) + self.assertEqual(policy.cache_info().currsize, 3) diff --git a/tests/unit/cqlengine/__init__.py b/tests/unit/cqlengine/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/unit/cqlengine/__init__.py +++ b/tests/unit/cqlengine/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/cqlengine/test_columns.py b/tests/unit/cqlengine/test_columns.py index bcb174a8c0..4d264df07c 100644 --- a/tests/unit/cqlengine/test_columns.py +++ b/tests/unit/cqlengine/test_columns.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cqlengine.columns import Column diff --git a/tests/unit/cqlengine/test_connection.py b/tests/unit/cqlengine/test_connection.py index 9f8e500c6b..dd7586aff0 100644 --- a/tests/unit/cqlengine/test_connection.py +++ b/tests/unit/cqlengine/test_connection.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,17 +14,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest +from unittest.mock import Mock from cassandra.cluster import _ConfigMode from cassandra.cqlengine import connection from cassandra.query import dict_factory -from mock import Mock - class ConnectionTest(unittest.TestCase): @@ -53,12 +51,12 @@ def test_get_session_fails_without_existing_connection(self): """ Users can't get the default session without having a default connection set. """ - with self.assertRaisesRegexp(connection.CQLEngineException, self.no_registered_connection_msg): + with self.assertRaisesRegex(connection.CQLEngineException, self.no_registered_connection_msg): connection.get_session(connection=None) def test_get_cluster_fails_without_existing_connection(self): """ Users can't get the default cluster without having a default connection set. """ - with self.assertRaisesRegexp(connection.CQLEngineException, self.no_registered_connection_msg): + with self.assertRaisesRegex(connection.CQLEngineException, self.no_registered_connection_msg): connection.get_cluster(connection=None) diff --git a/tests/unit/cqlengine/test_udt.py b/tests/unit/cqlengine/test_udt.py index ebe1139fd0..de87bf3833 100644 --- a/tests/unit/cqlengine/test_udt.py +++ b/tests/unit/cqlengine/test_udt.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.cqlengine import columns from cassandra.cqlengine.models import Model diff --git a/tests/unit/cython/__init__.py b/tests/unit/cython/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/unit/cython/__init__.py +++ b/tests/unit/cython/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/cython/bytesio_testhelper.pyx b/tests/unit/cython/bytesio_testhelper.pyx index 7ba91bc4c0..dcb8c4a4de 100644 --- a/tests/unit/cython/bytesio_testhelper.pyx +++ b/tests/unit/cython/bytesio_testhelper.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/cython/test_bytesio.py b/tests/unit/cython/test_bytesio.py index a156fc1272..08ca284ff3 100644 --- a/tests/unit/cython/test_bytesio.py +++ b/tests/unit/cython/test_bytesio.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +17,7 @@ from tests.unit.cython.utils import cyimport, cythontest bytesio_testhelper = cyimport('tests.unit.cython.bytesio_testhelper') -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest class BytesIOTest(unittest.TestCase): diff --git a/tests/unit/cython/test_types.py b/tests/unit/cython/test_types.py index a0d2138c6d..4ae18639f6 100644 --- a/tests/unit/cython/test_types.py +++ b/tests/unit/cython/test_types.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +17,7 @@ from tests.unit.cython.utils import cyimport, cythontest types_testhelper = cyimport('tests.unit.cython.types_testhelper') -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest class TypesTest(unittest.TestCase): diff --git a/tests/unit/cython/test_utils.py b/tests/unit/cython/test_utils.py index dc8745e471..e43ae343f4 100644 --- a/tests/unit/cython/test_utils.py +++ b/tests/unit/cython/test_utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +17,7 @@ from tests.unit.cython.utils import cyimport, cythontest utils_testhelper = cyimport('tests.unit.cython.utils_testhelper') -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest class UtilsTest(unittest.TestCase): @@ -26,4 +25,4 @@ class UtilsTest(unittest.TestCase): @cythontest def test_datetime_from_timestamp(self): - utils_testhelper.test_datetime_from_timestamp(self.assertEqual) \ No newline at end of file + utils_testhelper.test_datetime_from_timestamp(self.assertEqual) diff --git a/tests/unit/cython/types_testhelper.pyx b/tests/unit/cython/types_testhelper.pyx index 55fd310837..a9252df7ee 100644 --- a/tests/unit/cython/types_testhelper.pyx +++ b/tests/unit/cython/types_testhelper.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -38,7 +40,7 @@ def test_datetype(assert_equal): cdef BytesIOReader reader cdef Buffer buf - dt = datetime.datetime.utcfromtimestamp(timestamp) + dt = datetime.datetime.fromtimestamp(timestamp, tz=datetime.timezone.utc) bytes = io.BytesIO() write_value(bytes, DateType.serialize(dt, 0)) @@ -52,7 +54,7 @@ def test_datetype(assert_equal): # deserialize # epoc expected = 0 - assert_equal(deserialize(expected), datetime.datetime.utcfromtimestamp(expected)) + assert_equal(deserialize(expected), datetime.datetime.fromtimestamp(expected, tz=datetime.timezone.utc).replace(tzinfo=None)) # beyond 32b expected = 2 ** 33 diff --git a/tests/unit/cython/utils.py b/tests/unit/cython/utils.py index 7f8be22ce0..de348afffa 100644 --- a/tests/unit/cython/utils.py +++ b/tests/unit/cython/utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +20,7 @@ except ImportError: VERIFY_CYTHON = False -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest def cyimport(import_path): """ diff --git a/tests/unit/cython/utils_testhelper.pyx b/tests/unit/cython/utils_testhelper.pyx index fe67691aa8..8a8294d9c7 100644 --- a/tests/unit/cython/utils_testhelper.pyx +++ b/tests/unit/cython/utils_testhelper.pyx @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/io/__init__.py b/tests/unit/io/__init__.py index 386372eb4a..588a655d98 100644 --- a/tests/unit/io/__init__.py +++ b/tests/unit/io/__init__.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/io/eventlet_utils.py b/tests/unit/io/eventlet_utils.py index 785856be20..ef3e633ac7 100644 --- a/tests/unit/io/eventlet_utils.py +++ b/tests/unit/io/eventlet_utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/io/gevent_utils.py b/tests/unit/io/gevent_utils.py index a341fd9385..b458d13170 100644 --- a/tests/unit/io/gevent_utils.py +++ b/tests/unit/io/gevent_utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/tests/unit/io/test_asyncioreactor.py b/tests/unit/io/test_asyncioreactor.py index aa00a32943..65708d41dc 100644 --- a/tests/unit/io/test_asyncioreactor.py +++ b/tests/unit/io/test_asyncioreactor.py @@ -10,7 +10,7 @@ from tests import is_monkey_patched, connection_class from tests.unit.io.utils import TimerCallback, TimerTestMixin -from mock import patch +from unittest.mock import patch import unittest import time diff --git a/tests/unit/io/test_asyncorereactor.py b/tests/unit/io/test_asyncorereactor.py index 4e0e540327..b37df83bf6 100644 --- a/tests/unit/io/test_asyncorereactor.py +++ b/tests/unit/io/test_asyncorereactor.py @@ -1,25 +1,32 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import platform +import socket +import unittest + +from unittest.mock import patch +from packaging.version import Version + +from cassandra import DependencyException try: - import unittest2 as unittest -except ImportError: - import unittest # noqa + import cassandra.io.asyncorereactor as asyncorereactor + from cassandra.io.asyncorereactor import AsyncoreConnection +except DependencyException: + AsyncoreConnection = None -from mock import patch -import socket -import cassandra.io.asyncorereactor as asyncorereactor -from cassandra.io.asyncorereactor import AsyncoreConnection from tests import is_monkey_patched from tests.unit.io.utils import ReactorTestMixin, TimerTestMixin, noop_if_monkey_patched @@ -57,7 +64,8 @@ def tearDownClass(cls): except: pass - +has_asyncore = Version(platform.python_version()) < Version("3.12.0") +@unittest.skipUnless(has_asyncore, "asyncore has been removed in Python 3.12") class AsyncoreConnectionTest(ReactorTestMixin, AsyncorePatcher): connection_class = AsyncoreConnection @@ -68,6 +76,7 @@ def setUp(self): raise unittest.SkipTest("Can't test asyncore with monkey patching") +@unittest.skipUnless(has_asyncore, "asyncore has been removed in Python 3.12") class TestAsyncoreTimer(TimerTestMixin, AsyncorePatcher): connection_class = AsyncoreConnection diff --git a/tests/unit/io/test_eventletreactor.py b/tests/unit/io/test_eventletreactor.py index ce828cd6d8..8228884a4a 100644 --- a/tests/unit/io/test_eventletreactor.py +++ b/tests/unit/io/test_eventletreactor.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,16 +15,13 @@ # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from tests.unit.io.utils import TimerTestMixin from tests import notpypy, EVENT_LOOP_MANAGER from eventlet import monkey_patch -from mock import patch +from unittest.mock import patch try: from cassandra.io.eventletreactor import EventletConnection diff --git a/tests/unit/io/test_geventreactor.py b/tests/unit/io/test_geventreactor.py index ec64ce34c1..9bf0c7895f 100644 --- a/tests/unit/io/test_geventreactor.py +++ b/tests/unit/io/test_geventreactor.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest +from unittest.mock import patch from tests.unit.io.utils import TimerTestMixin @@ -26,8 +26,6 @@ except ImportError: GeventConnection = None # noqa -from mock import patch - skip_condition = GeventConnection is None or EVENT_LOOP_MANAGER != "gevent" @unittest.skipIf(skip_condition, "Skipping the gevent tests because it's not installed") diff --git a/tests/unit/io/test_libevreactor.py b/tests/unit/io/test_libevreactor.py index a02458edc8..a4050c79c1 100644 --- a/tests/unit/io/test_libevreactor.py +++ b/tests/unit/io/test_libevreactor.py @@ -1,35 +1,34 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -from mock import patch, Mock +from unittest.mock import patch, Mock import weakref import socket -from tests import is_monkey_patched -from tests.unit.io.utils import ReactorTestMixin, TimerTestMixin, noop_if_monkey_patched - - +from cassandra import DependencyException try: from cassandra.io.libevreactor import _cleanup as libev__cleanup from cassandra.io.libevreactor import LibevConnection -except ImportError: +except DependencyException: LibevConnection = None # noqa +from tests import is_monkey_patched +from tests.unit.io.utils import ReactorTestMixin, TimerTestMixin, noop_if_monkey_patched + class LibevConnectionTest(ReactorTestMixin, unittest.TestCase): diff --git a/tests/unit/io/test_twistedreactor.py b/tests/unit/io/test_twistedreactor.py index f3a2f05328..67c4d8eaf3 100644 --- a/tests/unit/io/test_twistedreactor.py +++ b/tests/unit/io/test_twistedreactor.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,11 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest -from mock import Mock, patch +import unittest +from unittest.mock import Mock, patch from cassandra.connection import DefaultEndPoint @@ -69,7 +68,9 @@ def setUp(self): self.tr.protocol = self.obj_ut def tearDown(self): - pass + loop = twistedreactor.TwistedConnection._loop + if not loop._reactor_stopped(): + loop._cleanup() def test_makeConnection(self): """ @@ -148,12 +149,12 @@ def test_handle_read__incomplete(self): # incomplete header self.obj_ut._iobuf.write(b'\x84\x00\x00\x00\x00') self.obj_ut.handle_read() - self.assertEqual(self.obj_ut._iobuf.getvalue(), b'\x84\x00\x00\x00\x00') + self.assertEqual(self.obj_ut._io_buffer.cql_frame_buffer.getvalue(), b'\x84\x00\x00\x00\x00') # full header, but incomplete body self.obj_ut._iobuf.write(b'\x00\x00\x00\x15') self.obj_ut.handle_read() - self.assertEqual(self.obj_ut._iobuf.getvalue(), + self.assertEqual(self.obj_ut._io_buffer.cql_frame_buffer.getvalue(), b'\x84\x00\x00\x00\x00\x00\x00\x00\x15') self.assertEqual(self.obj_ut._current_frame.end_pos, 30) @@ -174,7 +175,7 @@ def test_handle_read__fullmessage(self): self.obj_ut._iobuf.write( b'\x84\x01\x00\x02\x03\x00\x00\x00\x15' + body + extra) self.obj_ut.handle_read() - self.assertEqual(self.obj_ut._iobuf.getvalue(), extra) + self.assertEqual(self.obj_ut._io_buffer.cql_frame_buffer.getvalue(), extra) self.obj_ut.process_msg.assert_called_with( _Frame(version=4, flags=1, stream=2, opcode=3, body_offset=9, end_pos=9 + len(body)), body) diff --git a/tests/unit/io/utils.py b/tests/unit/io/utils.py index 168a5e8b76..d4483d08c7 100644 --- a/tests/unit/io/utils.py +++ b/tests/unit/io/utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -26,9 +28,8 @@ import random from functools import wraps from itertools import cycle -import six -from six import binary_type, BytesIO -from mock import Mock +from io import BytesIO +from unittest.mock import Mock import errno import logging @@ -37,10 +38,7 @@ from socket import error as socket_error import ssl -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import time @@ -185,7 +183,7 @@ def test_timer_cancellation(self): timer = self.create_timer(timeout, callback.invoke) timer.cancel() # Release context allow for timer thread to run. - time.sleep(.2) + time.sleep(timeout * 2) timer_manager = self._timers # Assert that the cancellation was honored self.assertFalse(timer_manager._queue) @@ -205,7 +203,7 @@ def set_socket(self, connection, obj): return setattr(connection, self.socket_attr_name, obj) def make_header_prefix(self, message_class, version=2, stream_id=0): - return binary_type().join(map(uint8_pack, [ + return bytes().join(map(uint8_pack, [ 0xff & (HEADER_DIRECTION_TO_CLIENT | version), 0, # flags (compression) stream_id, @@ -233,7 +231,7 @@ def make_error_body(self, code, msg): write_string(buf, msg) return buf.getvalue() - def make_msg(self, header, body=binary_type()): + def make_msg(self, header, body=bytes()): return header + uint32_pack(len(body)) + body def test_successful_connection(self): @@ -292,7 +290,7 @@ def recv_side_effect(*args): c.process_io_buffer = Mock() def chunk(size): - return six.b('a') * size + return b'a' * size buf_size = c.in_buffer_size @@ -309,14 +307,14 @@ def chunk(size): for message, expected_size in messages: message_chunks = message - c._iobuf = io.BytesIO() + c._io_buffer._io_buffer = io.BytesIO() c.process_io_buffer.reset_mock() c.handle_read(*self.null_handle_function_args) - c._iobuf.seek(0, os.SEEK_END) + c._io_buffer.io_buffer.seek(0, os.SEEK_END) # Ensure the message size is the good one and that the # message has been processed if it is non-empty - self.assertEqual(c._iobuf.tell(), expected_size) + self.assertEqual(c._io_buffer.io_buffer.tell(), expected_size) if expected_size == 0: c.process_io_buffer.assert_not_called() else: @@ -435,11 +433,11 @@ def test_partial_header_read(self): self.get_socket(c).recv.return_value = message[0:1] c.handle_read(*self.null_handle_function_args) - self.assertEqual(c._iobuf.getvalue(), message[0:1]) + self.assertEqual(c._io_buffer.cql_frame_buffer.getvalue(), message[0:1]) self.get_socket(c).recv.return_value = message[1:] c.handle_read(*self.null_handle_function_args) - self.assertEqual(six.binary_type(), c._iobuf.getvalue()) + self.assertEqual(bytes(), c._io_buffer.io_buffer.getvalue()) # let it write out a StartupMessage c.handle_write(*self.null_handle_function_args) @@ -461,12 +459,12 @@ def test_partial_message_read(self): # read in the first nine bytes self.get_socket(c).recv.return_value = message[:9] c.handle_read(*self.null_handle_function_args) - self.assertEqual(c._iobuf.getvalue(), message[:9]) + self.assertEqual(c._io_buffer.cql_frame_buffer.getvalue(), message[:9]) # ... then read in the rest self.get_socket(c).recv.return_value = message[9:] c.handle_read(*self.null_handle_function_args) - self.assertEqual(six.binary_type(), c._iobuf.getvalue()) + self.assertEqual(bytes(), c._io_buffer.io_buffer.getvalue()) # let it write out a StartupMessage c.handle_write(*self.null_handle_function_args) @@ -501,8 +499,8 @@ def test_mixed_message_and_buffer_sizes(self): for i in range(1, 15): c.process_io_buffer.reset_mock() - c._iobuf = io.BytesIO() - message = io.BytesIO(six.b('a') * (2**i)) + c._io_buffer._io_buffer = io.BytesIO() + message = io.BytesIO(b'a' * (2**i)) def recv_side_effect(*args): if random.randint(1,10) % 3 == 0: @@ -511,7 +509,7 @@ def recv_side_effect(*args): self.get_socket(c).recv.side_effect = recv_side_effect c.handle_read(*self.null_handle_function_args) - if c._iobuf.tell(): + if c._io_buffer.io_buffer.tell(): c.process_io_buffer.assert_called_once() else: c.process_io_buffer.assert_not_called() diff --git a/tests/unit/test_auth.py b/tests/unit/test_auth.py index 7b4196f831..49607d4e48 100644 --- a/tests/unit/test_auth.py +++ b/tests/unit/test_auth.py @@ -1,11 +1,13 @@ # -*- coding: utf-8 -*- -# # Copyright DataStax, Inc. +# # Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,13 +15,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -import six from cassandra.auth import PlainTextAuthenticator -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest class TestPlainTextAuthenticator(unittest.TestCase): @@ -27,6 +25,6 @@ class TestPlainTextAuthenticator(unittest.TestCase): def test_evaluate_challenge_with_unicode_data(self): authenticator = PlainTextAuthenticator("johnӁ", "doeӁ") self.assertEqual( - authenticator.evaluate_challenge(six.ensure_binary('PLAIN-START')), - six.ensure_binary("\x00johnӁ\x00doeӁ") + authenticator.evaluate_challenge(b'PLAIN-START'), + "\x00johnӁ\x00doeӁ".encode('utf-8') ) diff --git a/tests/unit/test_cluster.py b/tests/unit/test_cluster.py index 249c0a17cc..69a65855a0 100644 --- a/tests/unit/test_cluster.py +++ b/tests/unit/test_cluster.py @@ -1,30 +1,31 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import logging -import six +import socket +import uuid -from mock import patch, Mock +from unittest.mock import patch, Mock from cassandra import ConsistencyLevel, DriverException, Timeout, Unavailable, RequestExecutionException, ReadTimeout, WriteTimeout, CoordinationFailure, ReadFailure, WriteFailure, FunctionFailure, AlreadyExists,\ InvalidRequest, Unauthorized, AuthenticationFailed, OperationTimedOut, UnsupportedOperation, RequestValidationException, ConfigurationException, ProtocolVersion from cassandra.cluster import _Scheduler, Session, Cluster, default_lbp_factory, \ ExecutionProfile, _ConfigMode, EXEC_PROFILE_DEFAULT +from cassandra.connection import SniEndPoint, SniEndPointFactory from cassandra.pool import Host from cassandra.policies import HostDistance, RetryPolicy, RoundRobinPolicy, DowngradingConsistencyRetryPolicy, SimpleConvictionPolicy from cassandra.query import SimpleStatement, named_tuple_factory, tuple_factory @@ -34,6 +35,7 @@ log = logging.getLogger(__name__) + class ExceptionTypeTest(unittest.TestCase): def test_exception_types(self): @@ -88,12 +90,19 @@ def test_exception_types(self): self.assertTrue(issubclass(UnsupportedOperation, DriverException)) +class MockOrderedPolicy(RoundRobinPolicy): + all_hosts = set() + + def make_query_plan(self, working_keyspace=None, query=None): + return sorted(self.all_hosts, key=lambda x: x.endpoint.ssl_options['server_hostname']) + class ClusterTest(unittest.TestCase): def test_tuple_for_contact_points(self): cluster = Cluster(contact_points=[('localhost', 9045), ('127.0.0.2', 9046), '127.0.0.3'], port=9999) + localhost_addr = set([addr[0] for addr in [t for (_,_,_,_,t) in socket.getaddrinfo("localhost",80)]]) for cp in cluster.endpoints_resolved: - if cp.address in ('::1', '127.0.0.1'): + if cp.address in localhost_addr: self.assertEqual(cp.port, 9045) elif cp.address == '127.0.0.2': self.assertEqual(cp.port, 9046) @@ -121,6 +130,26 @@ def test_requests_in_flight_threshold(self): for n in (0, mn, 128): self.assertRaises(ValueError, c.set_max_requests_per_connection, d, n) + # Validate that at least the default LBP can create a query plan with end points that resolve + # to different addresses initially. This may not be exactly how things play out in practice + # (the control connection will muck with this even if nothing else does) but it should be + # a pretty good approximation. + def test_query_plan_for_sni_contains_unique_addresses(self): + node_cnt = 5 + def _mocked_proxy_dns_resolution(self): + return [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, ('127.0.0.%s' % (i,), 9042)) for i in range(node_cnt)] + + c = Cluster() + lbp = c.load_balancing_policy + lbp.local_dc = "dc1" + factory = SniEndPointFactory("proxy.foo.bar", 9042) + for host in (Host(factory.create({"host_id": uuid.uuid4().hex, "dc": "dc1"}), SimpleConvictionPolicy) for _ in range(node_cnt)): + lbp.on_up(host) + with patch.object(SniEndPoint, '_resolve_proxy_addresses', _mocked_proxy_dns_resolution): + addrs = [host.endpoint.resolve() for host in lbp.make_query_plan()] + # single SNI endpoint should be resolved to multiple unique IP addresses + self.assertEqual(len(addrs), len(set(addrs))) + class SchedulerTest(unittest.TestCase): # TODO: this suite could be expanded; for now just adding a test covering a ticket @@ -209,6 +238,8 @@ def test_protocol_downgrade_test(self): lower = ProtocolVersion.get_lower_supported(ProtocolVersion.DSE_V2) self.assertEqual(ProtocolVersion.DSE_V1, lower) lower = ProtocolVersion.get_lower_supported(ProtocolVersion.DSE_V1) + self.assertEqual(ProtocolVersion.V5,lower) + lower = ProtocolVersion.get_lower_supported(ProtocolVersion.V5) self.assertEqual(ProtocolVersion.V4,lower) lower = ProtocolVersion.get_lower_supported(ProtocolVersion.V4) self.assertEqual(ProtocolVersion.V3,lower) @@ -287,7 +318,7 @@ def test_default_profile(self): rf = session.execute_async("query", execution_profile='non-default') self._verify_response_future_profile(rf, non_default_profile) - for name, ep in six.iteritems(cluster.profile_manager.profiles): + for name, ep in cluster.profile_manager.profiles.items(): self.assertEqual(ep, session.get_execution_profile(name)) # invalid ep diff --git a/tests/unit/test_concurrent.py b/tests/unit/test_concurrent.py index cc6c12cdaa..18e8381185 100644 --- a/tests/unit/test_concurrent.py +++ b/tests/unit/test_concurrent.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,21 +15,18 @@ # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from itertools import cycle -from mock import Mock +from unittest.mock import Mock import time import threading -from six.moves.queue import PriorityQueue +from queue import PriorityQueue import sys import platform from cassandra.cluster import Cluster, Session -from cassandra.concurrent import execute_concurrent, execute_concurrent_with_args +from cassandra.concurrent import execute_concurrent, execute_concurrent_with_args, execute_concurrent_async from cassandra.pool import Host from cassandra.policies import SimpleConvictionPolicy from tests.unit.utils import mock_session_pools @@ -43,7 +42,7 @@ class MockResponseResponseFuture(): _col_names = None _col_types = None - # a list pending callbacks, these will be prioritized in reverse or normal orderd + # a list pending callbacks, these will be prioritized in reverse or normal order pending_callbacks = PriorityQueue() def __init__(self, reverse): @@ -182,7 +181,7 @@ def insert_and_validate_list_results(self, reverse, slowdown): This utility method will execute submit various statements for execution using the ConcurrentExecutorListResults, then invoke a separate thread to execute the callback associated with the futures registered for those statements. The parameters will toggle various timing, and ordering changes. - Finally it will validate that the results were returned in the order they were submitted + Finally, it will validate that the results were returned in the order they were submitted :param reverse: Execute the callbacks in the opposite order that they were submitted :param slowdown: Cause intermittent queries to perform slowly """ @@ -206,7 +205,7 @@ def insert_and_validate_list_generator(self, reverse, slowdown): This utility method will execute submit various statements for execution using the ConcurrentExecutorGenResults, then invoke a separate thread to execute the callback associated with the futures registered for those statements. The parameters will toggle various timing, and ordering changes. - Finally it will validate that the results were returned in the order they were submitted + Finally, it will validate that the results were returned in the order they were submitted :param reverse: Execute the callbacks in the opposite order that they were submitted :param slowdown: Cause intermittent queries to perform slowly """ @@ -235,13 +234,65 @@ def validate_result_ordering(self, results): self.assertTrue(success) current_time_added = list(result)[0] - #Windows clock granularity makes this equal most of the times + # Windows clock granularity makes this equal most of the time if "Windows" in platform.system(): self.assertLessEqual(last_time_added, current_time_added) else: self.assertLess(last_time_added, current_time_added) last_time_added = current_time_added + def insert_and_validate_list_async(self, reverse, slowdown): + """ + This utility method will execute submit various statements for execution using execute_concurrent_async, + then invoke a separate thread to execute the callback associated with the futures registered + for those statements. The parameters will toggle various timing, and ordering changes. + Finally it will validate that the results were returned in the order they were submitted + :param reverse: Execute the callbacks in the opposite order that they were submitted + :param slowdown: Cause intermittent queries to perform slowly + """ + our_handler = MockResponseResponseFuture(reverse=reverse) + mock_session = Mock() + statements_and_params = zip(cycle(["INSERT INTO test3rf.test (k, v) VALUES (%s, 0)"]), + [(i, ) for i in range(100)]) + mock_session.execute_async.return_value = our_handler + + t = TimedCallableInvoker(our_handler, slowdown=slowdown) + t.start() + try: + future = execute_concurrent_async(mock_session, statements_and_params) + results = future.result() + self.validate_result_ordering(results) + finally: + t.stop() + + def test_results_ordering_async_forward(self): + """ + This tests the ordering of our execute_concurrent_async function + when queries complete in the order they were executed. + """ + self.insert_and_validate_list_async(False, False) + + def test_results_ordering_async_reverse(self): + """ + This tests the ordering of our execute_concurrent_async function + when queries complete in the reverse order they were executed. + """ + self.insert_and_validate_list_async(True, False) + + def test_results_ordering_async_forward_slowdown(self): + """ + This tests the ordering of our execute_concurrent_async function + when queries complete in the order they were executed, with slow queries mixed in. + """ + self.insert_and_validate_list_async(False, True) + + def test_results_ordering_async_reverse_slowdown(self): + """ + This tests the ordering of our execute_concurrent_async function + when queries complete in the reverse order they were executed, with slow queries mixed in. + """ + self.insert_and_validate_list_async(True, True) + @mock_session_pools def test_recursion_limited(self): """ diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py index 68577a396e..3bca654c55 100644 --- a/tests/unit/test_connection.py +++ b/tests/unit/test_connection.py @@ -1,26 +1,23 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -from mock import Mock, ANY, call, patch -import six -from six import BytesIO +import unittest +from io import BytesIO import time from threading import Lock +from unittest.mock import Mock, ANY, call, patch from cassandra import OperationTimedOut from cassandra.cluster import Cluster @@ -44,14 +41,14 @@ def make_connection(self): def make_header_prefix(self, message_class, version=Connection.protocol_version, stream_id=0): if Connection.protocol_version < 3: - return six.binary_type().join(map(uint8_pack, [ + return bytes().join(map(uint8_pack, [ 0xff & (HEADER_DIRECTION_TO_CLIENT | version), 0, # flags (compression) stream_id, message_class.opcode # opcode ])) else: - return six.binary_type().join(map(uint8_pack, [ + return bytes().join(map(uint8_pack, [ 0xff & (HEADER_DIRECTION_TO_CLIENT | version), 0, # flags (compression) 0, # MSB for v3+ stream @@ -100,7 +97,7 @@ def test_bad_protocol_version(self, *args): header = self.make_header_prefix(SupportedMessage, version=0x7f) options = self.make_options_body() message = self.make_msg(header, options) - c._iobuf = BytesIO() + c._iobuf._io_buffer = BytesIO() c._iobuf.write(message) c.process_io_buffer() @@ -117,7 +114,7 @@ def test_negative_body_length(self, *args): # read in a SupportedMessage response header = self.make_header_prefix(SupportedMessage) message = header + int32_pack(-13) - c._iobuf = BytesIO() + c._iobuf._io_buffer = BytesIO() c._iobuf.write(message) c.process_io_buffer() @@ -395,7 +392,7 @@ def send_msg(msg, req_id, msg_callback): connection.defunct.assert_has_calls([call(ANY)] * get_holders.call_count) exc = connection.defunct.call_args_list[0][0][0] self.assertIsInstance(exc, ConnectionException) - self.assertRegexpMatches(exc.args[0], r'^Received unexpected response to OptionsMessage.*') + self.assertRegex(exc.args[0], r'^Received unexpected response to OptionsMessage.*') holder.return_connection.assert_has_calls( [call(connection)] * get_holders.call_count) diff --git a/tests/unit/test_control_connection.py b/tests/unit/test_control_connection.py index 3e75a0af27..618bb42b1f 100644 --- a/tests/unit/test_control_connection.py +++ b/tests/unit/test_control_connection.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,15 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import six +import unittest from concurrent.futures import ThreadPoolExecutor -from mock import Mock, ANY, call +from unittest.mock import Mock, ANY, call from cassandra import OperationTimedOut, SchemaTargetType, SchemaChangeType from cassandra.protocol import ResultMessage, RESULT_KIND_ROWS @@ -51,7 +48,7 @@ def __init__(self): def get_host(self, endpoint_or_address, port=None): if not isinstance(endpoint_or_address, EndPoint): - for host in six.itervalues(self.hosts): + for host in self.hosts.values(): if (host.address == endpoint_or_address and (port is None or host.broadcast_rpc_port is None or host.broadcast_rpc_port == port)): return host @@ -127,15 +124,15 @@ def __init__(self): ] self.peer_results = [ - ["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"]]] + ["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], "uuid1"], + ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"], "uuid2"]] ] self.peer_results_v2 = [ - ["native_address", "native_port", "peer", "peer_port", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", 9042, "10.0.0.1", 7042, "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", ["2", "102", "202"]]] + ["native_address", "native_port", "peer", "peer_port", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", 9042, "10.0.0.1", 7042, "a", "dc1", "rack1", ["1", "101", "201"], "uuid1"], + ["192.168.1.2", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", ["2", "102", "202"], "uuid2"]] ] self.wait_for_responses = Mock(return_value=_node_meta_results(self.local_results, self.peer_results)) @@ -155,18 +152,18 @@ def sleep(self, amount): class ControlConnectionTest(unittest.TestCase): _matching_schema_preloaded_results = _node_meta_results( - local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens"], - [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"]]]), - peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"]]])) + local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens", "host_id"], + [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"], "uuid1"]]), + peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], "uuid2"], + ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"], "uuid3"]])) _nonmatching_schema_preloaded_results = _node_meta_results( - local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens"], - [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"]]]), - peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", "10.0.0.2", "b", "dc1", "rack1", ["2", "102", "202"]]])) + local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens", "host_id"], + [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"], "uuid1"]]), + peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], "uuid2"], + ["192.168.1.2", "10.0.0.2", "b", "dc1", "rack1", ["2", "102", "202"], "uuid3"]])) def setUp(self): self.cluster = MockCluster() @@ -275,6 +272,40 @@ def test_refresh_nodes_and_tokens(self): self.assertEqual(self.connection.wait_for_responses.call_count, 1) + def test_refresh_nodes_and_tokens_with_invalid_peers(self): + def refresh_and_validate_added_hosts(): + self.connection.wait_for_responses = Mock(return_value=_node_meta_results( + self.connection.local_results, self.connection.peer_results)) + self.control_connection.refresh_node_list_and_token_map() + self.assertEqual(1, len(self.cluster.added_hosts)) # only one valid peer found + + # peersV1 + del self.connection.peer_results[:] + self.connection.peer_results.extend([ + ["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.3", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], 'uuid5'], + # all others are invalid + [None, None, "a", "dc1", "rack1", ["1", "101", "201"], 'uuid1'], + ["192.168.1.7", "10.0.0.1", "a", None, "rack1", ["1", "101", "201"], 'uuid2'], + ["192.168.1.6", "10.0.0.1", "a", "dc1", None, ["1", "101", "201"], 'uuid3'], + ["192.168.1.5", "10.0.0.1", "a", "dc1", "rack1", None, 'uuid4'], + ["192.168.1.4", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], None]]]) + refresh_and_validate_added_hosts() + + # peersV2 + del self.cluster.added_hosts[:] + del self.connection.peer_results[:] + self.connection.peer_results.extend([ + ["native_address", "native_port", "peer", "peer_port", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.4", 9042, "10.0.0.1", 7042, "a", "dc1", "rack1", ["1", "101", "201"], "uuid1"], + # all others are invalid + [None, 9042, None, 7040, "a", "dc1", "rack1", ["2", "102", "202"], "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", None, "rack1", ["2", "102", "202"], "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", "dc1", None, ["2", "102", "202"], "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", None, "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", ["2", "102", "202"], None]]]) + refresh_and_validate_added_hosts() + def test_refresh_nodes_and_tokens_uses_preloaded_results_if_given(self): """ refresh_nodes_and_tokens uses preloaded results if given for shared table queries @@ -311,7 +342,7 @@ def test_refresh_nodes_and_tokens_no_partitioner(self): def test_refresh_nodes_and_tokens_add_host(self): self.connection.peer_results[1].append( - ["192.168.1.3", "10.0.0.3", "a", "dc1", "rack1", ["3", "103", "203"]] + ["192.168.1.3", "10.0.0.3", "a", "dc1", "rack1", ["3", "103", "203"], "uuid3"] ) self.cluster.scheduler.schedule = lambda delay, f, *args, **kwargs: f(*args, **kwargs) self.control_connection.refresh_node_list_and_token_map() @@ -319,6 +350,7 @@ def test_refresh_nodes_and_tokens_add_host(self): self.assertEqual(self.cluster.added_hosts[0].address, "192.168.1.3") self.assertEqual(self.cluster.added_hosts[0].datacenter, "dc1") self.assertEqual(self.cluster.added_hosts[0].rack, "rack1") + self.assertEqual(self.cluster.added_hosts[0].host_id, "uuid3") def test_refresh_nodes_and_tokens_remove_host(self): del self.connection.peer_results[1][1] @@ -482,7 +514,7 @@ def test_refresh_nodes_and_tokens_add_host_detects_port(self): del self.connection.peer_results[:] self.connection.peer_results.extend(self.connection.peer_results_v2) self.connection.peer_results[1].append( - ["192.168.1.3", 555, "10.0.0.3", 666, "a", "dc1", "rack1", ["3", "103", "203"]] + ["192.168.1.3", 555, "10.0.0.3", 666, "a", "dc1", "rack1", ["3", "103", "203"], "uuid3"] ) self.connection.wait_for_responses = Mock(return_value=_node_meta_results( self.connection.local_results, self.connection.peer_results)) @@ -494,7 +526,7 @@ def test_refresh_nodes_and_tokens_add_host_detects_port(self): self.assertEqual(self.cluster.added_hosts[0].broadcast_rpc_address, "192.168.1.3") self.assertEqual(self.cluster.added_hosts[0].broadcast_rpc_port, 555) self.assertEqual(self.cluster.added_hosts[0].broadcast_address, "10.0.0.3") - self.assertEquals(self.cluster.added_hosts[0].broadcast_port, 666) + self.assertEqual(self.cluster.added_hosts[0].broadcast_port, 666) self.assertEqual(self.cluster.added_hosts[0].datacenter, "dc1") self.assertEqual(self.cluster.added_hosts[0].rack, "rack1") @@ -502,7 +534,7 @@ def test_refresh_nodes_and_tokens_add_host_detects_invalid_port(self): del self.connection.peer_results[:] self.connection.peer_results.extend(self.connection.peer_results_v2) self.connection.peer_results[1].append( - ["192.168.1.3", -1, "10.0.0.3", 0, "a", "dc1", "rack1", ["3", "103", "203"]] + ["192.168.1.3", -1, "10.0.0.3", 0, "a", "dc1", "rack1", ["3", "103", "203"], "uuid3"] ) self.connection.wait_for_responses = Mock(return_value=_node_meta_results( self.connection.local_results, self.connection.peer_results)) @@ -514,7 +546,7 @@ def test_refresh_nodes_and_tokens_add_host_detects_invalid_port(self): self.assertEqual(self.cluster.added_hosts[0].broadcast_rpc_address, "192.168.1.3") self.assertEqual(self.cluster.added_hosts[0].broadcast_rpc_port, None) self.assertEqual(self.cluster.added_hosts[0].broadcast_address, "10.0.0.3") - self.assertEquals(self.cluster.added_hosts[0].broadcast_port, None) + self.assertEqual(self.cluster.added_hosts[0].broadcast_port, None) self.assertEqual(self.cluster.added_hosts[0].datacenter, "dc1") self.assertEqual(self.cluster.added_hosts[0].rack, "rack1") diff --git a/tests/unit/test_endpoints.py b/tests/unit/test_endpoints.py index 2452e267ba..4352afb9a5 100644 --- a/tests/unit/test_endpoints.py +++ b/tests/unit/test_endpoints.py @@ -6,16 +6,13 @@ # You may obtain a copy of the License at # # http://www.datastax.com/terms/datastax-dse-driver-license-terms -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import itertools from cassandra.connection import DefaultEndPoint, SniEndPoint, SniEndPointFactory -from mock import patch +from unittest.mock import patch def socket_getaddrinfo(*args): @@ -68,3 +65,15 @@ def test_endpoint_resolve(self): for i in range(10): (address, _) = endpoint.resolve() self.assertEqual(address, next(it)) + + def test_sni_resolution_start_index(self): + factory = SniEndPointFactory("proxy.datastax.com", 9999) + initial_index = factory._init_index + + endpoint1 = factory.create_from_sni('sni1') + self.assertEqual(factory._init_index, initial_index + 1) + self.assertEqual(endpoint1._index, factory._init_index) + + endpoint2 = factory.create_from_sni('sni2') + self.assertEqual(factory._init_index, initial_index + 2) + self.assertEqual(endpoint2._index, factory._init_index) diff --git a/tests/unit/test_exception.py b/tests/unit/test_exception.py index 3a082f7363..4758970d9c 100644 --- a/tests/unit/test_exception.py +++ b/tests/unit/test_exception.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest +import unittest from cassandra import Unavailable, Timeout, ConsistencyLevel import re diff --git a/tests/unit/test_host_connection_pool.py b/tests/unit/test_host_connection_pool.py index e62488b400..d8b5ca976e 100644 --- a/tests/unit/test_host_connection_pool.py +++ b/tests/unit/test_host_connection_pool.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,13 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -from mock import Mock, NonCallableMagicMock +import unittest from threading import Thread, Event, Lock +from unittest.mock import Mock, NonCallableMagicMock from cassandra.cluster import Session from cassandra.connection import Connection @@ -26,8 +24,8 @@ from cassandra.pool import Host, NoConnectionsAvailable from cassandra.policies import HostDistance, SimpleConvictionPolicy - class _PoolTests(unittest.TestCase): + __test__ = False PoolImpl = None uses_single_connection = None @@ -45,7 +43,7 @@ def test_borrow_and_return(self): session.cluster.connection_factory.return_value = conn pool = self.PoolImpl(host, HostDistance.LOCAL, session) - session.cluster.connection_factory.assert_called_once_with(host.endpoint) + session.cluster.connection_factory.assert_called_once_with(host.endpoint, on_orphaned_stream_released=pool.on_orphaned_stream_released) c, request_id = pool.borrow_connection(timeout=0.01) self.assertIs(c, conn) @@ -64,7 +62,7 @@ def test_failed_wait_for_connection(self): session.cluster.connection_factory.return_value = conn pool = self.PoolImpl(host, HostDistance.LOCAL, session) - session.cluster.connection_factory.assert_called_once_with(host.endpoint) + session.cluster.connection_factory.assert_called_once_with(host.endpoint, on_orphaned_stream_released=pool.on_orphaned_stream_released) pool.borrow_connection(timeout=0.01) self.assertEqual(1, conn.in_flight) @@ -82,7 +80,7 @@ def test_successful_wait_for_connection(self): session.cluster.connection_factory.return_value = conn pool = self.PoolImpl(host, HostDistance.LOCAL, session) - session.cluster.connection_factory.assert_called_once_with(host.endpoint) + session.cluster.connection_factory.assert_called_once_with(host.endpoint, on_orphaned_stream_released=pool.on_orphaned_stream_released) pool.borrow_connection(timeout=0.01) self.assertEqual(1, conn.in_flight) @@ -110,7 +108,7 @@ def test_spawn_when_at_max(self): session.cluster.get_max_connections_per_host.return_value = 2 pool = self.PoolImpl(host, HostDistance.LOCAL, session) - session.cluster.connection_factory.assert_called_once_with(host.endpoint) + session.cluster.connection_factory.assert_called_once_with(host.endpoint, on_orphaned_stream_released=pool.on_orphaned_stream_released) pool.borrow_connection(timeout=0.01) self.assertEqual(1, conn.in_flight) @@ -133,7 +131,7 @@ def test_return_defunct_connection(self): session.cluster.connection_factory.return_value = conn pool = self.PoolImpl(host, HostDistance.LOCAL, session) - session.cluster.connection_factory.assert_called_once_with(host.endpoint) + session.cluster.connection_factory.assert_called_once_with(host.endpoint, on_orphaned_stream_released=pool.on_orphaned_stream_released) pool.borrow_connection(timeout=0.01) conn.is_defunct = True @@ -148,11 +146,12 @@ def test_return_defunct_connection_on_down_host(self): host = Mock(spec=Host, address='ip1') session = self.make_session() conn = NonCallableMagicMock(spec=Connection, in_flight=0, is_defunct=False, is_closed=False, - max_request_id=100, signaled_error=False) + max_request_id=100, signaled_error=False, + orphaned_threshold_reached=False) session.cluster.connection_factory.return_value = conn pool = self.PoolImpl(host, HostDistance.LOCAL, session) - session.cluster.connection_factory.assert_called_once_with(host.endpoint) + session.cluster.connection_factory.assert_called_once_with(host.endpoint, on_orphaned_stream_released=pool.on_orphaned_stream_released) pool.borrow_connection(timeout=0.01) conn.is_defunct = True @@ -169,11 +168,11 @@ def test_return_closed_connection(self): host = Mock(spec=Host, address='ip1') session = self.make_session() conn = NonCallableMagicMock(spec=Connection, in_flight=0, is_defunct=False, is_closed=True, max_request_id=100, - signaled_error=False) + signaled_error=False, orphaned_threshold_reached=False) session.cluster.connection_factory.return_value = conn pool = self.PoolImpl(host, HostDistance.LOCAL, session) - session.cluster.connection_factory.assert_called_once_with(host.endpoint) + session.cluster.connection_factory.assert_called_once_with(host.endpoint, on_orphaned_stream_released=pool.on_orphaned_stream_released) pool.borrow_connection(timeout=0.01) conn.is_closed = True @@ -208,6 +207,7 @@ def test_host_equality(self): class HostConnectionPoolTests(_PoolTests): + __test__ = True PoolImpl = HostConnectionPool uses_single_connection = False @@ -256,6 +256,7 @@ def get_conn(): class HostConnectionTests(_PoolTests): + __test__ = True PoolImpl = HostConnection uses_single_connection = True diff --git a/tests/unit/test_marshalling.py b/tests/unit/test_marshalling.py index c2363e0adc..9b44bb5ac2 100644 --- a/tests/unit/test_marshalling.py +++ b/tests/unit/test_marshalling.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +17,7 @@ from cassandra import ProtocolVersion -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import platform from datetime import datetime, date diff --git a/tests/unit/test_metadata.py b/tests/unit/test_metadata.py index b2143f8c20..76e47a4331 100644 --- a/tests/unit/test_metadata.py +++ b/tests/unit/test_metadata.py @@ -1,26 +1,24 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from binascii import unhexlify import logging -from mock import Mock +from unittest.mock import Mock import os -import six import timeit import cassandra @@ -488,11 +486,11 @@ def test_murmur3_c(self): raise unittest.SkipTest('The cmurmur3 extension is not available') def _verify_hash(self, fn): - self.assertEqual(fn(six.b('123')), -7468325962851647638) + self.assertEqual(fn(b'123'), -7468325962851647638) self.assertEqual(fn(b'\x00\xff\x10\xfa\x99' * 10), 5837342703291459765) self.assertEqual(fn(b'\xfe' * 8), -8927430733708461935) self.assertEqual(fn(b'\x10' * 8), 1446172840243228796) - self.assertEqual(fn(six.b(str(cassandra.metadata.MAX_LONG))), 7162290910810015547) + self.assertEqual(fn(str(cassandra.metadata.MAX_LONG).encode()), 7162290910810015547) class MD5TokensTest(unittest.TestCase): @@ -507,28 +505,28 @@ def test_md5_tokens(self): class BytesTokensTest(unittest.TestCase): def test_bytes_tokens(self): - bytes_token = BytesToken(unhexlify(six.b('01'))) - self.assertEqual(bytes_token.value, six.b('\x01')) + bytes_token = BytesToken(unhexlify(b'01')) + self.assertEqual(bytes_token.value, b'\x01') self.assertEqual(str(bytes_token), "" % bytes_token.value) self.assertEqual(bytes_token.hash_fn('123'), '123') self.assertEqual(bytes_token.hash_fn(123), 123) self.assertEqual(bytes_token.hash_fn(str(cassandra.metadata.MAX_LONG)), str(cassandra.metadata.MAX_LONG)) def test_from_string(self): - from_unicode = BytesToken.from_string(six.text_type('0123456789abcdef')) - from_bin = BytesToken.from_string(six.b('0123456789abcdef')) + from_unicode = BytesToken.from_string('0123456789abcdef') + from_bin = BytesToken.from_string(b'0123456789abcdef') self.assertEqual(from_unicode, from_bin) - self.assertIsInstance(from_unicode.value, six.binary_type) - self.assertIsInstance(from_bin.value, six.binary_type) + self.assertIsInstance(from_unicode.value, bytes) + self.assertIsInstance(from_bin.value, bytes) def test_comparison(self): - tok = BytesToken.from_string(six.text_type('0123456789abcdef')) + tok = BytesToken.from_string('0123456789abcdef') token_high_order = uint16_unpack(tok.value[0:2]) self.assertLess(BytesToken(uint16_pack(token_high_order - 1)), tok) self.assertGreater(BytesToken(uint16_pack(token_high_order + 1)), tok) def test_comparison_unicode(self): - value = six.b('\'_-()"\xc2\xac') + value = b'\'_-()"\xc2\xac' t0 = BytesToken(value) t1 = BytesToken.from_string('00') self.assertGreater(t0, t1) @@ -645,7 +643,7 @@ class UnicodeIdentifiersTests(unittest.TestCase): Looking for encoding errors like PYTHON-447 """ - name = six.text_type(b'\'_-()"\xc2\xac'.decode('utf-8')) + name = b'\'_-()"\xc2\xac'.decode('utf-8') def test_keyspace_name(self): km = KeyspaceMetadata(self.name, False, 'SimpleStrategy', {'replication_factor': 1}) diff --git a/tests/unit/test_orderedmap.py b/tests/unit/test_orderedmap.py index f2baab40f0..a26994dd7b 100644 --- a/tests/unit/test_orderedmap.py +++ b/tests/unit/test_orderedmap.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,14 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.util import OrderedMap, OrderedMapSerializedKey from cassandra.cqltypes import EMPTY, UTF8Type, lookup_casstype -import six class OrderedMapTest(unittest.TestCase): def test_init(self): @@ -121,11 +119,11 @@ def test_iter(self): itr = iter(om) self.assertEqual(sum([1 for _ in itr]), len(keys)) - self.assertRaises(StopIteration, six.next, itr) + self.assertRaises(StopIteration, next, itr) self.assertEqual(list(iter(om)), keys) - self.assertEqual(list(six.iteritems(om)), items) - self.assertEqual(list(six.itervalues(om)), values) + self.assertEqual(list(om.items()), items) + self.assertEqual(list(om.values()), values) def test_len(self): self.assertEqual(len(OrderedMap()), 0) diff --git a/tests/unit/test_parameter_binding.py b/tests/unit/test_parameter_binding.py index 228f3f4432..fd44728c25 100644 --- a/tests/unit/test_parameter_binding.py +++ b/tests/unit/test_parameter_binding.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.encoder import Encoder from cassandra.protocol import ColumnMetadata @@ -24,9 +23,6 @@ from cassandra.cqltypes import Int32Type from cassandra.util import OrderedDict -from six.moves import xrange -import six - class ParamBindingTest(unittest.TestCase): @@ -43,7 +39,7 @@ def test_sequence_param(self): self.assertEqual(result, "(1, 'a', 2.0)") def test_generator_param(self): - result = bind_params("%s", ((i for i in xrange(3)),), Encoder()) + result = bind_params("%s", ((i for i in range(3)),), Encoder()) self.assertEqual(result, "[0, 1, 2]") def test_none_param(self): @@ -152,7 +148,7 @@ def test_missing_value(self): def test_extra_value(self): self.bound.bind({'rk0': 0, 'rk1': 0, 'ck0': 0, 'v0': 0, 'should_not_be_here': 123}) # okay to have extra keys in dict - self.assertEqual(self.bound.values, [six.b('\x00') * 4] * 4) # four encoded zeros + self.assertEqual(self.bound.values, [b'\x00' * 4] * 4) # four encoded zeros self.assertRaises(ValueError, self.bound.bind, (0, 0, 0, 0, 123)) def test_values_none(self): diff --git a/tests/unit/test_policies.py b/tests/unit/test_policies.py index 5c0c11281b..792268cd7f 100644 --- a/tests/unit/test_policies.py +++ b/tests/unit/test_policies.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,22 +14,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from itertools import islice, cycle -from mock import Mock, patch, call +from unittest.mock import Mock, patch, call from random import randint -import six -from six.moves._thread import LockType +from _thread import LockType import sys import struct from threading import Thread from cassandra import ConsistencyLevel from cassandra.cluster import Cluster +from cassandra.connection import DefaultEndPoint from cassandra.metadata import Metadata from cassandra.policies import (RoundRobinPolicy, WhiteListRoundRobinPolicy, DCAwareRoundRobinPolicy, TokenAwarePolicy, SimpleConvictionPolicy, @@ -37,11 +36,8 @@ LoadBalancingPolicy, ConvictionPolicy, ReconnectionPolicy, FallthroughRetryPolicy, IdentityTranslator, EC2MultiRegionTranslator, HostFilterPolicy) from cassandra.pool import Host -from cassandra.connection import DefaultEndPoint from cassandra.query import Statement -from six.moves import xrange - class LoadBalancingPolicyTest(unittest.TestCase): def test_non_implemented(self): @@ -78,7 +74,7 @@ def test_multiple_query_plans(self): hosts = [0, 1, 2, 3] policy = RoundRobinPolicy() policy.populate(None, hosts) - for i in xrange(20): + for i in range(20): qplan = list(policy.make_query_plan()) self.assertEqual(sorted(qplan), hosts) @@ -124,17 +120,17 @@ def test_thread_safety_during_modification(self): def check_query_plan(): try: - for i in xrange(100): + for i in range(100): list(policy.make_query_plan()) except Exception as exc: errors.append(exc) def host_up(): - for i in xrange(1000): + for i in range(1000): policy.on_up(randint(0, 99)) def host_down(): - for i in xrange(1000): + for i in range(1000): policy.on_down(randint(0, 99)) threads = [] @@ -145,7 +141,7 @@ def host_down(): # make the GIL switch after every instruction, maximizing # the chance of race conditions - check = six.PY2 or '__pypy__' in sys.builtin_module_names + check = '__pypy__' in sys.builtin_module_names if check: original_interval = sys.getcheckinterval() else: @@ -208,7 +204,7 @@ def test_with_remotes(self): local_hosts = set(h for h in hosts if h.datacenter == "dc1") remote_hosts = set(h for h in hosts if h.datacenter != "dc1") - # allow all of the remote hosts to be used + # allow all the remote hosts to be used policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=2) policy.populate(Mock(), hosts) qplan = list(policy.make_query_plan()) @@ -309,7 +305,7 @@ def test_modification_during_generation(self): policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=3) policy.populate(Mock(), hosts) - # The general concept here is to change thee internal state of the + # The general concept here is to change the internal state of the # policy during plan generation. In this case we use a grey-box # approach that changes specific things during known phases of the # generator. @@ -1202,7 +1198,7 @@ def test_write_timeout(self): query=None, consistency=ONE, write_type=write_type, required_responses=1, received_responses=2, retry_num=0) self.assertEqual(retry, RetryPolicy.IGNORE) - # retrhow if we can't be sure we have a replica + # rethrow if we can't be sure we have a replica retry, consistency = policy.on_write_timeout( query=None, consistency=ONE, write_type=write_type, required_responses=1, received_responses=0, retry_num=0) @@ -1298,10 +1294,13 @@ def test_init_kwargs(self): )) def test_immutable_predicate(self): - expected_message_regex = "can't set attribute" + if sys.version_info >= (3, 11): + expected_message_regex = "has no setter" + else: + expected_message_regex = "can't set attribute" hfp = HostFilterPolicy(child_policy=Mock(name='child_policy'), predicate=Mock(name='predicate')) - with self.assertRaisesRegexp(AttributeError, expected_message_regex): + with self.assertRaisesRegex(AttributeError, expected_message_regex): hfp.predicate = object() @@ -1499,4 +1498,3 @@ def test_create_whitelist(self): # Only the filtered replicas should be allowed self.assertEqual(set(query_plan), {Host(DefaultEndPoint("127.0.0.1"), SimpleConvictionPolicy), Host(DefaultEndPoint("127.0.0.4"), SimpleConvictionPolicy)}) - diff --git a/tests/unit/test_protocol.py b/tests/unit/test_protocol.py index b43b21eeff..08516eba9e 100644 --- a/tests/unit/test_protocol.py +++ b/tests/unit/test_protocol.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -from mock import Mock +from unittest.mock import Mock from cassandra import ProtocolVersion, UnsupportedOperation from cassandra.protocol import ( @@ -98,7 +97,7 @@ def _check_calls(self, io, expected): def test_continuous_paging(self): """ - Test to check continuous paging throws an Exception if it's not supported and the correct valuesa + Test to check continuous paging throws an Exception if it's not supported and the correct values are written to the buffer if the option is enabled. @since DSE 2.0b3 GRAPH 1.0b1 @@ -175,7 +174,7 @@ def test_keyspace_flag_raises_before_v5(self): keyspace_message = QueryMessage('a', consistency_level=3, keyspace='ks') io = Mock(name='io') - with self.assertRaisesRegexp(UnsupportedOperation, 'Keyspaces.*set'): + with self.assertRaisesRegex(UnsupportedOperation, 'Keyspaces.*set'): keyspace_message.send_body(io, protocol_version=4) io.assert_not_called() diff --git a/tests/unit/test_query.py b/tests/unit/test_query.py index 7c2bfc0d14..2e87da389b 100644 --- a/tests/unit/test_query.py +++ b/tests/unit/test_query.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import six +import unittest from cassandra.query import BatchStatement, SimpleStatement @@ -28,7 +25,7 @@ class BatchStatementTest(unittest.TestCase): def test_clear(self): keyspace = 'keyspace' routing_key = 'routing_key' - custom_payload = {'key': six.b('value')} + custom_payload = {'key': b'value'} ss = SimpleStatement('whatever', keyspace=keyspace, routing_key=routing_key, custom_payload=custom_payload) diff --git a/tests/unit/test_response_future.py b/tests/unit/test_response_future.py index 98d2156079..f9d32780de 100644 --- a/tests/unit/test_response_future.py +++ b/tests/unit/test_response_future.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest -from mock import Mock, MagicMock, ANY +from collections import deque +from threading import RLock +from unittest.mock import Mock, MagicMock, ANY from cassandra import ConsistencyLevel, Unavailable, SchemaTargetType, SchemaChangeType, OperationTimedOut from cassandra.cluster import Session, ResponseFuture, NoHostAvailable, ProtocolVersion @@ -159,7 +160,7 @@ def test_heartbeat_defunct_deadlock(self): # Simulate ResponseFuture timing out rf._on_timeout() - self.assertRaisesRegexp(OperationTimedOut, "Connection defunct by heartbeat", rf.result) + self.assertRaisesRegex(OperationTimedOut, "Connection defunct by heartbeat", rf.result) def test_read_timeout_error_message(self): session = self.make_session() @@ -604,3 +605,29 @@ def test_repeat_orig_query_after_succesful_reprepare(self): rf._query = Mock(return_value=True) rf._execute_after_prepare('host', None, None, response) rf._query.assert_called_once_with('host') + + def test_timeout_does_not_release_stream_id(self): + """ + Make sure that stream ID is not reused immediately after client-side + timeout. Otherwise, a new request could reuse the stream ID and would + risk getting a response for the old, timed out query. + """ + session = self.make_basic_session() + session.cluster._default_load_balancing_policy.make_query_plan.return_value = [Mock(endpoint='ip1'), Mock(endpoint='ip2')] + pool = self.make_pool() + session._pools.get.return_value = pool + connection = Mock(spec=Connection, lock=RLock(), _requests={}, request_ids=deque(), + orphaned_request_ids=set(), orphaned_threshold=256) + pool.borrow_connection.return_value = (connection, 1) + + rf = self.make_response_future(session) + rf.send_request() + + connection._requests[1] = (connection._handle_options_response, ProtocolHandler.decode_message, []) + + rf._on_timeout() + pool.return_connection.assert_called_once_with(connection, stream_was_orphaned=True) + self.assertRaisesRegex(OperationTimedOut, "Client request timeout", rf.result) + + assert len(connection.request_ids) == 0, \ + "Request IDs should be empty but it's not: {}".format(connection.request_ids) diff --git a/tests/unit/test_resultset.py b/tests/unit/test_resultset.py index 1af3e849b6..340169d198 100644 --- a/tests/unit/test_resultset.py +++ b/tests/unit/test_resultset.py @@ -1,26 +1,24 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from cassandra.query import named_tuple_factory, dict_factory, tuple_factory - -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa -from mock import Mock, PropertyMock, patch +import unittest +from unittest.mock import Mock, PropertyMock, patch from cassandra.cluster import ResultSet +from cassandra.query import named_tuple_factory, dict_factory, tuple_factory class ResultSetTests(unittest.TestCase): @@ -41,6 +39,19 @@ def test_iter_paged(self): type(response_future).has_more_pages = PropertyMock(side_effect=(True, True, False)) # after init to avoid side effects being consumed by init self.assertListEqual(list(itr), expected) + def test_iter_paged_with_empty_pages(self): + expected = list(range(10)) + response_future = Mock(has_more_pages=True, _continuous_paging_session=None) + response_future.result.side_effect = [ + ResultSet(Mock(), []), + ResultSet(Mock(), [0, 1, 2, 3, 4]), + ResultSet(Mock(), []), + ResultSet(Mock(), [5, 6, 7, 8, 9]), + ] + rs = ResultSet(response_future, []) + itr = iter(rs) + self.assertListEqual(list(itr), expected) + def test_list_non_paged(self): # list access on RS for backwards-compatibility expected = list(range(10)) @@ -111,7 +122,7 @@ def test_index_list_mode(self): # index access before iteration causes list to be materialized self.assertEqual(rs[0], expected[0]) - # resusable iteration + # reusable iteration self.assertListEqual(list(rs), expected) self.assertListEqual(list(rs), expected) @@ -126,7 +137,7 @@ def test_index_list_mode(self): # index access before iteration causes list to be materialized self.assertEqual(rs[0], expected[0]) self.assertEqual(rs[9], expected[9]) - # resusable iteration + # reusable iteration self.assertListEqual(list(rs), expected) self.assertListEqual(list(rs), expected) diff --git a/tests/unit/test_row_factories.py b/tests/unit/test_row_factories.py index 13049ba034..0055497a54 100644 --- a/tests/unit/test_row_factories.py +++ b/tests/unit/test_row_factories.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -20,10 +22,7 @@ import sys -try: - from unittest import TestCase -except ImportError: - from unittest2 import TestCase +from unittest import TestCase log = logging.getLogger(__name__) diff --git a/tests/unit/test_segment.py b/tests/unit/test_segment.py new file mode 100644 index 0000000000..e94bcf9809 --- /dev/null +++ b/tests/unit/test_segment.py @@ -0,0 +1,216 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from io import BytesIO + +from cassandra import DriverException +from cassandra.segment import Segment, CrcException +from cassandra.connection import segment_codec_no_compression, segment_codec_lz4 + + +def to_bits(b): + return '{:08b}'.format(b) + + +class SegmentCodecTest(unittest.TestCase): + + small_msg = b'b' * 50 + max_msg = b'b' * Segment.MAX_PAYLOAD_LENGTH + large_msg = b'b' * (Segment.MAX_PAYLOAD_LENGTH + 1) + + @staticmethod + def _header_to_bits(data): + # unpack a header to bits + # data should be the little endian bytes sequence + if len(data) > 6: # compressed + data = data[:5] + bits = ''.join([to_bits(b) for b in reversed(data)]) + # return the compressed payload length, the uncompressed payload length, + # the self-contained flag and the padding as bits + return bits[23:40] + bits[6:23] + bits[5:6] + bits[:5] + else: # uncompressed + data = data[:3] + bits = ''.join([to_bits(b) for b in reversed(data)]) + # return the payload length, the self-contained flag and + # the padding as bits + return bits[7:24] + bits[6:7] + bits[:6] + + def test_encode_uncompressed_header(self): + buffer = BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.small_msg), -1, True) + self.assertEqual(buffer.tell(), 6) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "00000000000110010" + "1" + "000000") + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_encode_compressed_header(self): + buffer = BytesIO() + compressed_length = len(segment_codec_lz4.compress(self.small_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.small_msg), True) + + self.assertEqual(buffer.tell(), 8) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "{:017b}".format(compressed_length) + "00000000000110010" + "1" + "00000") + + def test_encode_uncompressed_header_with_max_payload(self): + buffer = BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.max_msg), -1, True) + self.assertEqual(buffer.tell(), 6) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "11111111111111111" + "1" + "000000") + + def test_encode_header_fails_if_payload_too_big(self): + buffer = BytesIO() + for codec in [c for c in [segment_codec_no_compression, segment_codec_lz4] if c is not None]: + with self.assertRaises(DriverException): + codec.encode_header(buffer, len(self.large_msg), -1, False) + + def test_encode_uncompressed_header_not_self_contained_msg(self): + buffer = BytesIO() + # simulate the first chunk with the max size + segment_codec_no_compression.encode_header(buffer, len(self.max_msg), -1, False) + self.assertEqual(buffer.tell(), 6) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + ("11111111111111111" + "0" # not self-contained + "000000")) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_encode_compressed_header_with_max_payload(self): + buffer = BytesIO() + compressed_length = len(segment_codec_lz4.compress(self.max_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.max_msg), True) + self.assertEqual(buffer.tell(), 8) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "{:017b}".format(compressed_length) + "11111111111111111" + "1" + "00000") + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_encode_compressed_header_not_self_contained_msg(self): + buffer = BytesIO() + # simulate the first chunk with the max size + compressed_length = len(segment_codec_lz4.compress(self.max_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.max_msg), False) + self.assertEqual(buffer.tell(), 8) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + ("{:017b}".format(compressed_length) + + "11111111111111111" + "0" # not self-contained + "00000")) + + def test_decode_uncompressed_header(self): + buffer = BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.small_msg), -1, True) + buffer.seek(0) + header = segment_codec_no_compression.decode_header(buffer) + self.assertEqual(header.uncompressed_payload_length, -1) + self.assertEqual(header.payload_length, len(self.small_msg)) + self.assertEqual(header.is_self_contained, True) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_compressed_header(self): + buffer = BytesIO() + compressed_length = len(segment_codec_lz4.compress(self.small_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.small_msg), True) + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + self.assertEqual(header.uncompressed_payload_length, len(self.small_msg)) + self.assertEqual(header.payload_length, compressed_length) + self.assertEqual(header.is_self_contained, True) + + def test_decode_header_fails_if_corrupted(self): + buffer = BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.small_msg), -1, True) + # corrupt one byte + buffer.seek(buffer.tell()-1) + buffer.write(b'0') + buffer.seek(0) + + with self.assertRaises(CrcException): + segment_codec_no_compression.decode_header(buffer) + + def test_decode_uncompressed_self_contained_segment(self): + buffer = BytesIO() + segment_codec_no_compression.encode(buffer, self.small_msg) + + buffer.seek(0) + header = segment_codec_no_compression.decode_header(buffer) + segment = segment_codec_no_compression.decode(buffer, header) + + self.assertEqual(header.is_self_contained, True) + self.assertEqual(header.uncompressed_payload_length, -1) + self.assertEqual(header.payload_length, len(self.small_msg)) + self.assertEqual(segment.payload, self.small_msg) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_compressed_self_contained_segment(self): + buffer = BytesIO() + segment_codec_lz4.encode(buffer, self.small_msg) + + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + segment = segment_codec_lz4.decode(buffer, header) + + self.assertEqual(header.is_self_contained, True) + self.assertEqual(header.uncompressed_payload_length, len(self.small_msg)) + self.assertGreater(header.uncompressed_payload_length, header.payload_length) + self.assertEqual(segment.payload, self.small_msg) + + def test_decode_multi_segments(self): + buffer = BytesIO() + segment_codec_no_compression.encode(buffer, self.large_msg) + + buffer.seek(0) + # We should have 2 segments to read + headers = [] + segments = [] + headers.append(segment_codec_no_compression.decode_header(buffer)) + segments.append(segment_codec_no_compression.decode(buffer, headers[0])) + headers.append(segment_codec_no_compression.decode_header(buffer)) + segments.append(segment_codec_no_compression.decode(buffer, headers[1])) + + self.assertTrue(all([h.is_self_contained is False for h in headers])) + decoded_msg = segments[0].payload + segments[1].payload + self.assertEqual(decoded_msg, self.large_msg) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_fails_if_corrupted(self): + buffer = BytesIO() + segment_codec_lz4.encode(buffer, self.small_msg) + buffer.seek(buffer.tell()-1) + buffer.write(b'0') + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + with self.assertRaises(CrcException): + segment_codec_lz4.decode(buffer, header) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_tiny_msg_not_compressed(self): + buffer = BytesIO() + segment_codec_lz4.encode(buffer, b'b') + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + segment = segment_codec_lz4.decode(buffer, header) + self.assertEqual(header.uncompressed_payload_length, 0) + self.assertEqual(header.payload_length, 1) + self.assertEqual(segment.payload, b'b') diff --git a/tests/unit/test_sortedset.py b/tests/unit/test_sortedset.py index 3845c2c31c..875485f824 100644 --- a/tests/unit/test_sortedset.py +++ b/tests/unit/test_sortedset.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra.util import sortedset from cassandra.cqltypes import EMPTY diff --git a/tests/unit/test_time_util.py b/tests/unit/test_time_util.py index 7025f151d6..be5c984907 100644 --- a/tests/unit/test_time_util.py +++ b/tests/unit/test_time_util.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest from cassandra import marshal from cassandra import util diff --git a/tests/unit/test_timestamps.py b/tests/unit/test_timestamps.py index 8903fbc99b..676cb6442a 100644 --- a/tests/unit/test_timestamps.py +++ b/tests/unit/test_timestamps.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,12 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa - -import mock +import unittest +from unittest import mock from cassandra import timestamps from threading import Thread, Lock @@ -108,10 +106,7 @@ def assertLastCallArgRegex(self, call, pattern): last_warn_args, last_warn_kwargs = call self.assertEqual(len(last_warn_args), 1) self.assertEqual(len(last_warn_kwargs), 0) - self.assertRegexpMatches( - last_warn_args[0], - pattern, - ) + self.assertRegex(last_warn_args[0], pattern) def test_basic_log_content(self): """ diff --git a/tests/unit/test_types.py b/tests/unit/test_types.py index 562fd2c899..ba01538b2a 100644 --- a/tests/unit/test_types.py +++ b/tests/unit/test_types.py @@ -1,28 +1,26 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import datetime import tempfile import time +import uuid from binascii import unhexlify -import six - import cassandra from cassandra import util from cassandra.cqltypes import ( @@ -30,7 +28,8 @@ EmptyValue, LongType, SetType, UTF8Type, cql_typename, int8_pack, int64_pack, lookup_casstype, lookup_casstype_simple, parse_casstype_args, - int32_pack, Int32Type, ListType, MapType + int32_pack, Int32Type, ListType, MapType, VectorType, + FloatType ) from cassandra.encoder import cql_quote from cassandra.pool import Host @@ -169,7 +168,7 @@ def __init__(self, subtypes, names): @classmethod def apply_parameters(cls, subtypes, names): - return cls(subtypes, [unhexlify(six.b(name)) if name is not None else name for name in names]) + return cls(subtypes, [unhexlify(name.encode()) if name is not None else name for name in names]) class BarType(FooType): typename = 'org.apache.cassandra.db.marshal.BarType' @@ -193,12 +192,28 @@ class BarType(FooType): self.assertEqual(UTF8Type, ctype.subtypes[2]) self.assertEqual([b'city', None, b'zip'], ctype.names) + def test_parse_casstype_vector(self): + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 3)") + self.assertTrue(issubclass(ctype, VectorType)) + self.assertEqual(3, ctype.vector_size) + self.assertEqual(FloatType, ctype.subtype) + + def test_parse_casstype_vector_of_vectors(self): + inner_type = "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 4)" + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(%s, 3)" % (inner_type)) + self.assertTrue(issubclass(ctype, VectorType)) + self.assertEqual(3, ctype.vector_size) + sub_ctype = ctype.subtype + self.assertTrue(issubclass(sub_ctype, VectorType)) + self.assertEqual(4, sub_ctype.vector_size) + self.assertEqual(FloatType, sub_ctype.subtype) + def test_empty_value(self): self.assertEqual(str(EmptyValue()), 'EMPTY') def test_datetype(self): now_time_seconds = time.time() - now_datetime = datetime.datetime.utcfromtimestamp(now_time_seconds) + now_datetime = datetime.datetime.fromtimestamp(now_time_seconds, tz=datetime.timezone.utc) # Cassandra timestamps in millis now_timestamp = now_time_seconds * 1e3 @@ -209,23 +224,23 @@ def test_datetype(self): # deserialize # epoc expected = 0 - self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime.utcfromtimestamp(expected)) + self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime.fromtimestamp(expected, tz=datetime.timezone.utc).replace(tzinfo=None)) # beyond 32b expected = 2 ** 33 - self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime(2242, 3, 16, 12, 56, 32)) + self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime(2242, 3, 16, 12, 56, 32, tzinfo=datetime.timezone.utc).replace(tzinfo=None)) # less than epoc (PYTHON-119) expected = -770172256 - self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime(1945, 8, 5, 23, 15, 44)) + self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime(1945, 8, 5, 23, 15, 44, tzinfo=datetime.timezone.utc).replace(tzinfo=None)) # work around rounding difference among Python versions (PYTHON-230) expected = 1424817268.274 - self.assertEqual(DateType.deserialize(int64_pack(int(1000 * expected)), 0), datetime.datetime(2015, 2, 24, 22, 34, 28, 274000)) + self.assertEqual(DateType.deserialize(int64_pack(int(1000 * expected)), 0), datetime.datetime(2015, 2, 24, 22, 34, 28, 274000, tzinfo=datetime.timezone.utc).replace(tzinfo=None)) # Large date overflow (PYTHON-452) expected = 2177403010.123 - self.assertEqual(DateType.deserialize(int64_pack(int(1000 * expected)), 0), datetime.datetime(2038, 12, 31, 10, 10, 10, 123000)) + self.assertEqual(DateType.deserialize(int64_pack(int(1000 * expected)), 0), datetime.datetime(2038, 12, 31, 10, 10, 10, 123000, tzinfo=datetime.timezone.utc).replace(tzinfo=None)) def test_collection_null_support(self): """ @@ -307,6 +322,203 @@ def test_cql_quote(self): self.assertEqual(cql_quote(0), '0') +class VectorTests(unittest.TestCase): + def _normalize_set(self, val): + if isinstance(val, set) or isinstance(val, util.SortedSet): + return frozenset([self._normalize_set(v) for v in val]) + return val + + def _round_trip_compare_fn(self, first, second): + if isinstance(first, float): + self.assertAlmostEqual(first, second, places=5) + elif isinstance(first, list): + self.assertEqual(len(first), len(second)) + for (felem, selem) in zip(first, second): + self._round_trip_compare_fn(felem, selem) + elif isinstance(first, set) or isinstance(first, frozenset): + self.assertEqual(len(first), len(second)) + first_norm = self._normalize_set(first) + second_norm = self._normalize_set(second) + self.assertEqual(first_norm, second_norm) + elif isinstance(first, dict): + for ((fk,fv), (sk,sv)) in zip(first.items(), second.items()): + self._round_trip_compare_fn(fk, sk) + self._round_trip_compare_fn(fv, sv) + else: + self.assertEqual(first,second) + + def _round_trip_test(self, data, ctype_str): + ctype = parse_casstype_args(ctype_str) + data_bytes = ctype.serialize(data, 0) + serialized_size = ctype.subtype.serial_size() + if serialized_size: + self.assertEqual(serialized_size * len(data), len(data_bytes)) + result = ctype.deserialize(data_bytes, 0) + self.assertEqual(len(data), len(result)) + for idx in range(0,len(data)): + self._round_trip_compare_fn(data[idx], result[idx]) + + def test_round_trip_basic_types_with_fixed_serialized_size(self): + self._round_trip_test([True, False, False, True], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.BooleanType, 4)") + self._round_trip_test([3.4, 2.9, 41.6, 12.0], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 4)") + self._round_trip_test([3.4, 2.9, 41.6, 12.0], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.DoubleType, 4)") + self._round_trip_test([3, 2, 41, 12], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.LongType, 4)") + self._round_trip_test([3, 2, 41, 12], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.Int32Type, 4)") + self._round_trip_test([uuid.uuid1(), uuid.uuid1(), uuid.uuid1(), uuid.uuid1()], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.TimeUUIDType, 4)") + self._round_trip_test([3, 2, 41, 12], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.ShortType, 4)") + + def test_round_trip_basic_types_without_fixed_serialized_size(self): + # Varints + self._round_trip_test([3, 2, 41, 12], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.IntegerType, 4)") + # ASCII text + self._round_trip_test(["abc", "def", "ghi", "jkl"], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.AsciiType, 4)") + # UTF8 text + self._round_trip_test(["abc", "def", "ghi", "jkl"], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.UTF8Type, 4)") + # Time is something of a weird one. By rights, it should be a fixed size type but C* code marks it as variable + # size. We're forced to follow the C* code base (since that's who'll be providing the data we're parsing) so + # we match what they're doing. + self._round_trip_test([datetime.time(1,1,1), datetime.time(2,2,2), datetime.time(3,3,3)], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.TimeType, 3)") + # Duration (contains varints) + self._round_trip_test([util.Duration(1,1,1), util.Duration(2,2,2), util.Duration(3,3,3)], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.DurationType, 3)") + + def test_round_trip_collection_types(self): + # List (subtype of fixed size) + self._round_trip_test([[1, 2, 3, 4], [5, 6], [7, 8, 9, 10], [11, 12]], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.ListType \ + (org.apache.cassandra.db.marshal.Int32Type), 4)") + # Set (subtype of fixed size) + self._round_trip_test([set([1, 2, 3, 4]), set([5, 6]), set([7, 8, 9, 10]), set([11, 12])], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.SetType \ + (org.apache.cassandra.db.marshal.Int32Type), 4)") + # Map (subtype of fixed size) + self._round_trip_test([{1:1.2}, {2:3.4}, {3:5.6}, {4:7.8}], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.MapType \ + (org.apache.cassandra.db.marshal.Int32Type,org.apache.cassandra.db.marshal.FloatType), 4)") + # List (subtype without fixed size) + self._round_trip_test([["one","two"], ["three","four"], ["five","six"], ["seven","eight"]], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.ListType \ + (org.apache.cassandra.db.marshal.AsciiType), 4)") + # Set (subtype without fixed size) + self._round_trip_test([set(["one","two"]), set(["three","four"]), set(["five","six"]), set(["seven","eight"])], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.SetType \ + (org.apache.cassandra.db.marshal.AsciiType), 4)") + # Map (subtype without fixed size) + self._round_trip_test([{1:"one"}, {2:"two"}, {3:"three"}, {4:"four"}], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.MapType \ + (org.apache.cassandra.db.marshal.IntegerType,org.apache.cassandra.db.marshal.AsciiType), 4)") + # List of lists (subtype without fixed size) + data = [[["one","two"],["three"]], [["four"],["five"]], [["six","seven","eight"]], [["nine"]]] + ctype = "org.apache.cassandra.db.marshal.VectorType\ + (org.apache.cassandra.db.marshal.ListType\ + (org.apache.cassandra.db.marshal.ListType\ + (org.apache.cassandra.db.marshal.AsciiType)), 4)" + self._round_trip_test(data, ctype) + # Set of sets (subtype without fixed size) + data = [set([frozenset(["one","two"]),frozenset(["three"])]),\ + set([frozenset(["four"]),frozenset(["five"])]),\ + set([frozenset(["six","seven","eight"])]), + set([frozenset(["nine"])])] + ctype = "org.apache.cassandra.db.marshal.VectorType\ + (org.apache.cassandra.db.marshal.SetType\ + (org.apache.cassandra.db.marshal.SetType\ + (org.apache.cassandra.db.marshal.AsciiType)), 4)" + self._round_trip_test(data, ctype) + # Map of maps (subtype without fixed size) + data = [{100:{1:"one",2:"two",3:"three"}},\ + {200:{4:"four",5:"five"}},\ + {300:{}},\ + {400:{6:"six"}}] + ctype = "org.apache.cassandra.db.marshal.VectorType\ + (org.apache.cassandra.db.marshal.MapType\ + (org.apache.cassandra.db.marshal.Int32Type,\ + org.apache.cassandra.db.marshal.MapType \ + (org.apache.cassandra.db.marshal.IntegerType,org.apache.cassandra.db.marshal.AsciiType)), 4)" + self._round_trip_test(data, ctype) + + def test_round_trip_vector_of_vectors(self): + # Subytpes of subtypes with a fixed size + self._round_trip_test([[1.2, 3.4], [5.6, 7.8], [9.10, 11.12], [13.14, 15.16]], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.VectorType \ + (org.apache.cassandra.db.marshal.FloatType,2), 4)") + + # Subytpes of subtypes without a fixed size + self._round_trip_test([["one", "two"], ["three", "four"], ["five", "six"], ["seven", "eight"]], \ + "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.VectorType \ + (org.apache.cassandra.db.marshal.AsciiType,2), 4)") + + # parse_casstype_args() is tested above... we're explicitly concerned about cql_parameterized_type() output here + def test_cql_parameterized_type(self): + # Base vector functionality + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 4)") + self.assertEqual(ctype.cql_parameterized_type(), "org.apache.cassandra.db.marshal.VectorType") + + # Test vector-of-vectors + inner_type = "org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 4)" + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(%s, 3)" % (inner_type)) + inner_parsed_type = "org.apache.cassandra.db.marshal.VectorType" + self.assertEqual(ctype.cql_parameterized_type(), "org.apache.cassandra.db.marshal.VectorType<%s, 3>" % (inner_parsed_type)) + + def test_serialization_fixed_size_too_small(self): + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 5)") + with self.assertRaisesRegex(ValueError, "Expected sequence of size 5 for vector of type float and dimension 5, observed sequence of length 4"): + ctype.serialize([1.2, 3.4, 5.6, 7.8], 0) + + def test_serialization_fixed_size_too_big(self): + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 4)") + with self.assertRaisesRegex(ValueError, "Expected sequence of size 4 for vector of type float and dimension 4, observed sequence of length 5"): + ctype.serialize([1.2, 3.4, 5.6, 7.8, 9.10], 0) + + def test_serialization_variable_size_too_small(self): + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.IntegerType, 5)") + with self.assertRaisesRegex(ValueError, "Expected sequence of size 5 for vector of type varint and dimension 5, observed sequence of length 4"): + ctype.serialize([1, 2, 3, 4], 0) + + def test_serialization_variable_size_too_big(self): + ctype = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.IntegerType, 4)") + with self.assertRaisesRegex(ValueError, "Expected sequence of size 4 for vector of type varint and dimension 4, observed sequence of length 5"): + ctype.serialize([1, 2, 3, 4, 5], 0) + + def test_deserialization_fixed_size_too_small(self): + ctype_four = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 4)") + ctype_four_bytes = ctype_four.serialize([1.2, 3.4, 5.6, 7.8], 0) + ctype_five = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 5)") + with self.assertRaisesRegex(ValueError, "Expected vector of type float and dimension 5 to have serialized size 20; observed serialized size of 16 instead"): + ctype_five.deserialize(ctype_four_bytes, 0) + + def test_deserialization_fixed_size_too_big(self): + ctype_five = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 5)") + ctype_five_bytes = ctype_five.serialize([1.2, 3.4, 5.6, 7.8, 9.10], 0) + ctype_four = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.FloatType, 4)") + with self.assertRaisesRegex(ValueError, "Expected vector of type float and dimension 4 to have serialized size 16; observed serialized size of 20 instead"): + ctype_four.deserialize(ctype_five_bytes, 0) + + def test_deserialization_variable_size_too_small(self): + ctype_four = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.IntegerType, 4)") + ctype_four_bytes = ctype_four.serialize([1, 2, 3, 4], 0) + ctype_five = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.IntegerType, 5)") + with self.assertRaisesRegex(ValueError, "Error reading additional data during vector deserialization after successfully adding 4 elements"): + ctype_five.deserialize(ctype_four_bytes, 0) + + def test_deserialization_variable_size_too_big(self): + ctype_five = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.IntegerType, 5)") + ctype_five_bytes = ctype_five.serialize([1, 2, 3, 4, 5], 0) + ctype_four = parse_casstype_args("org.apache.cassandra.db.marshal.VectorType(org.apache.cassandra.db.marshal.IntegerType, 4)") + with self.assertRaisesRegex(ValueError, "Additional bytes remaining after vector deserialization completed"): + ctype_four.deserialize(ctype_five_bytes, 0) + + ZERO = datetime.timedelta(0) @@ -539,8 +751,8 @@ class no_bounds_object(object): self.assertRaises(ValueError, DateRangeType.serialize, no_bounds_object, 5) def test_serialized_value_round_trip(self): - vals = [six.b('\x01\x00\x00\x01%\xe9a\xf9\xd1\x06\x00\x00\x01v\xbb>o\xff\x00'), - six.b('\x01\x00\x00\x00\xdcm\x03-\xd1\x06\x00\x00\x01v\xbb>o\xff\x00')] + vals = [b'\x01\x00\x00\x01%\xe9a\xf9\xd1\x06\x00\x00\x01v\xbb>o\xff\x00', + b'\x01\x00\x00\x00\xdcm\x03-\xd1\x06\x00\x00\x01v\xbb>o\xff\x00'] for serialized in vals: self.assertEqual( serialized, @@ -704,7 +916,7 @@ def test_deserialize_date_range_day(self): 999, lambda original_value, i: original_value + i * 900 * 50 * 60 * 24) - @unittest.skip("This is currently failig, see PYTHON-912") + @unittest.skip("This is currently failing, see PYTHON-912") def test_deserialize_date_range_month(self): """ Test rounding from DateRange for months @@ -721,7 +933,7 @@ def get_upper_bound(seconds): but with the microseconds set to 999999, seconds to 59, minutes to 59, hours to 23 and days 28, 29, 30 or 31 depending on the month. The way to do this is to add one month and leave the date at YEAR-MONTH-01 00:00:00 000000. - Then substract one millisecond. + Then subtract one millisecond. """ dt = datetime.datetime.fromtimestamp(seconds / 1000.0, tz=utc_timezone) dt = dt + datetime.timedelta(days=32) @@ -748,7 +960,7 @@ def get_upper_bound(seconds): but with the microseconds set to 999999, seconds to 59, minutes to 59, hours to 23 days 28, 29, 30 or 31 depending on the month and months to 12. The way to do this is to add one year and leave the date at YEAR-01-01 00:00:00 000000. - Then substract one millisecond. + Then subtract one millisecond. """ dt = datetime.datetime.fromtimestamp(seconds / 1000.0, tz=utc_timezone) dt = dt + datetime.timedelta(days=370) @@ -770,14 +982,14 @@ def _deserialize_date_range(self, truncate_kwargs, precision, lower_value upper_value which are given as a value that represents seconds since the epoch. We want to make sure the lower_value is correctly rounded down and the upper value is correctly rounded up. In the case of rounding down we verify that the rounded down value - has the appropriate fields set to the minimum they could possible have. That is + has the appropriate fields set to the minimum they could possibly have. That is 1 for months, 1 for days, 0 for hours, 0 for minutes, 0 for seconds, 0 for microseconds. We use the generic function truncate_date which depends on truncate_kwargs for this In the case of rounding up we verify that the rounded up value has the appropriate fields set - to the maximum they could possible have. This is calculated by round_up_truncated_upper_value + to the maximum they could possibly have. This is calculated by round_up_truncated_upper_value which input is the truncated value from before. It is passed as an argument as the way - of calculating this is is different for every precision. + of calculating this is different for every precision. :param truncate_kwargs: determine what values to truncate in truncate_date :param precision: :class:`~util.DateRangePrecision` diff --git a/tests/unit/test_util_types.py b/tests/unit/test_util_types.py index b7dc837249..779d416923 100644 --- a/tests/unit/test_util_types.py +++ b/tests/unit/test_util_types.py @@ -1,20 +1,19 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -try: - import unittest2 as unittest -except ImportError: - import unittest # noqa +import unittest import datetime @@ -54,7 +53,7 @@ def test_limits(self): max_builtin = Date(datetime.date(9999, 12, 31)) self.assertEqual(Date(min_builtin.days_from_epoch), min_builtin) self.assertEqual(Date(max_builtin.days_from_epoch), max_builtin) - # just proving we can construct with on offset outside buildin range + # just proving we can construct with on offset outside builtin range self.assertEqual(Date(min_builtin.days_from_epoch - 1).days_from_epoch, min_builtin.days_from_epoch - 1) self.assertEqual(Date(max_builtin.days_from_epoch + 1).days_from_epoch, @@ -194,7 +193,7 @@ def test_equality(self): second = Duration(1000, 10000, 2345345) self.assertEqual(first, second) - first = Duration(12, 0 , 100) + first = Duration(12, 0, 100) second = Duration(nanoseconds=100, months=12) self.assertEqual(first, second) diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 3c9cc34e22..fc3ce4b481 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,10 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from concurrent.futures import Future from functools import wraps +from unittest.mock import patch + +from concurrent.futures import Future from cassandra.cluster import Session -from mock import patch def mock_session_pools(f): diff --git a/tests/util.py b/tests/util.py index 5c7ac2416f..d44d6c91c8 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,10 +1,12 @@ -# Copyright DataStax, Inc. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -40,7 +42,7 @@ def wait_until_not_raised(condition, delay, max_attempts): doesn't raise an exception and the amount of attempts < maxAttempts. :param condition: a function :param delay: the delay in second - :param max_attempts: the maximum number of attemps. So the timeout + :param max_attempts: the maximum number of attempts. So the timeout of this function will be delay*max_attempts """ def wrapped_condition(): diff --git a/tox.ini b/tox.ini index fd50a6c1d6..e77835f0da 100644 --- a/tox.ini +++ b/tox.ini @@ -1,17 +1,18 @@ [tox] -envlist = py{27,34,35,36,37,38},pypy +envlist = py{39,310,311,312,313},pypy [base] -deps = nose - mock<=1.0.1 - six +deps = pytest packaging - cython + cython>=3.0 eventlet - twisted <15.5.0 + gevent + twisted[tls] pure-sasl kerberos futurist + lz4 + cryptography>=42.0 [testenv] deps = {[base]deps} @@ -20,28 +21,26 @@ setenv = LIBEV_EMBED=0 CARES_EMBED=0 LC_ALL=en_US.UTF-8 changedir = {envtmpdir} -commands = nosetests --verbosity=2 --no-path-adjustment {toxinidir}/tests/unit/ +commands = pytest -v {toxinidir}/tests/unit/ [testenv:gevent_loop] deps = {[base]deps} - gevent>=1.4,<1.5 setenv = LIBEV_EMBED=0 CARES_EMBED=0 EVENT_LOOP_MANAGER=gevent changedir = {envtmpdir} commands = - nosetests --verbosity=2 --no-path-adjustment {toxinidir}/tests/unit/io/test_geventreactor.py + pytest -v {toxinidir}/tests/unit/io/test_geventreactor.py [testenv:eventlet_loop] deps = {[base]deps} - gevent>=1.4,<1.5 setenv = LIBEV_EMBED=0 CARES_EMBED=0 EVENT_LOOP_MANAGER=eventlet changedir = {envtmpdir} commands = - nosetests --verbosity=2 --no-path-adjustment {toxinidir}/tests/unit/io/test_eventletreactor.py + pytest -v {toxinidir}/tests/unit/io/test_eventletreactor.py