diff -Nru python-cassandra-driver-3.24.0/build.yaml.bak python-cassandra-driver-3.25.0/build.yaml.bak --- python-cassandra-driver-3.24.0/build.yaml.bak 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/build.yaml.bak 2021-03-18 14:48:29.000000000 +0000 @@ -8,7 +8,7 @@ EVENT_LOOP_MANAGER='libev' matrix: exclude: - - python: [3.4, 3.6, 3.7, 3.8] + - python: [3.6, 3.7, 3.8] - cassandra: ['2.1', '3.0', '4.0', 'test-dse'] commit_long_test: @@ -20,7 +20,7 @@ EVENT_LOOP_MANAGER='libev' matrix: exclude: - - python: [3.4, 3.6, 3.7, 3.8] + - python: [3.6, 3.7, 3.8] - cassandra: ['2.1', '3.0', 'test-dse'] commit_branches: @@ -33,7 +33,7 @@ EXCLUDE_LONG=1 matrix: exclude: - - python: [3.4, 3.6, 3.7, 3.8] + - python: [3.6, 3.7, 3.8] - cassandra: ['2.1', '3.0', 'test-dse'] commit_branches_dev: @@ -46,7 +46,7 @@ EXCLUDE_LONG=1 matrix: exclude: - - python: [2.7, 3.4, 3.7, 3.6, 3.8] + - python: [2.7, 3.7, 3.6, 3.8] - cassandra: ['2.0', '2.1', '2.2', '3.0', '4.0', 'test-dse', 'dse-4.8', 'dse-5.0', 'dse-6.0', 'dse-6.8'] release_test: @@ -77,9 +77,6 @@ env_vars: | EVENT_LOOP_MANAGER='gevent' JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] weekly_eventlet: schedule: 0 18 * * 6 @@ -89,9 +86,6 @@ env_vars: | EVENT_LOOP_MANAGER='eventlet' JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] weekly_asyncio: schedule: 0 22 * * 6 @@ -113,9 +107,6 @@ env_vars: | EVENT_LOOP_MANAGER='asyncore' JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] weekly_twister: schedule: 0 14 * * 7 @@ -125,9 +116,6 @@ env_vars: | EVENT_LOOP_MANAGER='twisted' JUST_EVENT_LOOP=1 - matrix: - exclude: - - python: [3.4] upgrade_tests: schedule: adhoc @@ -138,12 +126,11 @@ JUST_UPGRADE=True matrix: exclude: - - python: [3.4, 3.6, 3.7, 3.8] + - python: [3.6, 3.7, 3.8] - cassandra: ['2.0', '2.1', '2.2', '3.0', '4.0', 'test-dse'] python: - 2.7 - - 3.4 - 3.5 - 3.6 - 3.7 @@ -186,8 +173,6 @@ pip install git+ssh://git@github.com/riptano/ccm-private.git@cassandra-7544-native-ports-with-dse-fix - # Remove this pyyaml installation when removing Python 3.4 support - pip install PyYAML==5.2 #pip install $HOME/ccm if [ -n "$CCM_IS_DSE" ]; then diff -Nru python-cassandra-driver-3.24.0/cassandra/cluster.py python-cassandra-driver-3.25.0/cassandra/cluster.py --- python-cassandra-driver-3.24.0/cassandra/cluster.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/cluster.py 2021-03-18 14:48:29.000000000 +0000 @@ -63,7 +63,7 @@ BatchMessage, RESULT_KIND_PREPARED, RESULT_KIND_SET_KEYSPACE, RESULT_KIND_ROWS, RESULT_KIND_SCHEMA_CHANGE, ProtocolHandler, - RESULT_KIND_VOID) + RESULT_KIND_VOID, ProtocolException) from cassandra.metadata import Metadata, protect_name, murmur3, _NodeInfo from cassandra.policies import (TokenAwarePolicy, DCAwareRoundRobinPolicy, SimpleConvictionPolicy, ExponentialReconnectionPolicy, HostDistance, @@ -785,7 +785,7 @@ By default, a ``ca_certs`` value should be supplied (the value should be a string pointing to the location of the CA certs file), and you probably - want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLSv1`` to match + want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLS`` to match Cassandra's default protocol. .. versionchanged:: 3.3.0 @@ -1570,7 +1570,7 @@ If :attr:`~.Cluster.protocol_version` is set to 3 or higher, this is not supported (there is always one connection per host, unless the host is remote and :attr:`connect_to_remote_hosts` is :const:`False`) - and using this will result in an :exc:`~.UnsupporteOperation`. + and using this will result in an :exc:`~.UnsupportedOperation`. """ if self.protocol_version >= 3: raise UnsupportedOperation( @@ -1603,7 +1603,7 @@ If :attr:`~.Cluster.protocol_version` is set to 3 or higher, this is not supported (there is always one connection per host, unless the host is remote and :attr:`connect_to_remote_hosts` is :const:`False`) - and using this will result in an :exc:`~.UnsupporteOperation`. + and using this will result in an :exc:`~.UnsupportedOperation`. """ if self.protocol_version >= 3: raise UnsupportedOperation( @@ -3548,6 +3548,14 @@ break except ProtocolVersionUnsupported as e: self._cluster.protocol_downgrade(host.endpoint, e.startup_version) + except ProtocolException as e: + # protocol v5 is out of beta in C* >=4.0-beta5 and is now the default driver + # protocol version. If the protocol version was not explicitly specified, + # and that the server raises a beta protocol error, we should downgrade. + if not self._cluster._protocol_version_explicit and e.is_beta_protocol_error: + self._cluster.protocol_downgrade(host.endpoint, self._cluster.protocol_version) + else: + raise log.debug("[control connection] Established new connection %r, " "registering watchers and refreshing schema and topology", @@ -3788,12 +3796,14 @@ # any new nodes, so we need this additional check. (See PYTHON-90) should_rebuild_token_map = force_token_rebuild or self._cluster.metadata.partitioner is None for row in peers_result: + if not self._is_valid_peer(row): + log.warning( + "Found an invalid row for peer (%s). Ignoring host." % + _NodeInfo.get_broadcast_rpc_address(row)) + continue + endpoint = self._cluster.endpoint_factory.create(row) - tokens = row.get("tokens", None) - if 'tokens' in row and not tokens: # it was selected, but empty - log.warning("Excluding host (%s) with no tokens in system.peers table of %s." % (endpoint, connection.endpoint)) - continue if endpoint in found_hosts: log.warning("Found multiple hosts with the same endpoint (%s). Excluding peer %s", endpoint, row.get("peer")) continue @@ -3820,6 +3830,7 @@ host.dse_workload = row.get("workload") host.dse_workloads = row.get("workloads") + tokens = row.get("tokens", None) if partitioner and tokens and self._token_meta_enabled: token_map[host] = tokens @@ -3834,6 +3845,12 @@ log.debug("[control connection] Rebuilding token map due to topology changes") self._cluster.metadata.rebuild_token_map(partitioner, token_map) + @staticmethod + def _is_valid_peer(row): + return bool(_NodeInfo.get_broadcast_rpc_address(row) and row.get("host_id") and + row.get("data_center") and row.get("rack") and + ('tokens' not in row or row.get('tokens'))) + def _update_location_info(self, host, datacenter, rack): if host.datacenter == datacenter and host.rack == rack: return False diff -Nru python-cassandra-driver-3.24.0/cassandra/connection.py python-cassandra-driver-3.25.0/cassandra/connection.py --- python-cassandra-driver-3.24.0/cassandra/connection.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/connection.py 2021-03-18 14:48:29.000000000 +0000 @@ -27,6 +27,8 @@ from threading import Thread, Event, RLock, Condition import time import ssl +import weakref + if 'gevent.monkey' in sys.modules: from gevent.queue import Queue, Empty @@ -42,11 +44,15 @@ AuthResponseMessage, AuthChallengeMessage, AuthSuccessMessage, ProtocolException, RegisterMessage, ReviseRequestMessage) +from cassandra.segment import SegmentCodec, CrcException from cassandra.util import OrderedDict log = logging.getLogger(__name__) +segment_codec_no_compression = SegmentCodec() +segment_codec_lz4 = None + # We use an ordered dictionary and specifically add lz4 before # snappy so that lz4 will be preferred. Changing the order of this # will change the compression preferences for the driver. @@ -88,6 +94,7 @@ return lz4_block.decompress(byts[3::-1] + byts[4:]) locally_supported_compressions['lz4'] = (lz4_compress, lz4_decompress) + segment_codec_lz4 = SegmentCodec(lz4_compress, lz4_decompress) try: import snappy @@ -426,6 +433,10 @@ pass +class CrcMismatchException(ConnectionException): + pass + + class ContinuousPagingState(object): """ A class for specifying continuous paging state, only supported starting with DSE_V2. @@ -601,6 +612,60 @@ int_from_buf_item = ord +class _ConnectionIOBuffer(object): + """ + Abstraction class to ease the use of the different connection io buffers. With + protocol V5 and checksumming, the data is read, validated and copied to another + cql frame buffer. + """ + _io_buffer = None + _cql_frame_buffer = None + _connection = None + _segment_consumed = False + + def __init__(self, connection): + self._io_buffer = io.BytesIO() + self._connection = weakref.proxy(connection) + + @property + def io_buffer(self): + return self._io_buffer + + @property + def cql_frame_buffer(self): + return self._cql_frame_buffer if self.is_checksumming_enabled else \ + self._io_buffer + + def set_checksumming_buffer(self): + self.reset_io_buffer() + self._cql_frame_buffer = io.BytesIO() + + @property + def is_checksumming_enabled(self): + return self._connection._is_checksumming_enabled + + @property + def has_consumed_segment(self): + return self._segment_consumed; + + def readable_io_bytes(self): + return self.io_buffer.tell() + + def readable_cql_frame_bytes(self): + return self.cql_frame_buffer.tell() + + def reset_io_buffer(self): + self._io_buffer = io.BytesIO(self._io_buffer.read()) + self._io_buffer.seek(0, 2) # 2 == SEEK_END + + def reset_cql_frame_buffer(self): + if self.is_checksumming_enabled: + self._cql_frame_buffer = io.BytesIO(self._cql_frame_buffer.read()) + self._cql_frame_buffer.seek(0, 2) # 2 == SEEK_END + else: + self.reset_io_buffer() + + class Connection(object): CALLBACK_ERR_THREAD_THRESHOLD = 100 @@ -656,7 +721,6 @@ allow_beta_protocol_version = False - _iobuf = None _current_frame = None _socket = None @@ -667,6 +731,13 @@ _check_hostname = False _product_type = None + _is_checksumming_enabled = False + + @property + def _iobuf(self): + # backward compatibility, to avoid any change in the reactors + return self._io_buffer.io_buffer + def __init__(self, host='127.0.0.1', port=9042, authenticator=None, ssl_options=None, sockopts=None, compression=True, cql_version=None, protocol_version=ProtocolVersion.MAX_SUPPORTED, is_control_connection=False, @@ -690,7 +761,7 @@ self.no_compact = no_compact self._push_watchers = defaultdict(set) self._requests = {} - self._iobuf = io.BytesIO() + self._io_buffer = _ConnectionIOBuffer(self) self._continuous_paging_sessions = {} self._socket_writable = True @@ -831,6 +902,16 @@ for args in self.sockopts: self._socket.setsockopt(*args) + def _enable_compression(self): + if self._compressor: + self.compressor = self._compressor + + def _enable_checksumming(self): + self._io_buffer.set_checksumming_buffer() + self._is_checksumming_enabled = True + self._segment_codec = segment_codec_lz4 if self.compressor else segment_codec_no_compression + log.debug("Enabling protocol checksumming on connection (%s).", id(self)) + def close(self): raise NotImplementedError() @@ -933,7 +1014,14 @@ # queue the decoder function with the request # this allows us to inject custom functions per request to encode, decode messages self._requests[request_id] = (cb, decoder, result_metadata) - msg = encoder(msg, request_id, self.protocol_version, compressor=self.compressor, allow_beta_protocol_version=self.allow_beta_protocol_version) + msg = encoder(msg, request_id, self.protocol_version, compressor=self.compressor, + allow_beta_protocol_version=self.allow_beta_protocol_version) + + if self._is_checksumming_enabled: + buffer = io.BytesIO() + self._segment_codec.encode(buffer, msg) + msg = buffer.getvalue() + self.push(msg) return len(msg) @@ -1012,7 +1100,7 @@ @defunct_on_error def _read_frame_header(self): - buf = self._iobuf.getvalue() + buf = self._io_buffer.cql_frame_buffer.getvalue() pos = len(buf) if pos: version = int_from_buf_item(buf[0]) & PROTOCOL_VERSION_MASK @@ -1028,29 +1116,62 @@ self._current_frame = _Frame(version, flags, stream, op, header_size, body_len + header_size) return pos - def _reset_frame(self): - self._iobuf = io.BytesIO(self._iobuf.read()) - self._iobuf.seek(0, 2) # io.SEEK_END == 2 (constant not present in 2.6) - self._current_frame = None + @defunct_on_error + def _process_segment_buffer(self): + readable_bytes = self._io_buffer.readable_io_bytes() + if readable_bytes >= self._segment_codec.header_length_with_crc: + try: + self._io_buffer.io_buffer.seek(0) + segment_header = self._segment_codec.decode_header(self._io_buffer.io_buffer) + + if readable_bytes >= segment_header.segment_length: + segment = self._segment_codec.decode(self._iobuf, segment_header) + self._io_buffer._segment_consumed = True + self._io_buffer.cql_frame_buffer.write(segment.payload) + else: + # not enough data to read the segment. reset the buffer pointer at the + # beginning to not lose what we previously read (header). + self._io_buffer._segment_consumed = False + self._io_buffer.io_buffer.seek(0) + except CrcException as exc: + # re-raise an exception that inherits from ConnectionException + raise CrcMismatchException(str(exc), self.endpoint) + else: + self._io_buffer._segment_consumed = False def process_io_buffer(self): while True: + if self._is_checksumming_enabled and self._io_buffer.readable_io_bytes(): + self._process_segment_buffer() + self._io_buffer.reset_io_buffer() + + if self._is_checksumming_enabled and not self._io_buffer.has_consumed_segment: + # We couldn't read an entire segment from the io buffer, so return + # control to allow more bytes to be read off the wire + return + if not self._current_frame: pos = self._read_frame_header() else: - pos = self._iobuf.tell() + pos = self._io_buffer.readable_cql_frame_bytes() if not self._current_frame or pos < self._current_frame.end_pos: + if self._is_checksumming_enabled and self._io_buffer.readable_io_bytes(): + # We have a multi-segments message and we need to read more + # data to complete the current cql frame + continue + # we don't have a complete header yet or we # already saw a header, but we don't have a # complete message yet return else: frame = self._current_frame - self._iobuf.seek(frame.body_offset) - msg = self._iobuf.read(frame.end_pos - frame.body_offset) + self._io_buffer.cql_frame_buffer.seek(frame.body_offset) + msg = self._io_buffer.cql_frame_buffer.read(frame.end_pos - frame.body_offset) self.process_msg(frame, msg) - self._reset_frame() + self._io_buffer.reset_cql_frame_buffer() + self._current_frame = None @defunct_on_error def process_msg(self, header, body): @@ -1185,11 +1306,19 @@ compression_type = k break - # set the decompressor here, but set the compressor only after - # a successful Ready message - self._compression_type = compression_type - self._compressor, self.decompressor = \ - locally_supported_compressions[compression_type] + # If snappy compression is selected with v5+checksumming, the connection + # will fail with OTO. Only lz4 is supported + if (compression_type == 'snappy' and + ProtocolVersion.has_checksumming_support(self.protocol_version)): + log.debug("Snappy compression is not supported with protocol version %s and " + "checksumming. Consider installing lz4. Disabling compression.", self.protocol_version) + compression_type = None + else: + # set the decompressor here, but set the compressor only after + # a successful Ready message + self._compression_type = compression_type + self._compressor, self.decompressor = \ + locally_supported_compressions[compression_type] self._send_startup_message(compression_type, no_compact=self.no_compact) @@ -1210,6 +1339,7 @@ def _handle_startup_response(self, startup_response, did_authenticate=False): if self.is_defunct: return + if isinstance(startup_response, ReadyMessage): if self.authenticator: log.warning("An authentication challenge was not sent, " @@ -1218,8 +1348,11 @@ self.authenticator.__class__.__name__) log.debug("Got ReadyMessage on new connection (%s) from %s", id(self), self.endpoint) - if self._compressor: - self.compressor = self._compressor + self._enable_compression() + + if ProtocolVersion.has_checksumming_support(self.protocol_version): + self._enable_checksumming() + self.connected_event.set() elif isinstance(startup_response, AuthenticateMessage): log.debug("Got AuthenticateMessage on new connection (%s) from %s: %s", @@ -1231,6 +1364,10 @@ "if DSE authentication is configured with transitional mode" % (self.host,)) raise AuthenticationFailed('Remote end requires authentication') + self._enable_compression() + if ProtocolVersion.has_checksumming_support(self.protocol_version): + self._enable_checksumming() + if isinstance(self.authenticator, dict): log.debug("Sending credentials-based auth response on %s", self) cm = CredentialsMessage(creds=self.authenticator) diff -Nru python-cassandra-driver-3.24.0/cassandra/cqlengine/connection.py python-cassandra-driver-3.25.0/cassandra/cqlengine/connection.py --- python-cassandra-driver-3.24.0/cassandra/cqlengine/connection.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/cqlengine/connection.py 2021-03-18 14:48:29.000000000 +0000 @@ -98,7 +98,13 @@ if self.lazy_connect: return - self.cluster = Cluster(self.hosts, **self.cluster_options) + if 'cloud' in self.cluster_options: + if self.hosts: + log.warning("Ignoring hosts %s because a cloud config was provided.", self.hosts) + self.cluster = Cluster(**self.cluster_options) + else: + self.cluster = Cluster(self.hosts, **self.cluster_options) + try: self.session = self.cluster.connect() log.debug(format_log_context("connection initialized with internally created session", connection=self.name)) @@ -301,6 +307,8 @@ log.debug("cqlengine default connection initialized with %s", s) +# TODO next major: if a cloud config is specified in kwargs, hosts will be ignored. +# This function should be refactored to reflect this change. PYTHON-1265 def setup( hosts, default_keyspace, diff -Nru python-cassandra-driver-3.24.0/cassandra/__init__.py python-cassandra-driver-3.25.0/cassandra/__init__.py --- python-cassandra-driver-3.24.0/cassandra/__init__.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/__init__.py 2021-03-18 14:48:29.000000000 +0000 @@ -22,7 +22,7 @@ logging.getLogger('cassandra').addHandler(NullHandler()) -__version_info__ = (3, 24, 0) +__version_info__ = (3, 25, 0) __version__ = '.'.join(map(str, __version_info__)) @@ -161,7 +161,12 @@ V5 = 5 """ - v5, in beta from 3.x+ + v5, in beta from 3.x+. Finalised in 4.0-beta5 + """ + + V6 = 6 + """ + v6, in beta from 4.0-beta5 """ DSE_V1 = 0x41 @@ -174,12 +179,12 @@ DSE private protocol v2, supported in DSE 6.0+ """ - SUPPORTED_VERSIONS = (DSE_V2, DSE_V1, V5, V4, V3, V2, V1) + SUPPORTED_VERSIONS = (DSE_V2, DSE_V1, V6, V5, V4, V3, V2, V1) """ A tuple of all supported protocol versions """ - BETA_VERSIONS = (V5,) + BETA_VERSIONS = (V6,) """ A tuple of all beta protocol versions """ @@ -235,6 +240,10 @@ def has_continuous_paging_next_pages(cls, version): return version >= cls.DSE_V2 + @classmethod + def has_checksumming_support(cls, version): + return cls.V5 <= version < cls.DSE_V1 + class WriteType(object): """ diff -Nru python-cassandra-driver-3.24.0/cassandra/io/asyncorereactor.py python-cassandra-driver-3.25.0/cassandra/io/asyncorereactor.py --- python-cassandra-driver-3.24.0/cassandra/io/asyncorereactor.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/io/asyncorereactor.py 2021-03-18 14:48:29.000000000 +0000 @@ -36,20 +36,7 @@ from cassandra.connection import Connection, ConnectionShutdown, NONBLOCKING, Timer, TimerManager -# TODO: Remove when Python 2 is removed -class LogWrapper(object): - """ PYTHON-1228. If our logger has disappeared, there's nothing we can do, so just execute nothing """ - def __init__(self): - self._log = logging.getLogger(__name__) - - def __getattr__(self, name): - try: - return getattr(self._log, name) - except: - return lambda *args, **kwargs: None - - -log = LogWrapper() +log = logging.getLogger(__name__) _dispatcher_map = {} @@ -262,7 +249,13 @@ self._loop_dispatcher.loop(self.timer_resolution) self._timers.service_timeouts() except Exception: - log.debug("Asyncore event loop stopped unexepectedly", exc_info=True) + try: + log.debug("Asyncore event loop stopped unexpectedly", exc_info=True) + except Exception: + # TODO: Remove when Python 2 support is removed + # PYTHON-1266. If our logger has disappeared, there's nothing we + # can do, so just log nothing. + pass break self._started = False diff -Nru python-cassandra-driver-3.24.0/cassandra/marshal.py python-cassandra-driver-3.25.0/cassandra/marshal.py --- python-cassandra-driver-3.24.0/cassandra/marshal.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/marshal.py 2021-03-18 14:48:29.000000000 +0000 @@ -28,6 +28,7 @@ int8_pack, int8_unpack = _make_packer('>b') uint64_pack, uint64_unpack = _make_packer('>Q') uint32_pack, uint32_unpack = _make_packer('>I') +uint32_le_pack, uint32_le_unpack = _make_packer('H') uint8_pack, uint8_unpack = _make_packer('>B') float_pack, float_unpack = _make_packer('>f') diff -Nru python-cassandra-driver-3.24.0/cassandra/metadata.py python-cassandra-driver-3.25.0/cassandra/metadata.py --- python-cassandra-driver-3.24.0/cassandra/metadata.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/metadata.py 2021-03-18 14:48:29.000000000 +0000 @@ -52,18 +52,18 @@ 'counter', 'create', 'custom', 'date', 'decimal', 'default', 'delete', 'desc', 'describe', 'deterministic', 'distinct', 'double', 'drop', 'entries', 'execute', 'exists', 'filtering', 'finalfunc', 'float', 'from', 'frozen', 'full', 'function', 'functions', 'grant', 'if', 'in', 'index', 'inet', 'infinity', 'initcond', 'input', 'insert', 'int', 'into', 'is', 'json', - 'key', 'keys', 'keyspace', 'keyspaces', 'language', 'limit', 'list', 'login', 'map', 'materialized', 'modify', 'monotonic', 'nan', 'nologin', - 'norecursive', 'nosuperuser', 'not', 'null', 'of', 'on', 'options', 'or', 'order', 'password', 'permission', + 'key', 'keys', 'keyspace', 'keyspaces', 'language', 'limit', 'list', 'login', 'map', 'materialized', 'mbean', 'mbeans', 'modify', 'monotonic', + 'nan', 'nologin', 'norecursive', 'nosuperuser', 'not', 'null', 'of', 'on', 'options', 'or', 'order', 'password', 'permission', 'permissions', 'primary', 'rename', 'replace', 'returns', 'revoke', 'role', 'roles', 'schema', 'select', 'set', 'sfunc', 'smallint', 'static', 'storage', 'stype', 'superuser', 'table', 'text', 'time', 'timestamp', 'timeuuid', - 'tinyint', 'to', 'token', 'trigger', 'truncate', 'ttl', 'tuple', 'type', 'unlogged', 'update', 'use', 'user', + 'tinyint', 'to', 'token', 'trigger', 'truncate', 'ttl', 'tuple', 'type', 'unlogged', 'unset', 'update', 'use', 'user', 'users', 'using', 'uuid', 'values', 'varchar', 'varint', 'view', 'where', 'with', 'writetime', # DSE specifics "node", "nodes", "plan", "active", "application", "applications", "java", "executor", "executors", "std_out", "std_err", "renew", "delegation", "no", "redact", "token", "lowercasestring", "cluster", "authentication", "schemes", "scheme", "internal", "ldap", "kerberos", "remote", "object", "method", "call", "calls", "search", "schema", "config", "rows", - "columns", "profiles", "commit", "reload", "unset", "rebuild", "field", "workpool", "any", "submission", "indices", + "columns", "profiles", "commit", "reload", "rebuild", "field", "workpool", "any", "submission", "indices", "restrict", "unrestrict" )) """ diff -Nru python-cassandra-driver-3.24.0/cassandra/protocol.py python-cassandra-driver-3.25.0/cassandra/protocol.py --- python-cassandra-driver-3.24.0/cassandra/protocol.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/protocol.py 2021-03-18 14:48:29.000000000 +0000 @@ -31,7 +31,7 @@ UserAggregateDescriptor, SchemaTargetType) from cassandra.marshal import (int32_pack, int32_unpack, uint16_pack, uint16_unpack, uint8_pack, int8_unpack, uint64_pack, header_pack, - v3_header_pack, uint32_pack) + v3_header_pack, uint32_pack, uint32_le_unpack, uint32_le_pack) from cassandra.cqltypes import (AsciiType, BytesType, BooleanType, CounterColumnType, DateType, DecimalType, DoubleType, FloatType, Int32Type, @@ -180,6 +180,10 @@ summary = 'Protocol error' error_code = 0x000A + @property + def is_beta_protocol_error(self): + return 'USE_BETA flag is unset' in str(self) + class BadCredentials(ErrorMessageSub): summary = 'Bad credentials' @@ -1115,7 +1119,9 @@ msg.send_body(body, protocol_version) body = body.getvalue() - if compressor and len(body) > 0: + # With checksumming, the compression is done at the segment frame encoding + if (not ProtocolVersion.has_checksumming_support(protocol_version) + and compressor and len(body) > 0): body = compressor(body) flags |= COMPRESSED_FLAG @@ -1155,7 +1161,8 @@ :param decompressor: optional decompression function to inflate the body :return: a message decoded from the body and frame attributes """ - if flags & COMPRESSED_FLAG: + if (not ProtocolVersion.has_checksumming_support(protocol_version) and + flags & COMPRESSED_FLAG): if decompressor is None: raise RuntimeError("No de-compressor available for compressed frame!") body = decompressor(body) @@ -1271,6 +1278,33 @@ return int32_unpack(f.read(4)) +def read_uint_le(f, size=4): + """ + Read a sequence of little endian bytes and return an unsigned integer. + """ + + if size == 4: + value = uint32_le_unpack(f.read(4)) + else: + value = 0 + for i in range(size): + value |= (read_byte(f) & 0xFF) << 8 * i + + return value + + +def write_uint_le(f, i, size=4): + """ + Write an unsigned integer on a sequence of little endian bytes. + """ + if size == 4: + f.write(uint32_le_pack(i)) + else: + for j in range(size): + shift = j * 8 + write_byte(f, i >> shift & 0xFF) + + def write_int(f, i): f.write(int32_pack(i)) diff -Nru python-cassandra-driver-3.24.0/cassandra/segment.py python-cassandra-driver-3.25.0/cassandra/segment.py --- python-cassandra-driver-3.24.0/cassandra/segment.py 1970-01-01 00:00:00.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/segment.py 2021-03-18 14:48:29.000000000 +0000 @@ -0,0 +1,224 @@ +# Copyright DataStax, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import zlib +import six + +from cassandra import DriverException +from cassandra.marshal import int32_pack +from cassandra.protocol import write_uint_le, read_uint_le + +CRC24_INIT = 0x875060 +CRC24_POLY = 0x1974F0B +CRC24_LENGTH = 3 +CRC32_LENGTH = 4 +CRC32_INITIAL = zlib.crc32(b"\xfa\x2d\x55\xca") + + +class CrcException(Exception): + """ + CRC mismatch error. + + TODO: here to avoid import cycles with cassandra.connection. In the next + major, the exceptions should be declared in a separated exceptions.py + file. + """ + pass + + +def compute_crc24(data, length): + crc = CRC24_INIT + + for _ in range(length): + crc ^= (data & 0xff) << 16 + data >>= 8 + + for i in range(8): + crc <<= 1 + if crc & 0x1000000 != 0: + crc ^= CRC24_POLY + + return crc + + +def compute_crc32(data, value): + crc32 = zlib.crc32(data, value) + if six.PY2: + crc32 &= 0xffffffff + + return crc32 + + +class SegmentHeader(object): + + payload_length = None + uncompressed_payload_length = None + is_self_contained = None + + def __init__(self, payload_length, uncompressed_payload_length, is_self_contained): + self.payload_length = payload_length + self.uncompressed_payload_length = uncompressed_payload_length + self.is_self_contained = is_self_contained + + @property + def segment_length(self): + """ + Return the total length of the segment, including the CRC. + """ + hl = SegmentCodec.UNCOMPRESSED_HEADER_LENGTH if self.uncompressed_payload_length < 1 \ + else SegmentCodec.COMPRESSED_HEADER_LENGTH + return hl + CRC24_LENGTH + self.payload_length + CRC32_LENGTH + + +class Segment(object): + + MAX_PAYLOAD_LENGTH = 128 * 1024 - 1 + + payload = None + is_self_contained = None + + def __init__(self, payload, is_self_contained): + self.payload = payload + self.is_self_contained = is_self_contained + + +class SegmentCodec(object): + + COMPRESSED_HEADER_LENGTH = 5 + UNCOMPRESSED_HEADER_LENGTH = 3 + FLAG_OFFSET = 17 + + compressor = None + decompressor = None + + def __init__(self, compressor=None, decompressor=None): + self.compressor = compressor + self.decompressor = decompressor + + @property + def header_length(self): + return self.COMPRESSED_HEADER_LENGTH if self.compression \ + else self.UNCOMPRESSED_HEADER_LENGTH + + @property + def header_length_with_crc(self): + return (self.COMPRESSED_HEADER_LENGTH if self.compression + else self.UNCOMPRESSED_HEADER_LENGTH) + CRC24_LENGTH + + @property + def compression(self): + return self.compressor and self.decompressor + + def compress(self, data): + # the uncompressed length is already encoded in the header, so + # we remove it here + return self.compressor(data)[4:] + + def decompress(self, encoded_data, uncompressed_length): + return self.decompressor(int32_pack(uncompressed_length) + encoded_data) + + def encode_header(self, buffer, payload_length, uncompressed_length, is_self_contained): + if payload_length > Segment.MAX_PAYLOAD_LENGTH: + raise DriverException('Payload length exceed Segment.MAX_PAYLOAD_LENGTH') + + header_data = payload_length + + flag_offset = self.FLAG_OFFSET + if self.compression: + header_data |= uncompressed_length << flag_offset + flag_offset += 17 + + if is_self_contained: + header_data |= 1 << flag_offset + + write_uint_le(buffer, header_data, size=self.header_length) + header_crc = compute_crc24(header_data, self.header_length) + write_uint_le(buffer, header_crc, size=CRC24_LENGTH) + + def _encode_segment(self, buffer, payload, is_self_contained): + """ + Encode a message to a single segment. + """ + uncompressed_payload = payload + uncompressed_payload_length = len(payload) + + if self.compression: + compressed_payload = self.compress(uncompressed_payload) + if len(compressed_payload) >= uncompressed_payload_length: + encoded_payload = uncompressed_payload + uncompressed_payload_length = 0 + else: + encoded_payload = compressed_payload + else: + encoded_payload = uncompressed_payload + + payload_length = len(encoded_payload) + self.encode_header(buffer, payload_length, uncompressed_payload_length, is_self_contained) + payload_crc = compute_crc32(encoded_payload, CRC32_INITIAL) + buffer.write(encoded_payload) + write_uint_le(buffer, payload_crc) + + def encode(self, buffer, msg): + """ + Encode a message to one of more segments. + """ + msg_length = len(msg) + + if msg_length > Segment.MAX_PAYLOAD_LENGTH: + payloads = [] + for i in range(0, msg_length, Segment.MAX_PAYLOAD_LENGTH): + payloads.append(msg[i:i + Segment.MAX_PAYLOAD_LENGTH]) + else: + payloads = [msg] + + is_self_contained = len(payloads) == 1 + for payload in payloads: + self._encode_segment(buffer, payload, is_self_contained) + + def decode_header(self, buffer): + header_data = read_uint_le(buffer, self.header_length) + + expected_header_crc = read_uint_le(buffer, CRC24_LENGTH) + actual_header_crc = compute_crc24(header_data, self.header_length) + if actual_header_crc != expected_header_crc: + raise CrcException('CRC mismatch on header {:x}. Received {:x}", computed {:x}.'.format( + header_data, expected_header_crc, actual_header_crc)) + + payload_length = header_data & Segment.MAX_PAYLOAD_LENGTH + header_data >>= 17 + + if self.compression: + uncompressed_payload_length = header_data & Segment.MAX_PAYLOAD_LENGTH + header_data >>= 17 + else: + uncompressed_payload_length = -1 + + is_self_contained = (header_data & 1) == 1 + + return SegmentHeader(payload_length, uncompressed_payload_length, is_self_contained) + + def decode(self, buffer, header): + encoded_payload = buffer.read(header.payload_length) + expected_payload_crc = read_uint_le(buffer) + + actual_payload_crc = compute_crc32(encoded_payload, CRC32_INITIAL) + if actual_payload_crc != expected_payload_crc: + raise CrcException('CRC mismatch on payload. Received {:x}", computed {:x}.'.format( + expected_payload_crc, actual_payload_crc)) + + payload = encoded_payload + if self.compression and header.uncompressed_payload_length > 0: + payload = self.decompress(encoded_payload, header.uncompressed_payload_length) + + return Segment(payload, header.is_self_contained) diff -Nru python-cassandra-driver-3.24.0/cassandra/util.py python-cassandra-driver-3.25.0/cassandra/util.py --- python-cassandra-driver-3.24.0/cassandra/util.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/cassandra/util.py 2021-03-18 14:48:29.000000000 +0000 @@ -1541,8 +1541,11 @@ """ months = 0 + "" days = 0 + "" nanoseconds = 0 + "" def __init__(self, months=0, days=0, nanoseconds=0): self.months = months diff -Nru python-cassandra-driver-3.24.0/CHANGELOG.rst python-cassandra-driver-3.25.0/CHANGELOG.rst --- python-cassandra-driver-3.24.0/CHANGELOG.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/CHANGELOG.rst 2021-03-18 14:48:29.000000000 +0000 @@ -1,3 +1,23 @@ +3.25.0 +====== +March 18, 2021 + +Features +-------- +* Ensure the driver can connect when invalid peer hosts are in system.peers (PYTHON-1260) +* Implement protocol v5 checksumming (PYTHON-1258) +* Fix the default cqlengine connection mechanism to work with Astra (PYTHON-1265) + +Bug Fixes +--------- +* Asyncore race condition cause logging exception on shutdown (PYTHON-1266) +* Update list of reserved keywords (PYTHON-1269) + +Others +------ +* Drop Python 3.4 support (PYTHON-1220) +* Update security documentation and examples to use PROTOCOL_TLS (PYTHON-1264) + 3.24.0 ====== June 18, 2020 diff -Nru python-cassandra-driver-3.24.0/debian/changelog python-cassandra-driver-3.25.0/debian/changelog --- python-cassandra-driver-3.24.0/debian/changelog 2022-03-16 23:02:52.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/changelog 2022-05-13 20:37:40.000000000 +0000 @@ -1,14 +1,22 @@ -python-cassandra-driver (3.24.0-1build3) jammy; urgency=medium +python-cassandra-driver (3.25.0-1) unstable; urgency=medium - * No-change rebuild with Python 3.10 only + [ Nicolas Dandrimont ] + * New upstream release - -- Graham Inggs Wed, 16 Mar 2022 23:02:52 +0000 + [ Emmanuel Arias] + * d/control: Bump Standards-Version to 4.6.1 (from 4.5.1; no further + changes). + * d/control: Remove trivial autopkgtest-pkg-python. + * d/copyright: Update Copyright for year for Debian files. + * d/upstream/metadata: Fix Name key in file. Use Python-Cassandra-Files + instead of TileDB. + * d/patches/0004-Fix-typos-in-upstream-source.patch: fix typos in upstream + source. + * d/tests: Run autopkgtest for this package. + * d/patches/0005-Skip-test_multi_timer_validation-flaky-test.patch: Skip + flaky test (Closes: #1000612). -python-cassandra-driver (3.24.0-1build2) jammy; urgency=medium - - * No-change rebuild to add python3.10. - - -- Matthias Klose Sun, 17 Oct 2021 13:33:22 +0200 + -- Emmanuel Arias Fri, 13 May 2022 17:37:40 -0300 python-cassandra-driver (3.24.0-1) unstable; urgency=medium diff -Nru python-cassandra-driver-3.24.0/debian/control python-cassandra-driver-3.25.0/debian/control --- python-cassandra-driver-3.24.0/debian/control 2021-02-05 16:27:33.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/control 2022-05-13 20:37:40.000000000 +0000 @@ -22,11 +22,10 @@ python3-twisted , python3-tz , python3-yaml , -Standards-Version: 4.5.1 +Standards-Version: 4.6.1 Homepage: https://github.com/datastax/python-driver Vcs-Git: https://salsa.debian.org/python-team/packages/python-cassandra-driver.git Vcs-Browser: https://salsa.debian.org/python-team/packages/python-cassandra-driver -Testsuite: autopkgtest-pkg-python Rules-Requires-Root: no Package: python3-cassandra diff -Nru python-cassandra-driver-3.24.0/debian/copyright python-cassandra-driver-3.25.0/debian/copyright --- python-cassandra-driver-3.24.0/debian/copyright 2021-02-05 16:27:33.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/copyright 2022-05-13 20:37:40.000000000 +0000 @@ -7,7 +7,7 @@ License: Apache-2 Files: debian/* -Copyright:2018-2021 Emmanuel Arias +Copyright:2018-2022 Emmanuel Arias 2015-2018 Sandro Tosi License: Apache-2 @@ -41,7 +41,7 @@ Copyright (c) 2011 Patrick Hensley (Python wrapper, packaging) Copyright 2013-2016 DataStax (Minor modifications to match Cassandra's MM3 hashes) License: expat -Note: The majority of this code was taken from the python-smhasher library, +Comment: The majority of this code was taken from the python-smhasher library, which can be found here: https://github.com/phensley/python-smhasher License: expat diff -Nru python-cassandra-driver-3.24.0/debian/patches/0004-Fix-typos-in-upstream-source.patch python-cassandra-driver-3.25.0/debian/patches/0004-Fix-typos-in-upstream-source.patch --- python-cassandra-driver-3.24.0/debian/patches/0004-Fix-typos-in-upstream-source.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/patches/0004-Fix-typos-in-upstream-source.patch 2022-05-13 20:37:40.000000000 +0000 @@ -0,0 +1,36 @@ +From: Emmanuel Arias +Date: Fri, 13 May 2022 19:44:22 -0300 +Subject: Fix typos in upstream source +Forwarded: https://github.com/datastax/python-driver/pull/1126 +Author: Emmanuel Arias +--- + cassandra/cluster.py | 2 +- + cassandra/util.py | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/cassandra/cluster.py b/cassandra/cluster.py +index 7e101af..9cf7f07 100644 +--- a/cassandra/cluster.py ++++ b/cassandra/cluster.py +@@ -2391,7 +2391,7 @@ class Session(object): + *Deprecated:* use execution profiles instead + """ + warn("Setting the consistency level at the session level will be removed in 4.0. Consider using " +- "execution profiles and setting the desired consitency level to the EXEC_PROFILE_DEFAULT profile." ++ "execution profiles and setting the desired consistency level to the EXEC_PROFILE_DEFAULT profile." + , DeprecationWarning) + self._validate_set_legacy_config('default_consistency_level', cl) + +diff --git a/cassandra/util.py b/cassandra/util.py +index f896ff4..dd5c58b 100644 +--- a/cassandra/util.py ++++ b/cassandra/util.py +@@ -797,7 +797,7 @@ class OrderedMap(Mapping): + ''' + An ordered map that accepts non-hashable types for keys. It also maintains the + insertion order of items, behaving as OrderedDict in that regard. These maps +- are constructed and read just as normal mapping types, exept that they may ++ are constructed and read just as normal mapping types, except that they may + contain arbitrary collections and other non-hashable items as keys:: + + >>> od = OrderedMap([({'one': 1, 'two': 2}, 'value'), diff -Nru python-cassandra-driver-3.24.0/debian/patches/0005-Skip-test_multi_timer_validation-flaky-test.patch python-cassandra-driver-3.25.0/debian/patches/0005-Skip-test_multi_timer_validation-flaky-test.patch --- python-cassandra-driver-3.24.0/debian/patches/0005-Skip-test_multi_timer_validation-flaky-test.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/patches/0005-Skip-test_multi_timer_validation-flaky-test.patch 2022-05-13 20:37:40.000000000 +0000 @@ -0,0 +1,22 @@ +From: Emmanuel Arias +Date: Thu, 26 May 2022 15:24:07 -0300 +Subject: Skip test_multi_timer_validation flaky test + +Bug: https://datastax-oss.atlassian.net/browse/PYTHON-1301 +Bug-Debian: https://bugs.debian.org/1000612 +--- + tests/unit/io/utils.py | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/tests/unit/io/utils.py b/tests/unit/io/utils.py +index 848513f..deee650 100644 +--- a/tests/unit/io/utils.py ++++ b/tests/unit/io/utils.py +@@ -163,6 +163,7 @@ class TimerTestMixin(object): + def tearDown(self): + self.connection.close() + ++ @unittest.skip("Skip flaky test") + def test_multi_timer_validation(self): + """ + Verify that timer timeouts are honored appropriately diff -Nru python-cassandra-driver-3.24.0/debian/patches/series python-cassandra-driver-3.25.0/debian/patches/series --- python-cassandra-driver-3.24.0/debian/patches/series 2021-02-05 16:27:33.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/patches/series 2022-05-13 20:37:40.000000000 +0000 @@ -1,3 +1,5 @@ Make_unittests_loadable.patch Disable_GitHub_links_in_doc.patch 0003-Skip-tests-that-fails-on-i386-arch.patch +0004-Fix-typos-in-upstream-source.patch +0005-Skip-test_multi_timer_validation-flaky-test.patch diff -Nru python-cassandra-driver-3.24.0/debian/tests/control python-cassandra-driver-3.25.0/debian/tests/control --- python-cassandra-driver-3.24.0/debian/tests/control 1970-01-01 00:00:00.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/tests/control 2022-05-13 20:37:40.000000000 +0000 @@ -0,0 +1,14 @@ +Tests: testsuite +Depends: cython3, + python3-all, + python3-eventlet, + python3-mock, + python3-nose, + python3-pure-sasl, + python3-pytest, + python3-sure, + python3-twisted, + python3-tz, + python3-yaml, + @, +Restrictions: allow-stderr, needs-root diff -Nru python-cassandra-driver-3.24.0/debian/tests/testsuite python-cassandra-driver-3.25.0/debian/tests/testsuite --- python-cassandra-driver-3.24.0/debian/tests/testsuite 1970-01-01 00:00:00.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/tests/testsuite 2022-05-13 20:37:40.000000000 +0000 @@ -0,0 +1,19 @@ +#!/bin/sh +set -efu + +pys="$(py3versions -s 2> /dev/null)" + + +cp -a tests "$AUTOPKGTEST_TMP" +cd "$AUTOPKGTEST_TMP" + +# Remove cython tests for now +rm -R -f tests/unit/cython/ + +# Remove test that use internet connection +rm -R -f tests/unit/advanced/cloud/ + +for py in $pys; do + echo "=== $py ===" + $py -m nose -v tests 2>&1 +done diff -Nru python-cassandra-driver-3.24.0/debian/upstream/metadata python-cassandra-driver-3.25.0/debian/upstream/metadata --- python-cassandra-driver-3.24.0/debian/upstream/metadata 2021-02-05 16:27:33.000000000 +0000 +++ python-cassandra-driver-3.25.0/debian/upstream/metadata 2022-05-13 20:37:40.000000000 +0000 @@ -1,5 +1,5 @@ +Name: Python-Cassandra-Driver Bug-Database: https://github.com/datastax/python-driver/issues Bug-Submit: https://github.com/datastax/python-driver/issues/new -Name: TileDB Repository: https://github.com/datastax/python-driver.git Repository-Browse: https://github.com/datastax/python-driver diff -Nru python-cassandra-driver-3.24.0/docs/cloud.rst python-cassandra-driver-3.25.0/docs/cloud.rst --- python-cassandra-driver-3.24.0/docs/cloud.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/docs/cloud.rst 2021-03-18 14:48:29.000000000 +0000 @@ -54,3 +54,36 @@ Event loops ^^^^^^^^^^^ Evenlet isn't yet supported for python 3.7+ due to an `issue in Eventlet `_. + + +CqlEngine +========= + +When using the object mapper, you can configure cqlengine with :func:`~.cqlengine.connection.set_session`: + +.. code:: python + + from cassandra.cqlengine import connection + ... + + c = Cluster(cloud={'secure_connect_bundle':'/path/to/secure-connect-test.zip'}, + auth_provider=PlainTextAuthProvider('user', 'pass')) + s = c.connect('myastrakeyspace') + connection.set_session(s) + ... + +If you are using some third-party libraries (flask, django, etc.), you might not be able to change the +configuration mechanism. For this reason, the `hosts` argument of the default +:func:`~.cqlengine.connection.setup` function will be ignored if a `cloud` config is provided: + +.. code:: python + + from cassandra.cqlengine import connection + ... + + connection.setup( + None, # or anything else + "myastrakeyspace", cloud={ + 'secure_connect_bundle':'/path/to/secure-connect-test.zip' + }, + auth_provider=PlainTextAuthProvider('user', 'pass')) diff -Nru python-cassandra-driver-3.24.0/docs/getting_started.rst python-cassandra-driver-3.25.0/docs/getting_started.rst --- python-cassandra-driver-3.24.0/docs/getting_started.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/docs/getting_started.rst 2021-03-18 14:48:29.000000000 +0000 @@ -3,16 +3,42 @@ First, make sure you have the driver properly :doc:`installed `. -Connecting to Cassandra +Connecting to a Cluster ----------------------- Before we can start executing any queries against a Cassandra cluster we need to setup an instance of :class:`~.Cluster`. As the name suggests, you will typically have one instance of :class:`~.Cluster` for each Cassandra cluster you want to interact with. -The simplest way to create a :class:`~.Cluster` is like this: First, make sure you have the Cassandra driver properly :doc:`installed `. +Connecting to Astra ++++++++++++++++++++ + +If you are a DataStax `Astra `_ user, +here is how to connect to your cluster: + +1. Download the secure connect bundle from your Astra account. +2. Connect to your cluster with + +.. code-block:: python + + from cassandra.cluster import Cluster + from cassandra.auth import PlainTextAuthProvider + + cloud_config = { + 'secure_connect_bundle': '/path/to/secure-connect-dbname.zip' + } + auth_provider = PlainTextAuthProvider(username='user', password='pass') + cluster = Cluster(cloud=cloud_config, auth_provider=auth_provider) + session = cluster.connect() + +See `Astra `_ and :doc:`cloud` for more details. + +Connecting to Cassandra ++++++++++++++++++++++++ +The simplest way to create a :class:`~.Cluster` is like this: + .. code-block:: python from cassandra.cluster import Cluster @@ -52,6 +78,8 @@ cluster = Cluster() session = cluster.connect() +Session Keyspace +---------------- The :meth:`~.Cluster.connect()` method takes an optional ``keyspace`` argument which sets the default keyspace for all queries made through that :class:`~.Session`: @@ -60,7 +88,6 @@ cluster = Cluster() session = cluster.connect('mykeyspace') - You can always change a Session's keyspace using :meth:`~.Session.set_keyspace` or by executing a ``USE `` query: @@ -70,6 +97,8 @@ # or you can do this instead session.execute('USE users') +Execution Profiles +------------------ Profiles are passed in by ``execution_profiles`` dict. In this case we can construct the base ``ExecutionProfile`` passing all attributes: diff -Nru python-cassandra-driver-3.24.0/docs/index.rst python-cassandra-driver-3.25.0/docs/index.rst --- python-cassandra-driver-3.24.0/docs/index.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/docs/index.rst 2021-03-18 14:48:29.000000000 +0000 @@ -4,7 +4,7 @@ This driver works exclusively with the Cassandra Query Language v3 (CQL3) and Cassandra's native protocol. Cassandra 2.1+ is supported, including DSE 4.7+. -The driver supports Python 2.7, 3.4, 3.5, 3.6, 3.7 and 3.8. +The driver supports Python 2.7, 3.5, 3.6, 3.7 and 3.8. This driver is open source under the `Apache v2 License `_. diff -Nru python-cassandra-driver-3.24.0/docs/installation.rst python-cassandra-driver-3.25.0/docs/installation.rst --- python-cassandra-driver-3.24.0/docs/installation.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/docs/installation.rst 2021-03-18 14:48:29.000000000 +0000 @@ -3,7 +3,7 @@ Supported Platforms ------------------- -Python 2.7, 3.4, 3.5, 3.6, 3.7 and 3.8 are supported. Both CPython (the standard Python +Python 2.7, 3.5, 3.6, 3.7 and 3.8 are supported. Both CPython (the standard Python implementation) and `PyPy `_ are supported and tested. Linux, OSX, and Windows are supported. diff -Nru python-cassandra-driver-3.24.0/docs/security.rst python-cassandra-driver-3.25.0/docs/security.rst --- python-cassandra-driver-3.24.0/docs/security.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/docs/security.rst 2021-03-18 14:48:29.000000000 +0000 @@ -119,9 +119,9 @@ .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1 + from ssl import SSLContext, PROTOCOL_TLS - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) cluster = Cluster(['127.0.0.1'], ssl_context=ssl_context) session = cluster.connect() @@ -147,9 +147,9 @@ .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_verify_locations('/path/to/rootca.crt') ssl_context.verify_mode = CERT_REQUIRED @@ -161,9 +161,9 @@ .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_verify_locations('/path/to/rootca.crt') ssl_context.verify_mode = CERT_REQUIRED ssl_context.check_hostname = True @@ -228,9 +228,9 @@ .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1 + from ssl import SSLContext, PROTOCOL_TLS - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_cert_chain( certfile='/path/to/client.crt_signed', keyfile='/path/to/client.key') @@ -251,9 +251,9 @@ .. code-block:: python from cassandra.cluster import Cluster, Session - from ssl import SSLContext, PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED - ssl_context = SSLContext(PROTOCOL_TLSv1) + ssl_context = SSLContext(PROTOCOL_TLS) ssl_context.load_verify_locations('/path/to/rootca.crt') ssl_context.verify_mode = CERT_REQUIRED ssl_context.load_cert_chain( @@ -275,7 +275,7 @@ from cassandra.cluster import Cluster from cassandra.io.twistedreactor import TwistedConnection - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.set_verify(SSL.VERIFY_PEER, callback=lambda _1, _2, _3, _4, ok: ok) ssl_context.use_certificate_file('/path/to/client.crt_signed') ssl_context.use_privatekey_file('/path/to/client.key') @@ -303,7 +303,7 @@ By default, a ``ca_certs`` value should be supplied (the value should be a string pointing to the location of the CA certs file), and you probably -want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLSv1`` to match +want to specify ``ssl_version`` as ``ssl.PROTOCOL_TLS`` to match Cassandra's default protocol. For example: @@ -311,11 +311,11 @@ .. code-block:: python from cassandra.cluster import Cluster - from ssl import PROTOCOL_TLSv1, CERT_REQUIRED + from ssl import PROTOCOL_TLS, CERT_REQUIRED ssl_opts = { 'ca_certs': '/path/to/my/ca.certs', - 'ssl_version': PROTOCOL_TLSv1, + 'ssl_version': PROTOCOL_TLS, 'cert_reqs': CERT_REQUIRED # Certificates are required and validated } cluster = Cluster(ssl_options=ssl_opts) diff -Nru python-cassandra-driver-3.24.0/docs.yaml python-cassandra-driver-3.25.0/docs.yaml --- python-cassandra-driver-3.24.0/docs.yaml 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/docs.yaml 2021-03-18 14:48:29.000000000 +0000 @@ -22,8 +22,10 @@ # build extensions like libev CASS_DRIVER_NO_CYTHON=1 python setup.py build_ext --inplace --force versions: + - name: '3.25' + ref: a83c36a5 - name: '3.24' - ref: e0b7e73c + ref: 21cac12b - name: '3.23' ref: a40a2af7 - name: '3.22' diff -Nru python-cassandra-driver-3.24.0/Jenkinsfile python-cassandra-driver-3.25.0/Jenkinsfile --- python-cassandra-driver-3.24.0/Jenkinsfile 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/Jenkinsfile 2021-03-18 14:48:29.000000000 +0000 @@ -1,39 +1,172 @@ #!groovy +/* -def initializeEnvironment() { - env.DRIVER_DISPLAY_NAME = 'Cassandra Python Driver' - env.DRIVER_METRIC_TYPE = 'oss' +There are multiple combinations to test the python driver. + +Test Profiles: + + Full: Execute all unit and integration tests, including long tests. + Standard: Execute unit and integration tests. + Smoke Tests: Execute a small subset of tests. + EVENT_LOOP: Execute a small subset of tests selected to test EVENT_LOOPs. + +Matrix Types: + + Full: All server versions, python runtimes tested with and without Cython. + Develop: Smaller matrix for dev purpose. + Cassandra: All cassandra server versions. + Dse: All dse server versions. + +Parameters: + + EVENT_LOOP: 'LIBEV' (Default), 'GEVENT', 'EVENTLET', 'ASYNCIO', 'ASYNCORE', 'TWISTED' + CYTHON: Default, 'True', 'False' + +*/ + +@Library('dsdrivers-pipeline-lib@develop') +import com.datastax.jenkins.drivers.python.Slack + +slack = new Slack() + +// Define our predefined matrices +matrices = [ + "FULL": [ + "SERVER": ['2.1', '2.2', '3.0', '3.11', '4.0', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.7', 'dse-6.8'], + "RUNTIME": ['2.7.18', '3.5.9', '3.6.10', '3.7.7', '3.8.3'], + "CYTHON": ["True", "False"] + ], + "DEVELOP": [ + "SERVER": ['2.1', '3.11', 'dse-6.8'], + "RUNTIME": ['2.7.18', '3.6.10'], + "CYTHON": ["True", "False"] + ], + "CASSANDRA": [ + "SERVER": ['2.1', '2.2', '3.0', '3.11', '4.0'], + "RUNTIME": ['2.7.18', '3.5.9', '3.6.10', '3.7.7', '3.8.3'], + "CYTHON": ["True", "False"] + ], + "DSE": [ + "SERVER": ['dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.7', 'dse-6.8'], + "RUNTIME": ['2.7.18', '3.5.9', '3.6.10', '3.7.7', '3.8.3'], + "CYTHON": ["True", "False"] + ] +] + +def getBuildContext() { + /* + Based on schedule, parameters and branch name, configure the build context and env vars. + */ + + def driver_display_name = 'Cassandra Python Driver' if (env.GIT_URL.contains('riptano/python-driver')) { - env.DRIVER_DISPLAY_NAME = 'private ' + env.DRIVER_DISPLAY_NAME - env.DRIVER_METRIC_TYPE = 'oss-private' + driver_display_name = 'private ' + driver_display_name } else if (env.GIT_URL.contains('python-dse-driver')) { - env.DRIVER_DISPLAY_NAME = 'DSE Python Driver' - env.DRIVER_METRIC_TYPE = 'dse' + driver_display_name = 'DSE Python Driver' } - env.GIT_SHA = "${env.GIT_COMMIT.take(7)}" - env.GITHUB_PROJECT_URL = "https://${GIT_URL.replaceFirst(/(git@|http:\/\/|https:\/\/)/, '').replace(':', '/').replace('.git', '')}" - env.GITHUB_BRANCH_URL = "${GITHUB_PROJECT_URL}/tree/${env.BRANCH_NAME}" - env.GITHUB_COMMIT_URL = "${GITHUB_PROJECT_URL}/commit/${env.GIT_COMMIT}" + def git_sha = "${env.GIT_COMMIT.take(7)}" + def github_project_url = "https://${GIT_URL.replaceFirst(/(git@|http:\/\/|https:\/\/)/, '').replace(':', '/').replace('.git', '')}" + def github_branch_url = "${github_project_url}/tree/${env.BRANCH_NAME}" + def github_commit_url = "${github_project_url}/commit/${env.GIT_COMMIT}" - sh label: 'Assign Python global environment', script: '''#!/bin/bash -lex - pyenv global ${PYTHON_VERSION} - ''' + def profile = "${params.PROFILE}" + def EVENT_LOOP = "${params.EVENT_LOOP.toLowerCase()}" + matrixType = "FULL" + developBranchPattern = ~"((dev|long)-)?python-.*" - sh label: 'Install socat; required for unix socket tests', script: '''#!/bin/bash -lex - sudo apt-get install socat - ''' + if (developBranchPattern.matcher(env.BRANCH_NAME).matches()) { + matrixType = "DEVELOP" + if (env.BRANCH_NAME.contains("long")) { + profile = "FULL" + } + } + + // Check if parameters were set explicitly + if (params.MATRIX != "DEFAULT") { + matrixType = params.MATRIX + } + + matrix = matrices[matrixType].clone() + if (params.CYTHON != "DEFAULT") { + matrix["CYTHON"] = [params.CYTHON] + } + + if (params.SERVER_VERSION != "DEFAULT") { + matrix["SERVER"] = [params.SERVER_VERSION] + } + + if (params.PYTHON_VERSION != "DEFAULT") { + matrix["RUNTIME"] = [params.PYTHON_VERSION] + } + + if (params.CI_SCHEDULE == "WEEKNIGHTS") { + matrix["SERVER"] = params.CI_SCHEDULE_SERVER_VERSION.split(' ') + matrix["RUNTIME"] = params.CI_SCHEDULE_PYTHON_VERSION.split(' ') + } + + context = [ + vars: [ + "PROFILE=${profile}", + "EVENT_LOOP=${EVENT_LOOP}", + "DRIVER_DISPLAY_NAME=${driver_display_name}", "GIT_SHA=${git_sha}", "GITHUB_PROJECT_URL=${github_project_url}", + "GITHUB_BRANCH_URL=${github_branch_url}", "GITHUB_COMMIT_URL=${github_commit_url}" + ], + matrix: matrix + ] + + return context +} + +def buildAndTest(context) { + initializeEnvironment() + installDriverAndCompileExtensions() + + try { + executeTests() + } finally { + junit testResults: '*_results.xml' + } +} + +def getMatrixBuilds(buildContext) { + def tasks = [:] + matrix = buildContext.matrix + + matrix["SERVER"].each { serverVersion -> + matrix["RUNTIME"].each { runtimeVersion -> + matrix["CYTHON"].each { cythonFlag -> + def taskVars = [ + "CASSANDRA_VERSION=${serverVersion}", + "PYTHON_VERSION=${runtimeVersion}", + "CYTHON_ENABLED=${cythonFlag}" + ] + def cythonDesc = cythonFlag == "True" ? ", Cython": "" + tasks["${serverVersion}, py${runtimeVersion}${cythonDesc}"] = { + node("${OS_VERSION}") { + checkout scm + + withEnv(taskVars) { + buildAndTest(context) + } + } + } + } + } + } + return tasks +} - sh label: 'Install the latest setuptools', script: '''#!/bin/bash -lex +def initializeEnvironment() { + sh label: 'Initialize the environment', script: '''#!/bin/bash -lex + pyenv global ${PYTHON_VERSION} + sudo apt-get install socat pip install --upgrade pip pip install -U setuptools - ''' - - sh label: 'Install CCM', script: '''#!/bin/bash -lex pip install ${HOME}/ccm ''' - // Determine if server version is Apache Cassandra� or DataStax Enterprise + // Determine if server version is Apache CassandraⓇ or DataStax Enterprise if (env.CASSANDRA_VERSION.split('-')[0] == 'dse') { sh label: 'Install DataStax Enterprise requirements', script: '''#!/bin/bash -lex pip install -r test-datastax-requirements.txt @@ -46,7 +179,6 @@ sh label: 'Uninstall the geomet dependency since it is not required for Cassandra', script: '''#!/bin/bash -lex pip uninstall -y geomet ''' - } sh label: 'Install unit test modules', script: '''#!/bin/bash -lex @@ -71,6 +203,7 @@ python --version pip --version + pip freeze printenv | sort ''' } @@ -95,9 +228,9 @@ . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml tests/unit/ || true - EVENT_LOOP_MANAGER=eventlet VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_eventlet_results.xml tests/unit/io/test_eventletreactor.py || true - EVENT_LOOP_MANAGER=gevent VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_gevent_results.xml tests/unit/io/test_geventreactor.py || true + EVENT_LOOP=${EVENT_LOOP} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml tests/unit/ || true + EVENT_LOOP=eventlet VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_eventlet_results.xml tests/unit/io/test_eventletreactor.py || true + EVENT_LOOP=gevent VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_gevent_results.xml tests/unit/io/test_geventreactor.py || true ''' sh label: 'Execute Simulacron integration tests', script: '''#!/bin/bash -lex @@ -107,13 +240,13 @@ set +o allexport SIMULACRON_JAR="${HOME}/simulacron.jar" - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_results.xml tests/integration/simulacron/ || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_results.xml tests/integration/simulacron/ || true # Run backpressure tests separately to avoid memory issue - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_1_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_paused_connections || true - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_2_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_queued_requests_timeout || true - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_3_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_cluster_busy || true - SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_4_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_node_busy || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_1_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_paused_connections || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_2_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_queued_requests_timeout || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_3_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_cluster_busy || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_4_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_node_busy || true ''' sh label: 'Execute CQL engine integration tests', script: '''#!/bin/bash -lex @@ -122,7 +255,7 @@ . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=cqle_results.xml tests/integration/cqlengine/ || true + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=cqle_results.xml tests/integration/cqlengine/ || true ''' sh label: 'Execute Apache CassandraⓇ integration tests', script: '''#!/bin/bash -lex @@ -131,7 +264,7 @@ . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/ || true + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/ || true ''' if (env.CASSANDRA_VERSION.split('-')[0] == 'dse' && env.CASSANDRA_VERSION.split('-')[1] != '4.8') { @@ -141,7 +274,7 @@ . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} DSE_VERSION=${DSE_VERSION} ADS_HOME="${HOME}/" VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=dse_results.xml tests/integration/advanced/ || true + EVENT_LOOP=${EVENT_LOOP} CASSANDRA_DIR=${CCM_INSTALL_DIR} DSE_VERSION=${DSE_VERSION} ADS_HOME="${HOME}/" VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=dse_results.xml tests/integration/advanced/ || true ''' } @@ -151,17 +284,17 @@ . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CLOUD_PROXY_PATH="${HOME}/proxy/" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=advanced_results.xml tests/integration/cloud/ || true + EVENT_LOOP=${EVENT_LOOP} CLOUD_PROXY_PATH="${HOME}/proxy/" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=advanced_results.xml tests/integration/cloud/ || true ''' - if (env.EXECUTE_LONG_TESTS == 'True') { + if (env.PROFILE == 'FULL') { sh label: 'Execute long running integration tests', script: '''#!/bin/bash -lex # Load CCM environment variable set -o allexport . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --exclude-dir=tests/integration/long/upgrade --with-ignore-docstrings --with-xunit --xunit-file=long_results.xml tests/integration/long/ || true + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --exclude-dir=tests/integration/long/upgrade --with-ignore-docstrings --with-xunit --xunit-file=long_results.xml tests/integration/long/ || true ''' } } @@ -173,7 +306,7 @@ . ${HOME}/environment.txt set +o allexport - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} DSE_VERSION=${DSE_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/test_dse.py || true + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} DSE_VERSION=${DSE_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/test_dse.py || true ''' } @@ -194,69 +327,34 @@ "tests/integration/simulacron/test_endpoint.py" "tests/integration/long/test_ssl.py" ) - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml ${EVENT_LOOP_TESTS[@]} || true - ''' -} - -def executeUpgradeTests() { - sh label: 'Execute profile upgrade integration tests', script: '''#!/bin/bash -lex - # Load CCM environment variable - set -o allexport - . ${HOME}/environment.txt - set +o allexport - - EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=upgrade_results.xml tests/integration/upgrade || true + EVENT_LOOP=${EVENT_LOOP} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml ${EVENT_LOOP_TESTS[@]} || true ''' } def executeTests() { - switch(params.PROFILE) { + switch(env.PROFILE) { case 'DSE-SMOKE-TEST': executeDseSmokeTests() break - case 'EVENT-LOOP': + case 'EVENT_LOOP': executeEventLoopTests() break - case 'UPGRADE': - executeUpgradeTests() - break default: executeStandardTests() break } } -def notifySlack(status = 'started') { - // Set the global pipeline scoped environment (this is above each matrix) - env.BUILD_STATED_SLACK_NOTIFIED = 'true' - - def buildType = 'Commit' - if (params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION') { - buildType = "${params.CI_SCHEDULE.toLowerCase().capitalize()}" - } - - def color = 'good' // Green - if (status.equalsIgnoreCase('aborted')) { - color = '808080' // Grey - } else if (status.equalsIgnoreCase('unstable')) { - color = 'warning' // Orange - } else if (status.equalsIgnoreCase('failed')) { - color = 'danger' // Red - } - def message = """Build ${status} for ${env.DRIVER_DISPLAY_NAME} [${buildType}] -<${env.GITHUB_BRANCH_URL}|${env.BRANCH_NAME}> - <${env.RUN_DISPLAY_URL}|#${env.BUILD_NUMBER}> - <${env.GITHUB_COMMIT_URL}|${env.GIT_SHA}>""" - if (params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION') { - message += " - ${params.CI_SCHEDULE_PYTHON_VERSION} - ${params.EVENT_LOOP_MANAGER}" - } - if (!status.equalsIgnoreCase('Started')) { - message += """ -${status} after ${currentBuild.durationString - ' and counting'}""" +// TODO move this in the shared lib +def getDriverMetricType() { + metric_type = 'oss' + if (env.GIT_URL.contains('riptano/python-driver')) { + metric_type = 'oss-private' + } else if (env.GIT_URL.contains('python-dse-driver')) { + metric_type = 'dse' } - - slackSend color: "${color}", - channel: "#python-driver-dev-bots", - message: "${message}" + return metric_type } def submitCIMetrics(buildType) { @@ -264,7 +362,8 @@ long durationSec = durationMs / 1000 long nowSec = (currentBuild.startTimeInMillis + durationMs) / 1000 def branchNameNoPeriods = env.BRANCH_NAME.replaceAll('\\.', '_') - def durationMetric = "okr.ci.python.${env.DRIVER_METRIC_TYPE}.${buildType}.${branchNameNoPeriods} ${durationSec} ${nowSec}" + metric_type = getDriverMetricType() + def durationMetric = "okr.ci.python.${metric_type}.${buildType}.${branchNameNoPeriods} ${durationSec} ${nowSec}" timeout(time: 1, unit: 'MINUTES') { withCredentials([string(credentialsId: 'lab-grafana-address', variable: 'LAB_GRAFANA_ADDRESS'), @@ -278,108 +377,24 @@ } } -def describePerCommitStage() { - script { - def type = 'standard' - def serverDescription = 'current Apache CassandaraⓇ and supported DataStax Enterprise versions' - if (env.BRANCH_NAME ==~ /long-python.*/) { - type = 'long' - } else if (env.BRANCH_NAME ==~ /dev-python.*/) { - type = 'dev' - } - - currentBuild.displayName = "Per-Commit (${env.EVENT_LOOP_MANAGER} | ${type.capitalize()})" - currentBuild.description = "Per-Commit build and ${type} testing of ${serverDescription} against Python v2.7.18 and v3.5.9 using ${env.EVENT_LOOP_MANAGER} event loop manager" - } - - sh label: 'Describe the python environment', script: '''#!/bin/bash -lex - python -V - pip freeze - ''' -} - -def describeScheduledTestingStage() { - script { - def type = params.CI_SCHEDULE.toLowerCase().capitalize() - def displayName = "${type} schedule (${env.EVENT_LOOP_MANAGER}" - if (env.CYTHON_ENABLED == 'True') { - displayName += " | Cython" - } - if (params.PROFILE != 'NONE') { - displayName += " | ${params.PROFILE}" - } - displayName += ")" - currentBuild.displayName = displayName - - def serverVersionDescription = "${params.CI_SCHEDULE_SERVER_VERSION.replaceAll(' ', ', ')} server version(s) in the matrix" - def pythonVersionDescription = "${params.CI_SCHEDULE_PYTHON_VERSION.replaceAll(' ', ', ')} Python version(s) in the matrix" - def description = "${type} scheduled testing using ${env.EVENT_LOOP_MANAGER} event loop manager" - if (env.CYTHON_ENABLED == 'True') { - description += ", with Cython enabled" - } - if (params.PROFILE != 'NONE') { - description += ", ${params.PROFILE} profile" - } - description += ", ${serverVersionDescription}, and ${pythonVersionDescription}" - currentBuild.description = description - } -} - -def describeAdhocTestingStage() { +def describeBuild(buildContext) { script { - def serverType = params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[0] - def serverDisplayName = 'Apache CassandaraⓇ' - def serverVersion = " v${serverType}" - if (serverType == 'ALL') { - serverDisplayName = "all ${serverDisplayName} and DataStax Enterprise server versions" - serverVersion = '' - } else { - try { - serverVersion = " v${env.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[1]}" - } catch (e) { - ;; // no-op - } - if (serverType == 'dse') { - serverDisplayName = 'DataStax Enterprise' - } - } - def displayName = "${params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION} for v${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION} (${env.EVENT_LOOP_MANAGER}" - if (env.CYTHON_ENABLED == 'True') { - displayName += " | Cython" - } - if (params.PROFILE != 'NONE') { - displayName += " | ${params.PROFILE}" - } - displayName += ")" - currentBuild.displayName = displayName - - def description = "Testing ${serverDisplayName} ${serverVersion} using ${env.EVENT_LOOP_MANAGER} against Python ${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION}" - if (env.CYTHON_ENABLED == 'True') { - description += ", with Cython" - } - if (params.PROFILE == 'NONE') { - if (params.EXECUTE_LONG_TESTS) { - description += ", with" - } else { - description += ", without" - } - description += " long tests executed" - } else { - description += ", ${params.PROFILE} profile" - } - currentBuild.description = description + def runtimes = buildContext.matrix["RUNTIME"] + def serverVersions = buildContext.matrix["SERVER"] + def numBuilds = runtimes.size() * serverVersions.size() * buildContext.matrix["CYTHON"].size() + currentBuild.displayName = "${env.PROFILE} (${env.EVENT_LOOP} | ${numBuilds} builds)" + currentBuild.description = "${env.PROFILE} build testing servers (${serverVersions.join(', ')}) against Python (${runtimes.join(', ')}) using ${env.EVENT_LOOP} event loop manager" } } -def branchPatternCron = ~"(master)" -def riptanoPatternCron = ~"(riptano)" +def scheduleTriggerJobName = "drivers/python/oss/master/disabled" pipeline { agent none // Global pipeline timeout options { - timeout(time: 10, unit: 'HOURS') + timeout(time: 10, unit: 'HOURS') // TODO timeout should be per build buildDiscarder(logRotator(artifactNumToKeepStr: '10', // Keep only the last 10 artifacts numToKeepStr: '50')) // Keep only the last 50 build records } @@ -406,12 +421,73 @@ ''') choice( - name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION', - choices: ['2.7.18', '3.4.10', '3.5.9', '3.6.10', '3.7.7', '3.8.3'], - description: 'Python version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY!') + name: 'PROFILE', + choices: ['STANDARD', 'FULL', 'DSE-SMOKE-TEST', 'EVENT_LOOP'], + description: '''

Profile to utilize for scheduled or adhoc builds

+ + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
STANDARDExecute the standard tests for the driver
FULLExecute all tests for the driver, including long tests.
DSE-SMOKE-TESTExecute only the DataStax Enterprise smoke tests
EVENT_LOOPExecute only the event loop tests for the specified event loop manager (see: EVENT_LOOP)
''') + choice( + name: 'MATRIX', + choices: ['DEFAULT', 'FULL', 'DEVELOP', 'CASSANDRA', 'DSE'], + description: '''

The matrix for the build.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
DEFAULTDefault to the build context.
FULLAll server versions, python runtimes tested with and without Cython.
DEVELOPSmaller matrix for dev purpose.
CASSANDRAAll cassandra server versions.
DSEAll dse server versions.
''') + choice( + name: 'PYTHON_VERSION', + choices: ['DEFAULT', '2.7.18', '3.5.9', '3.6.10', '3.7.7', '3.8.3'], + description: 'Python runtime version. Default to the build context.') choice( - name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION', - choices: ['2.1', // Legacy Apache CassandraⓇ + name: 'SERVER_VERSION', + choices: ['DEFAULT', + '2.1', // Legacy Apache CassandraⓇ '2.2', // Legacy Apache CassandraⓇ '3.0', // Previous Apache CassandraⓇ '3.11', // Current Apache CassandraⓇ @@ -421,7 +497,7 @@ 'dse-6.0', // Previous DataStax Enterprise 'dse-6.7', // Previous DataStax Enterprise 'dse-6.8', // Current DataStax Enterprise - 'ALL'], + ], description: '''Apache CassandraⓇ and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY! @@ -430,9 +506,13 @@ + + + + - + @@ -440,15 +520,15 @@ - + - + - + @@ -471,16 +551,32 @@
Choice Description
DEFAULTDefault to the build context.
2.1Apache CassandaraⓇ; v2.1.xApache CassandraⓇ; v2.1.x
2.2
3.0Apache CassandaraⓇ v3.0.xApache CassandraⓇ v3.0.x
3.11Apache CassandaraⓇ v3.11.xApache CassandraⓇ v3.11.x
4.0Apache CassandaraⓇ v4.x (CURRENTLY UNDER DEVELOPMENT)Apache CassandraⓇ v4.x (CURRENTLY UNDER DEVELOPMENT)
dse-5.0DataStax Enterprise v6.8.x (CURRENTLY UNDER DEVELOPMENT)
''') - booleanParam( + choice( name: 'CYTHON', - defaultValue: false, - description: 'Flag to determine if Cython should be enabled for scheduled or adhoc builds') - booleanParam( - name: 'EXECUTE_LONG_TESTS', - defaultValue: false, - description: 'Flag to determine if long integration tests should be executed for scheduled or adhoc builds') + choices: ['DEFAULT', 'True', 'False'], + description: '''

Flag to determine if Cython should be enabled

+ + + + + + + + + + + + + + + + + + + +
ChoiceDescription
DefaultDefault to the build context.
TrueEnable Cython
FalseDisable Cython
''') choice( - name: 'EVENT_LOOP_MANAGER', + name: 'EVENT_LOOP', choices: ['LIBEV', 'GEVENT', 'EVENTLET', 'ASYNCIO', 'ASYNCORE', 'TWISTED'], description: '''

Event loop manager to utilize for scheduled or adhoc builds

@@ -516,34 +612,6 @@
''') choice( - name: 'PROFILE', - choices: ['NONE', 'DSE-SMOKE-TEST', 'EVENT-LOOP', 'UPGRADE'], - description: '''

Profile to utilize for scheduled or adhoc builds

- - - - - - - - - - - - - - - - - - - - - - - -
ChoiceDescription
NONEExecute the standard tests for the driver
DSE-SMOKE-TESTExecute only the DataStax Enterprise smoke tests
EVENT-LOOPExecute only the event loop tests for the specified event loop manager (see: EVENT_LOOP_MANAGER)
UPGRADEExecute only the upgrade tests
''') - choice( name: 'CI_SCHEDULE', choices: ['DO-NOT-CHANGE-THIS-SELECTION', 'WEEKNIGHTS', 'WEEKENDS'], description: 'CI testing schedule to execute periodically scheduled builds and tests of the driver (DO NOT CHANGE THIS SELECTION)') @@ -558,316 +626,50 @@ } triggers { - parameterizedCron((branchPatternCron.matcher(env.BRANCH_NAME).matches() && !riptanoPatternCron.matcher(GIT_URL).find()) ? """ + parameterizedCron((scheduleTriggerJobName == env.JOB_NAME) ? """ # Every weeknight (Monday - Friday) around 4:00 AM # These schedules will run with and without Cython enabled for Python v2.7.18 and v3.5.9 - H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.2 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.2 3.11 dse-5.1 dse-6.0 dse-6.7 - - # Every Saturday around 12:00, 4:00 and 8:00 PM - # These schedules are for weekly libev event manager runs with and without Cython for most of the Python versions (excludes v3.5.9.x) - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.4.10;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 - # These schedules are for weekly gevent event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - # These schedules are for weekly eventlet event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - - # Every Sunday around 12:00 and 4:00 AM - # These schedules are for weekly asyncore event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - # These schedules are for weekly twisted event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 - H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP=LIBEV;CI_SCHEDULE_PYTHON_VERSION=2.7.18 3.5.9;CI_SCHEDULE_SERVER_VERSION=2.2 3.11 dse-5.1 dse-6.0 dse-6.7 """ : "") } environment { OS_VERSION = 'ubuntu/bionic64/python-driver' - CYTHON_ENABLED = "${params.CYTHON ? 'True' : 'False'}" - EVENT_LOOP_MANAGER = "${params.EVENT_LOOP_MANAGER.toLowerCase()}" - EXECUTE_LONG_TESTS = "${params.EXECUTE_LONG_TESTS ? 'True' : 'False'}" CCM_ENVIRONMENT_SHELL = '/usr/local/bin/ccm_environment.sh' CCM_MAX_HEAP_SIZE = '1536M' } stages { - stage ('Per-Commit') { - options { - timeout(time: 2, unit: 'HOURS') + stage ('Build and Test') { + agent { + // // If I removed this agent block, GIT_URL and GIT_COMMIT aren't set. + // // However, this trigger an additional checkout + label "master" } when { beforeAgent true - branch pattern: '((dev|long)-)?python-.*', comparator: 'REGEXP' allOf { - expression { params.ADHOC_BUILD_TYPE == 'BUILD' } - expression { params.CI_SCHEDULE == 'DO-NOT-CHANGE-THIS-SELECTION' } not { buildingTag() } } } - matrix { - axes { - axis { - name 'CASSANDRA_VERSION' - values '3.11', // Current Apache Cassandra - 'dse-6.8' // Current DataStax Enterprise - } - axis { - name 'PYTHON_VERSION' - values '2.7.18', '3.5.9' - } - axis { - name 'CYTHON_ENABLED' - values 'False' - } - } + steps { + script { + context = getBuildContext() + withEnv(context.vars) { + describeBuild(context) + slack.notifyChannel() - agent { - label "${OS_VERSION}" - } + // build and test all builds + parallel getMatrixBuilds(context) - stages { - stage('Initialize-Environment') { - steps { - initializeEnvironment() - script { - if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { - notifySlack() - } - } - } - } - stage('Describe-Build') { - steps { - describePerCommitStage() - } - } - stage('Install-Driver-And-Compile-Extensions') { - steps { - installDriverAndCompileExtensions() - } - } - stage('Execute-Tests') { - steps { - - script { - if (env.BRANCH_NAME ==~ /long-python.*/) { - withEnv(["EXECUTE_LONG_TESTS=True"]) { - executeTests() - } - } - else { - executeTests() - } - } - } - post { - always { - junit testResults: '*_results.xml' - } - } - } - } - } - post { - always { - node('master') { + // send the metrics submitCIMetrics('commit') + slack.notifyChannel(currentBuild.currentResult) } } - aborted { - notifySlack('aborted') - } - success { - notifySlack('completed') - } - unstable { - notifySlack('unstable') - } - failure { - notifySlack('FAILED') - } - } - } - - stage ('Scheduled-Testing') { - when { - beforeAgent true - allOf { - expression { params.ADHOC_BUILD_TYPE == 'BUILD' } - expression { params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION' } - not { buildingTag() } - } - } - matrix { - axes { - axis { - name 'CASSANDRA_VERSION' - values '2.1', // Legacy Apache Cassandra - '2.2', // Legacy Apache Cassandra - '3.0', // Previous Apache Cassandra - '3.11', // Current Apache Cassandra - 'dse-5.1', // Legacy DataStax Enterprise - 'dse-6.0', // Previous DataStax Enterprise - 'dse-6.7' // Current DataStax Enterprise - } - axis { - name 'CYTHON_ENABLED' - values 'True', 'False' - } - } - when { - beforeAgent true - allOf { - expression { return params.CI_SCHEDULE_SERVER_VERSION.split(' ').any { it =~ /(ALL|${env.CASSANDRA_VERSION})/ } } - } - } - - environment { - PYTHON_VERSION = "${params.CI_SCHEDULE_PYTHON_VERSION}" - } - agent { - label "${OS_VERSION}" - } - - stages { - stage('Initialize-Environment') { - steps { - initializeEnvironment() - script { - if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { - notifySlack() - } - } - } - } - stage('Describe-Build') { - steps { - describeScheduledTestingStage() - } - } - stage('Install-Driver-And-Compile-Extensions') { - steps { - installDriverAndCompileExtensions() - } - } - stage('Execute-Tests') { - steps { - executeTests() - } - post { - always { - junit testResults: '*_results.xml' - } - } - } - } - } - post { - aborted { - notifySlack('aborted') - } - success { - notifySlack('completed') - } - unstable { - notifySlack('unstable') - } - failure { - notifySlack('FAILED') - } } } - - stage('Adhoc-Testing') { - when { - beforeAgent true - allOf { - expression { params.ADHOC_BUILD_TYPE == 'BUILD-AND-EXECUTE-TESTS' } - not { buildingTag() } - } - } - - environment { - CYTHON_ENABLED = "${params.CYTHON ? 'True' : 'False'}" - PYTHON_VERSION = "${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION}" - } - - matrix { - axes { - axis { - name 'CASSANDRA_VERSION' - values '2.1', // Legacy Apache Cassandra - '2.2', // Legacy Apache Cassandra - '3.0', // Previous Apache Cassandra - '3.11', // Current Apache Cassandra - '4.0', // Development Apache Cassandra - 'dse-5.0', // Long Term Support DataStax Enterprise - 'dse-5.1', // Legacy DataStax Enterprise - 'dse-6.0', // Previous DataStax Enterprise - 'dse-6.7', // Current DataStax Enterprise - 'dse-6.8' // Development DataStax Enterprise - } - } - when { - beforeAgent true - allOf { - expression { params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION ==~ /(ALL|${env.CASSANDRA_VERSION})/ } - } - } - - agent { - label "${OS_VERSION}" - } - - stages { - stage('Describe-Build') { - steps { - describeAdhocTestingStage() - } - } - stage('Initialize-Environment') { - steps { - initializeEnvironment() - } - } - stage('Install-Driver-And-Compile-Extensions') { - steps { - installDriverAndCompileExtensions() - } - } - stage('Execute-Tests') { - steps { - executeTests() - } - post { - always { - junit testResults: '*_results.xml' - } - } - } - } - } - } } } diff -Nru python-cassandra-driver-3.24.0/Jenkinsfile.bak python-cassandra-driver-3.25.0/Jenkinsfile.bak --- python-cassandra-driver-3.24.0/Jenkinsfile.bak 1970-01-01 00:00:00.000000000 +0000 +++ python-cassandra-driver-3.25.0/Jenkinsfile.bak 2021-03-18 14:48:29.000000000 +0000 @@ -0,0 +1,873 @@ +#!groovy + +def initializeEnvironment() { + env.DRIVER_DISPLAY_NAME = 'Cassandra Python Driver' + env.DRIVER_METRIC_TYPE = 'oss' + if (env.GIT_URL.contains('riptano/python-driver')) { + env.DRIVER_DISPLAY_NAME = 'private ' + env.DRIVER_DISPLAY_NAME + env.DRIVER_METRIC_TYPE = 'oss-private' + } else if (env.GIT_URL.contains('python-dse-driver')) { + env.DRIVER_DISPLAY_NAME = 'DSE Python Driver' + env.DRIVER_METRIC_TYPE = 'dse' + } + + env.GIT_SHA = "${env.GIT_COMMIT.take(7)}" + env.GITHUB_PROJECT_URL = "https://${GIT_URL.replaceFirst(/(git@|http:\/\/|https:\/\/)/, '').replace(':', '/').replace('.git', '')}" + env.GITHUB_BRANCH_URL = "${GITHUB_PROJECT_URL}/tree/${env.BRANCH_NAME}" + env.GITHUB_COMMIT_URL = "${GITHUB_PROJECT_URL}/commit/${env.GIT_COMMIT}" + + sh label: 'Assign Python global environment', script: '''#!/bin/bash -lex + pyenv global ${PYTHON_VERSION} + ''' + + sh label: 'Install socat; required for unix socket tests', script: '''#!/bin/bash -lex + sudo apt-get install socat + ''' + + sh label: 'Install the latest setuptools', script: '''#!/bin/bash -lex + pip install --upgrade pip + pip install -U setuptools + ''' + + sh label: 'Install CCM', script: '''#!/bin/bash -lex + pip install ${HOME}/ccm + ''' + + // Determine if server version is Apache Cassandra� or DataStax Enterprise + if (env.CASSANDRA_VERSION.split('-')[0] == 'dse') { + sh label: 'Install DataStax Enterprise requirements', script: '''#!/bin/bash -lex + pip install -r test-datastax-requirements.txt + ''' + } else { + sh label: 'Install Apache CassandraⓇ requirements', script: '''#!/bin/bash -lex + pip install -r test-requirements.txt + ''' + + sh label: 'Uninstall the geomet dependency since it is not required for Cassandra', script: '''#!/bin/bash -lex + pip uninstall -y geomet + ''' + + } + + sh label: 'Install unit test modules', script: '''#!/bin/bash -lex + pip install nose-ignore-docstring nose-exclude service_identity + ''' + + if (env.CYTHON_ENABLED == 'True') { + sh label: 'Install cython modules', script: '''#!/bin/bash -lex + pip install cython numpy + ''' + } + + sh label: 'Download Apache CassandraⓇ or DataStax Enterprise', script: '''#!/bin/bash -lex + . ${CCM_ENVIRONMENT_SHELL} ${CASSANDRA_VERSION} + ''' + + sh label: 'Display Python and environment information', script: '''#!/bin/bash -le + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + python --version + pip --version + printenv | sort + ''' +} + +def installDriverAndCompileExtensions() { + if (env.CYTHON_ENABLED == 'True') { + sh label: 'Install the driver and compile with C extensions with Cython', script: '''#!/bin/bash -lex + python setup.py build_ext --inplace + ''' + } else { + sh label: 'Install the driver and compile with C extensions without Cython', script: '''#!/bin/bash -lex + python setup.py build_ext --inplace --no-cython + ''' + } +} + +def executeStandardTests() { + + sh label: 'Execute unit tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_results.xml tests/unit/ || true + EVENT_LOOP_MANAGER=eventlet VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_eventlet_results.xml tests/unit/io/test_eventletreactor.py || true + EVENT_LOOP_MANAGER=gevent VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=unit_gevent_results.xml tests/unit/io/test_geventreactor.py || true + ''' + + sh label: 'Execute Simulacron integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + SIMULACRON_JAR="${HOME}/simulacron.jar" + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_results.xml tests/integration/simulacron/ || true + + # Run backpressure tests separately to avoid memory issue + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_1_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_paused_connections || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_2_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_queued_requests_timeout || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_3_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_cluster_busy || true + SIMULACRON_JAR=${SIMULACRON_JAR} EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --exclude test_backpressure.py --xunit-file=simulacron_backpressure_4_results.xml tests/integration/simulacron/test_backpressure.py:TCPBackpressureTests.test_node_busy || true + ''' + + sh label: 'Execute CQL engine integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=cqle_results.xml tests/integration/cqlengine/ || true + ''' + + sh label: 'Execute Apache CassandraⓇ integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variables + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/ || true + ''' + + if (env.CASSANDRA_VERSION.split('-')[0] == 'dse' && env.CASSANDRA_VERSION.split('-')[1] != '4.8') { + sh label: 'Execute DataStax Enterprise integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CASSANDRA_DIR=${CCM_INSTALL_DIR} DSE_VERSION=${DSE_VERSION} ADS_HOME="${HOME}/" VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=dse_results.xml tests/integration/advanced/ || true + ''' + } + + sh label: 'Execute DataStax Constellation integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CLOUD_PROXY_PATH="${HOME}/proxy/" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=advanced_results.xml tests/integration/cloud/ || true + ''' + + if (env.EXECUTE_LONG_TESTS == 'True') { + sh label: 'Execute long running integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --exclude-dir=tests/integration/long/upgrade --with-ignore-docstrings --with-xunit --xunit-file=long_results.xml tests/integration/long/ || true + ''' + } +} + +def executeDseSmokeTests() { + sh label: 'Execute profile DataStax Enterprise smoke test integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} DSE_VERSION=${DSE_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml tests/integration/standard/test_dse.py || true + ''' +} + +def executeEventLoopTests() { + sh label: 'Execute profile event loop manager integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_TESTS=( + "tests/integration/standard/test_cluster.py" + "tests/integration/standard/test_concurrent.py" + "tests/integration/standard/test_connection.py" + "tests/integration/standard/test_control_connection.py" + "tests/integration/standard/test_metrics.py" + "tests/integration/standard/test_query.py" + "tests/integration/simulacron/test_endpoint.py" + "tests/integration/long/test_ssl.py" + ) + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} CCM_ARGS="${CCM_ARGS}" DSE_VERSION=${DSE_VERSION} CASSANDRA_VERSION=${CCM_CASSANDRA_VERSION} MAPPED_CASSANDRA_VERSION=${MAPPED_CASSANDRA_VERSION} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=standard_results.xml ${EVENT_LOOP_TESTS[@]} || true + ''' +} + +def executeUpgradeTests() { + sh label: 'Execute profile upgrade integration tests', script: '''#!/bin/bash -lex + # Load CCM environment variable + set -o allexport + . ${HOME}/environment.txt + set +o allexport + + EVENT_LOOP_MANAGER=${EVENT_LOOP_MANAGER} VERIFY_CYTHON=${CYTHON_ENABLED} nosetests -s -v --logging-format="[%(levelname)s] %(asctime)s %(thread)d: %(message)s" --with-ignore-docstrings --with-xunit --xunit-file=upgrade_results.xml tests/integration/upgrade || true + ''' +} + +def executeTests() { + switch(params.PROFILE) { + case 'DSE-SMOKE-TEST': + executeDseSmokeTests() + break + case 'EVENT-LOOP': + executeEventLoopTests() + break + case 'UPGRADE': + executeUpgradeTests() + break + default: + executeStandardTests() + break + } +} + +def notifySlack(status = 'started') { + // Set the global pipeline scoped environment (this is above each matrix) + env.BUILD_STATED_SLACK_NOTIFIED = 'true' + + def buildType = 'Commit' + if (params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION') { + buildType = "${params.CI_SCHEDULE.toLowerCase().capitalize()}" + } + + def color = 'good' // Green + if (status.equalsIgnoreCase('aborted')) { + color = '808080' // Grey + } else if (status.equalsIgnoreCase('unstable')) { + color = 'warning' // Orange + } else if (status.equalsIgnoreCase('failed')) { + color = 'danger' // Red + } + + def message = """Build ${status} for ${env.DRIVER_DISPLAY_NAME} [${buildType}] +<${env.GITHUB_BRANCH_URL}|${env.BRANCH_NAME}> - <${env.RUN_DISPLAY_URL}|#${env.BUILD_NUMBER}> - <${env.GITHUB_COMMIT_URL}|${env.GIT_SHA}>""" + if (params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION') { + message += " - ${params.CI_SCHEDULE_PYTHON_VERSION} - ${params.EVENT_LOOP_MANAGER}" + } + if (!status.equalsIgnoreCase('Started')) { + message += """ +${status} after ${currentBuild.durationString - ' and counting'}""" + } + + slackSend color: "${color}", + channel: "#python-driver-dev-bots", + message: "${message}" +} + +def submitCIMetrics(buildType) { + long durationMs = currentBuild.duration + long durationSec = durationMs / 1000 + long nowSec = (currentBuild.startTimeInMillis + durationMs) / 1000 + def branchNameNoPeriods = env.BRANCH_NAME.replaceAll('\\.', '_') + def durationMetric = "okr.ci.python.${env.DRIVER_METRIC_TYPE}.${buildType}.${branchNameNoPeriods} ${durationSec} ${nowSec}" + + timeout(time: 1, unit: 'MINUTES') { + withCredentials([string(credentialsId: 'lab-grafana-address', variable: 'LAB_GRAFANA_ADDRESS'), + string(credentialsId: 'lab-grafana-port', variable: 'LAB_GRAFANA_PORT')]) { + withEnv(["DURATION_METRIC=${durationMetric}"]) { + sh label: 'Send runtime metrics to labgrafana', script: '''#!/bin/bash -lex + echo "${DURATION_METRIC}" | nc -q 5 ${LAB_GRAFANA_ADDRESS} ${LAB_GRAFANA_PORT} + ''' + } + } + } +} + +def describePerCommitStage() { + script { + def type = 'standard' + def serverDescription = 'current Apache CassandaraⓇ and supported DataStax Enterprise versions' + if (env.BRANCH_NAME ==~ /long-python.*/) { + type = 'long' + } else if (env.BRANCH_NAME ==~ /dev-python.*/) { + type = 'dev' + } + + currentBuild.displayName = "Per-Commit (${env.EVENT_LOOP_MANAGER} | ${type.capitalize()})" + currentBuild.description = "Per-Commit build and ${type} testing of ${serverDescription} against Python v2.7.18 and v3.5.9 using ${env.EVENT_LOOP_MANAGER} event loop manager" + } + + sh label: 'Describe the python environment', script: '''#!/bin/bash -lex + python -V + pip freeze + ''' +} + +def describeScheduledTestingStage() { + script { + def type = params.CI_SCHEDULE.toLowerCase().capitalize() + def displayName = "${type} schedule (${env.EVENT_LOOP_MANAGER}" + if (env.CYTHON_ENABLED == 'True') { + displayName += " | Cython" + } + if (params.PROFILE != 'NONE') { + displayName += " | ${params.PROFILE}" + } + displayName += ")" + currentBuild.displayName = displayName + + def serverVersionDescription = "${params.CI_SCHEDULE_SERVER_VERSION.replaceAll(' ', ', ')} server version(s) in the matrix" + def pythonVersionDescription = "${params.CI_SCHEDULE_PYTHON_VERSION.replaceAll(' ', ', ')} Python version(s) in the matrix" + def description = "${type} scheduled testing using ${env.EVENT_LOOP_MANAGER} event loop manager" + if (env.CYTHON_ENABLED == 'True') { + description += ", with Cython enabled" + } + if (params.PROFILE != 'NONE') { + description += ", ${params.PROFILE} profile" + } + description += ", ${serverVersionDescription}, and ${pythonVersionDescription}" + currentBuild.description = description + } +} + +def describeAdhocTestingStage() { + script { + def serverType = params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[0] + def serverDisplayName = 'Apache CassandaraⓇ' + def serverVersion = " v${serverType}" + if (serverType == 'ALL') { + serverDisplayName = "all ${serverDisplayName} and DataStax Enterprise server versions" + serverVersion = '' + } else { + try { + serverVersion = " v${env.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION.split('-')[1]}" + } catch (e) { + ;; // no-op + } + if (serverType == 'dse') { + serverDisplayName = 'DataStax Enterprise' + } + } + def displayName = "${params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION} for v${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION} (${env.EVENT_LOOP_MANAGER}" + if (env.CYTHON_ENABLED == 'True') { + displayName += " | Cython" + } + if (params.PROFILE != 'NONE') { + displayName += " | ${params.PROFILE}" + } + displayName += ")" + currentBuild.displayName = displayName + + def description = "Testing ${serverDisplayName} ${serverVersion} using ${env.EVENT_LOOP_MANAGER} against Python ${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION}" + if (env.CYTHON_ENABLED == 'True') { + description += ", with Cython" + } + if (params.PROFILE == 'NONE') { + if (params.EXECUTE_LONG_TESTS) { + description += ", with" + } else { + description += ", without" + } + description += " long tests executed" + } else { + description += ", ${params.PROFILE} profile" + } + currentBuild.description = description + } +} + +def branchPatternCron = ~"(master)" +def riptanoPatternCron = ~"(riptano)" + +pipeline { + agent none + + // Global pipeline timeout + options { + timeout(time: 10, unit: 'HOURS') + buildDiscarder(logRotator(artifactNumToKeepStr: '10', // Keep only the last 10 artifacts + numToKeepStr: '50')) // Keep only the last 50 build records + } + + parameters { + choice( + name: 'ADHOC_BUILD_TYPE', + choices: ['BUILD', 'BUILD-AND-EXECUTE-TESTS'], + description: '''

Perform a adhoc build operation

+ + + + + + + + + + + + + + + +
ChoiceDescription
BUILDPerforms a Per-Commit build
BUILD-AND-EXECUTE-TESTSPerforms a build and executes the integration and unit tests
''') + choice( + name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION', + choices: ['2.7.18', '3.4.10', '3.5.9', '3.6.10', '3.7.7', '3.8.3'], + description: 'Python version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY!') + choice( + name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION', + choices: ['2.1', // Legacy Apache CassandraⓇ + '2.2', // Legacy Apache CassandraⓇ + '3.0', // Previous Apache CassandraⓇ + '3.11', // Current Apache CassandraⓇ + '4.0', // Development Apache CassandraⓇ + 'dse-5.0', // Long Term Support DataStax Enterprise + 'dse-5.1', // Legacy DataStax Enterprise + 'dse-6.0', // Previous DataStax Enterprise + 'dse-6.7', // Previous DataStax Enterprise + 'dse-6.8', // Current DataStax Enterprise + 'ALL'], + description: '''Apache CassandraⓇ and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS ONLY! + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
2.1Apache CassandaraⓇ; v2.1.x
2.2Apache CassandarⓇ; v2.2.x
3.0Apache CassandaraⓇ v3.0.x
3.11Apache CassandaraⓇ v3.11.x
4.0Apache CassandaraⓇ v4.x (CURRENTLY UNDER DEVELOPMENT)
dse-5.0DataStax Enterprise v5.0.x (Long Term Support)
dse-5.1DataStax Enterprise v5.1.x
dse-6.0DataStax Enterprise v6.0.x
dse-6.7DataStax Enterprise v6.7.x
dse-6.8DataStax Enterprise v6.8.x (CURRENTLY UNDER DEVELOPMENT)
''') + booleanParam( + name: 'CYTHON', + defaultValue: false, + description: 'Flag to determine if Cython should be enabled for scheduled or adhoc builds') + booleanParam( + name: 'EXECUTE_LONG_TESTS', + defaultValue: false, + description: 'Flag to determine if long integration tests should be executed for scheduled or adhoc builds') + choice( + name: 'EVENT_LOOP_MANAGER', + choices: ['LIBEV', 'GEVENT', 'EVENTLET', 'ASYNCIO', 'ASYNCORE', 'TWISTED'], + description: '''

Event loop manager to utilize for scheduled or adhoc builds

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
LIBEVA full-featured and high-performance event loop that is loosely modeled after libevent, but without its limitations and bugs
GEVENTA co-routine -based Python networking library that uses greenlet to provide a high-level synchronous API on top of the libev or libuv event loop
EVENTLETA concurrent networking library for Python that allows you to change how you run your code, not how you write it
ASYNCIOA library to write concurrent code using the async/await syntax
ASYNCOREA module provides the basic infrastructure for writing asynchronous socket service clients and servers
TWISTEDAn event-driven networking engine written in Python and licensed under the open source MIT license
''') + choice( + name: 'PROFILE', + choices: ['NONE', 'DSE-SMOKE-TEST', 'EVENT-LOOP', 'UPGRADE'], + description: '''

Profile to utilize for scheduled or adhoc builds

+ + + + + + + + + + + + + + + + + + + + + + + +
ChoiceDescription
NONEExecute the standard tests for the driver
DSE-SMOKE-TESTExecute only the DataStax Enterprise smoke tests
EVENT-LOOPExecute only the event loop tests for the specified event loop manager (see: EVENT_LOOP_MANAGER)
UPGRADEExecute only the upgrade tests
''') + choice( + name: 'CI_SCHEDULE', + choices: ['DO-NOT-CHANGE-THIS-SELECTION', 'WEEKNIGHTS', 'WEEKENDS'], + description: 'CI testing schedule to execute periodically scheduled builds and tests of the driver (DO NOT CHANGE THIS SELECTION)') + string( + name: 'CI_SCHEDULE_PYTHON_VERSION', + defaultValue: 'DO-NOT-CHANGE-THIS-SELECTION', + description: 'CI testing python version to utilize for scheduled test runs of the driver (DO NOT CHANGE THIS SELECTION)') + string( + name: 'CI_SCHEDULE_SERVER_VERSION', + defaultValue: 'DO-NOT-CHANGE-THIS-SELECTION', + description: 'CI testing server version to utilize for scheduled test runs of the driver (DO NOT CHANGE THIS SELECTION)') + } + + triggers { + parameterizedCron((branchPatternCron.matcher(env.BRANCH_NAME).matches() && !riptanoPatternCron.matcher(GIT_URL).find()) ? """ + # Every weeknight (Monday - Friday) around 4:00 AM + # These schedules will run with and without Cython enabled for Python v2.7.18 and v3.5.9 + H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.2 3.11 dse-5.1 dse-6.0 dse-6.7 + H 4 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.2 3.11 dse-5.1 dse-6.0 dse-6.7 + + # Every Saturday around 12:00, 4:00 and 8:00 PM + # These schedules are for weekly libev event manager runs with and without Cython for most of the Python versions (excludes v3.5.9.x) + H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 + H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.4.10;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 + H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 + H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 + H 12 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=LIBEV;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 3.0 dse-5.1 dse-6.0 dse-6.7 + # These schedules are for weekly gevent event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) + H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 16 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=GEVENT;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + # These schedules are for weekly eventlet event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) + H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 20 * * 6 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=EVENTLET;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + + # Every Sunday around 12:00 and 4:00 AM + # These schedules are for weekly asyncore event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) + H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 0 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=ASYNCORE;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + # These schedules are for weekly twisted event manager event loop only runs with and without Cython for most of the Python versions (excludes v3.4.10.x) + H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=2.7.18;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.5.9;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.6.10;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.7.7;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + H 4 * * 7 %CI_SCHEDULE=WEEKENDS;EVENT_LOOP_MANAGER=TWISTED;PROFILE=EVENT-LOOP;CI_SCHEDULE_PYTHON_VERSION=3.8.3;CI_SCHEDULE_SERVER_VERSION=2.1 2.2 3.0 3.11 dse-5.1 dse-6.0 dse-6.7 + """ : "") + } + + environment { + OS_VERSION = 'ubuntu/bionic64/python-driver' + CYTHON_ENABLED = "${params.CYTHON ? 'True' : 'False'}" + EVENT_LOOP_MANAGER = "${params.EVENT_LOOP_MANAGER.toLowerCase()}" + EXECUTE_LONG_TESTS = "${params.EXECUTE_LONG_TESTS ? 'True' : 'False'}" + CCM_ENVIRONMENT_SHELL = '/usr/local/bin/ccm_environment.sh' + CCM_MAX_HEAP_SIZE = '1536M' + } + + stages { + stage ('Per-Commit') { + options { + timeout(time: 2, unit: 'HOURS') + } + when { + beforeAgent true + branch pattern: '((dev|long)-)?python-.*', comparator: 'REGEXP' + allOf { + expression { params.ADHOC_BUILD_TYPE == 'BUILD' } + expression { params.CI_SCHEDULE == 'DO-NOT-CHANGE-THIS-SELECTION' } + not { buildingTag() } + } + } + + matrix { + axes { + axis { + name 'CASSANDRA_VERSION' + values '3.11', // Current Apache Cassandra + 'dse-6.8' // Current DataStax Enterprise + } + axis { + name 'PYTHON_VERSION' + values '2.7.18', '3.5.9' + } + axis { + name 'CYTHON_ENABLED' + values 'False' + } + } + + agent { + label "${OS_VERSION}" + } + + stages { + stage('Initialize-Environment') { + steps { + initializeEnvironment() + script { + if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { + notifySlack() + } + } + } + } + stage('Describe-Build') { + steps { + describePerCommitStage() + } + } + stage('Install-Driver-And-Compile-Extensions') { + steps { + installDriverAndCompileExtensions() + } + } + stage('Execute-Tests') { + steps { + + script { + if (env.BRANCH_NAME ==~ /long-python.*/) { + withEnv(["EXECUTE_LONG_TESTS=True"]) { + executeTests() + } + } + else { + executeTests() + } + } + } + post { + always { + junit testResults: '*_results.xml' + } + } + } + } + } + post { + always { + node('master') { + submitCIMetrics('commit') + } + } + aborted { + notifySlack('aborted') + } + success { + notifySlack('completed') + } + unstable { + notifySlack('unstable') + } + failure { + notifySlack('FAILED') + } + } + } + + stage ('Scheduled-Testing') { + when { + beforeAgent true + allOf { + expression { params.ADHOC_BUILD_TYPE == 'BUILD' } + expression { params.CI_SCHEDULE != 'DO-NOT-CHANGE-THIS-SELECTION' } + not { buildingTag() } + } + } + matrix { + axes { + axis { + name 'CASSANDRA_VERSION' + values '2.1', // Legacy Apache Cassandra + '2.2', // Legacy Apache Cassandra + '3.0', // Previous Apache Cassandra + '3.11', // Current Apache Cassandra + 'dse-5.1', // Legacy DataStax Enterprise + 'dse-6.0', // Previous DataStax Enterprise + 'dse-6.7' // Current DataStax Enterprise + } + axis { + name 'CYTHON_ENABLED' + values 'True', 'False' + } + } + when { + beforeAgent true + allOf { + expression { return params.CI_SCHEDULE_SERVER_VERSION.split(' ').any { it =~ /(ALL|${env.CASSANDRA_VERSION})/ } } + } + } + + environment { + PYTHON_VERSION = "${params.CI_SCHEDULE_PYTHON_VERSION}" + } + agent { + label "${OS_VERSION}" + } + + stages { + stage('Initialize-Environment') { + steps { + initializeEnvironment() + script { + if (env.BUILD_STATED_SLACK_NOTIFIED != 'true') { + notifySlack() + } + } + } + } + stage('Describe-Build') { + steps { + describeScheduledTestingStage() + } + } + stage('Install-Driver-And-Compile-Extensions') { + steps { + installDriverAndCompileExtensions() + } + } + stage('Execute-Tests') { + steps { + executeTests() + } + post { + always { + junit testResults: '*_results.xml' + } + } + } + } + } + post { + aborted { + notifySlack('aborted') + } + success { + notifySlack('completed') + } + unstable { + notifySlack('unstable') + } + failure { + notifySlack('FAILED') + } + } + } + + + stage('Adhoc-Testing') { + when { + beforeAgent true + allOf { + expression { params.ADHOC_BUILD_TYPE == 'BUILD-AND-EXECUTE-TESTS' } + not { buildingTag() } + } + } + + environment { + CYTHON_ENABLED = "${params.CYTHON ? 'True' : 'False'}" + PYTHON_VERSION = "${params.ADHOC_BUILD_AND_EXECUTE_TESTS_PYTHON_VERSION}" + } + + matrix { + axes { + axis { + name 'CASSANDRA_VERSION' + values '2.1', // Legacy Apache Cassandra + '2.2', // Legacy Apache Cassandra + '3.0', // Previous Apache Cassandra + '3.11', // Current Apache Cassandra + '4.0', // Development Apache Cassandra + 'dse-5.0', // Long Term Support DataStax Enterprise + 'dse-5.1', // Legacy DataStax Enterprise + 'dse-6.0', // Previous DataStax Enterprise + 'dse-6.7', // Current DataStax Enterprise + 'dse-6.8' // Development DataStax Enterprise + } + } + when { + beforeAgent true + allOf { + expression { params.ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION ==~ /(ALL|${env.CASSANDRA_VERSION})/ } + } + } + + agent { + label "${OS_VERSION}" + } + + stages { + stage('Describe-Build') { + steps { + describeAdhocTestingStage() + } + } + stage('Initialize-Environment') { + steps { + initializeEnvironment() + } + } + stage('Install-Driver-And-Compile-Extensions') { + steps { + installDriverAndCompileExtensions() + } + } + stage('Execute-Tests') { + steps { + executeTests() + } + post { + always { + junit testResults: '*_results.xml' + } + } + } + } + } + } + } +} diff -Nru python-cassandra-driver-3.24.0/README-dev.rst python-cassandra-driver-3.25.0/README-dev.rst --- python-cassandra-driver-3.24.0/README-dev.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/README-dev.rst 2021-03-18 14:48:29.000000000 +0000 @@ -176,7 +176,7 @@ Testing Multiple Python Versions -------------------------------- -If you want to test all of python 2.7, 3.4, 3.5, 3.6, 3.7, and pypy, use tox (this is what +If you want to test all of python 2.7, 3.5, 3.6, 3.7, and pypy, use tox (this is what TravisCI runs):: tox diff -Nru python-cassandra-driver-3.24.0/README.rst python-cassandra-driver-3.25.0/README.rst --- python-cassandra-driver-3.24.0/README.rst 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/README.rst 2021-03-18 14:48:29.000000000 +0000 @@ -7,7 +7,7 @@ A modern, `feature-rich `_ and highly-tunable Python client library for Apache Cassandra (2.1+) and DataStax Enterprise (4.7+) using exclusively Cassandra's binary protocol and Cassandra Query Language v3. -The driver supports Python 2.7, 3.4, 3.5, 3.6, 3.7 and 3.8. +The driver supports Python 2.7, 3.5, 3.6, 3.7 and 3.8. **Note:** DataStax products do not support big-endian systems. diff -Nru python-cassandra-driver-3.24.0/setup.py python-cassandra-driver-3.25.0/setup.py --- python-cassandra-driver-3.24.0/setup.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/setup.py 2021-03-18 14:48:29.000000000 +0000 @@ -443,7 +443,6 @@ 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', diff -Nru python-cassandra-driver-3.24.0/test-requirements.txt python-cassandra-driver-3.25.0/test-requirements.txt --- python-cassandra-driver-3.24.0/test-requirements.txt 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/test-requirements.txt 2021-03-18 14:48:29.000000000 +0000 @@ -15,5 +15,5 @@ packaging backports.ssl_match_hostname; python_version < '2.7.9' futurist; python_version >= '3.7' -asynctest; python_version > '3.4' +asynctest; python_version >= '3.5' ipaddress; python_version < '3.3.0' diff -Nru python-cassandra-driver-3.24.0/tests/integration/cloud/test_cloud.py python-cassandra-driver-3.25.0/tests/integration/cloud/test_cloud.py --- python-cassandra-driver-3.24.0/tests/integration/cloud/test_cloud.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/cloud/test_cloud.py 2021-03-18 14:48:29.000000000 +0000 @@ -13,6 +13,10 @@ # limitations under the License from cassandra.datastax.cloud import parse_metadata_info from cassandra.query import SimpleStatement +from cassandra.cqlengine import connection +from cassandra.cqlengine.management import sync_table, create_keyspace_simple +from cassandra.cqlengine.models import Model +from cassandra.cqlengine import columns try: import unittest2 as unittest @@ -20,7 +24,7 @@ import unittest # noqa import six -from ssl import SSLContext, PROTOCOL_TLSv1 +from ssl import SSLContext, PROTOCOL_TLS from cassandra import DriverException, ConsistencyLevel, InvalidRequest from cassandra.cluster import NoHostAvailable, ExecutionProfile, Cluster, _execution_profile_to_string @@ -30,7 +34,7 @@ from mock import patch -from tests.integration import requirescloudproxy, TestCluster +from tests.integration import requirescloudproxy from tests.util import wait_until_not_raised from tests.integration.cloud import CloudProxyCluster, CLOUD_PROXY_SERVER @@ -88,7 +92,7 @@ def test_error_overriding_ssl_context(self): with self.assertRaises(ValueError) as cm: - self.connect(self.creds, ssl_context=SSLContext(PROTOCOL_TLSv1)) + self.connect(self.creds, ssl_context=SSLContext(PROTOCOL_TLS)) self.assertIn('cannot be specified with a cloud configuration', str(cm.exception)) @@ -143,7 +147,7 @@ wait_until_not_raised( lambda: self.assertEqual(len(self.hosts_up()), 3), 0.02, 250) - mocked_resolve.assert_called_once() + mocked_resolve.assert_called() def test_metadata_unreachable(self): with self.assertRaises(DriverException) as cm: @@ -234,3 +238,14 @@ self.session.execute(statement) except InvalidRequest: self.fail("InvalidRequest was incorrectly raised for write query at LOCAL QUORUM!") + + def test_cqlengine_can_connect(self): + class TestModel(Model): + id = columns.Integer(primary_key=True) + val = columns.Text() + + connection.setup(None, "test", cloud={'secure_connect_bundle': self.creds}) + create_keyspace_simple('test', 1) + sync_table(TestModel) + TestModel.objects.create(id=42, value='test') + self.assertEqual(len(TestModel.objects.all()), 1) diff -Nru python-cassandra-driver-3.24.0/tests/integration/__init__.py python-cassandra-driver-3.25.0/tests/integration/__init__.py --- python-cassandra-driver-3.24.0/tests/integration/__init__.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/__init__.py 2021-03-18 14:48:29.000000000 +0000 @@ -207,8 +207,6 @@ if DSE_VERSION: return ProtocolVersion.DSE_V2 else: - global ALLOW_BETA_PROTOCOL - ALLOW_BETA_PROTOCOL = True return ProtocolVersion.V5 if CASSANDRA_VERSION >= Version('3.10'): if DSE_VERSION: @@ -234,9 +232,12 @@ 3.X -> 4, 3 3.10(C*) -> 5(beta),4,3 3.10(DSE) -> DSE_V1,4,3 - 4.0(C*) -> 5(beta),4,3 + 4.0(C*) -> 6(beta),5,4,3 4.0(DSE) -> DSE_v2, DSE_V1,4,3 ` """ + if CASSANDRA_VERSION >= Version('4.0-beta5'): + if not DSE_VERSION: + return (3, 4, 5, 6) if CASSANDRA_VERSION >= Version('4.0-a'): if DSE_VERSION: return (3, 4, ProtocolVersion.DSE_V1, ProtocolVersion.DSE_V2) @@ -316,7 +317,7 @@ notprotocolv1 = unittest.skipUnless(PROTOCOL_VERSION > 1, 'Protocol v1 not supported') lessthenprotocolv4 = unittest.skipUnless(PROTOCOL_VERSION < 4, 'Protocol versions 4 or greater not supported') greaterthanprotocolv3 = unittest.skipUnless(PROTOCOL_VERSION >= 4, 'Protocol versions less than 4 are not supported') -protocolv5 = unittest.skipUnless(5 in get_supported_protocol_versions(), 'Protocol versions less than 5 are not supported') +protocolv6 = unittest.skipUnless(6 in get_supported_protocol_versions(), 'Protocol versions less than 6 are not supported') greaterthancass20 = unittest.skipUnless(CASSANDRA_VERSION >= Version('2.1'), 'Cassandra version 2.1 or greater required') greaterthancass21 = unittest.skipUnless(CASSANDRA_VERSION >= Version('2.2'), 'Cassandra version 2.2 or greater required') greaterthanorequalcass30 = unittest.skipUnless(CASSANDRA_VERSION >= Version('3.0'), 'Cassandra version 3.0 or greater required') Binary files /tmp/tmp2txy8omi/oBpM12ta4U/python-cassandra-driver-3.24.0/tests/integration/long/ssl/127.0.0.1.keystore and /tmp/tmp2txy8omi/KkSqgU1RNZ/python-cassandra-driver-3.25.0/tests/integration/long/ssl/127.0.0.1.keystore differ Binary files /tmp/tmp2txy8omi/oBpM12ta4U/python-cassandra-driver-3.24.0/tests/integration/long/ssl/cassandra.truststore and /tmp/tmp2txy8omi/KkSqgU1RNZ/python-cassandra-driver-3.25.0/tests/integration/long/ssl/cassandra.truststore differ diff -Nru python-cassandra-driver-3.24.0/tests/integration/long/ssl/client.crt_signed python-cassandra-driver-3.25.0/tests/integration/long/ssl/client.crt_signed --- python-cassandra-driver-3.24.0/tests/integration/long/ssl/client.crt_signed 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/long/ssl/client.crt_signed 2021-03-18 14:48:29.000000000 +0000 @@ -1,19 +1,19 @@ -----BEGIN CERTIFICATE----- -MIIDDjCCAfYCFAdgzL+DD0fzl77VnEr2V4axiX0qMA0GCSqGSIb3DQEBCwUAMEIx +MIIDDjCCAfYCFAG4WryLorTXxNtrkEJ56zUg/XdDMA0GCSqGSIb3DQEBCwUAMEIx CzAJBgNVBAYTAlVTMREwDwYDVQQKDAhkYXRhc3RheDEPMA0GA1UECwwGZmllbGRz -MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjAwMTEwMjExMTM3WhcNMjEwMTA5MjExMTM3 +MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjEwMzE3MTcwNTE4WhcNMjIwMzE3MTcwNTE4 WjBFMQswCQYDVQQGEwJVUzERMA8GA1UECgwIZGF0YXN0YXgxDzANBgNVBAsMBmZp ZWxkczESMBAGA1UEAwwJMTI3LjAuMC4xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAvGQ6G4o1PYcDiRuZnWhSzq2Zh0i/73JPyUAauPzXFRjZmOQdUaro -inkyoSpJkARxL6SIGbUeKq6rD/2Std4b6T1lj5GOmmqetUtNf1tiXvgu0NFUuWh7 -KJllojuFvUgiWRN3oMEoYgN+EqIBS+wYx5Nh59UW2v+1D/zD3ckn3kHXX2c+fNjJ -UqYUbUBElpb7N9j6SouakW3UzTonvl1kxMO3UjiWyy03IVRS8zQo/zpHHOqsqYhr -G4jCWB+7JRHi8qxiA3sjA6mUPatgF46dJKlEXEP5OSsESC20CDhmOzFQFA4/PIc9 -srG9MSaZlENyhF/ZTW4CJeH9QmCmFnv4ewIDAQABMA0GCSqGSIb3DQEBCwUAA4IB -AQA5LFiDq/+2SpfmL6US0x93E4LNCut7qa7eQhqEdmnKqshO3HwmNmYzLWhpifHL -OFz3Tzz7JxuIyNNI2XPQvLcyncymw69PdIbaYiNhuwvV7DtZUuxY/z0dlDPNDOqU -NJCS6/ny2KcCK5d36b+ppvLB8DHdyQAn++7mqqvB1vyePtkKx801qC0VFvP/NVKf -R5gFWBwFX+wNz66oFhikPSxG96SaddE5XIpRWY2richacgn2/f+Z04P6AN7DyAWA -lc94hdua7X+AV5lKM7VMWcgSJq/xhGZb0GsJv9+yZwTHWnv4qOtT5FVGGIVXrl97 -yjeRAMOqzv8+w1nGGZ1Kv1Ym +MIIBCgKCAQEAnrpE3g8pbQn2tVVidX2Ww1rh/6YIH6EGW9hXMO/F506ReMruv+Al +ilc7B2sPpGRDKXupy23IcpfMIe9+Lm74/yu7pW51rJ/r2jMqg+tViFa/GQxSQLKd +AxDAvwJaAM41kro0DKmcm4RwfYAltupwc6pC7AfBtT08PBuDK7WfaNnFbhGAWkHv +MbULNWAKbPWqITHbUEvLgS/uPj+/W4SHk5GaYk0Y2mU3aWypeDOBqEfKTi2W0ix1 +O7SpOHyfA0hvXS9IilF/HWURvr9u13mnvJNe8W+uqWqlQMdyFsbPCIhbVwVwGYQp +yoyBrgz6y5SPwSyugAb2F8Yk3UpvqH30yQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB +AQB5XV+3NS5UpwpTXTYsadLL8XcdGsfITMs4MSv0N3oir++TUzTc3cOd2T6YVdEc +ypw5CKTYnFTK9oF2PZXeV+aLIjdvK4AukQurB8EdXq4Hu7y1b61OaGRqiKTVsIne +LwxCXpc42jqMFt4mMXpmU/hSCjRSvoumTcL1aHUzaPlSIasD2JDyLurO64gxQypi +wbD9gliPJ60pdhY0m9NfF5F2PdqBuJXrhF1VuxYx1/cfo/c1A4UK2slhsZCDls7/ +HbM8ri5Z74M1EtCGFcTNYvm0xlfF5arisGQSKhTw+06LnpUlQi5a8NRNBLeAmem/ +cuICJJbnSzjmq9skkp8i/ejH -----END CERTIFICATE----- diff -Nru python-cassandra-driver-3.24.0/tests/integration/long/ssl/client_encrypted.key python-cassandra-driver-3.25.0/tests/integration/long/ssl/client_encrypted.key --- python-cassandra-driver-3.24.0/tests/integration/long/ssl/client_encrypted.key 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/long/ssl/client_encrypted.key 2021-03-18 14:48:29.000000000 +0000 @@ -1,30 +1,30 @@ -----BEGIN RSA PRIVATE KEY----- Proc-Type: 4,ENCRYPTED -DEK-Info: AES-256-CBC,12FC332B1EC2742035449FF59F9C5F71 +DEK-Info: AES-256-CBC,7288A409E846EBE2DE421B77598DAF98 -mjFChSmk+AX00EmSz22A+kv8X5XwtW8awkrcud2cH1tUopGv5B7PL6dfprrNp4fj -T9nmncH65b+GovBClEqOy6I+Tzm/WJ6aOqQnkfL48QT5KW6bM/Gzm8JSJkXSEfDZ -Cck1FvMG4ituL50Rvlw5XpFjSyFt6VEZi/s6taSLPiJmuw8cWeEkMCs3I4nZee0k -lDvsiDRcqV/+Uk7jM4umepa7fBMumI8fydtscpFxJBspBVIb4UMx3i2lFWs7NeXf -EN92S1fhq3uWBDIlQMtAAoV2qf3cPSg7+du/iLB6/Sc7mxGyQ6aVWoIyxB+dwVVX -oqmeBgyNV8xS94Qcw2aNcqyyFsU0cIitafIzBuJO9+G2/FB+UWTJwPzbp/Kk24OV -wZ1wExFcYo4UOe6FT5LAihhfkGFdpXba8XviEgQK18o8lQL1SitwGSrlBNQsohJt -3Ug9aHvTGvECBdjj7NwNP0EDs7IxMVSsOHPAJyDLdWzHe0eLqRYCWNQlWGn7LKIP -dsqJqpMJkh3G3m7Lkkb0Z4YaUoFRi5DyCB2vkBcgmvbV2QdNHwc6EWk4OnFTq4vE -kWrMGa7lQkxnXV12NUNaUoHbsX7waeO5d81IUHbfa6r3/N046QGDrkLAFnsghfiS -I76y8QklqVIp5/5t4hg96n55lSqxbDAikHMrTI5niHUajqU74xkoCgh/SpwwwndT -QwdRH8jAMLyBf4xf5PQMPJwiNltX7QE6XB9sD80f9SJdf3xHF9L3H++ANPDzhHJC -ntGd/JmsMv+G/u+VLPTF9hB83UEMuTi1OL2Fm7V41yR6ZROYDUXyf8hhUxKAGasu -Hn1Q41mVb1nFE2NaD65+6MD2UtqBBXaD8raTGzcauxgkNTg4JroMM3vc+PSanSUp -ZSN28MYp4XlGl5dDhpoY782V09YIweLKAqC/4NZK9X0r563BLj5pektuqNlNG9N3 -E+X9F4fP4tjOYGeVDhZ1sYBYsUfHrkbngacGpnD+eis+ReSaBE7T5/jfxnbW9AW7 -Cj4qRlUYvyYznvHxCrQ8Su6IleW49z0lD8jLmxUPZkzMqEX8tYe8dxz5AaG2Egb9 -bhwheQiiih1LTv9ZuLMGyARQcOZsPqsi1SRMzaln5f/PNlh4RCP+JbjvHxy5IMAN -g5ti9ejF2D7A1YOxlyqv0xvN5z4OEkXngg3HCT/FFgJWsppEG6nbpgpDlnC/xBiJ -3VgtwmU/CsJ11lkCZihl2xMxiazA0WoKMNs8N4qxjr5YEnyBwRqFWjpMgXUyjcPH -H+QBAkUL11qbr8O0Sj2cn/urVZNCQMMDpYT2VDbJK6vrj2ZyEsH3XeeQr2ohevwp -4GVyIDgzvP/+B67k65Pvj6iUXI2kXqZhhURBPKDo00XCFSz+9guDJX7HeRII1D1k -VzW3aly1SvrEUgI30FpC1L47LY/NksVybSD1kxNMMdb1g/yVJCzGeMgMyiyreaxT -+UXJ5/sZIQ+FZqzh24EShM/JmUC9epO0nl8nee8FAd9kY9kkDCjTXf3KzYTiyKN8 -ma2xnLwWWK1bqj282/dj6D/QlXYgePb90Duq5X19HOTe6wouNnr6fx8ZH/k/tAGQ +ahiUSf+k9POIEUJb5BGbQ6knk4+FtTz+e+6fouqVc4Lq+RXR9f0pFBi9eDEkFNiN +AcUjLkxh+3TmihTZJprqXSbQ3jacwbnwDOFgtZE3PxoA1heHxADaKCNr+Ph0lC/T +3cIzsoIZ6slk+3n6ERieZRdmvoMH1SY8nXKT5+bLMR4RIjw1y7h26MRhjQS+lXaX +Asd5EOGROCIgefeEBGHAbrlg0FoHy7slqVBxuZphTHKtyK/VK4fRLt6doUzBu5GJ +T2jdrqJCWr5PRn3bAqMemJWxDhZLX4DyNDQPn8riZ8jMbwPOVUSnF8B8re1tNkQ0 +CsH77sYIIjmPdizCdvj91+jH6o7MRCZPvky+PHG/9G5WsPiw5W1i/nrPemT1XJyy +oPRc/fMFfbHmW3HCGqgv2/6Wg+17un/a6UyzXsbNdhDZLCVqtAQ7PSv83z5oUazT +djzFHgxSqRknUY0lOUvP8Rni67MG+Rcksj9HgszhLoC0be64IX0Ey5oc5+pBYrf9 +FVEPsuyyu4aDSRYYATC2E1V/EQRwcvpKEZNFTbqMpQhjrWtlBM/GgQnQBeQdLAGX +yefDSzkH31y5gcdgHLElriWwbHHbcaAmf3e15W94YHgTytJBsQ9A19SmtmgUmo4h +jaFoUooM5mFA8hc/snSe2PdkEefkzS72g8qxa//61LTJAAkVk43dYjoqQ34wq6WR +OB4nn/W2xlfv/ClZJTWf8YvQTrQptJY5VQq/TTEcrXy67Uc0wRHXZK2rTjKeyRj9 +65SkyyXhMopWEl2vX25ReITVfdJ0FgjqI/ugYSf25iOfJtsk+jgrtrswZ+8F2eMq +iAQ+0JSiYmlot2Pn1QCalLjtTz8zeMfXPyo5fbKNMdp52U1cPYld90kUGHZfjqju +GmY/aHa6N8lZGxj8SC/JM36GawaGKe4S/F5BetYJOpaEzkpowqlTC8Syv529rm46 +vvgf+EJL8gRvdtnIEe/qtzbtel299VhaBpuOcApfTDSxRHZmvkCpdHo9I3KgOZB9 +Cqu9Bz+FiJmTk8rGQwmI8EYj38jneEoqA+fN7tUkzxCGacg+x6ke4nOcJzgBhd94 +8DvGclrcAwBY1mlNYRceFJKFXhwLZTKBojZlS8Q9863EAH3DOBLeP85V3YvBD/MK +O+kzPoxN/jPVNho7y4gL7skcqe/IXePzPxBcZrHJjoU7mGVDcVcouRj16XSezMbB +5Pft0/gGiItRJ2+v9DlPjzDfjTuRdS78muaZ4nNqX6B+JmyPJtkb2CdiHz6B21RO +3hjGrffM1nhmYBegyjTVc88IxzYg0T8CZLq1FYxuTZmwyahA520IpwsbfwXxLVMU +5rmou5dj1pVlvoP3l+ivPqugeY3k7UjZ33m5H9p009JR40dybr1S2RbI8Gqhe953 +0bedA4DWvPakODXgYu43al92uR/tyjazeB5t7Iu8uB5Xcm3/Mqoofe9xtdQSCWa0 +jKKvXzSpL1MM2C0bRyYHIkVR65K7Zmi/BzvTaPECo1+Uv+EwqRZRyBzUZKPP8LMq +jTCOBmYaK8+0dTRk8MEzrPW2ihVVJYVMmFyTZKW0iK7kOMKZRkhDCaNSUlPEty7j -----END RSA PRIVATE KEY----- diff -Nru python-cassandra-driver-3.24.0/tests/integration/long/ssl/client.key python-cassandra-driver-3.25.0/tests/integration/long/ssl/client.key --- python-cassandra-driver-3.24.0/tests/integration/long/ssl/client.key 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/long/ssl/client.key 2021-03-18 14:48:29.000000000 +0000 @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC8ZDobijU9hwOJ -G5mdaFLOrZmHSL/vck/JQBq4/NcVGNmY5B1RquiKeTKhKkmQBHEvpIgZtR4qrqsP -/ZK13hvpPWWPkY6aap61S01/W2Je+C7Q0VS5aHsomWWiO4W9SCJZE3egwShiA34S -ogFL7BjHk2Hn1Rba/7UP/MPdySfeQddfZz582MlSphRtQESWlvs32PpKi5qRbdTN -Oie+XWTEw7dSOJbLLTchVFLzNCj/Okcc6qypiGsbiMJYH7slEeLyrGIDeyMDqZQ9 -q2AXjp0kqURcQ/k5KwRILbQIOGY7MVAUDj88hz2ysb0xJpmUQ3KEX9lNbgIl4f1C -YKYWe/h7AgMBAAECggEASoGdFY04dyfxdwUqYL2Emma/5GgaOJnOAjrPFsAwVBCq -5jO5gLYGF9XM9z5hL4sCNKRuizQ9RQYlc0KHBlRcV4dHplsbuehW8j5g3PCIXCTt -ZvqS9mzi4HCiaGIAB5cCtpXjZvldfj4BW18lAiDSwAOC4gw9aMlek38U+571nIlh -gs2rtCuWR6usJbJdFfJ4ouIPX1F4Gz5s/lYhG5jTGUAnKyE1NgqPj+PY+ZfH0qNX -AVJVNn9Ze3hT3JdEjTYFNo5c1YHui9Krlh5EMabVWgqmbI2WS8sZsRcMAluDB5lY -OaCKLFYwOIG4nGBU9TE2s7fxl3qCkBGNDhuD4n80iQKBgQDrZ233CVfSLgTyKKuV -i2niqIk1fAgLIFefHxsI1wlApLBCc58dAGhAvmHRdYSw8iBinbLXGnS9yPZvjNik -kLyWPznq1p94CZTD1KsLz5qmRlReKHJEhdajlR5LnisULvqiDc7/lk18W2fD0KeD -UnN5hUjjOeaQRjvOTghTie+1PwKBgQDM39Agd/INPr6WsYSqYPEeE9fZu27p0YUQ -4Y8zHVENeg1jUMDXgHo3weZl5n3sFMYeuYm6d9nE0k/Mjr2hL/zlnqQBG5XtJ5oW -2r7Sh1pKCxV+oYgsdFctkJKSi5Q8RyK4YrUQA55EZqDewfZlmmWW3q/fahls49Ny -uTJESV/BxQKBgDiIh10rjj64tJFfeQ2aBJzdcs44ckoRw1lAhCKUWfF/W6Ep2U2C -uobJ8f32ph5El8h3LOsBvIWTjLNvdNvYsqG2n3cpgfS3AFYjbcyRWAeUnlBakE6q -gciZWEQ6wQfA3IosnMi+1O8HmJzrMD+WforxmnaPgjKl21kJXnCJkNrLAoGAdlG3 -6Fh9UUrwVSVWgfOUrRM2sMd/yce4OsSZqCKBQfBANSBZDtxjOTphbm5MQQDKXso2 -kZtQCEyRy4iQWbvWKWKSQxWEY79gTVytofaLnYGDO2vcshfKlUUOcVXtGVbX5XcU -LJh6WfSPabbJL2qYyyX2mmezIWD+KB5uumNJyyUCgYEA6afM7Gzv/jvm8Vrqx+Gj -gDFvO0ZxkxOCtA1Qd42NYu9rgn0pMIJyukxvEXePxUu164ehE7VRN0HhofXMGbMG -R8aVqbl0w7jSN6M97vo6Xn+sRwFcgMfjo3uKgdXbdxyKnzPGxOTYUuy7Q5B0teiw -kz4fRgPkfSQ+nfVrHAgX3WA= +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCeukTeDyltCfa1 +VWJ1fZbDWuH/pggfoQZb2Fcw78XnTpF4yu6/4CWKVzsHaw+kZEMpe6nLbchyl8wh +734ubvj/K7ulbnWsn+vaMyqD61WIVr8ZDFJAsp0DEMC/AloAzjWSujQMqZybhHB9 +gCW26nBzqkLsB8G1PTw8G4MrtZ9o2cVuEYBaQe8xtQs1YAps9aohMdtQS8uBL+4+ +P79bhIeTkZpiTRjaZTdpbKl4M4GoR8pOLZbSLHU7tKk4fJ8DSG9dL0iKUX8dZRG+ +v27Xeae8k17xb66paqVAx3IWxs8IiFtXBXAZhCnKjIGuDPrLlI/BLK6ABvYXxiTd +Sm+offTJAgMBAAECggEAN+VysRx3wy1aEvuRo7xpZjxQD/5BKBpFqfxioBogAFfb +xMT6FNnzfmc/o1ohdQvV1vr0jW4Iw8oPGfhD4Eg2KW4WM6jVicf7f6i7FR+/zDZ4 +L3L2WFBOGLFCn0FNvrDfjt9Byx/DxcR69Mc3ANZIaYMQ9Bu7LH73AlfR9oeMLpjL ++6g1qz2yz8Sm2CMCGXTyXtvUCgn2ld6nz8KlZ8FTUG9C9mAabuvV91Ko6rmTxuiv +YKvHSPnIjXRjuC+Ozjf1rYTOJ5LVMNNhlbIKBG/Nx5QzL7bA3XDtMD1BEI9pdHR+ +5HwA0tV2Ex67tBCJwlBAhYLxuPjfOj1R5KV8wriE3QKBgQDNvqOaGYiXwp9Rajoo +ltlOBPfnjshd9tPdc6tTUQR34vSbkHrg0HVJhvIP5LRbyx/M/8ACQxFkDRE4U7fJ +xVGDs8Pi0FqcqFTnm/AYQ5eZbJkPp9qe71aDOPanncrVNEGFeW26LaeLGbTLrOMM +6mTmsfGig0MKgml35IMrP+oPuwKBgQDFf56DdaFe08xSK9pDWuKxUuBIagGExQkQ +r9eYasBc336CXh3FWtpSlxl73dqtISh/HbKbv+OZfkVdbmkcTVGlWm/N/XvLqpPK +86kbKW6PY8FxIY/RxiZANf/JJ5gzPp6VQMJeSy+oepeWj11mTLcT02plvIMM0Jmg +Z5B9Hw37SwKBgDR/59lDmLI47FRnCc4fp/WbmPKSYZhwimFgyZ/p9XzuAcLMXD6P +ks4fTBc4IbmmnEfAHuu013QzTWiVHDm1SvaTYXG3/tcosPmkteBLJxz0NB5lk4io +w+eaGn5s6jv7KJj5gkFWswDwn0y1of5CtVqUn3b7jZjZ7DW2rq3TklNPAoGAIzaW +56+AfyzaQEhrWRkKVD2HmcG01Zxf+mav1RArjiOXJd1sB3UkehdQxuIOjFHeK5P6 +9YQoK4T1DyyRdydeCFJwntS0TuLyCPyaySoA+XX61pX6U5e12DsIiTATFgfzNH9g +aHmVXL/G6WRUbdn9xn4qeUs8Pnuu+IeenoB7+LMCgYBBnig9nTp81U+SGsNl2D3J +WUz4z+XzEfKU1nq2s4KNjIPB2T1ne+1x3Uso2hagtEHeuEbZoRY4dtCahAvYwrPM +8wtDFQXWmvFyN3X0Js65GZ++knuseQ1tdlbc/4C+k4u26tVe2GcwhKTjn08++L2E +UB3pLXbssswH271OjD+QkQ== -----END PRIVATE KEY----- diff -Nru python-cassandra-driver-3.24.0/tests/integration/long/ssl/rootCa.crt python-cassandra-driver-3.25.0/tests/integration/long/ssl/rootCa.crt --- python-cassandra-driver-3.24.0/tests/integration/long/ssl/rootCa.crt 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/long/ssl/rootCa.crt 2021-03-18 14:48:29.000000000 +0000 @@ -1,19 +1,19 @@ -----BEGIN CERTIFICATE----- -MIIDCzCCAfMCFG+iEODhRSw0SUMut7vX0hPTLCe1MA0GCSqGSIb3DQEBCwUAMEIx +MIIDCzCCAfMCFCoTNYhIQpOXMBnAq8Bw72qfKwGLMA0GCSqGSIb3DQEBCwUAMEIx CzAJBgNVBAYTAlVTMREwDwYDVQQKDAhkYXRhc3RheDEPMA0GA1UECwwGZmllbGRz -MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjAwMTEwMjExMTM1WhcNMzAwMTA3MjExMTM1 +MQ8wDQYDVQQDDAZyb290Q2EwHhcNMjEwMzE3MTcwNTE2WhcNMzEwMzE1MTcwNTE2 WjBCMQswCQYDVQQGEwJVUzERMA8GA1UECgwIZGF0YXN0YXgxDzANBgNVBAsMBmZp ZWxkczEPMA0GA1UEAwwGcm9vdENhMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAvLrr+qvLvUHZe4Py2QvqlAh/SMxscWsPKfCvJLYS4SX4tDxyn/Xgn1+M -9XMlhtnpJHSdL6bzVm3PMDJmPe+8zKa8s/vsbKCOakIi9GmjxGHMdRfN3NsubRvZ -sr6rX4VFxu6ATmZU9q1vNlOBSoFntOSTYTQH9svrQxsM3dNnDOXr3eFUAvNaNp/l -/YFcjD0LBSVYBUvLf+1RvIDDXS8nTCHZLaKbCevGBMMy8tWYgDD4CD8q9gQ3xail -yc7ES94NiZ6OB+a9GbUce3308NqVmWpDhXBzXshKy5TaH4VfthkJlnnYATjsYI/1 -XkLWVCSgwI2Yl7fHmttJXvmA/zggbQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCX -05lSh/MBzbxg6zEjQdyN3Mm/TNiZv+wIDDddYKVA8psX0gLiCgnpC0B4BK+ualYk -AQwJvgQQzbdxy/7RC/a0OEvXg4syjLs3vRq4fgCXebzfbpxhqykCO7czyS4WPZp3 -8AEeqjV0iZHE2WN1+kfWT6Dwc8rjFxWyLB4qgtKxpeW8sNdpSussNpD+IoKpIzzh -VBNRzb+g1tc87zDnvy7GfEjLRke57sZ82QN/bGZakdVgfppg1mbnrrbsOPEMSTMU -iCQo2JP64HS7oGmYUp5KQFge0fGqou0Ww/rkVp2AtR/tNKw4XwZybTQgwsdvz54S -KEmDs3I5S2S5QO5hRDG+ +CgKCAQEApFoQtNu0+XQuMBPle4WAJYIMR74HL15uk9ToKBqMEXL7ah3r23xTTeGr +NyUXicM6Owiup7DK27F4vni+MYKAn7L4uZ99mW0ATYNXBDLFB+wwy1JBk4Dw5+eZ +q9lz1TGK7uBvTOXCllOA2qxRqtMTl2aPy5OuciWQe794abwFqs5+1l9GEuzJGsp1 +P9L4yljbmijC8RmvDFAeUZoKRdKXw2G5kUOHqK9Aej5gLxIK920PezpgLxm0V/PD +ZAlwlsW0vT79RgZCF/vtKcKSLtFTHgPBNPPbkZmOdE7s/6KoAkORBV/9CIsKeTC3 +Y/YeYQ2+G0gxiq1RcMavPw8f58POTQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQA1 +MXBlk6u2oVBM+4SyYc2nsaHyerM+omUEysAUNFJq6S6i0pu32ULcusDfrnrIQoyR +xPJ/GSYqZkIDX0s9LvPVD6A6bnugR+Z6VfEniLkG1+TkFC+JMCblgJyaF/EbuayU +3iJX+uj7ikTySjMSDvXxOHik2i0aOh90B/351+sFnSPQrFDQ0XqxeG8s0d7EiLTV +wWJmsYglSeTo1vF3ilVRwjmHO9sX6cmQhRvRNmiQrdWaM3gLS5F6yoQ2UQQ3YdFp +quhYuNwy0Ip6ZpORHYtzkCKSanz/oUh17QWvi7aaJyqD5G5hWZgn3R4RCutoOHRS +TEJ+xzhY768rpsrrNUou -----END CERTIFICATE----- diff -Nru python-cassandra-driver-3.24.0/tests/integration/long/test_ssl.py python-cassandra-driver-3.25.0/tests/integration/long/test_ssl.py --- python-cassandra-driver-3.24.0/tests/integration/long/test_ssl.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/long/test_ssl.py 2021-03-18 14:48:29.000000000 +0000 @@ -54,11 +54,11 @@ USES_PYOPENSSL = "twisted" in EVENT_LOOP_MANAGER or "eventlet" in EVENT_LOOP_MANAGER if "twisted" in EVENT_LOOP_MANAGER: import OpenSSL - ssl_version = OpenSSL.SSL.TLSv1_METHOD + ssl_version = OpenSSL.SSL.TLSv1_2_METHOD verify_certs = {'cert_reqs': SSL.VERIFY_PEER, 'check_hostname': True} else: - ssl_version = ssl.PROTOCOL_TLSv1 + ssl_version = ssl.PROTOCOL_TLS verify_certs = {'cert_reqs': ssl.CERT_REQUIRED, 'check_hostname': True} @@ -404,7 +404,7 @@ @test_category connection:ssl """ if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.load_verify_locations(CLIENT_CA_CERTS) else: ssl_context = ssl.SSLContext(ssl_version) @@ -428,7 +428,7 @@ ssl_options = {} if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.use_certificate_file(abs_driver_certfile) with open(abs_driver_keyfile) as keyfile: key = crypto.load_privatekey(crypto.FILETYPE_PEM, keyfile.read(), b'cassandra') @@ -449,7 +449,7 @@ """ ssl_options = {} if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.use_certificate_file(DRIVER_CERTFILE) with open(DRIVER_KEYFILE_ENCRYPTED) as keyfile: key = crypto.load_privatekey(crypto.FILETYPE_PEM, keyfile.read(), b'cassandra') @@ -472,7 +472,7 @@ def test_cannot_connect_ssl_context_with_invalid_hostname(self): ssl_options = {} if USES_PYOPENSSL: - ssl_context = SSL.Context(SSL.TLSv1_METHOD) + ssl_context = SSL.Context(SSL.TLSv1_2_METHOD) ssl_context.use_certificate_file(DRIVER_CERTFILE) with open(DRIVER_KEYFILE_ENCRYPTED) as keyfile: key = crypto.load_privatekey(crypto.FILETYPE_PEM, keyfile.read(), b"cassandra") diff -Nru python-cassandra-driver-3.24.0/tests/integration/simulacron/test_empty_column.py python-cassandra-driver-3.25.0/tests/integration/simulacron/test_empty_column.py --- python-cassandra-driver-3.24.0/tests/integration/simulacron/test_empty_column.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/simulacron/test_empty_column.py 2021-03-18 14:48:29.000000000 +0000 @@ -27,8 +27,8 @@ from cassandra.cqlengine.connection import set_session from cassandra.cqlengine.models import Model -from tests.integration import PROTOCOL_VERSION, requiressimulacron -from tests.integration.simulacron import SimulacronCluster +from tests.integration import requiressimulacron +from tests.integration.simulacron import PROTOCOL_VERSION, SimulacronCluster from tests.integration.simulacron.utils import PrimeQuery, prime_request diff -Nru python-cassandra-driver-3.24.0/tests/integration/simulacron/test_policies.py python-cassandra-driver-3.25.0/tests/integration/simulacron/test_policies.py --- python-cassandra-driver-3.24.0/tests/integration/simulacron/test_policies.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/simulacron/test_policies.py 2021-03-18 14:48:29.000000000 +0000 @@ -184,7 +184,7 @@ spec = ExecutionProfile(load_balancing_policy=RoundRobinPolicy(), speculative_execution_policy=ConstantSpeculativeExecutionPolicy(0, number_of_requests)) - cluster = Cluster(compression=False) + cluster = Cluster(protocol_version=PROTOCOL_VERSION, compression=False) cluster.add_execution_profile("spec", spec) session = cluster.connect(wait_for_all_pools=True) self.addCleanup(cluster.shutdown) diff -Nru python-cassandra-driver-3.24.0/tests/integration/standard/test_cluster.py python-cassandra-driver-3.25.0/tests/integration/standard/test_cluster.py --- python-cassandra-driver-3.24.0/tests/integration/standard/test_cluster.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/standard/test_cluster.py 2021-03-18 14:48:29.000000000 +0000 @@ -42,7 +42,7 @@ from tests import notwindows from tests.integration import use_singledc, get_server_versions, CASSANDRA_VERSION, \ execute_until_pass, execute_with_long_wait_retry, get_node, MockLoggingHandler, get_unsupported_lower_protocol, \ - get_unsupported_upper_protocol, protocolv5, local, CASSANDRA_IP, greaterthanorequalcass30, lessthanorequalcass40, \ + get_unsupported_upper_protocol, protocolv6, local, CASSANDRA_IP, greaterthanorequalcass30, lessthanorequalcass40, \ DSE_VERSION, TestCluster, PROTOCOL_VERSION from tests.integration.util import assert_quiescent_pool_state import sys @@ -261,6 +261,18 @@ elif DSE_VERSION and DSE_VERSION >= Version("5.1"): self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.DSE_V1) self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.DSE_V1) + elif CASSANDRA_VERSION >= Version('4.0-beta5'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V5) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V5) + elif CASSANDRA_VERSION >= Version('4.0-a'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V4) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V4) + elif CASSANDRA_VERSION >= Version('3.11'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V4) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V4) + elif CASSANDRA_VERSION >= Version('3.0'): + self.assertEqual(updated_protocol_version, cassandra.ProtocolVersion.V4) + self.assertEqual(updated_cluster_version, cassandra.ProtocolVersion.V4) elif CASSANDRA_VERSION >= Version('2.2'): self.assertEqual(updated_protocol_version, 4) self.assertEqual(updated_cluster_version, 4) @@ -1473,42 +1485,42 @@ cluster.shutdown() -@protocolv5 +@protocolv6 class BetaProtocolTest(unittest.TestCase): - @protocolv5 + @protocolv6 def test_invalid_protocol_version_beta_option(self): """ - Test cluster connection with protocol v5 and beta flag not set + Test cluster connection with protocol v6 and beta flag not set @since 3.7.0 - @jira_ticket PYTHON-614 - @expected_result client shouldn't connect with V5 and no beta flag set + @jira_ticket PYTHON-614, PYTHON-1232 + @expected_result client shouldn't connect with V6 and no beta flag set @test_category connection """ - cluster = TestCluster(protocol_version=cassandra.ProtocolVersion.V5, allow_beta_protocol_version=False) + cluster = TestCluster(protocol_version=cassandra.ProtocolVersion.V6, allow_beta_protocol_version=False) try: with self.assertRaises(NoHostAvailable): cluster.connect() except Exception as e: self.fail("Unexpected error encountered {0}".format(e.message)) - @protocolv5 + @protocolv6 def test_valid_protocol_version_beta_options_connect(self): """ Test cluster connection with protocol version 5 and beta flag set @since 3.7.0 - @jira_ticket PYTHON-614 - @expected_result client should connect with protocol v5 and beta flag set. + @jira_ticket PYTHON-614, PYTHON-1232 + @expected_result client should connect with protocol v6 and beta flag set. @test_category connection """ - cluster = Cluster(protocol_version=cassandra.ProtocolVersion.V5, allow_beta_protocol_version=True) + cluster = Cluster(protocol_version=cassandra.ProtocolVersion.V6, allow_beta_protocol_version=True) session = cluster.connect() - self.assertEqual(cluster.protocol_version, cassandra.ProtocolVersion.V5) + self.assertEqual(cluster.protocol_version, cassandra.ProtocolVersion.V6) self.assertTrue(session.execute("select release_version from system.local")[0]) cluster.shutdown() diff -Nru python-cassandra-driver-3.24.0/tests/integration/standard/test_custom_protocol_handler.py python-cassandra-driver-3.25.0/tests/integration/standard/test_custom_protocol_handler.py --- python-cassandra-driver-3.24.0/tests/integration/standard/test_custom_protocol_handler.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/standard/test_custom_protocol_handler.py 2021-03-18 14:48:29.000000000 +0000 @@ -25,7 +25,7 @@ from tests.integration import use_singledc, drop_keyspace_shutdown_cluster, \ greaterthanorequalcass30, execute_with_long_wait_retry, greaterthanorequaldse51, greaterthanorequalcass3_10, \ - greaterthanorequalcass31, TestCluster + TestCluster, greaterthanorequalcass40, requirecassandra from tests.integration.datatype_utils import update_datatypes, PRIMITIVE_DATATYPES from tests.integration.standard.utils import create_table_with_all_types, get_all_primitive_params from six import binary_type @@ -124,7 +124,8 @@ self.assertEqual(len(CustomResultMessageTracked.checked_rev_row_set), len(PRIMITIVE_DATATYPES)-1) cluster.shutdown() - @greaterthanorequalcass31 + @requirecassandra + @greaterthanorequalcass40 def test_protocol_divergence_v5_fail_by_continuous_paging(self): """ Test to validate that V5 and DSE_V1 diverge. ContinuousPagingOptions is not supported by V5 @@ -170,7 +171,8 @@ self._protocol_divergence_fail_by_flag_uses_int(ProtocolVersion.V4, uses_int_query_flag=False, int_flag=True) - @greaterthanorequalcass3_10 + @requirecassandra + @greaterthanorequalcass40 def test_protocol_v5_uses_flag_int(self): """ Test to validate that the _PAGE_SIZE_FLAG is treated correctly using write_uint for V5 @@ -196,7 +198,8 @@ self._protocol_divergence_fail_by_flag_uses_int(ProtocolVersion.DSE_V1, uses_int_query_flag=True, int_flag=True) - @greaterthanorequalcass3_10 + @requirecassandra + @greaterthanorequalcass40 def test_protocol_divergence_v5_fail_by_flag_uses_int(self): """ Test to validate that the _PAGE_SIZE_FLAG is treated correctly using write_uint for V5 diff -Nru python-cassandra-driver-3.24.0/tests/integration/standard/test_query.py python-cassandra-driver-3.25.0/tests/integration/standard/test_query.py --- python-cassandra-driver-3.24.0/tests/integration/standard/test_query.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/integration/standard/test_query.py 2021-03-18 14:48:29.000000000 +0000 @@ -28,7 +28,7 @@ from cassandra.policies import HostDistance, RoundRobinPolicy, WhiteListRoundRobinPolicy from tests.integration import use_singledc, PROTOCOL_VERSION, BasicSharedKeyspaceUnitTestCase, \ greaterthanprotocolv3, MockLoggingHandler, get_supported_protocol_versions, local, get_cluster, setup_keyspace, \ - USE_CASS_EXTERNAL, greaterthanorequalcass40, DSE_VERSION, TestCluster + USE_CASS_EXTERNAL, greaterthanorequalcass40, DSE_VERSION, TestCluster, requirecassandra from tests import notwindows from tests.integration import greaterthanorequalcass30, get_node @@ -1408,6 +1408,8 @@ """ self._check_set_keyspace_in_statement(self.session) + @requirecassandra + @greaterthanorequalcass40 def test_setting_keyspace_and_session(self): """ Test we can still send the keyspace independently even the session diff -Nru python-cassandra-driver-3.24.0/tests/unit/io/test_twistedreactor.py python-cassandra-driver-3.25.0/tests/unit/io/test_twistedreactor.py --- python-cassandra-driver-3.24.0/tests/unit/io/test_twistedreactor.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/unit/io/test_twistedreactor.py 2021-03-18 14:48:29.000000000 +0000 @@ -148,12 +148,12 @@ # incomplete header self.obj_ut._iobuf.write(b'\x84\x00\x00\x00\x00') self.obj_ut.handle_read() - self.assertEqual(self.obj_ut._iobuf.getvalue(), b'\x84\x00\x00\x00\x00') + self.assertEqual(self.obj_ut._io_buffer.cql_frame_buffer.getvalue(), b'\x84\x00\x00\x00\x00') # full header, but incomplete body self.obj_ut._iobuf.write(b'\x00\x00\x00\x15') self.obj_ut.handle_read() - self.assertEqual(self.obj_ut._iobuf.getvalue(), + self.assertEqual(self.obj_ut._io_buffer.cql_frame_buffer.getvalue(), b'\x84\x00\x00\x00\x00\x00\x00\x00\x15') self.assertEqual(self.obj_ut._current_frame.end_pos, 30) @@ -174,7 +174,7 @@ self.obj_ut._iobuf.write( b'\x84\x01\x00\x02\x03\x00\x00\x00\x15' + body + extra) self.obj_ut.handle_read() - self.assertEqual(self.obj_ut._iobuf.getvalue(), extra) + self.assertEqual(self.obj_ut._io_buffer.cql_frame_buffer.getvalue(), extra) self.obj_ut.process_msg.assert_called_with( _Frame(version=4, flags=1, stream=2, opcode=3, body_offset=9, end_pos=9 + len(body)), body) diff -Nru python-cassandra-driver-3.24.0/tests/unit/io/utils.py python-cassandra-driver-3.25.0/tests/unit/io/utils.py --- python-cassandra-driver-3.24.0/tests/unit/io/utils.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/unit/io/utils.py 2021-03-18 14:48:29.000000000 +0000 @@ -309,14 +309,14 @@ for message, expected_size in messages: message_chunks = message - c._iobuf = io.BytesIO() + c._io_buffer._io_buffer = io.BytesIO() c.process_io_buffer.reset_mock() c.handle_read(*self.null_handle_function_args) - c._iobuf.seek(0, os.SEEK_END) + c._io_buffer.io_buffer.seek(0, os.SEEK_END) # Ensure the message size is the good one and that the # message has been processed if it is non-empty - self.assertEqual(c._iobuf.tell(), expected_size) + self.assertEqual(c._io_buffer.io_buffer.tell(), expected_size) if expected_size == 0: c.process_io_buffer.assert_not_called() else: @@ -435,11 +435,11 @@ self.get_socket(c).recv.return_value = message[0:1] c.handle_read(*self.null_handle_function_args) - self.assertEqual(c._iobuf.getvalue(), message[0:1]) + self.assertEqual(c._io_buffer.cql_frame_buffer.getvalue(), message[0:1]) self.get_socket(c).recv.return_value = message[1:] c.handle_read(*self.null_handle_function_args) - self.assertEqual(six.binary_type(), c._iobuf.getvalue()) + self.assertEqual(six.binary_type(), c._io_buffer.io_buffer.getvalue()) # let it write out a StartupMessage c.handle_write(*self.null_handle_function_args) @@ -461,12 +461,12 @@ # read in the first nine bytes self.get_socket(c).recv.return_value = message[:9] c.handle_read(*self.null_handle_function_args) - self.assertEqual(c._iobuf.getvalue(), message[:9]) + self.assertEqual(c._io_buffer.cql_frame_buffer.getvalue(), message[:9]) # ... then read in the rest self.get_socket(c).recv.return_value = message[9:] c.handle_read(*self.null_handle_function_args) - self.assertEqual(six.binary_type(), c._iobuf.getvalue()) + self.assertEqual(six.binary_type(), c._io_buffer.io_buffer.getvalue()) # let it write out a StartupMessage c.handle_write(*self.null_handle_function_args) @@ -501,7 +501,7 @@ for i in range(1, 15): c.process_io_buffer.reset_mock() - c._iobuf = io.BytesIO() + c._io_buffer._io_buffer = io.BytesIO() message = io.BytesIO(six.b('a') * (2**i)) def recv_side_effect(*args): @@ -511,7 +511,7 @@ self.get_socket(c).recv.side_effect = recv_side_effect c.handle_read(*self.null_handle_function_args) - if c._iobuf.tell(): + if c._io_buffer.io_buffer.tell(): c.process_io_buffer.assert_called_once() else: c.process_io_buffer.assert_not_called() diff -Nru python-cassandra-driver-3.24.0/tests/unit/test_cluster.py python-cassandra-driver-3.25.0/tests/unit/test_cluster.py --- python-cassandra-driver-3.24.0/tests/unit/test_cluster.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/unit/test_cluster.py 2021-03-18 14:48:29.000000000 +0000 @@ -209,6 +209,8 @@ lower = ProtocolVersion.get_lower_supported(ProtocolVersion.DSE_V2) self.assertEqual(ProtocolVersion.DSE_V1, lower) lower = ProtocolVersion.get_lower_supported(ProtocolVersion.DSE_V1) + self.assertEqual(ProtocolVersion.V5,lower) + lower = ProtocolVersion.get_lower_supported(ProtocolVersion.V5) self.assertEqual(ProtocolVersion.V4,lower) lower = ProtocolVersion.get_lower_supported(ProtocolVersion.V4) self.assertEqual(ProtocolVersion.V3,lower) diff -Nru python-cassandra-driver-3.24.0/tests/unit/test_connection.py python-cassandra-driver-3.25.0/tests/unit/test_connection.py --- python-cassandra-driver-3.24.0/tests/unit/test_connection.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/unit/test_connection.py 2021-03-18 14:48:29.000000000 +0000 @@ -100,7 +100,7 @@ header = self.make_header_prefix(SupportedMessage, version=0x7f) options = self.make_options_body() message = self.make_msg(header, options) - c._iobuf = BytesIO() + c._iobuf._io_buffer = BytesIO() c._iobuf.write(message) c.process_io_buffer() @@ -117,7 +117,7 @@ # read in a SupportedMessage response header = self.make_header_prefix(SupportedMessage) message = header + int32_pack(-13) - c._iobuf = BytesIO() + c._iobuf._io_buffer = BytesIO() c._iobuf.write(message) c.process_io_buffer() diff -Nru python-cassandra-driver-3.24.0/tests/unit/test_control_connection.py python-cassandra-driver-3.25.0/tests/unit/test_control_connection.py --- python-cassandra-driver-3.24.0/tests/unit/test_control_connection.py 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/unit/test_control_connection.py 2021-03-18 14:48:29.000000000 +0000 @@ -127,15 +127,15 @@ ] self.peer_results = [ - ["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"]]] + ["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], "uuid1"], + ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"], "uuid2"]] ] self.peer_results_v2 = [ - ["native_address", "native_port", "peer", "peer_port", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", 9042, "10.0.0.1", 7042, "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", ["2", "102", "202"]]] + ["native_address", "native_port", "peer", "peer_port", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", 9042, "10.0.0.1", 7042, "a", "dc1", "rack1", ["1", "101", "201"], "uuid1"], + ["192.168.1.2", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", ["2", "102", "202"], "uuid2"]] ] self.wait_for_responses = Mock(return_value=_node_meta_results(self.local_results, self.peer_results)) @@ -155,18 +155,18 @@ class ControlConnectionTest(unittest.TestCase): _matching_schema_preloaded_results = _node_meta_results( - local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens"], - [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"]]]), - peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"]]])) + local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens", "host_id"], + [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"], "uuid1"]]), + peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], "uuid2"], + ["192.168.1.2", "10.0.0.2", "a", "dc1", "rack1", ["2", "102", "202"], "uuid3"]])) _nonmatching_schema_preloaded_results = _node_meta_results( - local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens"], - [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"]]]), - peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens"], - [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"]], - ["192.168.1.2", "10.0.0.2", "b", "dc1", "rack1", ["2", "102", "202"]]])) + local_results=(["schema_version", "cluster_name", "data_center", "rack", "partitioner", "release_version", "tokens", "host_id"], + [["a", "foocluster", "dc1", "rack1", "Murmur3Partitioner", "2.2.0", ["0", "100", "200"], "uuid1"]]), + peer_results=(["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.1", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], "uuid2"], + ["192.168.1.2", "10.0.0.2", "b", "dc1", "rack1", ["2", "102", "202"], "uuid3"]])) def setUp(self): self.cluster = MockCluster() @@ -275,6 +275,40 @@ self.assertEqual(self.connection.wait_for_responses.call_count, 1) + def test_refresh_nodes_and_tokens_with_invalid_peers(self): + def refresh_and_validate_added_hosts(): + self.connection.wait_for_responses = Mock(return_value=_node_meta_results( + self.connection.local_results, self.connection.peer_results)) + self.control_connection.refresh_node_list_and_token_map() + self.assertEqual(1, len(self.cluster.added_hosts)) # only one valid peer found + + # peersV1 + del self.connection.peer_results[:] + self.connection.peer_results.extend([ + ["rpc_address", "peer", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.3", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], 'uuid5'], + # all others are invalid + [None, None, "a", "dc1", "rack1", ["1", "101", "201"], 'uuid1'], + ["192.168.1.7", "10.0.0.1", "a", None, "rack1", ["1", "101", "201"], 'uuid2'], + ["192.168.1.6", "10.0.0.1", "a", "dc1", None, ["1", "101", "201"], 'uuid3'], + ["192.168.1.5", "10.0.0.1", "a", "dc1", "rack1", None, 'uuid4'], + ["192.168.1.4", "10.0.0.1", "a", "dc1", "rack1", ["1", "101", "201"], None]]]) + refresh_and_validate_added_hosts() + + # peersV2 + del self.cluster.added_hosts[:] + del self.connection.peer_results[:] + self.connection.peer_results.extend([ + ["native_address", "native_port", "peer", "peer_port", "schema_version", "data_center", "rack", "tokens", "host_id"], + [["192.168.1.4", 9042, "10.0.0.1", 7042, "a", "dc1", "rack1", ["1", "101", "201"], "uuid1"], + # all others are invalid + [None, 9042, None, 7040, "a", "dc1", "rack1", ["2", "102", "202"], "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", None, "rack1", ["2", "102", "202"], "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", "dc1", None, ["2", "102", "202"], "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", None, "uuid2"], + ["192.168.1.5", 9042, "10.0.0.2", 7040, "a", "dc1", "rack1", ["2", "102", "202"], None]]]) + refresh_and_validate_added_hosts() + def test_refresh_nodes_and_tokens_uses_preloaded_results_if_given(self): """ refresh_nodes_and_tokens uses preloaded results if given for shared table queries @@ -311,7 +345,7 @@ def test_refresh_nodes_and_tokens_add_host(self): self.connection.peer_results[1].append( - ["192.168.1.3", "10.0.0.3", "a", "dc1", "rack1", ["3", "103", "203"]] + ["192.168.1.3", "10.0.0.3", "a", "dc1", "rack1", ["3", "103", "203"], "uuid3"] ) self.cluster.scheduler.schedule = lambda delay, f, *args, **kwargs: f(*args, **kwargs) self.control_connection.refresh_node_list_and_token_map() @@ -319,6 +353,7 @@ self.assertEqual(self.cluster.added_hosts[0].address, "192.168.1.3") self.assertEqual(self.cluster.added_hosts[0].datacenter, "dc1") self.assertEqual(self.cluster.added_hosts[0].rack, "rack1") + self.assertEqual(self.cluster.added_hosts[0].host_id, "uuid3") def test_refresh_nodes_and_tokens_remove_host(self): del self.connection.peer_results[1][1] @@ -482,7 +517,7 @@ del self.connection.peer_results[:] self.connection.peer_results.extend(self.connection.peer_results_v2) self.connection.peer_results[1].append( - ["192.168.1.3", 555, "10.0.0.3", 666, "a", "dc1", "rack1", ["3", "103", "203"]] + ["192.168.1.3", 555, "10.0.0.3", 666, "a", "dc1", "rack1", ["3", "103", "203"], "uuid3"] ) self.connection.wait_for_responses = Mock(return_value=_node_meta_results( self.connection.local_results, self.connection.peer_results)) @@ -502,7 +537,7 @@ del self.connection.peer_results[:] self.connection.peer_results.extend(self.connection.peer_results_v2) self.connection.peer_results[1].append( - ["192.168.1.3", -1, "10.0.0.3", 0, "a", "dc1", "rack1", ["3", "103", "203"]] + ["192.168.1.3", -1, "10.0.0.3", 0, "a", "dc1", "rack1", ["3", "103", "203"], "uuid3"] ) self.connection.wait_for_responses = Mock(return_value=_node_meta_results( self.connection.local_results, self.connection.peer_results)) diff -Nru python-cassandra-driver-3.24.0/tests/unit/test_segment.py python-cassandra-driver-3.25.0/tests/unit/test_segment.py --- python-cassandra-driver-3.24.0/tests/unit/test_segment.py 1970-01-01 00:00:00.000000000 +0000 +++ python-cassandra-driver-3.25.0/tests/unit/test_segment.py 2021-03-18 14:48:29.000000000 +0000 @@ -0,0 +1,218 @@ +# Copyright DataStax, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import unittest2 as unittest +except ImportError: + import unittest # noqa + +import six + +from cassandra import DriverException +from cassandra.segment import Segment, CrcException +from cassandra.connection import segment_codec_no_compression, segment_codec_lz4 + + +def to_bits(b): + if six.PY2: + b = six.byte2int(b) + return '{:08b}'.format(b) + +class SegmentCodecTest(unittest.TestCase): + + small_msg = b'b' * 50 + max_msg = b'b' * Segment.MAX_PAYLOAD_LENGTH + large_msg = b'b' * (Segment.MAX_PAYLOAD_LENGTH + 1) + + @staticmethod + def _header_to_bits(data): + # unpack a header to bits + # data should be the little endian bytes sequence + if len(data) > 6: # compressed + data = data[:5] + bits = ''.join([to_bits(b) for b in reversed(data)]) + # return the compressed payload length, the uncompressed payload length, + # the self contained flag and the padding as bits + return bits[23:40] + bits[6:23] + bits[5:6] + bits[:5] + else: # uncompressed + data = data[:3] + bits = ''.join([to_bits(b) for b in reversed(data)]) + # return the payload length, the self contained flag and + # the padding as bits + return bits[7:24] + bits[6:7] + bits[:6] + + def test_encode_uncompressed_header(self): + buffer = six.BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.small_msg), -1, True) + self.assertEqual(buffer.tell(), 6) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "00000000000110010" + "1" + "000000") + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_encode_compressed_header(self): + buffer = six.BytesIO() + compressed_length = len(segment_codec_lz4.compress(self.small_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.small_msg), True) + + self.assertEqual(buffer.tell(), 8) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "{:017b}".format(compressed_length) + "00000000000110010" + "1" + "00000") + + def test_encode_uncompressed_header_with_max_payload(self): + buffer = six.BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.max_msg), -1, True) + self.assertEqual(buffer.tell(), 6) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "11111111111111111" + "1" + "000000") + + def test_encode_header_fails_if_payload_too_big(self): + buffer = six.BytesIO() + for codec in [c for c in [segment_codec_no_compression, segment_codec_lz4] if c is not None]: + with self.assertRaises(DriverException): + codec.encode_header(buffer, len(self.large_msg), -1, False) + + def test_encode_uncompressed_header_not_self_contained_msg(self): + buffer = six.BytesIO() + # simulate the first chunk with the max size + segment_codec_no_compression.encode_header(buffer, len(self.max_msg), -1, False) + self.assertEqual(buffer.tell(), 6) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + ("11111111111111111" + "0" # not self contained + "000000")) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_encode_compressed_header_with_max_payload(self): + buffer = six.BytesIO() + compressed_length = len(segment_codec_lz4.compress(self.max_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.max_msg), True) + self.assertEqual(buffer.tell(), 8) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + "{:017b}".format(compressed_length) + "11111111111111111" + "1" + "00000") + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_encode_compressed_header_not_self_contained_msg(self): + buffer = six.BytesIO() + # simulate the first chunk with the max size + compressed_length = len(segment_codec_lz4.compress(self.max_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.max_msg), False) + self.assertEqual(buffer.tell(), 8) + self.assertEqual( + self._header_to_bits(buffer.getvalue()), + ("{:017b}".format(compressed_length) + + "11111111111111111" + "0" # not self contained + "00000")) + + def test_decode_uncompressed_header(self): + buffer = six.BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.small_msg), -1, True) + buffer.seek(0) + header = segment_codec_no_compression.decode_header(buffer) + self.assertEqual(header.uncompressed_payload_length, -1) + self.assertEqual(header.payload_length, len(self.small_msg)) + self.assertEqual(header.is_self_contained, True) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_compressed_header(self): + buffer = six.BytesIO() + compressed_length = len(segment_codec_lz4.compress(self.small_msg)) + segment_codec_lz4.encode_header(buffer, compressed_length, len(self.small_msg), True) + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + self.assertEqual(header.uncompressed_payload_length, len(self.small_msg)) + self.assertEqual(header.payload_length, compressed_length) + self.assertEqual(header.is_self_contained, True) + + def test_decode_header_fails_if_corrupted(self): + buffer = six.BytesIO() + segment_codec_no_compression.encode_header(buffer, len(self.small_msg), -1, True) + # corrupt one byte + buffer.seek(buffer.tell()-1) + buffer.write(b'0') + buffer.seek(0) + + with self.assertRaises(CrcException): + segment_codec_no_compression.decode_header(buffer) + + def test_decode_uncompressed_self_contained_segment(self): + buffer = six.BytesIO() + segment_codec_no_compression.encode(buffer, self.small_msg) + + buffer.seek(0) + header = segment_codec_no_compression.decode_header(buffer) + segment = segment_codec_no_compression.decode(buffer, header) + + self.assertEqual(header.is_self_contained, True) + self.assertEqual(header.uncompressed_payload_length, -1) + self.assertEqual(header.payload_length, len(self.small_msg)) + self.assertEqual(segment.payload, self.small_msg) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_compressed_self_contained_segment(self): + buffer = six.BytesIO() + segment_codec_lz4.encode(buffer, self.small_msg) + + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + segment = segment_codec_lz4.decode(buffer, header) + + self.assertEqual(header.is_self_contained, True) + self.assertEqual(header.uncompressed_payload_length, len(self.small_msg)) + self.assertGreater(header.uncompressed_payload_length, header.payload_length) + self.assertEqual(segment.payload, self.small_msg) + + def test_decode_multi_segments(self): + buffer = six.BytesIO() + segment_codec_no_compression.encode(buffer, self.large_msg) + + buffer.seek(0) + # We should have 2 segments to read + headers = [] + segments = [] + headers.append(segment_codec_no_compression.decode_header(buffer)) + segments.append(segment_codec_no_compression.decode(buffer, headers[0])) + headers.append(segment_codec_no_compression.decode_header(buffer)) + segments.append(segment_codec_no_compression.decode(buffer, headers[1])) + + self.assertTrue(all([h.is_self_contained is False for h in headers])) + decoded_msg = segments[0].payload + segments[1].payload + self.assertEqual(decoded_msg, self.large_msg) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_fails_if_corrupted(self): + buffer = six.BytesIO() + segment_codec_lz4.encode(buffer, self.small_msg) + buffer.seek(buffer.tell()-1) + buffer.write(b'0') + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + with self.assertRaises(CrcException): + segment_codec_lz4.decode(buffer, header) + + @unittest.skipUnless(segment_codec_lz4, ' lz4 not installed') + def test_decode_tiny_msg_not_compressed(self): + buffer = six.BytesIO() + segment_codec_lz4.encode(buffer, b'b') + buffer.seek(0) + header = segment_codec_lz4.decode_header(buffer) + segment = segment_codec_lz4.decode(buffer, header) + self.assertEqual(header.uncompressed_payload_length, 0) + self.assertEqual(header.payload_length, 1) + self.assertEqual(segment.payload, b'b') diff -Nru python-cassandra-driver-3.24.0/tox.ini python-cassandra-driver-3.25.0/tox.ini --- python-cassandra-driver-3.24.0/tox.ini 2020-06-17 17:45:15.000000000 +0000 +++ python-cassandra-driver-3.25.0/tox.ini 2021-03-18 14:48:29.000000000 +0000 @@ -1,5 +1,5 @@ [tox] -envlist = py{27,34,35,36,37,38},pypy +envlist = py{27,35,36,37,38},pypy [base] deps = nose @@ -12,9 +12,11 @@ pure-sasl kerberos futurist +lz4_dependency = py27,py35,py36,py37,py38: lz4 [testenv] deps = {[base]deps} + {[base]lz4_dependency} setenv = LIBEV_EMBED=0 CARES_EMBED=0 @@ -25,6 +27,7 @@ [testenv:gevent_loop] deps = {[base]deps} + {[base]lz4_dependency} gevent>=1.4,<1.5 setenv = LIBEV_EMBED=0 @@ -37,6 +40,7 @@ [testenv:eventlet_loop] deps = {[base]deps} + {[base]lz4_dependency} gevent>=1.4,<1.5 setenv = LIBEV_EMBED=0