diff --git a/cassandra/cqlengine/connection.py b/cassandra/cqlengine/connection.py index 516ff0e4e..cd9f2ac78 100644 --- a/cassandra/cqlengine/connection.py +++ b/cassandra/cqlengine/connection.py @@ -323,7 +323,7 @@ def setup( :param int consistency: The global default :class:`~.ConsistencyLevel` - default is the same as :attr:`.Session.default_consistency_level` :param bool lazy_connect: True if should not connect until first use :param bool retry_connect: True if we should retry to connect even if there was a connection failure initially - :param \*\*kwargs: Pass-through keyword arguments for :class:`cassandra.cluster.Cluster` + :param kwargs: Pass-through keyword arguments for :class:`cassandra.cluster.Cluster` """ from cassandra.cqlengine import models diff --git a/cassandra/cqlengine/query.py b/cassandra/cqlengine/query.py index 40134e884..e10233db9 100644 --- a/cassandra/cqlengine/query.py +++ b/cassandra/cqlengine/query.py @@ -205,8 +205,8 @@ def add_callback(self, fn, *args, **kwargs): :param fn: Callable object :type fn: callable - :param \*args: Positional arguments to be passed to the callback at the time of execution - :param \*\*kwargs: Named arguments to be passed to the callback at the time of execution + :param args: Positional arguments to be passed to the callback at the time of execution + :param kwargs: Named arguments to be passed to the callback at the time of execution """ if not callable(fn): raise ValueError("Value for argument 'fn' is {0} and is not a callable object.".format(type(fn))) @@ -276,8 +276,8 @@ class ContextQuery(object): A Context manager to allow a Model to switch context easily. Presently, the context only specifies a keyspace for model IO. - :param \*args: One or more models. A model should be a class type, not an instance. - :param \*\*kwargs: (optional) Context parameters: can be *keyspace* or *connection* + :param args: One or more models. A model should be a class type, not an instance. + :param kwargs: (optional) Context parameters: can be *keyspace* or *connection* For example: diff --git a/cassandra/datastax/cloud/__init__.py b/cassandra/datastax/cloud/__init__.py index 0f042ff1c..73b4fc4f3 100644 --- a/cassandra/datastax/cloud/__init__.py +++ b/cassandra/datastax/cloud/__init__.py @@ -22,7 +22,7 @@ _HAS_SSL = True try: - from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS_CLIENT, CERT_REQUIRED except: _HAS_SSL = False @@ -169,7 +169,7 @@ def parse_metadata_info(config, http_data): def _ssl_context_from_cert(ca_cert_location, cert_location, key_location): - ssl_context = SSLContext(PROTOCOL_TLS) + ssl_context = SSLContext(PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(ca_cert_location) ssl_context.verify_mode = CERT_REQUIRED ssl_context.load_cert_chain(certfile=cert_location, keyfile=key_location) diff --git a/cassandra/util.py b/cassandra/util.py index 06d338f2e..aef2b4e99 100644 --- a/cassandra/util.py +++ b/cassandra/util.py @@ -40,7 +40,7 @@ from cassandra import DriverException DATETIME_EPOC = datetime.datetime(1970, 1, 1) -UTC_DATETIME_EPOC = datetime.datetime.utcfromtimestamp(0) +UTC_DATETIME_EPOC = datetime.datetime.fromtimestamp(0, tz=datetime.timezone.utc) _nan = float('nan') diff --git a/pytest.ini b/pytest.ini index 084627342..d45f9c2cb 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,3 +3,17 @@ log_format = %(asctime)s.%(msecs)03d %(levelname)s [%(module)s:%(lineno)s]: %(me log_level = DEBUG log_date_format = %Y-%m-%d %H:%M:%S xfail_strict=true + +filterwarnings = + error + ignore::pytest.PytestCollectionWarning + ignore::ResourceWarning + ignore:distutils Version classes are deprecated:DeprecationWarning:eventlet.support.greenlets + ignore:X509Extension support in pyOpenSSL is deprecated.:DeprecationWarning + ignore:CRL support in pyOpenSSL is deprecated:DeprecationWarning + ignore:sign\(\) is deprecated:DeprecationWarning + ignore:verify\(\) is deprecated:DeprecationWarning + ignore:pkg_resources is deprecated as an API:DeprecationWarning:gevent.events + ignore:.*pkg_resources.declare_namespace.*:DeprecationWarning + ignore:"@coroutine" decorator is deprecated since Python 3.8:DeprecationWarning:asynctest.* + ignore:The asyncore module is deprecated and will be removed in Python 3.12:DeprecationWarning:asyncore.* diff --git a/tests/integration/cqlengine/__init__.py b/tests/integration/cqlengine/__init__.py index 5b7d16c53..a8ba17c01 100644 --- a/tests/integration/cqlengine/__init__.py +++ b/tests/integration/cqlengine/__init__.py @@ -77,7 +77,7 @@ def wrapped_function(*args, **kwargs): # DeMonkey Patch our code cassandra.cqlengine.connection.execute = original_function # Check to see if we have a pre-existing test case to work from. - if len(args) is 0: + if len(args) == 0: test_case = unittest.TestCase("__init__") else: test_case = args[0] diff --git a/tests/integration/cqlengine/query/test_queryoperators.py b/tests/integration/cqlengine/query/test_queryoperators.py index fd148bafc..fbf666cf2 100644 --- a/tests/integration/cqlengine/query/test_queryoperators.py +++ b/tests/integration/cqlengine/query/test_queryoperators.py @@ -154,6 +154,6 @@ def test_named_table_pk_token_function(self): query = named.all().limit(1) first_page = list(query) last = first_page[-1] - self.assertTrue(len(first_page) is 1) + self.assertTrue(len(first_page) == 1) next_page = list(query.filter(pk__token__gt=functions.Token(last.key))) - self.assertTrue(len(next_page) is 1) + self.assertTrue(len(next_page) == 1) diff --git a/tests/integration/standard/test_cluster.py b/tests/integration/standard/test_cluster.py index 43356dbd8..0bb588854 100644 --- a/tests/integration/standard/test_cluster.py +++ b/tests/integration/standard/test_cluster.py @@ -1,3 +1,4 @@ + # Copyright DataStax, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -150,7 +151,7 @@ def test_raise_error_on_control_connection_timeout(self): get_node(1).pause() cluster = TestCluster(contact_points=['127.0.0.1'], connect_timeout=1) - with self.assertRaisesRegex(NoHostAvailable, "OperationTimedOut\('errors=Timed out creating connection \(1 seconds\)"): + with self.assertRaisesRegex(NoHostAvailable, r"OperationTimedOut\('errors=Timed out creating connection \(1 seconds\)"): cluster.connect() cluster.shutdown() diff --git a/tests/integration/standard/test_connection.py b/tests/integration/standard/test_connection.py index a1b05c3d6..0db000978 100644 --- a/tests/integration/standard/test_connection.py +++ b/tests/integration/standard/test_connection.py @@ -180,7 +180,7 @@ def wait_for_connections(self, host, cluster): while(retry < 300): retry += 1 connections = self.fetch_connections(host, cluster) - if len(connections) is not 0: + if len(connections) != 0: return connections time.sleep(.1) self.fail("No new connections found") @@ -190,7 +190,7 @@ def wait_for_no_connections(self, host, cluster): while(retry < 100): retry += 1 connections = self.fetch_connections(host, cluster) - if len(connections) is 0: + if len(connections) == 0: return time.sleep(.5) self.fail("Connections never cleared") diff --git a/tests/integration/standard/test_metadata.py b/tests/integration/standard/test_metadata.py index 86f48f88d..aaff62e89 100644 --- a/tests/integration/standard/test_metadata.py +++ b/tests/integration/standard/test_metadata.py @@ -1640,7 +1640,7 @@ def test_function_no_parameters(self): with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*%s\(\) .*" % kwargs['name']) + self.assertRegex(fn_meta.as_cql_query(), r"CREATE FUNCTION.*%s\(\) .*" % kwargs['name']) def test_functions_follow_keyspace_alter(self): """ @@ -1688,12 +1688,12 @@ def test_function_cql_called_on_null(self): kwargs['called_on_null_input'] = True with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) CALLED ON NULL INPUT RETURNS .*") + self.assertRegex(fn_meta.as_cql_query(), r"CREATE FUNCTION.*\) CALLED ON NULL INPUT RETURNS .*") kwargs['called_on_null_input'] = False with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) RETURNS NULL ON NULL INPUT RETURNS .*") + self.assertRegex(fn_meta.as_cql_query(), r"CREATE FUNCTION.*\) RETURNS NULL ON NULL INPUT RETURNS .*") @requires_java_udf diff --git a/tests/integration/standard/test_query.py b/tests/integration/standard/test_query.py index bc0505131..64016655a 100644 --- a/tests/integration/standard/test_query.py +++ b/tests/integration/standard/test_query.py @@ -167,7 +167,7 @@ def test_client_ip_in_trace(self): client_ip = trace.client # Ip address should be in the local_host range - pat = re.compile("127.0.0.\d{1,3}") + pat = re.compile(r"127.0.0.\d{1,3}") # Ensure that ip is set self.assertIsNotNone(client_ip, "Client IP was not set in trace with C* >= 2.2") diff --git a/tests/integration/standard/test_scylla_cloud.py b/tests/integration/standard/test_scylla_cloud.py index d1a22f882..c719c0ca2 100644 --- a/tests/integration/standard/test_scylla_cloud.py +++ b/tests/integration/standard/test_scylla_cloud.py @@ -1,4 +1,5 @@ import logging +import warnings import os.path from unittest import TestCase from ccmlib.utils.ssl_utils import generate_ssl_stores @@ -11,7 +12,9 @@ from cassandra.io.libevreactor import LibevConnection supported_connection_classes = [LibevConnection, TwistedConnection] try: - from cassandra.io.asyncorereactor import AsyncoreConnection + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning, message="The asyncore module is deprecated") + from cassandra.io.asyncorereactor import AsyncoreConnection supported_connection_classes += [AsyncoreConnection] except ImportError: pass diff --git a/tests/unit/advanced/test_graph.py b/tests/unit/advanced/test_graph.py index 2870b9b1e..8c3610765 100644 --- a/tests/unit/advanced/test_graph.py +++ b/tests/unit/advanced/test_graph.py @@ -255,6 +255,7 @@ def test_with_graph_protocol(self): def test_init_unknown_kwargs(self): with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") GraphOptions(unknown_param=42) self.assertEqual(len(w), 1) self.assertRegex(str(w[0].message), r"^Unknown keyword.*GraphOptions.*") diff --git a/tests/unit/advanced/test_insights.py b/tests/unit/advanced/test_insights.py index 4f1dd7ac1..ede8e4a70 100644 --- a/tests/unit/advanced/test_insights.py +++ b/tests/unit/advanced/test_insights.py @@ -14,6 +14,7 @@ import unittest +import pytest import logging from mock import sentinel @@ -103,6 +104,7 @@ def superclass_sentinel_serializer(obj): class TestConfigAsDict(unittest.TestCase): # graph/query.py + @pytest.mark.filterwarnings("ignore:Unknown keyword argument received for GraphOptions:UserWarning") def test_graph_options(self): self.maxDiff = None diff --git a/tests/unit/advanced/test_policies.py b/tests/unit/advanced/test_policies.py index b8e4a4e75..4e1148956 100644 --- a/tests/unit/advanced/test_policies.py +++ b/tests/unit/advanced/test_policies.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import unittest +import pytest from mock import Mock @@ -29,6 +30,7 @@ def get_host(self, addr): return self.hosts.get(addr) +@pytest.mark.filterwarnings("ignore:DSELoadBalancingPolicy will be removed:DeprecationWarning") class DSELoadBalancingPolicyTest(unittest.TestCase): def test_no_target(self): diff --git a/tests/unit/cython/bytesio_testhelper.pyx b/tests/unit/cython/bytesio_testhelper.pyx index 7ba91bc4c..37e76ab33 100644 --- a/tests/unit/cython/bytesio_testhelper.pyx +++ b/tests/unit/cython/bytesio_testhelper.pyx @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# cython: language_level=3 + from cassandra.bytesio cimport BytesIOReader def test_read1(assert_equal, assert_raises): diff --git a/tests/unit/cython/types_testhelper.pyx b/tests/unit/cython/types_testhelper.pyx index 55fd31083..7f59b8419 100644 --- a/tests/unit/cython/types_testhelper.pyx +++ b/tests/unit/cython/types_testhelper.pyx @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# cython: language_level=3 + import calendar import datetime import time diff --git a/tests/unit/cython/utils_testhelper.pyx b/tests/unit/cython/utils_testhelper.pyx index fe67691aa..10127f3b4 100644 --- a/tests/unit/cython/utils_testhelper.pyx +++ b/tests/unit/cython/utils_testhelper.pyx @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# cython: language_level=3 + import datetime from cassandra.cython_utils cimport datetime_from_timestamp diff --git a/tests/unit/io/utils.py b/tests/unit/io/utils.py index 0e8eec52a..6f93e63f8 100644 --- a/tests/unit/io/utils.py +++ b/tests/unit/io/utils.py @@ -122,7 +122,7 @@ def submit_and_wait_for_completion(unit_test, create_timer, start, end, incremen pending_callbacks.append(callback) # wait for all the callbacks associated with the timers to be invoked - while len(pending_callbacks) is not 0: + while len(pending_callbacks) != 0: for callback in pending_callbacks: if callback.was_invoked(): pending_callbacks.remove(callback) @@ -232,7 +232,7 @@ def make_error_body(self, code, msg): def make_msg(self, header, body=bytes()): return header + uint32_pack(len(body)) + body - def test_successful_connection(self): + def _test_successful_connection(self): c = self.make_connection() # let it write the OptionsMessage @@ -254,6 +254,9 @@ def test_successful_connection(self): self.assertTrue(c.connected_event.is_set()) return c + def test_successful_connection(self): + self._test_successful_connection() + def test_eagain_on_buffer_size(self): self._check_error_recovery_on_buffer_size(errno.EAGAIN) @@ -271,7 +274,7 @@ def test_sslwantwrite_on_buffer_size(self): error_class=ssl.SSLError) def _check_error_recovery_on_buffer_size(self, error_code, error_class=socket_error): - c = self.test_successful_connection() + c = self._test_successful_connection() # current data, used by the recv side_effect message_chunks = None diff --git a/tests/unit/test_cluster.py b/tests/unit/test_cluster.py index 3334e650a..2372eaf19 100644 --- a/tests/unit/test_cluster.py +++ b/tests/unit/test_cluster.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import unittest +import pytest import logging @@ -274,6 +275,9 @@ def test_default_exec_parameters(self): self.assertEqual(cluster.profile_manager.default.row_factory, named_tuple_factory) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Setting the consistency level at the session level will be removed in 4.0:DeprecationWarning") def test_default_legacy(self): cluster = Cluster(load_balancing_policy=RoundRobinPolicy(), default_retry_policy=DowngradingConsistencyRetryPolicy()) self.assertEqual(cluster._config_mode, _ConfigMode.LEGACY) @@ -321,6 +325,8 @@ def test_serial_consistency_level_validation(self): ep = ExecutionProfile(RoundRobinPolicy(), serial_consistency_level=42) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") def test_statement_params_override_legacy(self): cluster = Cluster(load_balancing_policy=RoundRobinPolicy(), default_retry_policy=DowngradingConsistencyRetryPolicy()) self.assertEqual(cluster._config_mode, _ConfigMode.LEGACY) @@ -342,6 +348,7 @@ def test_statement_params_override_legacy(self): self._verify_response_future_profile(rf, expected_profile) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") def test_statement_params_override_profile(self): non_default_profile = ExecutionProfile(RoundRobinPolicy(), *[object() for _ in range(2)]) cluster = Cluster(execution_profiles={'non-default': non_default_profile}) @@ -366,6 +373,9 @@ def test_statement_params_override_profile(self): self._verify_response_future_profile(rf, expected_profile) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Setting the consistency level at the session level will be removed in 4.0:DeprecationWarning") def test_no_profile_with_legacy(self): # don't construct with both self.assertRaises(ValueError, Cluster, load_balancing_policy=RoundRobinPolicy(), execution_profiles={'a': ExecutionProfile()}) @@ -392,6 +402,7 @@ def test_no_profile_with_legacy(self): self.assertRaises(ValueError, session.execute_async, "query", execution_profile='some name here') @mock_session_pools + @pytest.mark.filterwarnings("ignore:Setting the consistency level at the session level will be removed in 4.0:DeprecationWarning") def test_no_legacy_with_profile(self): cluster_init = Cluster(execution_profiles={'name': ExecutionProfile()}) cluster_add = Cluster() @@ -512,6 +523,7 @@ def _check_warning_on_no_lbp_with_contact_points(self, cluster_kwargs): self.assertIn('please specify a load-balancing policy', warning_message) self.assertIn("contact_points = ['127.0.0.1']", warning_message) + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") def test_no_warning_on_contact_points_with_lbp_legacy_mode(self): """ Test that users aren't warned when they instantiate a Cluster object diff --git a/tests/unit/test_exception.py b/tests/unit/test_exception.py index b39b22239..5c8e8d9ec 100644 --- a/tests/unit/test_exception.py +++ b/tests/unit/test_exception.py @@ -29,7 +29,7 @@ def extract_consistency(self, msg): :param msg: message with consistency value :return: String representing consistency value """ - match = re.search("'consistency':\s+'([\w\s]+)'", msg) + match = re.search(r"'consistency':\s+'([\w\s]+)'", msg) return match and match.group(1) def test_timeout_consistency(self): diff --git a/tests/unit/test_metadata.py b/tests/unit/test_metadata.py index 94fed1345..616cf31cd 100644 --- a/tests/unit/test_metadata.py +++ b/tests/unit/test_metadata.py @@ -848,9 +848,9 @@ def test_strip_frozen(self): argument_to_expected_results = [ ('int', 'int'), ('tuple', 'tuple'), - (r'map<"!@#$%^&*()[]\ frozen >>>", int>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), # A valid UDT name + (r'map<"!@#$%^&*()[]\\ frozen >>>", int>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), # A valid UDT name ('frozen>', 'tuple'), - (r'frozen>>", int>>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), + (r'frozen>>", int>>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), ('frozen>, int>>, frozen>>>>>', 'map, int>, map>>'), ] diff --git a/tests/unit/test_policies.py b/tests/unit/test_policies.py index 15bd1ea95..1d266f9a7 100644 --- a/tests/unit/test_policies.py +++ b/tests/unit/test_policies.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest +import pytest from itertools import islice, cycle from mock import Mock, patch, call @@ -1179,6 +1180,7 @@ def test_unavailable(self): self.assertEqual(consistency, None) +@pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") class DowngradingConsistencyRetryPolicyTest(unittest.TestCase): def test_read_timeout(self): diff --git a/tests/unit/test_response_future.py b/tests/unit/test_response_future.py index ef667d081..345e3f2c0 100644 --- a/tests/unit/test_response_future.py +++ b/tests/unit/test_response_future.py @@ -81,7 +81,7 @@ def test_result_message(self): expected_result = (object(), object()) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) def test_unknown_result_class(self): @@ -127,7 +127,7 @@ def test_other_result_message_kind(self): rf.send_request() result = Mock(spec=ResultMessage, kind=999, results=[1, 2, 3]) rf._set_result(None, None, None, result) - self.assertEqual(rf.result()[0], result) + self.assertEqual(rf.result().one(), result) def test_heartbeat_defunct_deadlock(self): """ @@ -395,7 +395,7 @@ def test_first_pool_shutdown(self): expected_result = (object(), object()) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) def test_timeout_getting_connection_from_pool(self): @@ -419,7 +419,7 @@ def test_timeout_getting_connection_from_pool(self): expected_result = (object(), object()) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - self.assertEqual(rf.result()[0], expected_result) + self.assertEqual(rf.result().one(), expected_result) # make sure the exception is recorded correctly self.assertEqual(rf._errors, {'ip1': exc}) @@ -437,7 +437,7 @@ def test_callback(self): rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) callback.assert_called_once_with([expected_result], arg, **kwargs) @@ -487,7 +487,7 @@ def test_multiple_callbacks(self): rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) callback.assert_called_once_with([expected_result], arg, **kwargs) @@ -560,7 +560,7 @@ def test_add_callbacks(self): errback=self.assertIsInstance, errback_args=(Exception,)) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - self.assertEqual(rf.result()[0], expected_result) + self.assertEqual(rf.result().one(), expected_result) callback.assert_called_once_with([expected_result], arg, **kwargs) diff --git a/tests/unit/test_resultset.py b/tests/unit/test_resultset.py index 97002d90d..5163ac962 100644 --- a/tests/unit/test_resultset.py +++ b/tests/unit/test_resultset.py @@ -14,6 +14,7 @@ from cassandra.query import named_tuple_factory, dict_factory, tuple_factory import unittest +import pytest from mock import Mock, PropertyMock, patch @@ -51,6 +52,7 @@ def test_iter_paged_with_empty_pages(self): itr = iter(rs) self.assertListEqual(list(itr), expected) + @pytest.mark.filterwarnings("ignore:ResultSet indexing support will be removed in 4.0:DeprecationWarning") def test_list_non_paged(self): # list access on RS for backwards-compatibility expected = list(range(10)) @@ -78,6 +80,7 @@ def test_has_more_pages(self): self.assertTrue(rs.has_more_pages) self.assertFalse(rs.has_more_pages) + @pytest.mark.filterwarnings("ignore:ResultSet indexing support will be removed in 4.0:DeprecationWarning") def test_iterate_then_index(self): # RuntimeError if indexing with no pages expected = list(range(10)) @@ -113,6 +116,7 @@ def test_iterate_then_index(self): self.assertFalse(rs) self.assertFalse(list(rs)) + @pytest.mark.filterwarnings("ignore:ResultSet indexing support will be removed in 4.0:DeprecationWarning") def test_index_list_mode(self): # no pages expected = list(range(10)) @@ -152,7 +156,7 @@ def test_eq(self): # results can be iterated or indexed once we're materialized self.assertListEqual(list(rs), expected) - self.assertEqual(rs[9], expected[9]) + self.assertEqual(list(rs)[9], expected[9]) self.assertTrue(rs) # pages @@ -165,7 +169,7 @@ def test_eq(self): # results can be iterated or indexed once we're materialized self.assertListEqual(list(rs), expected) - self.assertEqual(rs[9], expected[9]) + self.assertEqual(list(rs)[9], expected[9]) self.assertTrue(rs) def test_bool(self): diff --git a/tests/unit/test_types.py b/tests/unit/test_types.py index b77c9dcdb..270108128 100644 --- a/tests/unit/test_types.py +++ b/tests/unit/test_types.py @@ -193,7 +193,7 @@ def test_empty_value(self): def test_datetype(self): now_time_seconds = time.time() - now_datetime = datetime.datetime.utcfromtimestamp(now_time_seconds) + now_datetime = datetime.datetime.fromtimestamp(now_time_seconds, tz=utc_timezone) # Cassandra timestamps in millis now_timestamp = now_time_seconds * 1e3 @@ -204,7 +204,7 @@ def test_datetype(self): # deserialize # epoc expected = 0 - self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime.utcfromtimestamp(expected)) + self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime.fromtimestamp(expected, tz=utc_timezone).replace(tzinfo=None)) # beyond 32b expected = 2 ** 33 @@ -333,7 +333,7 @@ def test_month_rounding_creation_failure(self): @jira_ticket PYTHON-912 """ feb_stamp = ms_timestamp_from_datetime( - datetime.datetime(2018, 2, 25, 18, 59, 59, 0) + datetime.datetime(2018, 2, 25, 18, 59, 59, 0, tzinfo=utc_timezone) ) dr = DateRange(OPEN_BOUND, DateRangeBound(feb_stamp, DateRangePrecision.MONTH)) @@ -342,7 +342,7 @@ def test_month_rounding_creation_failure(self): # Leap year feb_stamp_leap_year = ms_timestamp_from_datetime( - datetime.datetime(2016, 2, 25, 18, 59, 59, 0) + datetime.datetime(2016, 2, 25, 18, 59, 59, 0, tzinfo=utc_timezone) ) dr = DateRange(OPEN_BOUND, DateRangeBound(feb_stamp_leap_year, DateRangePrecision.MONTH)) @@ -370,7 +370,7 @@ def test_deserialize_single_value(self): self.assertEqual( DateRangeType.deserialize(serialized, 5), util.DateRange(value=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000, tzinfo=utc_timezone), precision='HOUR') ) ) @@ -385,11 +385,11 @@ def test_deserialize_closed_range(self): DateRangeType.deserialize(serialized, 5), util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 0, 0), + value=datetime.datetime(2017, 2, 1, 0, 0, tzinfo=utc_timezone), precision='DAY' ), upper_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000, tzinfo=utc_timezone), precision='MILLISECOND' ) ) @@ -404,7 +404,7 @@ def test_deserialize_open_high(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 0), + value=datetime.datetime(2017, 2, 1, 15, 0, tzinfo=utc_timezone), precision='HOUR' ), upper_bound=util.OPEN_BOUND @@ -421,7 +421,7 @@ def test_deserialize_open_low(self): util.DateRange( lower_bound=util.OPEN_BOUND, upper_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 20, 1000), + value=datetime.datetime(2017, 2, 1, 15, 42, 20, 1000, tzinfo=utc_timezone), precision='MINUTE' ) ) @@ -442,7 +442,7 @@ def test_serialize_single_value(self): deserialized, util.DateRange( value=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, tzinfo=utc_timezone), precision='SECOND' ) ) @@ -459,11 +459,11 @@ def test_serialize_closed_range(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, tzinfo=utc_timezone), precision='SECOND' ), upper_bound=util.DateRangeBound( - value=datetime.datetime(2017, 12, 31), + value=datetime.datetime(2017, 12, 31, tzinfo=utc_timezone), precision='YEAR' ) ) @@ -478,7 +478,7 @@ def test_serialize_open_high(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1), + value=datetime.datetime(2017, 2, 1, tzinfo=utc_timezone), precision='DAY' ), upper_bound=util.OPEN_BOUND @@ -494,7 +494,7 @@ def test_serialize_open_low(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15), + value=datetime.datetime(2017, 2, 1, 15, tzinfo=utc_timezone), precision='HOUR' ), upper_bound=util.OPEN_BOUND @@ -555,8 +555,8 @@ def test_serialize_zero_datetime(self): @test_category data_types """ DateRangeType.serialize(util.DateRange( - lower_bound=(datetime.datetime(1970, 1, 1), 'YEAR'), - upper_bound=(datetime.datetime(1970, 1, 1), 'YEAR') + lower_bound=(datetime.datetime(1970, 1, 1, tzinfo=utc_timezone), 'YEAR'), + upper_bound=(datetime.datetime(1970, 1, 1, tzinfo=utc_timezone), 'YEAR') ), 5) def test_deserialize_zero_datetime(self):