From 77c5909d3f735d528efd9355433b2d3f4aa967dc Mon Sep 17 00:00:00 2001 From: Uma Annamalai Date: Tue, 28 Jun 2022 13:40:10 -0700 Subject: [PATCH] Add support for aioredis. (#577) * Add aioredis Instrumentation (#567) * Add aioredis instrumentation Co-authored-by: Uma Annamalai Co-authored-by: Nyenty Ayuk Co-authored-by: ccedacero-nr * [Mega-Linter] Apply linters fixes * Bump Tests * Fix double wrapping Co-authored-by: Uma Annamalai Co-authored-by: Nyenty Ayuk Co-authored-by: ccedacero-nr Co-authored-by: TimPansino * Add aioredis test infrastructure. (#568) * Add aioredis test infra. * Fix flake8 errors. * Aredis concurrency bug reproduction. (#569) Co-authored-by: Uma Annamalai Co-authored-by: Uma Annamalai * Add aioredis tests (#573) * Add get and set tests. * Add more testing for aioredis. * Add aioredis testing. Co-authored-by: Tim Pansino Co-authored-by: Cristian Cedacero Co-authored-by: Nyenty Ayuk-Enow * Patch broken tests * Final aioredis testing cleanup Co-authored-by: Nyenty Ayuk Co-authored-by: ccedacero-nr Co-authored-by: Uma Annamalai * Parametrize multiple db tests. * Add missing arg. * Fix typo. * Add missing comma. * Add background_task decorator. * Parametrize instance info tests. * Fix formatting Co-authored-by: Tim Pansino Co-authored-by: Cristian Cedacero Co-authored-by: Nyenty Ayuk-Enow Co-authored-by: Tim Pansino Co-authored-by: ccedacero-nr Co-authored-by: Uma Annamalai Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> * Fix AIORedis Concurrency Bug (#574) * Add test for concurrency bug * Fix aioredis concurrency * Fix func signature * Fix ARedis Concurrency Bug (#570) * Patch aredis concurrency bug * Remove xfail marker * Format * Move fixture import Co-authored-by: Uma Annamalai * Increase concurrency of redis tests (#575) * Instrument All Redis Client Methods (#576) * Initial test files * Fully instrument uninstrumented redis client methods Co-authored-by: Uma Annamalai Co-authored-by: ccedacero-nr Co-authored-by: Nyenty Ayuk * Fix older redis client tests * Fix missing redis client method * Remove sentinel from commands list * Fix sentinels again Co-authored-by: Uma Annamalai Co-authored-by: ccedacero-nr Co-authored-by: Nyenty Ayuk * Add aioredis v1 support (#579) * Add aioredis v1 tests * Fix aioredis v1 Co-authored-by: Uma Annamalai * Adjust multiple dbs tests * Fix megalinter default base * Fix megalinter base take two * Fix aioredis version parser * Uncomment instance info tests * Fix import issues Co-authored-by: Uma Annamalai Co-authored-by: Timothy Pansino <11214426+TimPansino@users.noreply.github.com> Co-authored-by: Uma Annamalai Co-authored-by: Nyenty Ayuk Co-authored-by: ccedacero-nr Co-authored-by: TimPansino Co-authored-by: Tim Pansino Co-authored-by: Cristian Cedacero Co-authored-by: Tim Pansino --- .github/workflows/mega-linter.yml | 1 + .github/workflows/tests.yml | 4 +- newrelic/config.py | 32 ++ newrelic/hooks/datastore_aioredis.py | 127 +++++ newrelic/hooks/datastore_aredis.py | 60 +- newrelic/hooks/datastore_redis.py | 516 ++++++++++++++---- tests/datastore_aioredis/conftest.py | 54 ++ .../test_custom_conn_pool.py | 165 ++++++ .../test_execute_command.py | 135 +++++ tests/datastore_aioredis/test_get_and_set.py | 106 ++++ .../datastore_aioredis/test_instance_info.py | 127 +++++ tests/datastore_aioredis/test_multiple_dbs.py | 188 +++++++ tests/datastore_aioredis/test_span_event.py | 152 ++++++ tests/datastore_aioredis/test_trace_node.py | 128 +++++ .../test_uninstrumented_methods.py | 92 ++++ tests/datastore_aredis/conftest.py | 1 + tests/datastore_aredis/test_multiple_dbs.py | 156 ++++-- .../test_uninstrumented_methods.py | 48 ++ tests/datastore_redis/test_multiple_dbs.py | 2 +- .../test_uninstrumented_methods.py | 118 ++++ tox.ini | 5 + 21 files changed, 2027 insertions(+), 190 deletions(-) create mode 100644 newrelic/hooks/datastore_aioredis.py create mode 100644 tests/datastore_aioredis/conftest.py create mode 100644 tests/datastore_aioredis/test_custom_conn_pool.py create mode 100644 tests/datastore_aioredis/test_execute_command.py create mode 100644 tests/datastore_aioredis/test_get_and_set.py create mode 100644 tests/datastore_aioredis/test_instance_info.py create mode 100644 tests/datastore_aioredis/test_multiple_dbs.py create mode 100644 tests/datastore_aioredis/test_span_event.py create mode 100644 tests/datastore_aioredis/test_trace_node.py create mode 100644 tests/datastore_aioredis/test_uninstrumented_methods.py create mode 100644 tests/datastore_aredis/test_uninstrumented_methods.py create mode 100644 tests/datastore_redis/test_uninstrumented_methods.py diff --git a/.github/workflows/mega-linter.yml b/.github/workflows/mega-linter.yml index 4a717c9af0..75ab9a4b18 100644 --- a/.github/workflows/mega-linter.yml +++ b/.github/workflows/mega-linter.yml @@ -40,6 +40,7 @@ jobs: # All available variables are described in documentation # https://megalinter.github.io/configuration/ VALIDATE_ALL_CODEBASE: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} # Validates all source when push on main, else just the git diff with main. Set 'true' if you always want to lint all sources + DEFAULT_BRANCH: ${{ github.event_name == 'pull_request' && github.base_ref || 'main' }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # ADD YOUR CUSTOM ENV VARIABLES HERE TO OVERRIDE VALUES OF .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8864b82201..b96a101865 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -509,11 +509,11 @@ jobs: redis: env: - TOTAL_GROUPS: 1 + TOTAL_GROUPS: 2 strategy: matrix: - group-number: [1] + group-number: [1, 2] runs-on: ubuntu-latest timeout-minutes: 30 diff --git a/newrelic/config.py b/newrelic/config.py index 11601551c2..1c3571a546 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -2603,6 +2603,14 @@ def _process_module_builtin_defaults(): "instrument_aredis_connection", ) + _process_module_definition("aioredis.client", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + + _process_module_definition("aioredis.commands", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") + + _process_module_definition( + "aioredis.connection", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_connection" + ) + _process_module_definition( "elasticsearch.client", "newrelic.hooks.datastore_elasticsearch", @@ -2691,6 +2699,30 @@ def _process_module_builtin_defaults(): "redis.commands.core", "newrelic.hooks.datastore_redis", "instrument_redis_commands_core" ) + _process_module_definition( + "redis.commands.sentinel", "newrelic.hooks.datastore_redis", "instrument_redis_commands_sentinel" + ) + + _process_module_definition( + "redis.commands.json.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_json_commands" + ) + + _process_module_definition( + "redis.commands.search.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_search_commands" + ) + + _process_module_definition( + "redis.commands.timeseries.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_timeseries_commands" + ) + + _process_module_definition( + "redis.commands.bf.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_bf_commands" + ) + + _process_module_definition( + "redis.commands.graph.commands", "newrelic.hooks.datastore_redis", "instrument_redis_commands_graph_commands" + ) + _process_module_definition("motor", "newrelic.hooks.datastore_motor", "patch_motor") _process_module_definition( diff --git a/newrelic/hooks/datastore_aioredis.py b/newrelic/hooks/datastore_aioredis.py new file mode 100644 index 0000000000..13f08fd19a --- /dev/null +++ b/newrelic/hooks/datastore_aioredis.py @@ -0,0 +1,127 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.api.datastore_trace import DatastoreTrace +from newrelic.api.time_trace import current_trace +from newrelic.api.transaction import current_transaction +from newrelic.common.object_wrapper import wrap_function_wrapper +from newrelic.hooks.datastore_redis import ( + _redis_client_methods, + _redis_multipart_commands, + _redis_operation_re, +) + + +def _conn_attrs_to_dict(connection): + host = getattr(connection, "host", None) + port = getattr(connection, "port", None) + if not host and not port and hasattr(connection, "_address"): + host, port = connection._address + return { + "host": host, + "port": port, + "path": getattr(connection, "path", None), + "db": getattr(connection, "db", getattr(connection, "_db", None)), + } + + +def _instance_info(kwargs): + host = kwargs.get("host") or "localhost" + port_path_or_id = str(kwargs.get("port") or kwargs.get("path", 6379)) + db = str(kwargs.get("db") or 0) + + return (host, port_path_or_id, db) + + +def _wrap_AioRedis_method_wrapper(module, instance_class_name, operation): + async def _nr_wrapper_AioRedis_method_(wrapped, instance, args, kwargs): + transaction = current_transaction() + if transaction is None: + return await wrapped(*args, **kwargs) + + with DatastoreTrace(product="Redis", target=None, operation=operation): + return await wrapped(*args, **kwargs) + + name = "%s.%s" % (instance_class_name, operation) + wrap_function_wrapper(module, name, _nr_wrapper_AioRedis_method_) + + +async def wrap_Connection_send_command(wrapped, instance, args, kwargs): + transaction = current_transaction() + if not transaction: + return await wrapped(*args, **kwargs) + + host, port_path_or_id, db = (None, None, None) + + try: + dt = transaction.settings.datastore_tracer + if dt.instance_reporting.enabled or dt.database_name_reporting.enabled: + conn_kwargs = _conn_attrs_to_dict(instance) + host, port_path_or_id, db = _instance_info(conn_kwargs) + except Exception: + pass + + # Older Redis clients would when sending multi part commands pass + # them in as separate arguments to send_command(). Need to therefore + # detect those and grab the next argument from the set of arguments. + + operation = args[0].strip().lower() + + # If it's not a multi part command, there's no need to trace it, so + # we can return early. + + if operation.split()[0] not in _redis_multipart_commands: # Set the datastore info on the DatastoreTrace containing this function call. + trace = current_trace() + + # Find DatastoreTrace no matter how many other traces are inbetween + while trace is not None and not isinstance(trace, DatastoreTrace): + trace = getattr(trace, "parent", None) + + if trace is not None: + trace.host = host + trace.port_path_or_id = port_path_or_id + trace.database_name = db + + return await wrapped(*args, **kwargs) + + # Convert multi args to single arg string + + if operation in _redis_multipart_commands and len(args) > 1: + operation = "%s %s" % (operation, args[1].strip().lower()) + + operation = _redis_operation_re.sub("_", operation) + + with DatastoreTrace( + product="Redis", target=None, operation=operation, host=host, port_path_or_id=port_path_or_id, database_name=db + ): + return await wrapped(*args, **kwargs) + + +def instrument_aioredis_client(module): + # StrictRedis is just an alias of Redis, no need to wrap it as well. + if hasattr(module, "Redis"): + class_ = getattr(module, "Redis") + for operation in _redis_client_methods: + if hasattr(class_, operation): + _wrap_AioRedis_method_wrapper(module, "Redis", operation) + + +def instrument_aioredis_connection(module): + if hasattr(module, "Connection"): + if hasattr(module.Connection, "send_command"): + wrap_function_wrapper(module, "Connection.send_command", wrap_Connection_send_command) + + if hasattr(module, "RedisConnection"): + if hasattr(module.RedisConnection, "execute"): + wrap_function_wrapper(module, "RedisConnection.execute", wrap_Connection_send_command) \ No newline at end of file diff --git a/newrelic/hooks/datastore_aredis.py b/newrelic/hooks/datastore_aredis.py index 4edd8e4b24..a63da57c7b 100644 --- a/newrelic/hooks/datastore_aredis.py +++ b/newrelic/hooks/datastore_aredis.py @@ -12,11 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. - from newrelic.api.datastore_trace import DatastoreTrace +from newrelic.api.time_trace import current_trace from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper -from newrelic.hooks.datastore_redis import _conn_attrs_to_dict, _instance_info, _redis_client_methods, _redis_multipart_commands, _redis_operation_re +from newrelic.hooks.datastore_redis import ( + _conn_attrs_to_dict, + _instance_info, + _redis_client_methods, + _redis_multipart_commands, + _redis_operation_re, +) def _wrap_Aredis_method_wrapper_(module, instance_class_name, operation): @@ -25,35 +31,18 @@ async def _nr_wrapper_Aredis_method_(wrapped, instance, args, kwargs): if transaction is None: return await wrapped(*args, **kwargs) - dt = DatastoreTrace(product="Redis", target=None, operation=operation) - - transaction._nr_datastore_instance_info = (None, None, None) - - with dt: - result = await wrapped(*args, **kwargs) - - host, port_path_or_id, db = transaction._nr_datastore_instance_info - dt.host = host - dt.port_path_or_id = port_path_or_id - dt.database_name = db - return result + with DatastoreTrace(product="Redis", target=None, operation=operation): + return await wrapped(*args, **kwargs) name = "%s.%s" % (instance_class_name, operation) wrap_function_wrapper(module, name, _nr_wrapper_Aredis_method_) -def instrument_aredis_client(module): - if hasattr(module, "StrictRedis"): - for name in _redis_client_methods: - if hasattr(module.StrictRedis, name): - _wrap_Aredis_method_wrapper_(module, "StrictRedis", name) - - -async def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): +async def wrap_Connection_send_command(wrapped, instance, args, kwargs): transaction = current_transaction() if not transaction: return await wrapped(*args, **kwargs) - + host, port_path_or_id, db = (None, None, None) try: @@ -64,8 +53,6 @@ async def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): except Exception: pass - transaction._nr_datastore_instance_info = (host, port_path_or_id, db) - # Older Redis clients would when sending multi part commands pass # them in as separate arguments to send_command(). Need to therefore # detect those and grab the next argument from the set of arguments. @@ -76,6 +63,18 @@ async def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): # we can return early. if operation.split()[0] not in _redis_multipart_commands: + # Set the datastore info on the DatastoreTrace containing this function call. + trace = current_trace() + + # Find DatastoreTrace no matter how many other traces are inbetween + while trace is not None and not isinstance(trace, DatastoreTrace): + trace = getattr(trace, "parent", None) + + if trace is not None: + trace.host = host + trace.port_path_or_id = port_path_or_id + trace.database_name = db + return await wrapped(*args, **kwargs) # Convert multi args to single arg string @@ -89,7 +88,14 @@ async def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): product="Redis", target=None, operation=operation, host=host, port_path_or_id=port_path_or_id, database_name=db ): return await wrapped(*args, **kwargs) - + + +def instrument_aredis_client(module): + if hasattr(module, "StrictRedis"): + for name in _redis_client_methods: + if hasattr(module.StrictRedis, name): + _wrap_Aredis_method_wrapper_(module, "StrictRedis", name) + def instrument_aredis_connection(module): - wrap_function_wrapper(module.Connection, "send_command", _nr_Connection_send_command_wrapper_) + wrap_function_wrapper(module.Connection, "send_command", wrap_Connection_send_command) diff --git a/newrelic/hooks/datastore_redis.py b/newrelic/hooks/datastore_redis.py index 9587b206d9..f25c76e655 100644 --- a/newrelic/hooks/datastore_redis.py +++ b/newrelic/hooks/datastore_redis.py @@ -18,169 +18,431 @@ from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import wrap_function_wrapper -_redis_client_methods = ( +_redis_client_methods = { + "acl_cat", + "acl_deluser", + "acl_dryrun", + "acl_genpass", + "acl_getuser", + "acl_help", + "acl_list", + "acl_load", + "acl_log_reset", + "acl_log", + "acl_save", + "acl_setuser", + "acl_users", + "acl_whoami", + "add_document_hash", + "add_document", + "add", + "addnx", + "aggregate", + "aliasadd", + "aliasdel", + "aliasupdate", + "alter_schema_add", + "alter", + "append", + "arrappend", + "arrindex", + "arrinsert", + "arrlen", + "arrpop", + "arrtrim", + "auth", "bgrewriteaof", "bgsave", + "bitcount", + "bitfield", + "bitop_and", + "bitop_not", + "bitop_or", + "bitop_xor", + "bitop", + "bitpos", + "blmove", + "blmpop", + "blpop", + "brpop", + "brpoplpush", + "bzmpop", + "bzpopmax", + "bzpopmin", + "cdf", + "clear", + "client_getname", + "client_getredir", + "client_id", + "client_info", + "client_kill_filter", "client_kill", "client_list", - "client_getname", + "client_no_evict", + "client_pause", + "client_reply", "client_setname", + "client_tracking", + "client_trackinginfo", + "client_unblock", + "client_unpause", + "client", + "cluster_add_slots", + "cluster_addslots", + "cluster_count_failure_report", + "cluster_count_failure_reports", + "cluster_count_key_in_slots", + "cluster_countkeysinslot", + "cluster_del_slots", + "cluster_delslots", + "cluster_failover", + "cluster_forget", + "cluster_get_keys_in_slot", + "cluster_get_keys_in_slots", + "cluster_info", + "cluster_keyslot", + "cluster_meet", + "cluster_nodes", + "cluster_replicate", + "cluster_reset_all_nodes", + "cluster_reset", + "cluster_save_config", + "cluster_set_config_epoch", + "cluster_setslot", + "cluster_slaves", + "cluster_slots", + "cluster", + "command_count", + "command_docs", + "command_getkeys", + "command_getkeysandflags", + "command_info", + "command_list", + "command", + "commit", "config_get", - "config_set", "config_resetstat", "config_rewrite", + "config_set", + "config", + "copy", + "count", + "create_index", + "create", + "createrule", "dbsize", "debug_object", - "echo", - "flushall", - "flushdb", - "info", - "lastsave", - "object", - "ping", - "save", - "sentinel", - "sentinel_get_master_addr_by_name", - "sentinel_master", - "sentinel_masters", - "sentinel_monitor", - "sentinel_remove", - "sentinel_sentinels", - "sentinel_set", - "sentinel_slaves", - "shutdown", - "slaveof", - "slowlog_get", - "slowlog_reset", - "time", - "append", - "bitcount", - "bitop", - "bitpos", + "debug_segfault", + "debug_sleep", + "debug", "decr", + "decrby", + "delete_document", "delete", + "deleterule", + "dict_add", + "dict_del", + "dict_dump", + "drop_index", + "dropindex", "dump", + "echo", + "eval_ro", + "eval", + "evalsha_ro", + "evalsha", + "execution_plan", "exists", "expire", "expireat", + "expiretime", + "explain_cli", + "explain", + "failover", + "fcall_ro", + "fcall", + "flushall", + "flushdb", + "forget", + "function_delete", + "function_dump", + "function_flush", + "function_kill", + "function_list", + "function_load", + "function_restore", + "function_stats", + "geoadd", + "geodist", + "geohash", + "geopos", + "georadius", + "georadiusbymember", + "geosearch", + "geosearchstore", "get", "getbit", + "getdel", + "getex", "getrange", "getset", + "hdel", + "hello", + "hexists", + "hget", + "hgetall", + "hincrby", + "hincrbyfloat", + "hkeys", + "hlen", + "hmget", + "hmset_dict", + "hmset", + "hrandfield", + "hscan_inter", + "hscan", + "hset", + "hsetnx", + "hstrlen", + "hvals", "incr", "incrby", "incrbyfloat", + "info", + "initbydim", + "initbyprob", + "insert", + "insertnx", "keys", + "lastsave", + "latency_histogram", + "lcs", + "lindex", + "linsert", + "list", + "llen", + "lmove", + "lmpop", + "loadchunk", + "lolwut", + "lpop", + "lpos", + "lpush", + "lpushx", + "lrange", + "lrem", + "lset", + "ltrim", + "madd", + "max", + "memory_doctor", + "memory_help", + "memory_malloc_stats", + "memory_purge", + "memory_stats", + "memory_usage", + "merge", + "mexists", "mget", + "migrate_keys", + "migrate", + "min", + "module_list", + "module_load", + "module_loadex", + "module_unload", + "monitor", + "move", + "mrange", + "mrevrange", "mset", "msetnx", - "move", + "numincrby", + "object_encoding", + "object_idletime", + "object_refcount", + "object", + "objkeys", + "objlen", "persist", "pexpire", "pexpireat", + "pexpiretime", + "pfadd", + "pfcount", + "pfmerge", + "ping", + "profile", "psetex", + "psubscribe", + "psync", "pttl", + "publish", + "pubsub_channels", + "pubsub_numpat", + "pubsub_numsub", + "pubsub", + "punsubscribe", + "quantile", + "query", + "queryindex", + "quit", "randomkey", + "range", + "readonly", + "readwrite", "rename", "renamenx", + "replicaof", + "reserve", + "reset", + "resp", "restore", - "set", - "setbit", - "setex", - "setnx", - "setrange", - "strlen", - "substr", - "ttl", - "type", - "watch", - "unwatch", - "blpop", - "brpop", - "brpoplpush", - "lindex", - "linsert", - "llen", - "lpop", - "lpush", - "lpushx", - "lrange", - "lrem", - "lset", - "ltrim", + "revrange", + "role", "rpop", "rpoplpush", "rpush", "rpushx", - "sort", - "scan", - "scan_iter", - "sscan", - "sscan_iter", - "hscan", - "hscan_inter", - "zscan", - "zscan_iter", "sadd", + "save", + "scan_iter", + "scan", + "scandump", "scard", + "script_debug", + "script_exists", + "script_flush", + "script_kill", + "script_load", "sdiff", "sdiffstore", + "search", + "select", + "sentinel_ckquorum", + "sentinel_failover", + "sentinel_flushconfig", + "sentinel_get_master_addr_by_name", + "sentinel_master", + "sentinel_masters", + "sentinel_monitor", + "sentinel_remove", + "sentinel_reset", + "sentinel_sentinels", + "sentinel_set", + "sentinel_slaves", + "set", + "setbit", + "setex", + "setnx", + "setrange", + "shutdown", "sinter", + "sintercard", "sinterstore", "sismember", + "slaveof", + "slowlog_get", + "slowlog_len", + "slowlog_reset", + "slowlog", "smembers", + "smismember", "smove", + "sort_ro", + "sort", + "spellcheck", "spop", "srandmember", "srem", + "sscan_iter", + "sscan", + "stralgo", + "strappend", + "strlen", + "subscribe", + "substr", + "sugadd", + "sugdel", + "sugget", + "suglen", "sunion", "sunionstore", + "swapdb", + "sync", + "syndump", + "synupdate", + "tagvals", + "time", + "toggle", + "touch", + "ttl", + "type", + "unlink", + "unsubscribe", + "unwatch", + "wait", + "watch", + "xack", + "xadd", + "xautoclaim", + "xclaim", + "xdel", + "xgroup_create", + "xgroup_createconsumer", + "xgroup_del_consumer", + "xgroup_delconsumer", + "xgroup_destroy", + "xgroup_set_id", + "xgroup_setid", + "xinfo_consumers", + "xinfo_groups", + "xinfo_help", + "xinfo_stream", + "xlen", + "xpending_range", + "xpending", + "xrange", + "xread_group", + "xread", + "xreadgroup", + "xrevrange", + "xtrim", "zadd", + "zaddoption", "zcard", "zcount", + "zdiff", + "zdiffstore", "zincrby", + "zinter", + "zintercard", "zinterstore", "zlexcount", + "zmpop", + "zmscore", + "zpopmax", + "zpopmin", + "zrandmember", "zrange", "zrangebylex", "zrangebyscore", + "zrangestore", "zrank", "zrem", "zremrangebylex", "zremrangebyrank", "zremrangebyscore", "zrevrange", + "zrevrangebylex", "zrevrangebyscore", "zrevrank", + "zscan_iter", + "zscan", "zscore", + "zunion", "zunionstore", - "pfadd", - "pfcount", - "pfmerge", - "hdel", - "hexists", - "hget", - "hgetall", - "hincrby", - "hincrbyfloat", - "hkeys", - "hlen", - "hset", - "hsetnx", - "hmset", - "hmget", - "hvals", - "publish", - "eval", - "evalsha", - "script_exists", - "script_flush", - "script_kill", - "script_load", - "setex", - "lrem", - "zadd", -) +} _redis_multipart_commands = set(["client", "cluster", "command", "config", "debug", "sentinel", "slowlog", "script"]) @@ -229,28 +491,6 @@ def _nr_wrapper_Redis_method_(wrapped, instance, args, kwargs): wrap_function_wrapper(module, name, _nr_wrapper_Redis_method_) -def instrument_redis_client(module): - if hasattr(module, "StrictRedis"): - for name in _redis_client_methods: - if name in vars(module.StrictRedis): - _wrap_Redis_method_wrapper_(module, "StrictRedis", name) - - if hasattr(module, "Redis"): - for name in _redis_client_methods: - if name in vars(module.Redis): - _wrap_Redis_method_wrapper_(module, "Redis", name) - - -def instrument_redis_commands_core(module): - for name in _redis_client_methods: - if hasattr(module, "CoreCommands"): - if hasattr(module.CoreCommands, name): - _wrap_Redis_method_wrapper_(module, "CoreCommands", name) - if hasattr(module, "DataAccessCommands"): - if hasattr(module.DataAccessCommands, name): - _wrap_Redis_method_wrapper_(module, "DataAccessCommands", name) - - def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -292,5 +532,57 @@ def _nr_Connection_send_command_wrapper_(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) +def instrument_redis_client(module): + if hasattr(module, "StrictRedis"): + for name in _redis_client_methods: + if name in vars(module.StrictRedis): + _wrap_Redis_method_wrapper_(module, "StrictRedis", name) + + if hasattr(module, "Redis"): + for name in _redis_client_methods: + if name in vars(module.Redis): + _wrap_Redis_method_wrapper_(module, "Redis", name) + + +def instrument_redis_commands_core(module): + _instrument_redis_commands_module(module, "CoreCommands") + + +def instrument_redis_commands_sentinel(module): + _instrument_redis_commands_module(module, "SentinelCommands") + + +def instrument_redis_commands_json_commands(module): + _instrument_redis_commands_module(module, "JSONCommands") + + +def instrument_redis_commands_search_commands(module): + _instrument_redis_commands_module(module, "SearchCommands") + + +def instrument_redis_commands_timeseries_commands(module): + _instrument_redis_commands_module(module, "TimeSeriesCommands") + + +def instrument_redis_commands_graph_commands(module): + _instrument_redis_commands_module(module, "GraphCommands") + + +def instrument_redis_commands_bf_commands(module): + _instrument_redis_commands_module(module, "BFCommands") + _instrument_redis_commands_module(module, "CFCommands") + _instrument_redis_commands_module(module, "CMSCommands") + _instrument_redis_commands_module(module, "TDigestCommands") + _instrument_redis_commands_module(module, "TOPKCommands") + + +def _instrument_redis_commands_module(module, class_name): + for name in _redis_client_methods: + if hasattr(module, class_name): + class_instance = getattr(module, class_name) + if hasattr(class_instance, name): + _wrap_Redis_method_wrapper_(module, class_name, name) + + def instrument_redis_connection(module): wrap_function_wrapper(module, "Connection.send_command", _nr_Connection_send_command_wrapper_) diff --git a/tests/datastore_aioredis/conftest.py b/tests/datastore_aioredis/conftest.py new file mode 100644 index 0000000000..d144af2dfb --- /dev/null +++ b/tests/datastore_aioredis/conftest.py @@ -0,0 +1,54 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import aioredis +import asyncio +import pytest + +from testing_support.fixtures import ( # noqa: F401 + code_coverage_fixture, + collector_agent_registration_fixture, + collector_available_fixture, +) + +AIOREDIS_VERSION = tuple(int(x) for x in aioredis.__version__.split(".")[:2]) + +_coverage_source = [ + "newrelic.hooks.datastore_aioredis", +] + +code_coverage = code_coverage_fixture(source=_coverage_source) + +_default_settings = { + "transaction_tracer.explain_threshold": 0.0, + "transaction_tracer.transaction_threshold": 0.0, + "transaction_tracer.stack_trace_threshold": 0.0, + "debug.log_data_collector_payloads": True, + "debug.record_transaction_failure": True, +} + +collector_agent_registration = collector_agent_registration_fixture( + app_name="Python Agent Test (datastore_aioredis)", + default_settings=_default_settings, + linked_applications=["Python Agent Test (datastore)"], +) + + +event_loop = asyncio.get_event_loop() +asyncio.set_event_loop(event_loop) + + +@pytest.fixture() +def loop(): + yield event_loop diff --git a/tests/datastore_aioredis/test_custom_conn_pool.py b/tests/datastore_aioredis/test_custom_conn_pool.py new file mode 100644 index 0000000000..155765e5ae --- /dev/null +++ b/tests/datastore_aioredis/test_custom_conn_pool.py @@ -0,0 +1,165 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" The purpose of these tests is to confirm that using a non-standard +connection pool that does not have a `connection_kwargs` attribute +will not result in an error. +""" + +import asyncio +import pytest +import aioredis + +from conftest import event_loop, loop, AIOREDIS_VERSION + +from newrelic.api.background_task import background_task + +# from testing_support.fixture.event_loop import event_loop as loop +from testing_support.fixtures import validate_transaction_metrics, override_application_settings +from testing_support.db_settings import redis_settings +from testing_support.util import instance_hostname + +DB_SETTINGS = redis_settings()[0] + + +class FakeConnectionPool(object): + """Connection Pool without connection_kwargs attribute.""" + + def __init__(self, connection): + self.connection = connection + + async def get_connection(self, name=None, *keys, **options): + return self.connection + + async def release(self, connection): + self.connection.disconnect() + + async def execute(self, *args, **kwargs): + return await self.connection.execute(*args, **kwargs) + + # def execute_command(self, *args, **kwargs): + # return self.connection.execute_command(*args, **kwargs) + + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + + +_base_scoped_metrics = ( + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/set", 1), + ("Datastore/operation/Redis/client_list", 1), +) + +_base_rollup_metrics = ( + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Redis/all", 3), + ("Datastore/Redis/allOther", 3), + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/set", 1), + ("Datastore/operation/Redis/client_list", 1), +) + + +_disable_scoped_metrics = list(_base_scoped_metrics) +_disable_rollup_metrics = list(_base_rollup_metrics) + +_enable_scoped_metrics = list(_base_scoped_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +_host = instance_hostname(DB_SETTINGS["host"]) +_port = DB_SETTINGS["port"] + +_instance_metric_name = "Datastore/instance/Redis/%s/%s" % (_host, _port) + +_enable_rollup_metrics.append((_instance_metric_name, 3)) + +_disable_rollup_metrics.append((_instance_metric_name, None)) + + +# Operations +async def exercise_redis(client): + await client.set("key", "value") + await client.get("key") + if hasattr(client, "execute_command"): + await client.execute_command("CLIENT", "LIST", parse="LIST") + else: + await client.execute("CLIENT", "LIST") + + +if AIOREDIS_VERSION >= (2, 0): + clients = [ + aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), + aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), + ] +else: + clients = [ + event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), + ] + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_custom_conn_pool:test_fake_conn_pool_enable_instance", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_fake_conn_pool_enable_instance(client, loop): + # Get a real connection + conn = getattr(client, "_pool_or_conn", None) + if conn is None: + conn = loop.run_until_complete(client.connection_pool.get_connection("GET")) + + # Replace the original connection pool with one that doesn't + # have the `connection_kwargs` attribute. + + fake_pool = FakeConnectionPool(conn) + client.connection_pool = fake_pool + client._pool_or_conn = fake_pool + assert not hasattr(client.connection_pool, "connection_kwargs") + + loop.run_until_complete(exercise_redis(client)) + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_custom_conn_pool:test_fake_conn_pool_disable_instance", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_fake_conn_pool_disable_instance(client, loop): + # Get a real connection + conn = loop.run_until_complete(client.connection_pool.get_connection("GET")) + + # Replace the original connection pool with one that doesn't + # have the `connection_kwargs` attribute. + + fake_pool = FakeConnectionPool(conn) + client.connection_pool = fake_pool + assert not hasattr(client.connection_pool, "connection_kwargs") + + loop.run_until_complete(exercise_redis(client)) diff --git a/tests/datastore_aioredis/test_execute_command.py b/tests/datastore_aioredis/test_execute_command.py new file mode 100644 index 0000000000..690007d6c6 --- /dev/null +++ b/tests/datastore_aioredis/test_execute_command.py @@ -0,0 +1,135 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import aioredis +from newrelic.api.background_task import background_task + +from testing_support.fixtures import validate_transaction_metrics, override_application_settings +from conftest import event_loop, loop, AIOREDIS_VERSION +from testing_support.db_settings import redis_settings +from testing_support.util import instance_hostname + +DB_SETTINGS = redis_settings()[0] + +SKIP_IF_AIOREDIS_V1 = pytest.mark.skipif(AIOREDIS_VERSION < (2, 0), reason="Single arg commands not supported.") + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} + +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + +_base_scoped_metrics = (("Datastore/operation/Redis/client_list", 1),) + +_base_rollup_metrics = ( + ("Datastore/all", 1), + ("Datastore/allOther", 1), + ("Datastore/Redis/all", 1), + ("Datastore/Redis/allOther", 1), + ("Datastore/operation/Redis/client_list", 1), +) + +_disable_scoped_metrics = list(_base_scoped_metrics) +_disable_rollup_metrics = list(_base_rollup_metrics) + +_enable_scoped_metrics = list(_base_scoped_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +_host = instance_hostname(DB_SETTINGS["host"]) +_port = DB_SETTINGS["port"] + +_instance_metric_name = "Datastore/instance/Redis/%s/%s" % (_host, _port) + +_enable_rollup_metrics.append((_instance_metric_name, 1)) + +_disable_rollup_metrics.append((_instance_metric_name, None)) + + +async def exercise_redis_multi_args(client): + if hasattr(client, "execute_command"): + await client.execute_command("CLIENT", "LIST", parse="LIST") + else: + await client.execute("CLIENT", "LIST") + + +async def exercise_redis_single_arg(client): + await client.execute_command("CLIENT LIST") + + +if AIOREDIS_VERSION >= (2, 0): + clients = [ + aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), + aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), + ] +else: + clients = [ + event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), + ] + + +@SKIP_IF_AIOREDIS_V1 +@pytest.mark.parametrize("client", clients) +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_redis_execute_command_as_one_arg_enable", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_redis_execute_command_as_one_arg_enable(client, loop): + loop.run_until_complete(exercise_redis_single_arg(client)) + + +@SKIP_IF_AIOREDIS_V1 +@pytest.mark.parametrize("client", clients) +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_redis_execute_command_as_one_arg_disable", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_redis_execute_command_as_one_arg_disable(client, loop): + loop.run_until_complete(exercise_redis_single_arg(client)) + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_redis_execute_command_as_two_args_enable", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_redis_execute_command_as_two_args_enable(client, loop): + loop.run_until_complete(exercise_redis_multi_args(client)) + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_execute_command:test_redis_execute_command_as_two_args_disable", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_redis_execute_command_as_two_args_disable(client, loop): + loop.run_until_complete(exercise_redis_multi_args(client)) diff --git a/tests/datastore_aioredis/test_get_and_set.py b/tests/datastore_aioredis/test_get_and_set.py new file mode 100644 index 0000000000..b363f14d53 --- /dev/null +++ b/tests/datastore_aioredis/test_get_and_set.py @@ -0,0 +1,106 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import aioredis + +from newrelic.api.background_task import background_task + +from conftest import event_loop, loop, AIOREDIS_VERSION +from testing_support.fixtures import validate_transaction_metrics, override_application_settings +from testing_support.db_settings import redis_settings +from testing_support.util import instance_hostname + +DB_SETTINGS = redis_settings()[0] + + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} + +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + + +_base_scoped_metrics = ( + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/set", 1), +) + +_base_rollup_metrics = ( + ("Datastore/all", 2), + ("Datastore/allOther", 2), + ("Datastore/Redis/all", 2), + ("Datastore/Redis/allOther", 2), + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/set", 1), +) + +_disable_scoped_metrics = list(_base_scoped_metrics) +_disable_rollup_metrics = list(_base_rollup_metrics) + +_enable_scoped_metrics = list(_base_scoped_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +_host = instance_hostname(DB_SETTINGS["host"]) +_port = DB_SETTINGS["port"] + +_instance_metric_name = "Datastore/instance/Redis/%s/%s" % (_host, _port) + +_enable_rollup_metrics.append((_instance_metric_name, 2)) + +_disable_rollup_metrics.append((_instance_metric_name, None)) + + +if AIOREDIS_VERSION >= (2, 0): + clients = [ + aioredis.Redis(host=DB_SETTINGS["host"], port=_port, db=0), + aioredis.StrictRedis(host=DB_SETTINGS["host"], port=_port, db=0), + ] +else: + clients = [ + event_loop.run_until_complete(aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], _port), db=0)), + ] + + +async def exercise_redis(client): + await client.set("key", "value") + await client.get("key") + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_get_and_set:test_redis_client_operation_enable_instance", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_redis_client_operation_enable_instance(client, loop): + loop.run_until_complete(exercise_redis(client)) + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_get_and_set:test_redis_client_operation_disable_instance", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_redis_client_operation_disable_instance(client, loop): + loop.run_until_complete(exercise_redis(client)) diff --git a/tests/datastore_aioredis/test_instance_info.py b/tests/datastore_aioredis/test_instance_info.py new file mode 100644 index 0000000000..ffb5ab31dc --- /dev/null +++ b/tests/datastore_aioredis/test_instance_info.py @@ -0,0 +1,127 @@ +# Copyright 2010 New Relic, Inc. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from inspect import isawaitable +import pytest +import aioredis + +from newrelic.hooks.datastore_aioredis import _conn_attrs_to_dict, _instance_info +from conftest import event_loop, loop, AIOREDIS_VERSION + +_instance_info_tests = [ + ({}, ("localhost", "6379", "0")), + ({"host": None}, ("localhost", "6379", "0")), + ({"host": ""}, ("localhost", "6379", "0")), + ({"db": None}, ("localhost", "6379", "0")), + ({"db": ""}, ("localhost", "6379", "0")), + ({"host": "127.0.0.1", "port": 1234, "db": 2}, ("127.0.0.1", "1234", "2")), +] + + +SKIP_IF_AIOREDIS_V1 = pytest.mark.skipif(AIOREDIS_VERSION < (2, 0), reason="Single arg commands not supported.") + +if AIOREDIS_VERSION >= (2, 0): + clients = [aioredis.Redis, aioredis.StrictRedis] + class DisabledConnection(aioredis.Connection): + @staticmethod + async def connect(*args, **kwargs): + pass + + + class DisabledUnixConnection(aioredis.UnixDomainSocketConnection, DisabledConnection): + pass + +else: + clients = [] + DisabledConnection, DisabledUnixConnection = None, None + + + +@SKIP_IF_AIOREDIS_V1 +@pytest.mark.parametrize("client", clients) +@pytest.mark.parametrize("kwargs,expected", _instance_info_tests) +def test_strict_redis_client_instance_info(client, kwargs, expected, loop): + r = client(**kwargs) + if isawaitable(r): + r = loop.run_until_complete(r) + conn_kwargs = r.connection_pool.connection_kwargs + assert _instance_info(conn_kwargs) == expected + + +@SKIP_IF_AIOREDIS_V1 +@pytest.mark.parametrize("client", clients) +@pytest.mark.parametrize("kwargs,expected", _instance_info_tests) +def test_strict_redis_connection_instance_info(client, kwargs, expected, loop): + r = client(**kwargs) + if isawaitable(r): + r = loop.run_until_complete(r) + r.connection_pool.connection_class = DisabledConnection + connection = loop.run_until_complete(r.connection_pool.get_connection("SELECT")) + try: + conn_kwargs = _conn_attrs_to_dict(connection) + assert _instance_info(conn_kwargs) == expected + finally: + r.connection_pool.release(connection) + + +_instance_info_from_url_tests = [ + (("redis://localhost:1234/",), {}, ("localhost", "1234", "0")), + (("redis://localhost:1234",), {}, ("localhost", "1234", "0")), + (("redis://user:password@localhost:6379",), {}, ("localhost", "6379", "0")), + (("redis://localhost:6379/2",), {}, ("localhost", "6379", "2")), + (("redis://localhost:6379",), {"db": 2}, ("localhost", "6379", "2")), + (("redis://@127.0.0.1:6379",), {}, ("127.0.0.1", "6379", "0")), + (("redis://:1234/",), {}, ("localhost", "1234", "0")), + (("redis://@:1234/",), {}, ("localhost", "1234", "0")), + (("redis://localhost:1234/garbage",), {}, ("localhost", "1234", "0")), + (("redis://127.0.0.1",), {}, ("127.0.0.1", "6379", "0")), + (("rediss://localhost:6379/2/",), {}, ("localhost", "6379", "2")), # rediss: Not a typo + (("redis://localhost:6379",), {"host": "someotherhost"}, ("localhost", "6379", "0")), + (("redis://localhost:6379/2",), {"db": 3}, ("localhost", "6379", "2")), + (("redis://localhost:6379/2/?db=111",), {}, ("localhost", "6379", "111")), + (("redis://localhost:6379?db=2",), {}, ("localhost", "6379", "2")), + (("redis://localhost:6379/2?db=111",), {}, ("localhost", "6379", "111")), + (("unix:///path/to/socket.sock",), {}, ("localhost", "/path/to/socket.sock", "0")), + (("unix:///path/to/socket.sock?db=2",), {}, ("localhost", "/path/to/socket.sock", "2")), + (("unix:///path/to/socket.sock",), {"db": 2}, ("localhost", "/path/to/socket.sock", "2")), +] + + +@SKIP_IF_AIOREDIS_V1 +@pytest.mark.parametrize("client", clients) +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +def test_strict_redis_client_from_url(client, args, kwargs, expected): + r = client.from_url(*args, **kwargs) + conn_kwargs = r.connection_pool.connection_kwargs + assert _instance_info(conn_kwargs) == expected + + +@SKIP_IF_AIOREDIS_V1 +@pytest.mark.parametrize("client", clients) +@pytest.mark.parametrize("args,kwargs,expected", _instance_info_from_url_tests) +def test_strict_redis_connection_from_url(client, args, kwargs, expected, loop): + r = client.from_url(*args, **kwargs) + if r.connection_pool.connection_class in (aioredis.Connection, aioredis.connection.SSLConnection): + r.connection_pool.connection_class = DisabledConnection + elif r.connection_pool.connection_class is aioredis.UnixDomainSocketConnection: + r.connection_pool.connection_class = DisabledUnixConnection + else: + assert False, r.connection_pool.connection_class + + connection = loop.run_until_complete(r.connection_pool.get_connection("SELECT")) + try: + conn_kwargs = _conn_attrs_to_dict(connection) + assert _instance_info(conn_kwargs) == expected + finally: + r.connection_pool.release(connection) diff --git a/tests/datastore_aioredis/test_multiple_dbs.py b/tests/datastore_aioredis/test_multiple_dbs.py new file mode 100644 index 0000000000..248fb847d0 --- /dev/null +++ b/tests/datastore_aioredis/test_multiple_dbs.py @@ -0,0 +1,188 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import aioredis +from newrelic.api.background_task import background_task + +from testing_support.fixtures import validate_transaction_metrics, override_application_settings +from conftest import event_loop, loop, AIOREDIS_VERSION +from testing_support.db_settings import redis_settings +from testing_support.util import instance_hostname + +DB_SETTINGS = redis_settings() + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, +} + +_base_scoped_metrics = ( + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/set", 1), + ("Datastore/operation/Redis/client_list", 1), +) + +_base_rollup_metrics = ( + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Redis/all", 3), + ("Datastore/Redis/allOther", 3), + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/set", 1), + ("Datastore/operation/Redis/client_list", 1), +) + +_concurrent_scoped_metrics = [ + ("Datastore/operation/Redis/get", 2), + ("Datastore/operation/Redis/set", 2), +] + +_concurrent_rollup_metrics = [ + ("Datastore/all", 4), + ("Datastore/allOther", 4), + ("Datastore/Redis/all", 4), + ("Datastore/Redis/allOther", 4), + ("Datastore/operation/Redis/set", 2), + ("Datastore/operation/Redis/get", 2), +] + +_disable_scoped_metrics = list(_base_scoped_metrics) +_disable_rollup_metrics = list(_base_rollup_metrics) + +_enable_scoped_metrics = list(_base_scoped_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + + +if len(DB_SETTINGS) > 1: + redis_instance_1 = DB_SETTINGS[0] + redis_instance_2 = DB_SETTINGS[1] + + _host_1 = instance_hostname(redis_instance_1["host"]) + _port_1 = redis_instance_1["port"] + + _host_2 = instance_hostname(redis_instance_2["host"]) + _port_2 = redis_instance_2["port"] + + instance_metric_name_1 = "Datastore/instance/Redis/%s/%s" % (_host_1, _port_1) + instance_metric_name_2 = "Datastore/instance/Redis/%s/%s" % (_host_2, _port_2) + + _enable_rollup_metrics.extend( + [ + (instance_metric_name_1, 2), + (instance_metric_name_2, 1), + ] + ) + + _disable_rollup_metrics.extend( + [ + (instance_metric_name_1, None), + (instance_metric_name_2, None), + ] + ) + _concurrent_rollup_metrics.extend( + [ + (instance_metric_name_1, 2), + (instance_metric_name_2, 2), + ] + ) + + if AIOREDIS_VERSION >= (2, 0): + client_set = [ + ( + aioredis.Redis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), + aioredis.Redis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), + ), + ( + aioredis.StrictRedis(host=DB_SETTINGS[0]["host"], port=DB_SETTINGS[0]["port"], db=0), + aioredis.StrictRedis(host=DB_SETTINGS[1]["host"], port=DB_SETTINGS[1]["port"], db=0), + ), + ] + else: + client_set = [ + ( + event_loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[0]["host"], DB_SETTINGS[0]["port"]), db=0) + ), + event_loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS[1]["host"], DB_SETTINGS[1]["port"]), db=0) + ), + ) + ] +else: + client_set = [] + + +async def exercise_redis(client_1, client_2): + await client_1.set("key", "value") + await client_1.get("key") + + if hasattr(client_2, "execute_command"): + await client_2.execute_command("CLIENT", "LIST", parse="LIST") + else: + await client_2.execute("CLIENT", "LIST") + + +@pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") +@pytest.mark.parametrize("client_set", client_set) +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_multiple_dbs:test_multiple_datastores_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_multiple_datastores_enabled(client_set, loop): + loop.run_until_complete(exercise_redis(client_set[0], client_set[1])) + + +@pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") +@pytest.mark.parametrize("client_set", client_set) +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_multiple_dbs:test_multiple_datastores_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_multiple_datastores_disabled(client_set, loop): + loop.run_until_complete(exercise_redis(client_set[0], client_set[1])) + + +@pytest.mark.skipif(len(DB_SETTINGS) < 2, reason="Env not configured with multiple databases") +@pytest.mark.parametrize("client_set", client_set) +@validate_transaction_metrics( + "test_multiple_dbs:test_concurrent_calls", + scoped_metrics=_concurrent_scoped_metrics, + rollup_metrics=_concurrent_rollup_metrics, + background_task=True, +) +@override_application_settings(_enable_instance_settings) +@background_task() +def test_concurrent_calls(client_set, loop): + # Concurrent calls made with original instrumenation taken from synchonous Redis + # instrumentation had a bug where datastore info on concurrent calls to multiple instances + # would result in all instances reporting as the host/port of the final call made. + + import asyncio + + async def exercise_concurrent(): + await asyncio.gather(*(client.set("key-%d" % i, i) for i, client in enumerate(client_set))) + await asyncio.gather(*(client.get("key-%d" % i) for i, client in enumerate(client_set))) + + loop.run_until_complete(exercise_concurrent()) diff --git a/tests/datastore_aioredis/test_span_event.py b/tests/datastore_aioredis/test_span_event.py new file mode 100644 index 0000000000..0ab8923ca8 --- /dev/null +++ b/tests/datastore_aioredis/test_span_event.py @@ -0,0 +1,152 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import aioredis + +from newrelic.api.transaction import current_transaction +from newrelic.api.background_task import background_task + +from testing_support.db_settings import redis_settings +from conftest import event_loop, loop, AIOREDIS_VERSION +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_span_events import validate_span_events +from testing_support.util import instance_hostname + + +DB_SETTINGS = redis_settings()[0] + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, + "distributed_tracing.enabled": True, + "span_events.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, + "distributed_tracing.enabled": True, + "span_events.enabled": True, +} + +if AIOREDIS_VERSION >= (2, 0): + clients = [ + aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), + aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), + ] +else: + clients = [ + event_loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) + ), + ] + + +async def _exercise_db(client): + await client.set("key", "value") + await client.get("key") + + if hasattr(client, "execute_command"): + await client.execute_command("CLIENT", "LIST", parse="LIST") + else: + await client.execute("CLIENT", "LIST") + + +@pytest.mark.parametrize("client", clients) +@pytest.mark.parametrize("db_instance_enabled", (True, False)) +@pytest.mark.parametrize("instance_enabled", (True, False)) +def test_span_events(client, instance_enabled, db_instance_enabled, loop): + guid = "dbb533c53b749e0b" + priority = 0.5 + + common = { + "type": "Span", + "transactionId": guid, + "priority": priority, + "sampled": True, + "category": "datastore", + "component": "Redis", + "span.kind": "client", + } + exact_agents = {} + + if instance_enabled: + settings = _enable_instance_settings.copy() + hostname = instance_hostname(DB_SETTINGS["host"]) + exact_agents.update( + { + "peer.address": "%s:%s" % (hostname, DB_SETTINGS["port"]), + "peer.hostname": hostname, + } + ) + else: + settings = _disable_instance_settings.copy() + exact_agents.update( + { + "peer.address": "Unknown:Unknown", + "peer.hostname": "Unknown", + } + ) + + if db_instance_enabled and instance_enabled: + exact_agents.update( + { + "db.instance": "0", + } + ) + unexpected_agents = () + else: + settings["attributes.exclude"] = ["db.instance"] + unexpected_agents = ("db.instance",) + + query_1 = common.copy() + query_1["name"] = "Datastore/operation/Redis/set" + + query_2 = common.copy() + query_2["name"] = "Datastore/operation/Redis/get" + + query_3 = common.copy() + query_3["name"] = "Datastore/operation/Redis/client_list" + + @validate_span_events( + count=1, + exact_intrinsics=query_1, + unexpected_intrinsics=("db.instance"), + exact_agents=exact_agents, + unexpected_agents=unexpected_agents, + ) + @validate_span_events( + count=1, + exact_intrinsics=query_2, + unexpected_intrinsics=("db.instance"), + exact_agents=exact_agents, + unexpected_agents=unexpected_agents, + ) + @validate_span_events( + count=1, + exact_intrinsics=query_3, + unexpected_intrinsics=("db.instance"), + exact_agents=exact_agents, + unexpected_agents=unexpected_agents, + ) + @override_application_settings(settings) + @background_task(name="span_events") + def _test(): + txn = current_transaction() + txn.guid = guid + txn._priority = priority + txn._sampled = True + loop.run_until_complete(_exercise_db(client)) + + _test() diff --git a/tests/datastore_aioredis/test_trace_node.py b/tests/datastore_aioredis/test_trace_node.py new file mode 100644 index 0000000000..fb1ac8545c --- /dev/null +++ b/tests/datastore_aioredis/test_trace_node.py @@ -0,0 +1,128 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import aioredis +import pytest + +from conftest import event_loop, loop, AIOREDIS_VERSION +from testing_support.fixtures import validate_tt_collector_json, override_application_settings +from testing_support.util import instance_hostname +from testing_support.db_settings import redis_settings + +from newrelic.api.background_task import background_task + +DB_SETTINGS = redis_settings()[0] + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, +} +_instance_only_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, +} +_database_only_settings = { + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": True, +} + + +_enabled_required = { + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), + "db.instance": "0", +} +_enabled_forgone = {} + +_disabled_required = {} +_disabled_forgone = { + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", + "db.instance": "VALUE NOT USED", +} + + +_instance_only_required = { + "host": instance_hostname(DB_SETTINGS["host"]), + "port_path_or_id": str(DB_SETTINGS["port"]), +} +_instance_only_forgone = { + "db.instance": "0", +} + +_database_only_required = { + "db.instance": "0", +} +_database_only_forgone = { + "host": "VALUE NOT USED", + "port_path_or_id": "VALUE NOT USED", +} + +if AIOREDIS_VERSION >= (2, 0): + clients = [ + aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), + aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), + ] +else: + clients = [ + event_loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) + ), + ] + + +async def exercise_redis(client): + await client.set("key", "value") + await client.get("key") + + if hasattr(client, "execute_command"): + await client.execute_command("CLIENT", "LIST", parse="LIST") + else: + await client.execute("CLIENT", "LIST") + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_enable_instance_settings) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) +@background_task() +def test_trace_node_datastore_params_enable_instance(client, loop): + loop.run_until_complete(exercise_redis(client)) + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_disable_instance_settings) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) +@background_task() +def test_trace_node_datastore_params_disable_instance(client, loop): + loop.run_until_complete(exercise_redis(client)) + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_instance_only_settings) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) +@background_task() +def test_trace_node_datastore_params_instance_only(client, loop): + loop.run_until_complete(exercise_redis(client)) + + +@pytest.mark.parametrize("client", clients) +@override_application_settings(_database_only_settings) +@validate_tt_collector_json(datastore_params=_database_only_required, datastore_forgone_params=_database_only_forgone) +@background_task() +def test_trace_node_datastore_params_database_only(client, loop): + loop.run_until_complete(exercise_redis(client)) diff --git a/tests/datastore_aioredis/test_uninstrumented_methods.py b/tests/datastore_aioredis/test_uninstrumented_methods.py new file mode 100644 index 0000000000..43a2fe1797 --- /dev/null +++ b/tests/datastore_aioredis/test_uninstrumented_methods.py @@ -0,0 +1,92 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import aioredis + +from conftest import event_loop, loop, AIOREDIS_VERSION + +from testing_support.db_settings import redis_settings + +DB_SETTINGS = redis_settings()[0] + +if AIOREDIS_VERSION >= (2, 0): + clients = [ + aioredis.Redis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), + aioredis.StrictRedis(host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], db=0), + ] +else: + clients = [ + event_loop.run_until_complete( + aioredis.create_redis("redis://%s:%d" % (DB_SETTINGS["host"], DB_SETTINGS["port"]), db=0) + ), + ] + + +IGNORED_METHODS = { + "address", + "channels", + "close", + "closed", + "connection_pool", + "connection", + "db", + "encoding", + "execute_command", + "execute", + "from_url", + "hscan_iter", + "ihscan", + "in_pubsub", + "in_transaction", + "initialize", + "iscan", + "isscan", + "izscan", + "lock", + "multi_exec", + "parse_response", + "patterns", + "pipeline", + "publish_json", + "register_script", + "response_callbacks", + "RESPONSE_CALLBACKS", + "SET_IF_EXIST", + "SET_IF_NOT_EXIST", + "set_response_callback", + "SHUTDOWN_NOSAVE", + "SHUTDOWN_SAVE", + "single_connection_client", + "transaction", + "wait_closed", + "xinfo", + "ZSET_AGGREGATE_MAX", + "ZSET_AGGREGATE_MIN", + "ZSET_AGGREGATE_SUM", + "ZSET_EXCLUDE_BOTH", + "ZSET_EXCLUDE_MAX", + "ZSET_EXCLUDE_MIN", + "ZSET_IF_EXIST", + "ZSET_IF_NOT_EXIST", +} + + +@pytest.mark.parametrize("client", clients) +def test_uninstrumented_methods(client): + methods = {m for m in dir(client) if not m[0] == "_"} + is_wrapped = lambda m: hasattr(getattr(client, m), "__wrapped__") + uninstrumented = {m for m in methods - IGNORED_METHODS if not is_wrapped(m)} + + assert not uninstrumented, "Uninstrumented methods: %s" % sorted(uninstrumented) diff --git a/tests/datastore_aredis/conftest.py b/tests/datastore_aredis/conftest.py index 20eece262a..8f43d088b3 100644 --- a/tests/datastore_aredis/conftest.py +++ b/tests/datastore_aredis/conftest.py @@ -14,6 +14,7 @@ import pytest +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 from testing_support.fixtures import (code_coverage_fixture, collector_agent_registration_fixture, collector_available_fixture) diff --git a/tests/datastore_aredis/test_multiple_dbs.py b/tests/datastore_aredis/test_multiple_dbs.py index f766784afe..e16ae94831 100644 --- a/tests/datastore_aredis/test_multiple_dbs.py +++ b/tests/datastore_aredis/test_multiple_dbs.py @@ -12,46 +12,60 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import aredis - -from newrelic.api.background_task import background_task - -from testing_support.fixture.event_loop import event_loop as loop -from testing_support.fixtures import (validate_transaction_metrics, - override_application_settings) +import pytest from testing_support.db_settings import redis_settings +from testing_support.fixtures import ( + override_application_settings, + validate_transaction_metrics, +) from testing_support.util import instance_hostname +from newrelic.api.background_task import background_task + DB_MULTIPLE_SETTINGS = redis_settings() # Settings _enable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': True, + "datastore_tracer.instance_reporting.enabled": True, } _disable_instance_settings = { - 'datastore_tracer.instance_reporting.enabled': False, + "datastore_tracer.instance_reporting.enabled": False, } # Metrics _base_scoped_metrics = ( - ('Datastore/operation/Redis/get', 1), - ('Datastore/operation/Redis/set', 1), - ('Datastore/operation/Redis/client_list', 1), + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/set", 1), + ("Datastore/operation/Redis/client_list", 1), ) _base_rollup_metrics = ( - ('Datastore/all', 3), - ('Datastore/allOther', 3), - ('Datastore/Redis/all', 3), - ('Datastore/Redis/allOther', 3), - ('Datastore/operation/Redis/get', 1), - ('Datastore/operation/Redis/get', 1), - ('Datastore/operation/Redis/client_list', 1), + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Redis/all", 3), + ("Datastore/Redis/allOther", 3), + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/get", 1), + ("Datastore/operation/Redis/client_list", 1), ) +_concurrent_scoped_metrics = [ + ("Datastore/operation/Redis/get", 2), + ("Datastore/operation/Redis/set", 2), +] + +_concurrent_rollup_metrics = [ + ("Datastore/all", 4), + ("Datastore/allOther", 4), + ("Datastore/Redis/all", 4), + ("Datastore/Redis/allOther", 4), + ("Datastore/operation/Redis/set", 2), + ("Datastore/operation/Redis/get", 2), +] + _disable_scoped_metrics = list(_base_scoped_metrics) _disable_rollup_metrics = list(_base_rollup_metrics) @@ -62,59 +76,105 @@ redis_1 = DB_MULTIPLE_SETTINGS[0] redis_2 = DB_MULTIPLE_SETTINGS[1] - host_1 = instance_hostname(redis_1['host']) - port_1 = redis_1['port'] + host_1 = instance_hostname(redis_1["host"]) + port_1 = redis_1["port"] - host_2 = instance_hostname(redis_2['host']) - port_2 = redis_2['port'] + host_2 = instance_hostname(redis_2["host"]) + port_2 = redis_2["port"] - instance_metric_name_1 = 'Datastore/instance/Redis/%s/%s' % (host_1, port_1) - instance_metric_name_2 = 'Datastore/instance/Redis/%s/%s' % (host_2, port_2) + instance_metric_name_1 = "Datastore/instance/Redis/%s/%s" % (host_1, port_1) + instance_metric_name_2 = "Datastore/instance/Redis/%s/%s" % (host_2, port_2) - _enable_rollup_metrics.extend([ + _enable_rollup_metrics.extend( + [ (instance_metric_name_1, 2), (instance_metric_name_2, 1), - ]) + ] + ) - _disable_rollup_metrics.extend([ + _disable_rollup_metrics.extend( + [ (instance_metric_name_1, None), (instance_metric_name_2, None), - ]) + ] + ) + + _concurrent_rollup_metrics.extend( + [ + (instance_metric_name_1, 2), + (instance_metric_name_2, 2), + ] + ) + async def exercise_redis(client_1, client_2): - await client_1.set('key', 'value') - await client_1.get('key') + await client_1.set("key", "value") + await client_1.get("key") + + await client_2.execute_command("CLIENT", "LIST", parse="LIST") - await client_2.execute_command('CLIENT', 'LIST', parse='LIST') -@pytest.mark.skipif(len(DB_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') +@pytest.mark.skipif(len(DB_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_enable_instance_settings) -@validate_transaction_metrics('test_multiple_dbs:test_multiple_datastores_enabled', - scoped_metrics=_enable_scoped_metrics, - rollup_metrics=_enable_rollup_metrics, - background_task=True) +@validate_transaction_metrics( + "test_multiple_dbs:test_multiple_datastores_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) @background_task() def test_multiple_datastores_enabled(loop): redis1 = DB_MULTIPLE_SETTINGS[0] redis2 = DB_MULTIPLE_SETTINGS[1] - client_1 = aredis.StrictRedis(host=redis1['host'], port=redis1['port'], db=0) - client_2 = aredis.StrictRedis(host=redis2['host'], port=redis2['port'], db=1) + client_1 = aredis.StrictRedis(host=redis1["host"], port=redis1["port"], db=0) + client_2 = aredis.StrictRedis(host=redis2["host"], port=redis2["port"], db=1) loop.run_until_complete(exercise_redis(client_1, client_2)) -@pytest.mark.skipif(len(DB_MULTIPLE_SETTINGS) < 2, - reason='Test environment not configured with multiple databases.') + +@pytest.mark.skipif(len(DB_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") @override_application_settings(_disable_instance_settings) -@validate_transaction_metrics('test_multiple_dbs:test_multiple_datastores_disabled', - scoped_metrics=_disable_scoped_metrics, - rollup_metrics=_disable_rollup_metrics, - background_task=True) +@validate_transaction_metrics( + "test_multiple_dbs:test_multiple_datastores_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) @background_task() def test_multiple_datastores_disabled(loop): redis1 = DB_MULTIPLE_SETTINGS[0] redis2 = DB_MULTIPLE_SETTINGS[1] - client_1 = aredis.StrictRedis(host=redis1['host'], port=redis1['port'], db=0) - client_2 = aredis.StrictRedis(host=redis2['host'], port=redis2['port'], db=1) + client_1 = aredis.StrictRedis(host=redis1["host"], port=redis1["port"], db=0) + client_2 = aredis.StrictRedis(host=redis2["host"], port=redis2["port"], db=1) loop.run_until_complete(exercise_redis(client_1, client_2)) + + +@pytest.mark.skipif(len(DB_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") +@validate_transaction_metrics( + "test_multiple_dbs:test_concurrent_calls", + scoped_metrics=_concurrent_scoped_metrics, + rollup_metrics=_concurrent_rollup_metrics, + background_task=True, +) +@override_application_settings(_enable_instance_settings) +@background_task() +def test_concurrent_calls(loop): + # Concurrent calls made with original instrumenation taken from synchonous Redis + # instrumentation had a bug where datastore info on concurrent calls to multiple instances + # would result in all instances reporting as the host/port of the final call made. + + import asyncio + + redis1 = DB_MULTIPLE_SETTINGS[0] + redis2 = DB_MULTIPLE_SETTINGS[1] + + client_1 = aredis.StrictRedis(host=redis1["host"], port=redis1["port"], db=0) + client_2 = aredis.StrictRedis(host=redis2["host"], port=redis2["port"], db=1) + clients = (client_1, client_2) + + async def exercise_concurrent(): + await asyncio.gather(*(client.set("key-%d" % i, i) for i, client in enumerate(clients))) + await asyncio.gather(*(client.get("key-%d" % i) for i, client in enumerate(clients))) + + loop.run_until_complete(exercise_concurrent()) diff --git a/tests/datastore_aredis/test_uninstrumented_methods.py b/tests/datastore_aredis/test_uninstrumented_methods.py new file mode 100644 index 0000000000..38901e5c5d --- /dev/null +++ b/tests/datastore_aredis/test_uninstrumented_methods.py @@ -0,0 +1,48 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import aredis + +from testing_support.db_settings import redis_settings + +DB_SETTINGS = redis_settings()[0] + +strict_redis_client = aredis.StrictRedis(host=DB_SETTINGS['host'], port=DB_SETTINGS['port'], db=0) + + +IGNORED_METHODS = { + "cache", + "connection_pool", + "execute_command", + "from_url", + "hscan_iter", + "lock", + "NODES_FLAGS", + "parse_response", + "pipeline", + "register_script", + "response_callbacks", + "RESPONSE_CALLBACKS", + "RESULT_CALLBACKS", + "sentinel", + "set_response_callback", + "transaction", +} + +def test_uninstrumented_methods(): + methods = {m for m in dir(strict_redis_client) if not m[0] == "_"} + is_wrapped = lambda m: hasattr(getattr(strict_redis_client, m), "__wrapped__") + uninstrumented = {m for m in methods - IGNORED_METHODS if not is_wrapped(m)} + + assert not uninstrumented, "Uninstrumented methods: %s" % sorted(uninstrumented) diff --git a/tests/datastore_redis/test_multiple_dbs.py b/tests/datastore_redis/test_multiple_dbs.py index 8f570101d1..67f7b7fe8f 100644 --- a/tests/datastore_redis/test_multiple_dbs.py +++ b/tests/datastore_redis/test_multiple_dbs.py @@ -47,7 +47,7 @@ ('Datastore/Redis/all', 3), ('Datastore/Redis/allOther', 3), ('Datastore/operation/Redis/get', 1), - ('Datastore/operation/Redis/get', 1), + ('Datastore/operation/Redis/set', 1), ('Datastore/operation/Redis/client_list', 1), ) diff --git a/tests/datastore_redis/test_uninstrumented_methods.py b/tests/datastore_redis/test_uninstrumented_methods.py new file mode 100644 index 0000000000..314f9f2038 --- /dev/null +++ b/tests/datastore_redis/test_uninstrumented_methods.py @@ -0,0 +1,118 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import redis + +from testing_support.db_settings import redis_settings + +DB_SETTINGS = redis_settings()[0] + +redis_client = redis.Redis(host=DB_SETTINGS['host'], port=DB_SETTINGS['port'], db=0) +strict_redis_client = redis.StrictRedis(host=DB_SETTINGS['host'], port=DB_SETTINGS['port'], db=0) + + +IGNORED_METHODS = { + 'MODULE_CALLBACKS', + 'MODULE_VERSION', + 'NAME', + "append_bucket_size", + "append_capacity", + "append_error", + "append_expansion", + "append_items_and_increments", + "append_items", + "append_max_iterations", + "append_no_create", + "append_no_scale", + "append_values_and_weights", + "append_weights", + "client_tracking_off", + "client_tracking_on", + "client", + "close", + "commandmixin", + "connection_pool", + "connection", + "debug_segfault", + "execute_command", + "from_url", + "get_connection_kwargs", + "get_encoder", + "hscan_iter", + "load_external_module", + "lock", + "parse_response", + "pipeline", + "register_script", + "response_callbacks", + "RESPONSE_CALLBACKS", + "sentinel", + "set_file", + "set_path", + "set_response_callback", + "transaction", + "BatchIndexer", + "batch_indexer", + "get_params_args", + "index_name", + "load_document", + "add_edge", + "add_node", + "bulk", + "call_procedure", + "edges", + "flush", + "get_label", + "get_property", + "get_relation", + "labels", + "list_keys", + "name", + "nodes", + "property_keys", + "relationship_types", + "version", + +} + +REDIS_MODULES = { + "bf", + "cf", + "cms", + "ft", + "graph", + "json", + "tdigest", + "topk", + "ts", +} + +IGNORED_METHODS |= REDIS_MODULES + + +@pytest.mark.parametrize("client", (redis_client, strict_redis_client)) +def test_uninstrumented_methods(client): + methods = {m for m in dir(client) if not m[0] == "_"} + is_wrapped = lambda m: hasattr(getattr(client, m), "__wrapped__") + uninstrumented = {m for m in methods - IGNORED_METHODS if not is_wrapped(m)} + + for module in REDIS_MODULES: + if hasattr(client, module): + module_client = getattr(client, module)() + module_methods = {m for m in dir(module_client) if not m[0] == "_"} + is_wrapped = lambda m: hasattr(getattr(module_client, m), "__wrapped__") + uninstrumented |= {m for m in module_methods - IGNORED_METHODS if not is_wrapped(m)} + + assert not uninstrumented, "Uninstrumented methods: %s" % sorted(uninstrumented) diff --git a/tox.ini b/tox.ini index 717b5bcfea..0bbe82560b 100644 --- a/tox.ini +++ b/tox.ini @@ -86,6 +86,8 @@ envlist = solr-datastore_pysolr-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, redis-datastore_redis-{py27,py36,py37,py38,pypy,pypy36}-redis03, redis-datastore_redis-{py36,py37,py38,py39,py310,pypy36}-redis{0400,latest}, + redis-datastore_aioredis-{py36,py37,py38,py39,py310,pypy36}-aioredislatest, + redis-datastore_aioredis-py39-aioredis01, redis-datastore_aredis-{py36,py37,py38,py39,pypy36}-aredislatest, solr-datastore_solrpy-{py27,pypy}-solrpy{00,01}, python-datastore_sqlite-{py27,py36,py37,py38,py39,py310,pypy,pypy36}, @@ -227,6 +229,8 @@ deps = datastore_redis-redis0400: redis<4.1 datastore_redis-redis03: redis<4.0 datastore_redis-{py27,pypy}: rb + datastore_aioredis-aioredislatest: aioredis + datastore_aioredis-aioredis01: aioredis<2 datastore_aredis-aredislatest: aredis datastore_solrpy-solrpy00: solrpy<1.0 datastore_solrpy-solrpy01: solrpy<2.0 @@ -399,6 +403,7 @@ changedir = datastore_pymysql: tests/datastore_pymysql datastore_pysolr: tests/datastore_pysolr datastore_redis: tests/datastore_redis + datastore_aioredis: tests/datastore_aioredis datastore_aredis: tests/datastore_aredis datastore_solrpy: tests/datastore_solrpy datastore_sqlite: tests/datastore_sqlite