diff --git a/rma/rule/Hash.py b/rma/rule/Hash.py index ee78517..dfbc7b9 100644 --- a/rma/rule/Hash.py +++ b/rma/rule/Hash.py @@ -18,6 +18,7 @@ def __init__(self, info, redis): self.values = [] self.encoding = info["encoding"] self.ttl = info["ttl"] + self.idleTime=info["idleTime"] for key, value in redis.hscan_iter(key_name, '*'): self.keys.append(key) @@ -66,7 +67,7 @@ class HashAggregator(object): def __init__(self, all_obj, total): self.total_elements = total - g00, g0, g1, g2, g3, v1, v2, ttl = tee(all_obj, 8) + g00, g0, g1, g2, g3, v1, v2, ttl,idleTime = tee(all_obj, 9) self.encoding = pref_encoding([obj.encoding for obj in g00], redis_encoding_id_to_str) self.system = sum(obj.system for obj in g0) @@ -87,6 +88,10 @@ def __init__(self, all_obj, total): self.ttlMin = min(ttls) self.ttlMax = max(ttls) self.ttlAvg = statistics.mean( ttls ) if len(ttls) > 1 else min(ttls) + idleTimes = [obj.idleTime for obj in idleTime] + self.idleTimeMin = min(idleTimes) + self.idleTimeMax = max(idleTimes) + self.idleTimeAvg = statistics.mean( idleTimes ) if len(idleTimes) > 1 else min(idleTimes) def __enter__(self): return self @@ -106,7 +111,7 @@ def __init__(self, redis): def analyze(self, keys, total=0): key_stat = { 'headers': ['Match', "Count", "Avg field count", "Key mem", "Real", "Ratio", "Value mem", "Real", "Ratio", - "System", "Encoding", "Total mem", "Total aligned", "TTL Min", "TTL Max", "TTL Avg."], + "System", "Encoding", "Total mem", "Total aligned", "TTL Min", "TTL Max", "TTL Avg","idleTime Min", "idleTime Max", "idleTime Avg."], 'data': [] } @@ -135,13 +140,17 @@ def analyze(self, keys, total=0): agg.ttlMin, agg.ttlMax, agg.ttlAvg, + agg.idleTimeMin, + agg.idleTimeMax, + agg.idleTimeAvg, + ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[12], reverse=True) key_stat['data'].append( - make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, sum, 0, sum, '', sum, sum, min, max, math.nan])) + make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, sum, 0, sum, '', sum, sum, min, max, math.nan,min, max, math.nan])) progress.close() diff --git a/rma/rule/KeyString.py b/rma/rule/KeyString.py index 014a88b..18f5dd4 100644 --- a/rma/rule/KeyString.py +++ b/rma/rule/KeyString.py @@ -6,9 +6,10 @@ import math class StringEntry(object): - def __init__(self, value="", ttl=-1): + def __init__(self, value="", ttl=-1,idleTime=-1): self.encoding = get_string_encoding(value) self.ttl = ttl + self.idleTime=idleTime self.useful_bytes = len(value) self.free_bytes = 0 self.aligned = size_of_aligned_string(value, encoding=self.encoding) @@ -36,7 +37,7 @@ def analyze(self, keys, total=0): :return: """ key_stat = { - 'headers': ['Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg."], + 'headers': ['Match', "Count", "Useful", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg.","idleTime Min", "idleTime Max", "idleTime Avg"], 'data': [] } @@ -46,8 +47,8 @@ def analyze(self, keys, total=0): leave=False) for pattern, data in keys.items(): - used_bytes_iter, aligned_iter, encoding_iter, ttl_iter = tee( - progress_iterator((StringEntry(value=x["name"], ttl=x["ttl"]) for x in data), progress), 4) + used_bytes_iter, aligned_iter, encoding_iter, ttl_iter,idle_time_iter = tee( + progress_iterator((StringEntry(value=x["name"], ttl=x["ttl"],idleTime=x["idleTime"]) for x in data), progress), 5) total_elements = len(data) if total_elements == 0: @@ -70,15 +71,19 @@ def analyze(self, keys, total=0): min_ttl = min(ttls) max_ttl = max(ttls) avg_ttl = statistics.mean(ttls) if len(ttls) > 1 else min(ttls) + idle_times = [obj.idleTime for obj in idle_time_iter] + min_idle_time = min(idle_times) + max_idle_time = max(idle_times) + avg_idle_time = statistics.mean(idle_times) if len(idle_times) > 1 else min(idle_times) stat_entry = [ pattern, total_elements, used_user, aligned, aligned / used_user, prefered_encoding, - min_value, max(max_iter), avg, min_ttl, max_ttl, avg_ttl + min_value, max(max_iter), avg, min_ttl, max_ttl, avg_ttl,min_idle_time,max_idle_time,avg_idle_time ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[1], reverse=True) - key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, sum, 0, '', 0, 0, 0, min, max, math.nan])) + key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, sum, 0, '', 0, 0, 0, min, max, math.nan, min, max, math.nan])) progress.close() diff --git a/rma/rule/List.py b/rma/rule/List.py index b2ecadd..297487e 100644 --- a/rma/rule/List.py +++ b/rma/rule/List.py @@ -15,6 +15,7 @@ def __init__(self, info, redis): key_name = info["name"] self.encoding = info['encoding'] self.ttl = info['ttl'] + self.idleTime = info["idleTime"] self.values = redis.lrange(key_name, 0, -1) self.count = len(self.values) @@ -46,8 +47,8 @@ class ListAggregator(object): def __init__(self, all_obj, total): self.total_elements = total - encode_iter, sys_iter, avg_iter, stdev_iter, min_iter, max_iter, value_used_iter, value_align_iter, ttl_iter = \ - tee(all_obj, 9) + encode_iter, sys_iter, avg_iter, stdev_iter, min_iter, max_iter, value_used_iter, value_align_iter, ttl_iter,idle_time_iter = \ + tee(all_obj, 10) self.encoding = pref_encoding([obj.encoding for obj in encode_iter], redis_encoding_id_to_str) self.system = sum(obj.system for obj in sys_iter) @@ -75,6 +76,10 @@ def __init__(self, all_obj, total): self.ttlMin = min(ttls) self.ttlMax = max(ttls) self.ttlAvg = statistics.mean( ttls ) if len(ttls) > 1 else min(ttls) + idleTimes = [obj.idleTime for obj in idle_time_iter] + self.idleTimeMin = min(idleTimes) + self.idleTimeMax = max(idleTimes) + self.idleTimeAvg = statistics.mean(idleTimes) if len(idleTimes) > 1 else min(idleTimes) def __enter__(self): return self @@ -93,7 +98,7 @@ def __init__(self, redis): def analyze(self, keys, total=0): key_stat = { - 'headers': ['Match', "Count", "Avg Count", "Min Count", "Max Count", "Stdev Count", "Value mem", "Real", "Ratio", "System", "Encoding", "Total", 'TTL Min', 'TTL Max', 'TTL Avg'], + 'headers': ['Match', "Count", "Avg Count", "Min Count", "Max Count", "Stdev Count", "Value mem", "Real", "Ratio", "System", "Encoding", "Total", 'TTL Min', 'TTL Max', 'TTL Avg',"idleTime Min", "idleTime Max", "idleTime Avg"], 'data': [] } @@ -121,13 +126,16 @@ def analyze(self, keys, total=0): agg.ttlMin, agg.ttlMax, agg.ttlAvg, + agg.idleTimeMin, + agg.idleTimeMax, + agg.idleTimeAvg, ] key_stat['data'].append(stat_entry) progress.update() key_stat['data'].sort(key=lambda x: x[8], reverse=True) - key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, 0, 0, 0, sum, sum, 0, sum, '', sum, min, max, math.nan])) + key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, 0, 0, 0, sum, sum, 0, sum, '', sum, min, max, math.nan, min, max, math.nan])) progress.close() diff --git a/rma/rule/Set.py b/rma/rule/Set.py index 814e3d3..440b472 100644 --- a/rma/rule/Set.py +++ b/rma/rule/Set.py @@ -18,6 +18,7 @@ def __init__(self, info, redis): self.values = [v for v in redis.sscan_iter(key_name, '*', 1000)] self.encoding = info["encoding"] self.ttl = info["ttl"] + self.idleTime = info["idleTime"] self.count = len(self.values) if self.encoding == REDIS_ENCODING_ID_HASHTABLE: @@ -39,7 +40,7 @@ class SetAggregator(object): def __init__(self, all_obj, total): self.total_elements = total - g00, g0, g3, v1, v2, v3, ttl = tee(all_obj, 7) + g00, g0, g3, v1, v2, v3, ttl,idleTime = tee(all_obj, 8) self.encoding = pref_encoding([obj.encoding for obj in g00], redis_encoding_id_to_str) self.system = sum(obj.system for obj in g0) @@ -59,6 +60,10 @@ def __init__(self, all_obj, total): self.ttlMin = min(ttls) self.ttlMax = max(ttls) self.ttlAvg = statistics.mean( ttls ) if len(ttls) > 1 else min(ttls) + idleTimes = [obj.idleTime for obj in idleTime] + self.idleTimeMin = min(idleTimes) + self.idleTimeMax = max(idleTimes) + self.idleTimeAvg = statistics.mean(idleTimes) if len(idleTimes) > 1 else min(idleTimes) def __enter__(self): return self @@ -77,7 +82,7 @@ def __init__(self, redis): def analyze(self, keys, total=0): key_stat = { - 'headers': ['Match', "Count", "Avg Count", "Value mem", "Real", "Ratio", "System*", "Encoding", "Total", "TTL Min", "TTL Max", "TTL Avg."], + 'headers': ['Match', "Count", "Avg Count", "Value mem", "Real", "Ratio", "System*", "Encoding", "Total", "TTL Min", "TTL Max", "TTL Avg.","idleTime Min", "idleTime Max", "idleTime Avg."], 'data': [] } @@ -102,12 +107,15 @@ def analyze(self, keys, total=0): agg.ttlMin, agg.ttlMax, agg.ttlAvg, + agg.idleTimeMin, + agg.idleTimeMax, + agg.idleTimeAvg, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda x: x[8], reverse=True) - key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, '', sum, min, max, math.nan])) + key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, 0, sum, sum, 0, sum, '', sum, min, max, math.nan,min, max, math.nan])) progress.close() diff --git a/rma/rule/ValueString.py b/rma/rule/ValueString.py index 4e7affb..71fa750 100644 --- a/rma/rule/ValueString.py +++ b/rma/rule/ValueString.py @@ -33,6 +33,7 @@ def __init__(self, redis, info, use_debug=True): key_name = info["name"] self.encoding = info["encoding"] self.ttl = info["ttl"] + self.idleTime = info["idleTime"] self.logger = logging.getLogger(__name__) if self.encoding == REDIS_ENCODING_ID_INT: @@ -65,7 +66,7 @@ def __init__(self, redis): def analyze(self, keys, total=0): key_stat = { - 'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg"], + 'headers': ['Match', "Count", "Useful", "Free", "Real", "Ratio", "Encoding", "Min", "Max", "Avg", "TTL Min", "TTL Max", "TTL Avg","idleTime Min", "idleTime Max", "idleTime Avg"], 'data': [] } @@ -81,6 +82,7 @@ def analyze(self, keys, total=0): aligned_bytes = [] encodings = [] ttl = [] + idleTime=[] for key_info in progress_iterator(data, progress): try: @@ -90,6 +92,7 @@ def analyze(self, keys, total=0): aligned_bytes.append(stat.aligned) encodings.append(stat.encoding) ttl.append(stat.ttl) + idleTime.append(stat.idleTime) except RedisError as e: # This code works in real time so key me be deleted and this code fail error_string = repr(e) @@ -111,6 +114,9 @@ def analyze(self, keys, total=0): min_ttl = min(ttl) if len(ttl) >= 1 else -1 max_ttl = max(ttl) if len(ttl) >= 1 else -1 mean_ttl = statistics.mean(ttl) if len(ttl) > 1 else min_ttl + min_idle_time = min(idleTime) if len(idleTime) >= 1 else -1 + max_idle_time = max(idleTime) if len(idleTime) >= 1 else -1 + mean_idle_time = statistics.mean(idleTime) if len(idleTime) > 1 else min_idle_time stat_entry = [ pattern, @@ -126,11 +132,14 @@ def analyze(self, keys, total=0): min_ttl, max_ttl, mean_ttl, + min_idle_time, + max_idle_time, + mean_idle_time, ] key_stat['data'].append(stat_entry) key_stat['data'].sort(key=lambda e: e[1], reverse=True) - key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0, min, max, math.nan])) + key_stat['data'].append(make_total_row(key_stat['data'], ['Total:', sum, sum, 0, sum, 0, '', 0, 0, 0, min, max, math.nan,min, max, math.nan])) progress.close() diff --git a/rma/scanner.py b/rma/scanner.py index 79258c7..44a07c5 100644 --- a/rma/scanner.py +++ b/rma/scanner.py @@ -32,7 +32,8 @@ def __init__(self, redis, match="*", accepted_types=None): local type = redis.call("TYPE", KEYS[i]) local encoding = redis.call("OBJECT", "ENCODING",KEYS[i]) local ttl = redis.call("TTL", KEYS[i]) - ret[i] = {type["ok"], encoding, ttl} + local idleTime=redis.call("OBJECT", "IDLETIME",KEYS[i]) + ret[i] = {type["ok"], encoding, ttl,idleTime} end return cmsgpack.pack(ret) """) @@ -76,7 +77,8 @@ def resolve_with_pipe(self, ret): pipe.type(key) pipe.object('ENCODING', key) pipe.ttl(key) - key_with_types = [{'type': x, 'encoding': y, 'ttl': z} for x, y, z in chunker(pipe.execute(), 3)] + pipe.object('idletime', key) + key_with_types = [{'type': x, 'encoding': y, 'ttl': z,'idleTime':i} for x, y, z,i in chunker(pipe.execute(), 4)] return key_with_types def scan(self, limit=1000): @@ -86,7 +88,7 @@ def scan(self, limit=1000): total = 0 for key_tuple in self.batch_scan(): key_info, key_name = key_tuple - key_type, key_encoding, key_ttl = key_info + key_type, key_encoding, key_ttl,key_idle_time = key_info if not key_name: self.logger.warning( '\r\nWarning! Scan iterator return key with empty name `` and type %s', key_type) @@ -98,7 +100,8 @@ def scan(self, limit=1000): 'name': key_name.decode("utf-8", "replace"), 'type': to_id, 'encoding': redis_encoding_str_to_id(key_encoding), - 'ttl': key_ttl + 'ttl': key_ttl, + 'idleTime': key_idle_time } yield key_info_obj