diff --git a/piestats/cli.py b/piestats/cli.py index f45c91b..b7d8d89 100644 --- a/piestats/cli.py +++ b/piestats/cli.py @@ -60,7 +60,7 @@ def update(config_path): r = redis.Redis(**config.redis_connect) for server in config.servers: - print('Updating stats for {server.url_slug} ({server.title}) ({server.log_source})'.format(server=server)) + print(('Updating stats for {server.url_slug} ({server.title}) ({server.log_source})'.format(server=server))) # Redis key name manager keys = Keys(config, server) @@ -87,7 +87,7 @@ def update(config_path): # Trim old events retention.run_retention() - print('Updating took {0} seconds'.format(round(time.time() - start, 2))) + print(('Updating took {0} seconds'.format(round(time.time() - start, 2)))) @cli.command() @@ -116,7 +116,7 @@ def __init__(self, config={}): super(App, self).__init__() def load_config(self): - for key, value in self.options.iteritems(): + for key, value in self.options.items(): if key in self.cfg.settings and value is not None: self.cfg.set(key.lower(), value) diff --git a/piestats/compat.py b/piestats/compat.py new file mode 100644 index 0000000..aed4de3 --- /dev/null +++ b/piestats/compat.py @@ -0,0 +1,54 @@ +# Helpers to convert byte responses from redis and log lines to strings + +# There are likely lots more relevant encodings that are missing +encodings = ('utf-8', 'cp1252', 'cp1251', 'cp1250', 'cp1140', 'cp1254', 'cp1256') + + +def kill_bytes(item): + ''' Given a single variable, if it is bytes decode to string ''' + if isinstance(item, bytes): + last_exception = None + + # Try some known encodings + for encoding in encodings: + try: + return item.decode(encoding) + except UnicodeDecodeError as e: + last_exception = e + + # If none work, raise the last exception we got + raise last_exception + + else: + return item + + +def strip_bytes_from_list(iterable): + ''' Given iterable, convert each item within to a string if it is bytes ''' + if not iterable: + return iterable + + return [kill_bytes(item) for item in iterable] + + +def strip_bytes_from_stream(stream): + ''' + Given an iterable of lists/tuples, yield each list/tuple back with the items inside converted to non-bytes + Good for ZREVRANGE WITHSCORES + ''' + if not stream: + return stream + + for item in stream: + if isinstance(item, (list, tuple)): + yield strip_bytes_from_list(item) + else: + yield kill_bytes(item) + + +def strip_bytes_from_dict(d): + ''' Given a dict, return it with bytes replaced with strings in both keys and values ''' + if isinstance(d, dict): + return {kill_bytes(key): kill_bytes(value) for key, value in d.items()} + else: + return d diff --git a/piestats/config.py b/piestats/config.py index 26afbe1..22e90f4 100644 --- a/piestats/config.py +++ b/piestats/config.py @@ -9,7 +9,7 @@ class Config(): def __init__(self, conf_path): ''' Pass me path to config file; I'll load usefulness out of it ''' with open(conf_path, 'r') as h: - data = yaml.load(h) + data = yaml.safe_load(h) if not isinstance(data, dict): raise RuntimeError('Loaded yaml is garbage: {0}'.format(data)) diff --git a/piestats/keys.py b/piestats/keys.py index 3f18ba8..83d9d5c 100644 --- a/piestats/keys.py +++ b/piestats/keys.py @@ -50,21 +50,21 @@ def kills_per_day(self, day): ''' Plain keys for number of kills that happened on ``day`` ''' return '%s:kills_per_day:%s' % (self.key_prefix, day) - def player_hash(self, player): - ''' Hash of data for player ``player``''' - return '%s:player:%s' % (self.key_prefix, player) + def player_hash(self, player_id): + ''' Hash of data for player ``player_id``''' + return '%s:player:%d' % (self.key_prefix, player_id) - def map_hash(self, _map): - ''' Hash of data for map ``_map``''' - return '%s:map:%s' % (self.key_prefix, _map) + def map_hash(self, map_name): + ''' Hash of data for map ``map_name``''' + return '%s:map:%s' % (self.key_prefix, map_name) - def player_top_enemies(self, player): + def player_top_enemies(self, player_id): ''' Sorted set of people being killed by ``player`` ''' - return '%s:player_top_enemies:%s' % (self.key_prefix, player) + return '%s:player_top_enemies:%d' % (self.key_prefix, player_id) - def player_top_victims(self, player): + def player_top_victims(self, player_id): ''' Sorted set of people killing ``player`` ''' - return '%s:player_top_victims:%s' % (self.key_prefix, player) + return '%s:player_top_victims:%d' % (self.key_prefix, player_id) def weapon_top_killers(self, weapon): ''' Sorted set containing the amount of kills a player using a weapon has ''' @@ -72,7 +72,7 @@ def weapon_top_killers(self, weapon): def round_hash(self, round_id): ''' Hash of data for round ID ``round_id``''' - return '%s:round_data:%s' % (self.key_prefix, round_id) + return '%s:round_data:%d' % (self.key_prefix, round_id) def player_id_to_names(self, player_id): - return '%s:player_id_to_names:%s' % (self.key_prefix, player_id) + return '%s:player_id_to_names:%d' % (self.key_prefix, player_id) diff --git a/piestats/models/kill.py b/piestats/models/kill.py index 3f275cc..bd36b40 100644 --- a/piestats/models/kill.py +++ b/piestats/models/kill.py @@ -1,6 +1,7 @@ from datetime import datetime import msgpack from piestats.models.base import JsonSerializableModel +from piestats.compat import strip_bytes_from_list team_names = ( 'none', @@ -62,7 +63,7 @@ def from_redis(cls, item): Class factory to instantiate a new instance of this class based on a msgpack representation ''' - return cls(*msgpack.loads(item, use_list=False)) + return cls(*strip_bytes_from_list(msgpack.loads(item, use_list=False))) def to_redis(self): ''' @@ -81,6 +82,6 @@ def resolve_player_ids(self, resolver): ''' Translate player IDs to names ''' - self.killer = resolver(self.killer) - self.victim = resolver(self.victim) + self.killer = resolver(int(self.killer)) + self.victim = resolver(int(self.victim)) return self diff --git a/piestats/models/map.py b/piestats/models/map.py index 5f16a41..f07e7c6 100644 --- a/piestats/models/map.py +++ b/piestats/models/map.py @@ -11,7 +11,7 @@ class Map(JsonSerializableModel): def __init__(self, *args, **kwargs): self.info = kwargs self.wepstats = defaultdict(lambda: defaultdict(int)) - for key, value in kwargs.iteritems(): + for key, value in kwargs.items(): if key.startswith('kills:'): stat, wep = key.split(':') self.wepstats[wep][stat] = int(value) diff --git a/piestats/models/player.py b/piestats/models/player.py index 4cf78f8..61eff2b 100644 --- a/piestats/models/player.py +++ b/piestats/models/player.py @@ -14,7 +14,7 @@ def __init__(self, *args, **kwargs): self.info = kwargs self.wepstats = defaultdict(lambda: defaultdict(int)) self.mapstats = defaultdict(lambda: defaultdict(int)) - for key, value in kwargs.iteritems(): + for key, value in kwargs.items(): try: value = int(value) diff --git a/piestats/models/round.py b/piestats/models/round.py index 842473d..3fee8aa 100644 --- a/piestats/models/round.py +++ b/piestats/models/round.py @@ -17,7 +17,7 @@ def __init__(self, *args, **kwargs): self.weaponstats = defaultdict(lambda: defaultdict(int)) self._winning_player = None - for key, value in kwargs.iteritems(): + for key, value in kwargs.items(): if key.startswith('scores_player:'): _, team, player_id = key.split(':', 2) player_id = int(player_id) @@ -115,7 +115,7 @@ def winning_player(self): if not self._winning_player: self._winning_player = Player( - name=sorted(((player.kills, player.name) for player in self.players.itervalues()), reverse=True)[0][1] + name=sorted(((player.kills, player.name) for player in self.players.values()), reverse=True)[0][1] ) return self._winning_player @@ -152,7 +152,7 @@ def resolve_players(self, results): new_players = {} - for player_id, data in self.playerstats.iteritems(): + for player_id, data in self.playerstats.items(): player = results.get_player_fields(player_id, ['lastcountry']) if not player: continue diff --git a/piestats/status.py b/piestats/status.py index d87e620..4235e06 100644 --- a/piestats/status.py +++ b/piestats/status.py @@ -34,25 +34,25 @@ def parse_refresh(self, sock): } # See http://wiki.soldat.pl/index.php/Refresh for docs on the binary response - for i in xrange(0, 32): + for i in range(0, 32): info['players'][i]['name'] = unpack('25p', sock.recv(25))[0] - for i in xrange(0, 32): + for i in range(0, 32): info['players'][i]['team'] = unpack('B', sock.recv(1))[0] - for i in xrange(0, 32): + for i in range(0, 32): info['players'][i]['kills'] = unpack('H', sock.recv(2))[0] - for i in xrange(0, 32): + for i in range(0, 32): info['players'][i]['deaths'] = unpack('H', sock.recv(2))[0] - for i in xrange(0, 32): + for i in range(0, 32): info['players'][i]['ping'] = unpack('B', sock.recv(1))[0] - for i in xrange(0, 32): + for i in range(0, 32): info['players'][i]['id'] = unpack('B', sock.recv(1))[0] - for i in xrange(0, 32): + for i in range(0, 32): info['players'][i]['ip'] = '.'.join(map(str, unpack('4B', sock.recv(4)))) info['score'] = { @@ -72,7 +72,7 @@ def parse_refresh(self, sock): empty_players = set() # Post processing of player results - for key, player in info['players'].iteritems(): + for key, player in info['players'].items(): # Disregard this player if the name field is empty if player['name'] == '': @@ -106,11 +106,11 @@ def parse_refresh(self, sock): pass # Make the players object just an array - info['players'] = info['players'].values() + info['players'] = list(info['players'].values()) # Convenience info['minutesLeft'] = info['currentTime'] / 60 / 60 - info['botCount'] = len(filter(lambda x: x['bot'], info['players'])) + info['botCount'] = len([x for x in info['players'] if x['bot']]) return info diff --git a/piestats/update/filemanager/__init__.py b/piestats/update/filemanager/__init__.py index 917d56c..fcee899 100644 --- a/piestats/update/filemanager/__init__.py +++ b/piestats/update/filemanager/__init__.py @@ -6,19 +6,19 @@ class FileManager(): def __init__(self): - raise NotImplemented('Should be implemented') + raise NotImplementedError('Should be implemented') def get_file_paths(self): - raise NotImplemented('Should be implemented') + raise NotImplementedError('Should be implemented') def get_files(self): - raise NotImplemented('Should be implemented') + raise NotImplementedError('Should be implemented') def get_data(self): - raise NotImplemented('Should be implemented') + raise NotImplementedError('Should be implemented') def filename_key(self): - raise NotImplemented('Should be implemented') + raise NotImplementedError('Should be implemented') @contextmanager def initialize(self): diff --git a/piestats/update/filemanager/ftp.py b/piestats/update/filemanager/ftp.py index bb5bd4d..67f78dc 100644 --- a/piestats/update/filemanager/ftp.py +++ b/piestats/update/filemanager/ftp.py @@ -68,7 +68,7 @@ def get_files(self, sub_path, pattern='*'): try: size = self.ftp.size(path) except ftplib.error_perm: - print 'Could not get size of %s' % path + print('Could not get size of %s' % path) continue key = self.filename_key(path) @@ -80,7 +80,7 @@ def get_files(self, sub_path, pattern='*'): if size > pos: if progressbar.is_hidden: - print('Reading {filename} from offset {pos}'.format(filename=path, pos=pos)) + print(('Reading {filename} from offset {pos}'.format(filename=path, pos=pos))) yield path, pos self.r.hset(self.keys.log_positions, key, size) diff --git a/piestats/update/filemanager/local.py b/piestats/update/filemanager/local.py index 1a8ed93..8fb5e46 100644 --- a/piestats/update/filemanager/local.py +++ b/piestats/update/filemanager/local.py @@ -39,7 +39,7 @@ def get_files(self, sub_path, pattern='*'): pos = int(prev) if size > pos: if progressbar.is_hidden: - print('Reading {filename} from offset {pos}'.format(filename=path, pos=pos)) + print(('Reading {filename} from offset {pos}'.format(filename=path, pos=pos))) yield path, pos self.r.hset(self.keys.log_positions, key, size) diff --git a/piestats/update/filemanager/ssh.py b/piestats/update/filemanager/ssh.py index 5927d64..a0615a8 100644 --- a/piestats/update/filemanager/ssh.py +++ b/piestats/update/filemanager/ssh.py @@ -51,7 +51,7 @@ def get_files(self, sub_path, pattern='*'): pos = int(prev) if size > pos: if progressbar.is_hidden: - print('Reading {filename} from offset {pos}'.format(filename=path, pos=pos)) + print(('Reading {filename} from offset {pos}'.format(filename=path, pos=pos))) yield path, pos self.r.hset(self.keys.log_positions, key, size) diff --git a/piestats/update/hwid.py b/piestats/update/hwid.py index 1f9de24..2b3a041 100644 --- a/piestats/update/hwid.py +++ b/piestats/update/hwid.py @@ -1,5 +1,6 @@ from time import time from collections import deque +from piestats.compat import kill_bytes class BoundedCache: @@ -26,7 +27,7 @@ def set(self, key, value): if len(self.dict) > self.maxsize: evict = self.keys.pop() self.dict.pop(evict) - print '\n\nEvicted %s (size %s)\n\n' % (key, len(self.dict)) + print('\n\nEvicted %s (size %s)\n\n' % (key, len(self.dict))) self.dict[key] = value self.keys.appendleft(key) @@ -41,6 +42,8 @@ def __init__(self, r, keys): def get_player_id_from_name(self, name): ''' get what player id this name currently maps to. if there is none, instantiate one ''' + name = kill_bytes(name) + # Refer to our local cache cached = self.player_name_cache.get(name) if cached: @@ -50,11 +53,11 @@ def get_player_id_from_name(self, name): # Case where we have track of player joining if existing_name_id is not None: - player_id = existing_name_id + player_id = int(existing_name_id) # Case where we don't. Bot? Start it off. else: - player_id = self.r.incr(self.keys.last_player_id) + player_id = int(self.r.incr(self.keys.last_player_id)) self.r.hset(self.keys.name_to_id, name, player_id) self.r.zadd(self.keys.player_id_to_names(player_id), name, time()) @@ -67,13 +70,23 @@ def register_hwid(self, name, hwid, date): crux of hwid dedupe functionality. called based on the "player join" console log lines. instantiate a player ID for this name or tie it to an existing name ''' + name = kill_bytes(name) + hwid = kill_bytes(hwid) + date = int(date) + # Try to formulate proper mapping of hwid to id existing_hwid_id = self.r.hget(self.keys.hwid_to_id, hwid) existing_name_id = self.r.hget(self.keys.name_to_id, name) + if existing_hwid_id is not None: + existing_hwid_id = int(existing_hwid_id) + + if existing_name_id is not None: + existing_name_id = int(existing_name_id) + # No ID for this combo set yet? Map it if existing_hwid_id is None and existing_name_id is None: - player_id = self.r.incr(self.keys.last_player_id) + player_id = int(self.r.incr(self.keys.last_player_id)) self.r.hset(self.keys.hwid_to_id, hwid, player_id) self.r.hset(self.keys.name_to_id, name, player_id) @@ -93,8 +106,8 @@ def register_hwid(self, name, hwid, date): # Garbage fire of many-to-many mapping of HWID to playername if existing_hwid_id != existing_name_id: player_id = existing_hwid_id - print ('\nMismatch of IDs. HWID "%s" (%s) does not match Name "%s" (id %s). Defaulting to %s' % ( - hwid, existing_hwid_id, name, existing_name_id, player_id)) + print(('\nMismatch of IDs. HWID "%s" (%s) does not match Name "%s" (id %s). Defaulting to %s' % ( + hwid, existing_hwid_id, name, existing_name_id, player_id))) self.r.hset(self.keys.hwid_to_id, hwid, player_id) self.r.hset(self.keys.name_to_id, name, player_id) diff --git a/piestats/update/lock.py b/piestats/update/lock.py index 9fdebf2..08b560c 100644 --- a/piestats/update/lock.py +++ b/piestats/update/lock.py @@ -23,7 +23,7 @@ def acquire_lock(): yield return - print 'Acquiring lock (ensuring only one update is running at once)' + print('Acquiring lock (ensuring only one update is running at once)') try: fcntl.lockf(handle, fcntl.LOCK_EX) @@ -31,7 +31,7 @@ def acquire_lock(): # This should never fail and instead just block if another instance has the lock, # but in case it does fail, don't bail the updater except Exception as e: - print 'Failed acquiring lock: %s' % e + print('Failed acquiring lock: %s' % e) try: yield diff --git a/piestats/update/manageevents.py b/piestats/update/manageevents.py index 049f7e4..d6ffd2b 100644 --- a/piestats/update/manageevents.py +++ b/piestats/update/manageevents.py @@ -11,6 +11,7 @@ import select import msgpack import os +from piestats.compat import strip_bytes_from_dict try: import GeoIP @@ -37,7 +38,7 @@ def start_procs(self, nproc): self.kill_event = multiprocessing.Event() my_pid = os.getpid() - for number in xrange(nproc): + for number in range(nproc): r_pipe, w_pipe = multiprocessing.Pipe(False) proc = multiprocessing.Process(target=self.worker, args=(number, r_pipe, my_pid)) proc.start() @@ -91,7 +92,7 @@ def apply_kill(self, kill, incr=1): Apply a kill, incrementing (or decrementing) all relevant metrics ''' if abs(incr) != 1: - print 'Invalid increment value for kill: {kill}'.format(kill=kill) + print('Invalid increment value for kill: {kill}'.format(kill=kill)) return # Convert victim and killer to their IDs @@ -200,9 +201,9 @@ def __init__(self, r, keys, server): try: self.geoip = GeoIP.open(pkg_resources.resource_filename('piestats.update', 'GeoIP.dat'), GeoIP.GEOIP_MMAP_CACHE) except Exception as e: - print 'Failed loading geoip file %s' % e + print('Failed loading geoip file %s' % e) else: - print 'GeoIP looking up not supported' + print('GeoIP looking up not supported') def __enter__(self): ''' When our context manager is initialized, initialize our kill queue ''' @@ -275,7 +276,7 @@ def update_map(self, map, date): # Finish up old round's stats if old_map: - old_round_data = self.r.hgetall(self.keys.round_hash(old_round_id)) + old_round_data = strip_bytes_from_dict(self.r.hgetall(self.keys.round_hash(old_round_id))) if old_round_data: old_round = Round(**old_round_data) @@ -295,13 +296,13 @@ def update_map(self, map, date): else: old_round_id = self.r.get(self.keys.last_round_id) if old_round_id: - old_round_data = self.r.hgetall(self.keys.round_hash(old_round_id)) + old_round_data = strip_bytes_from_dict(self.r.hgetall(self.keys.round_hash(old_round_id))) if old_round_data: old_round = Round(**old_round_data) # If it has no kills or no scores or anything else just delete it if old_round.empty: - print 'Killing old empty round id %s' % old_round_id + print('Killing old empty round id %s' % old_round_id) self.r.delete(self.keys.round_hash(old_round_id)) self.r.zrem(self.keys.round_log, old_round_id) diff --git a/piestats/update/parseevents.py b/piestats/update/parseevents.py index 357f99f..b89d68b 100644 --- a/piestats/update/parseevents.py +++ b/piestats/update/parseevents.py @@ -8,6 +8,7 @@ from piestats.update.mapimage import generate_map_svg from piestats.models.events import EventPlayerJoin, EventNextMap, EventScore, EventInvalidMap, EventRequestMap, EventBareLog, MapList from piestats.models.kill import Kill +from piestats.compat import kill_bytes flag_round_map_prefixes = ('ctf_', 'inf_', 'tw_') @@ -20,22 +21,22 @@ def __init__(self, retention, filemanager, r, keys): self.retention = retention self.filemanager = filemanager - kill_regex = ('(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) \((?P\d)\) (?P.+) killed \((?P\d)\) (?P.+) ' + kill_regex = (r'(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) \((?P\d)\) (?P.+) killed \((?P\d)\) (?P.+) ' 'with (?PAk-74|Barrett M82A1|Chainsaw|Cluster Grenades|Combat Knife|Desert Eagles|' 'FN Minimi|Grenade|Hands|HK MP5|LAW|M79|Ruger 77|Selfkill|Spas-12|Stationary gun|Steyr AUG' '|USSOCOM|XM214 Minigun|Bow|Flame Bow)') self.event_regex = ( - (EventPlayerJoin, re.compile('(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) (?P.+) joining game \((?P[^:]+):\d+\) HWID:(?P\S+)')), - (EventNextMap, re.compile('(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) Next map: (?P[^$]+)')), - (EventNextMap, re.compile('(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) /map (?P[^(\s]+)')), - (EventRequestMap, re.compile('(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) \[.+\] !map (?P[^(\s]+)')), - (EventInvalidMap, re.compile('\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d Map not found \((?P\S+)\)')), - (EventScore, re.compile('(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) (?P.+) scores for (?PAlpha|Bravo) Team$')), + (EventPlayerJoin, re.compile(r'(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) (?P.+) joining game \((?P[^:]+):\d+\) HWID:(?P\S+)')), + (EventNextMap, re.compile(r'(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) Next map: (?P[^$]+)')), + (EventNextMap, re.compile(r'(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) /map (?P[^(\s]+)')), + (EventRequestMap, re.compile(r'(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) \[.+\] !map (?P[^(\s]+)')), + (EventInvalidMap, re.compile(r'\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d Map not found \((?P\S+)\)')), + (EventScore, re.compile(r'(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) (?P.+) scores for (?PAlpha|Bravo) Team$')), (Kill, re.compile(kill_regex)), # Make absolutely sure this is last - (EventBareLog, re.compile('(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) (?P[^$]+)$')), + (EventBareLog, re.compile(r'(?P\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) (?P[^$]+)$')), ) self.requested_map = None @@ -49,7 +50,7 @@ def build_map_names(self): map_paths = self.filemanager.get_file_paths('maps', '*.pms') - print 'Parsing %d maps' % len(map_paths) + print('Parsing %d maps' % len(map_paths)) for map_path in map_paths: @@ -74,7 +75,7 @@ def build_map_names(self): try: reader.parse(BytesIO(content)) except Exception as e: - print 'Failed reading map %s: %s' % (map_path, e) + print('Failed reading map %s: %s' % (map_path, e)) continue # If we already have the generate svg for this map, don't generate it again @@ -82,9 +83,9 @@ def build_map_names(self): try: generated_svg = generate_map_svg(reader) self.r.hset(self.keys.map_hash(map_filename), 'svg_image', generated_svg) - print 'Saved generated SVG for %s' % map_filename + print('Saved generated SVG for %s' % map_filename) except Exception as e: - print 'Failed generating SVG for %s: %s' % (map_filename, e) + print('Failed generating SVG for %s: %s' % (map_filename, e)) if not map_title: title = reader.header.Name.text[:reader.header.Name.length].strip() @@ -106,7 +107,13 @@ def parse_line(self, line): Run our regexes against a line and return the first event object that matches ''' for event, regex in self.event_regex: - m = regex.match(line.strip()) + try: + line = kill_bytes(line.strip()) + except UnicodeDecodeError: + print('Could not parse line "%s"' % line) + continue + + m = regex.match(line) if not m: continue diff --git a/piestats/update/pms_parser/__init__.py b/piestats/update/pms_parser/__init__.py index 37ebf0f..456be32 100644 --- a/piestats/update/pms_parser/__init__.py +++ b/piestats/update/pms_parser/__init__.py @@ -47,7 +47,7 @@ def parse(self, h): logging.info('read header. now reading polys') # All polygons - for i in xrange(self.header.PolyCount): + for i in range(self.header.PolyCount): polygon = T_Polygon() if not h.readinto(polygon): logging.error('Failed reading polygon #{}'.format(i)) @@ -59,9 +59,9 @@ def parse(self, h): # Skip sector data we don't immediately care about sector_division = self._get_long(h) num_sectors = self._get_long(h) - for i in xrange(((num_sectors * 2) + 1) * ((num_sectors * 2) + 1)): + for i in range(((num_sectors * 2) + 1) * ((num_sectors * 2) + 1)): sector_polys = unpack('H', h.read(2))[0] - for j in xrange(sector_polys): + for j in range(sector_polys): h.read(2) self.min_x = sector_division * -num_sectors @@ -73,7 +73,7 @@ def parse(self, h): # All props (scenery placements) prop_count = self._get_long(h) - for i in xrange(prop_count): + for i in range(prop_count): prop = T_Prop() if not h.readinto(prop): logging.error('Failed reading prop #{}'.format(i)) @@ -84,7 +84,7 @@ def parse(self, h): # All sceneries (map prop style to scenery filename) scenery_count = self._get_long(h) - for i in xrange(scenery_count): + for i in range(scenery_count): scenery = T_Scenery() if not h.readinto(scenery): logging.error('Failed reading scenery #{}'.format(i)) @@ -95,7 +95,7 @@ def parse(self, h): # Colliders collider_count = self._get_long(h) - for i in xrange(collider_count): + for i in range(collider_count): collider = T_Collider() if not h.readinto(collider): logging.error('Failed reading collider #{}'.format(i)) @@ -106,7 +106,7 @@ def parse(self, h): # Spawnpoints spawnpoint_count = self._get_long(h) - for i in xrange(spawnpoint_count): + for i in range(spawnpoint_count): spawnpoint = T_Spawnpoint() if not h.readinto(spawnpoint): logging.error('Failed reading spawnpoint #{}'.format(i)) @@ -118,7 +118,7 @@ def parse(self, h): # Waypoints waypoint_count = self._get_long(h) - for i in xrange(waypoint_count): + for i in range(waypoint_count): waypoint = T_Waypoint() if not h.readinto(waypoint): logging.error('Failed reading waypoint #{}'.format(i)) diff --git a/piestats/update/pms_parser/color.py b/piestats/update/pms_parser/color.py index f827219..55270cd 100644 --- a/piestats/update/pms_parser/color.py +++ b/piestats/update/pms_parser/color.py @@ -1,4 +1,4 @@ -from __future__ import division + import ctypes @@ -13,4 +13,4 @@ class T_Color(ctypes.Structure): @property def for_gl_color(self): - return [self.__getattribute__(key) / 255 for key in 'red', 'green', 'blue', 'alpha'] + return [self.__getattribute__(key) / 255 for key in ('red', 'green', 'blue', 'alpha')] diff --git a/piestats/update/retention.py b/piestats/update/retention.py index 881ac65..6a0a29f 100644 --- a/piestats/update/retention.py +++ b/piestats/update/retention.py @@ -22,7 +22,7 @@ def too_old_unix(self, seconds): return self.oldest_allowed_unix > seconds def too_old_filename(self, filename): - m = re.match('consolelog-(?P\d+)-(?P\d+)-(?P\d+)-\d+.txt', filename) + m = re.match(r'consolelog-(?P\d+)-(?P\d+)-(?P\d+)-\d+.txt', filename) if not m: return False @@ -39,7 +39,7 @@ def run_retention(self): ''' kill_ids = self.r.zrangebyscore(self.keys.kill_log, -1, self.oldest_allowed_unix) - print 'Processing retention.. trimming events up until %s' % datetime.utcfromtimestamp(self.oldest_allowed_unix) + print('Processing retention.. trimming events up until %s' % datetime.utcfromtimestamp(self.oldest_allowed_unix)) if not kill_ids: return diff --git a/piestats/web/app.py b/piestats/web/app.py index ff87ffb..cfd9b0e 100644 --- a/piestats/web/app.py +++ b/piestats/web/app.py @@ -2,6 +2,7 @@ import re import logging import urllib +import urllib.parse from collections import OrderedDict from datetime import datetime, timedelta @@ -33,7 +34,7 @@ '.woff': 'application/font-woff'} _filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]') -_safe_username_re = re.compile('^[^.][a-zA-Z0-9-\. ]+$') +_safe_username_re = re.compile(r'^[^.][a-zA-Z0-9-\. ]+$') def secure_filename(filename): @@ -74,7 +75,7 @@ def player_url(server, username): if not username: return None if bad_username(username): - return '/{server}/player?name={username}'.format(server=server, username=urllib.quote_plus(username)) + return '/{server}/player?name={username}'.format(server=server, username=urllib.parse.quote_plus(username)) else: return '/{server}/player/{username}'.format(server=server, username=username) @@ -114,6 +115,7 @@ def more_data(self, req): pretty_datetime=pretty_datetime(req.context['config'].timezone), enumerate=enumerate, len=len, + list=list, pretty_duration=pretty_duration, req=req ) diff --git a/piestats/web/helpers.py b/piestats/web/helpers.py index a195268..b737ee3 100644 --- a/piestats/web/helpers.py +++ b/piestats/web/helpers.py @@ -1,5 +1,5 @@ import re -trailing_name_count_matcher = re.compile('(.+)\((\d+)\)$') +trailing_name_count_matcher = re.compile(r'(.+)\((\d+)\)$') def remove_redundant_player_names(names): diff --git a/piestats/web/results.py b/piestats/web/results.py index 7d7ae63..5d0dd30 100644 --- a/piestats/web/results.py +++ b/piestats/web/results.py @@ -8,6 +8,8 @@ from piestats.models.kill import Kill from piestats.models.round import Round from piestats.web.helpers import remove_redundant_player_names +from piestats.compat import ( + strip_bytes_from_list, strip_bytes_from_dict, strip_bytes_from_stream, kill_bytes) class Results(): @@ -48,6 +50,7 @@ def get_top_killers(self, startat=0, incr=20): ''' list of Player objects sorted by those with the most kills desc ''' results = self.r.zrevrange(self.keys.top_players, startat, startat + incr, withscores=True) for player_id, kills in results: + player_id = int(player_id) more = self.hmget_with_keys(self.keys.player_hash(player_id), ['deaths', 'lastseen', 'firstseen', 'lastcountry', 'scores:Alpha', 'scores:Bravo']) yield Player(name=self.get_name_from_id(player_id), kills=kills, @@ -56,7 +59,7 @@ def get_top_killers(self, startat=0, incr=20): def get_top_maps(self, startat=0, incr=20): ''' list of Map objects sorted by those with the most plays desc ''' - results = self.r.zrevrange(self.keys.top_maps, startat, startat + incr, withscores=True) + results = strip_bytes_from_stream(self.r.zrevrange(self.keys.top_maps, startat, startat + incr, withscores=True)) for name, plays in results: more = self.hmget_with_keys(self.keys.map_hash(name), ['scores:Alpha', 'scores:Bravo', 'wins:bravo', 'wins:alpha', 'kills', 'flags']) yield Map(name=name, @@ -64,30 +67,31 @@ def get_top_maps(self, startat=0, incr=20): **more ) - def get_player(self, _player): + def get_player(self, player_name): ''' given a player id, get a Player object ''' - _player_id = self.get_id_from_name(_player) - if not _player_id: + player_id = self.get_id_from_name(player_name) + if not player_id: return None - info = self.r.hgetall(self.keys.player_hash(_player_id)) + info = strip_bytes_from_dict(self.r.hgetall(self.keys.player_hash(player_id))) if not info: return None - info['names'] = self.get_all_names_from_id(_player_id) - return Player(name=self.get_name_from_id(_player_id), **info) + info['names'] = self.get_all_names_from_id(player_id) + return Player(name=self.get_name_from_id(player_id), **info) - def get_round(self, _round): + def get_round(self, round_id): ''' given a round id, get a Round object ''' - info = self.r.hgetall(self.keys.round_hash(_round)) + round_id = int(round_id) + info = strip_bytes_from_dict(self.r.hgetall(self.keys.round_hash(round_id))) if not info: return None - return Round(round_id=_round, **info).resolve_players(self) + return Round(round_id=round_id, **info).resolve_players(self) - def get_map(self, _map, get_svg=False): + def get_map(self, map_name, get_svg=False): ''' given a map name, get a Map object ''' # Manually get list of all keys we have for this map, so # if we don't want to get the gigantic svg xml blob, we # can selectively remove it - keys = self.r.hkeys(self.keys.map_hash(_map)) + keys = strip_bytes_from_list(self.r.hkeys(self.keys.map_hash(map_name))) # No keys returned? Map doesn't exist if not keys: @@ -101,7 +105,7 @@ def get_map(self, _map, get_svg=False): keys = list(keys) - info = self.hmget_with_keys(self.keys.map_hash(_map), keys) + info = self.hmget_with_keys(self.keys.map_hash(map_name), keys) if not info: return None @@ -112,47 +116,48 @@ def get_map(self, _map, get_svg=False): if not get_svg and has_svg: info['svg_image'] = None - info['plays'] = self.r.zscore(self.keys.top_maps, _map) or 0 - return Map(name=_map, **info) + info['plays'] = self.r.zscore(self.keys.top_maps, map_name) or 0 + return Map(name=map_name, **info) - def get_player_fields_by_name(self, player_name, fields=[]): + def get_player_fields_by_name(self, player_name, fields): ''' given a player name and some keys, get back a populated Player object ''' player_id = self.get_id_from_name(player_name) if not player_id: return return self.get_player_fields(player_id, fields) - def get_player_fields(self, _player_id, fields=[]): + def get_player_fields(self, player_id, fields): ''' given a player id and some keys, get back a populated Player object ''' - info = {} - for key in fields: - info[key] = self.r.hget(self.keys.player_hash(_player_id), key) - return Player(name=self.get_name_from_id(_player_id), **info) + player_id = int(player_id) + info = self.hmget_with_keys(self.keys.player_hash(player_id), fields) + return Player(name=self.get_name_from_id(player_id), **info) - def get_player_top_enemies(self, _player, startat=0, incr=20): + def get_player_top_enemies(self, player_name, startat=0, incr=20): ''' given a player name, get list of Player objects for that player, sorted by number of times they killed us desc ''' - _player_id = self.get_id_from_name(_player) - if not _player_id: + player_id = self.get_id_from_name(player_name) + if not player_id: return - results = self.r.zrevrange(self.keys.player_top_enemies(_player_id), 0, startat + incr, withscores=True) + results = self.r.zrevrange(self.keys.player_top_enemies(player_id), 0, startat + incr, withscores=True) for enemy_id, their_kills in results: + enemy_id = int(enemy_id) more = self.hmget_with_keys(self.keys.player_hash(enemy_id), ['lastcountry']) - my_deaths = float(self.r.zscore(self.keys.player_top_enemies(enemy_id), _player_id) or 0) + my_deaths = float(self.r.zscore(self.keys.player_top_enemies(enemy_id), player_id) or 0) more['kd'] = '%.2f' % (their_kills / my_deaths if my_deaths > 0 else 0) yield Player(name=self.get_name_from_id(enemy_id), kills=their_kills, **more ) - def get_player_top_victims(self, _player, startat=0, incr=20): + def get_player_top_victims(self, player_name, startat=0, incr=20): ''' given a player name, get list of Player objects for that player, sorted by number of times we killed them us desc ''' - _player_id = self.get_id_from_name(_player) - if not _player_id: + player_id = self.get_id_from_name(player_name) + if not player_id: return - results = self.r.zrevrange(self.keys.player_top_victims(_player_id), 0, startat + incr, withscores=True) + results = self.r.zrevrange(self.keys.player_top_victims(player_id), 0, startat + incr, withscores=True) for victim_id, my_kills in results: + victim_id = int(victim_id) more = self.hmget_with_keys(self.keys.player_hash(victim_id), ['lastcountry']) - their_deaths = float(self.r.zscore(self.keys.player_top_victims(victim_id), _player_id) or 0) + their_deaths = float(self.r.zscore(self.keys.player_top_victims(victim_id), player_id) or 0) more['kd'] = '%.2f' % (my_kills / their_deaths if their_deaths > 0 else 0) yield Player(name=self.get_name_from_id(victim_id), kills=my_kills, @@ -163,6 +168,7 @@ def get_last_kills(self, startat=0, incr=20): ''' given pagination, get list of most recent Kill objects ''' kill_ids = self.r.zrevrange(self.keys.kill_log, startat, startat + incr) for kill_id in kill_ids: + kill_id = int(kill_id) kill_data = self.r.hget(self.keys.kill_data, kill_id) if kill_data: yield Kill.from_redis(kill_data).resolve_player_ids(self.get_name_from_id) @@ -172,14 +178,15 @@ def get_last_rounds(self, startat=0, incr=20): startat += 1 round_ids = self.r.zrevrange(self.keys.round_log, startat, startat + incr) for round_id in round_ids: - round_data = self.r.hgetall(self.keys.round_hash(round_id)) + round_id = int(round_id) + round_data = strip_bytes_from_dict(self.r.hgetall(self.keys.round_hash(round_id))) if round_data: round_data['round_id'] = round_id yield Round(**round_data).resolve_players(self) def get_top_weapons(self): ''' get list of tuples of weapon to kills ''' - results = self.r.zrevrange(self.keys.top_weapons, 0, 20, withscores=True) + results = strip_bytes_from_stream(self.r.zrevrange(self.keys.top_weapons, 0, 20, withscores=True)) return map(lambda x: (x[0], int(x[1])), results) def get_kills_for_date_range(self, startdate=None, previous_days=7): @@ -202,12 +209,12 @@ def get_kills_for_date_range(self, startdate=None, previous_days=7): def get_top_countries(self, limit=10): ''' get list of tuples of countries and players from that country ''' - return self.r.zrevrange(self.keys.top_countries, 0, limit, withscores=True) + return strip_bytes_from_stream(self.r.zrevrange(self.keys.top_countries, 0, limit, withscores=True)) def player_search(self, name): ''' search for players based on name fragment. return list of Player() objects sorted desc by last seen''' # escape glob characters so they act as expected as search terms - name = name.replace('*', '\*').replace('?', '\?') + name = name.replace('*', r'\*').replace('?', r'\?') player_ids = set() @@ -222,6 +229,7 @@ def player_search(self, name): res = self.r.hscan(self.keys.player_search, cursor, '*{name}*'.format(name=name.lower()), max_names) cursor = res[0] for name in res[1].values(): + name = kill_bytes(name) player_id = self.get_id_from_name(name) if player_id: player_ids.add(player_id) @@ -243,14 +251,19 @@ def get_name_from_id(self, player_id): def get_all_names_from_id(self, player_id): ''' get all names this ID is tied to, sorted by most recent use descending ''' - names_with_scores = self.r.zrevrange(self.keys.player_id_to_names(player_id), 0, -1, withscores=True) + player_id = int(player_id) + names_with_scores = list(strip_bytes_from_stream(self.r.zrevrange(self.keys.player_id_to_names(player_id), 0, -1, withscores=True))) filtered = remove_redundant_player_names([r[0] for r in names_with_scores]) names_with_scores_dict = dict(names_with_scores) return [(k, names_with_scores_dict[k]) for k in filtered] def get_id_from_name(self, name): ''' get latest id this name is tied to ''' - return self.r.hget(self.keys.name_to_id, name) + result = self.r.hget(self.keys.name_to_id, name) + if result: + return int(result) + else: + return None def hmget_with_keys(self, hash_name, keys): ''' like self.r.hgetall except specify the keys you get back ''' @@ -260,4 +273,6 @@ def hmget_with_keys(self, hash_name, keys): if not data: return {} + data = strip_bytes_from_list(data) + return dict(zip(keys, data)) diff --git a/piestats/web/templates/index.html b/piestats/web/templates/index.html index dc9fed5..0b33a32 100644 --- a/piestats/web/templates/index.html +++ b/piestats/web/templates/index.html @@ -41,7 +41,7 @@

Top countries

// Our latest kills line chart var killsCtx = document.getElementById('chart_latest_kills').getContext('2d'); var data = { - labels: {{killsperdate.keys()[::-1]|safe}}, + labels: {{list(killsperdate.keys())[::-1]|safe}}, datasets: [ { label: 'Kills per day', @@ -51,7 +51,7 @@

Top countries

pointStrokeColor: '#fff', pointHighlightFill: '#fff', pointHighlightStroke: 'rgba(220,220,220,1)', - data: {{killsperdate.values()[::-1]|safe}} + data: {{list(killsperdate.values())[::-1]|safe}} } ] }; diff --git a/requirements.txt b/requirements.txt index 0cff6d2..0282f2d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +2to3==1.0 asn1crypto==0.24.0 Babel==2.5.1 bcrypt==3.1.4 @@ -5,13 +6,14 @@ cffi==1.11.2 click==6.7 configparser==3.5.0 cryptography==2.3.1 +entrypoints==0.3 enum34==1.1.6 falcon==1.3.0 -flake8==3.5.0 +flake8==3.7.7 GeoIP==1.3.2 -gevent==1.2.2 -greenlet==0.4.12 -gunicorn==19.7.1 +gevent==1.4.0 +greenlet==0.4.15 +gunicorn==19.9.0 idna==2.6 ipaddress==1.0.19 IPy==0.83 @@ -20,20 +22,21 @@ MarkupSafe==1.0 mccabe==0.6.1 msgpack==0.5.0 paramiko==2.4.2 +pkg-resources==0.0.0 pluggy==0.6.0 py==1.5.2 pyasn1==0.4.2 -pycodestyle==2.3.1 +pycodestyle==2.5.0 pycparser==2.18 -pyflakes==1.6.0 +pyflakes==2.1.1 PyNaCl==1.2.1 pyparsing==2.3.0 python-mimeparse==1.6.0 pytz==2017.3 -pyyaml>=4.2b1 +PyYAML==5.1b3 redis==2.10.6 setproctitle==1.1.10 six==1.11.0 tox==2.9.1 ujson==1.35 -virtualenv==15.1.0 +virtualenv==16.4.3 diff --git a/setup.cfg b/setup.cfg index 5e1a9c8..df8dc40 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,4 +1,4 @@ [flake8] max-line-length = 160 exclude = env/* -ignore = E111,E722,E114,E402 +ignore = E111,E722,E114,E402,W503,W504