From 891d47655cd79a3f3886e34a93405686798ba155 Mon Sep 17 00:00:00 2001 From: Robby Dermody Date: Mon, 9 May 2016 14:17:25 -0400 Subject: [PATCH 01/36] fix counterwallet#774 --- counterblock/lib/modules/betting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/counterblock/lib/modules/betting.py b/counterblock/lib/modules/betting.py index 2c4f4b7f..a009f6d8 100644 --- a/counterblock/lib/modules/betting.py +++ b/counterblock/lib/modules/betting.py @@ -114,7 +114,7 @@ def get_feed(address_or_url = ''): 'info_status': 'valid' } result = {} - feeds = config.mongo_db.feeds.find(spec=conditions, projection={'_id': False}, limit=1) + feeds = config.mongo_db.feeds.find(conditions, projection={'_id': False}, limit=1) for feed in feeds: if 'targets' not in feed['info_data'] or ('type' in feed['info_data'] and feed['info_data']['type'] in ['all', 'cfd']): feed['info_data']['next_broadcast'] = util.next_interval_date(feed['info_data']['broadcast_date']) From 438aa72e4ac23313261c049e2230961a0643418b Mon Sep 17 00:00:00 2001 From: robby-dermody Date: Tue, 17 May 2016 16:56:14 +0000 Subject: [PATCH 02/36] port to python3 --- .gitignore | 3 + counterblock/__init__.py | 4 - counterblock/armory_utxsvr.py | 137 ------------------ counterblock/lib/blockchain.py | 2 +- counterblock/lib/blockfeed.py | 12 +- counterblock/lib/cache.py | 4 +- counterblock/lib/config.py | 4 +- counterblock/lib/database.py | 2 +- counterblock/lib/log.py | 4 + counterblock/lib/module.py | 20 +-- counterblock/lib/modules/assets.py | 6 +- counterblock/lib/modules/betting.py | 6 +- counterblock/lib/modules/counterwallet.py | 14 +- .../lib/modules/counterwallet_iofeeds.py | 14 +- counterblock/lib/modules/dex/__init__.py | 15 +- .../lib/modules/dex/assets_trading.py | 14 +- counterblock/lib/modules/dex/dex.py | 6 +- counterblock/lib/modules/transaction_stats.py | 6 +- counterblock/lib/processor/__init__.py | 8 +- counterblock/lib/processor/api.py | 35 +++-- counterblock/lib/tests/tests.py | 26 ++-- counterblock/lib/util.py | 115 +++++++-------- counterblock/server.py | 11 +- requirements.txt | 5 + setup.py | 65 ++++----- 25 files changed, 196 insertions(+), 342 deletions(-) delete mode 100755 counterblock/armory_utxsvr.py create mode 100644 requirements.txt diff --git a/.gitignore b/.gitignore index 071d4a16..f35199bd 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ # Packages *.egg *.egg-info +.eggs build eggs parts @@ -44,3 +45,5 @@ schema.py #Docs docs/_build +#backup files +*.bak diff --git a/counterblock/__init__.py b/counterblock/__init__.py index 795baf16..9ff757f5 100644 --- a/counterblock/__init__.py +++ b/counterblock/__init__.py @@ -3,7 +3,3 @@ def server_main(): from counterblock import server server.main() - -def armory_utxsvr_main(): - from counterblock import armory_utxsvr - armory_utxsvr.main() diff --git a/counterblock/armory_utxsvr.py b/counterblock/armory_utxsvr.py deleted file mode 100755 index 840a5bed..00000000 --- a/counterblock/armory_utxsvr.py +++ /dev/null @@ -1,137 +0,0 @@ -#! /usr/bin/env python3 -""" -server for creating unsigned armory offline transactions -""" -import sys -import logging -import argparse -import json -import time -import threading -import requests -import datetime - -import flask -from flask import request -import jsonrpc -from jsonrpc import dispatcher - -sys.path.append("/usr/lib/armory/") -from armoryengine.ALL import * - -ARMORY_UTXSVR_PORT_MAINNET = 6590 -ARMORY_UTXSVR_PORT_TESTNET = 6591 -app = flask.Flask(__name__) -is_testnet = False -bitcoind_url = None - -def call_rpc(method, params): - headers = {'content-type': 'application/json'} - if not isinstance(params, list): params = [params,] - payload = json.dumps({"method": method, "params": params, "jsonrpc": "2.0", "id": 0}) - response = requests.post(bitcoind_url, headers=headers, data=payload, timeout=10) - response_json = response.json() - if 'error' not in response_json.keys() or response_json['error'] == None: - return response_json['result'] - raise Exception("API request got error response: %s" % response_json) - -@dispatcher.add_method -def serialize_unsigned_tx(unsigned_tx_hex, public_key_hex): - print("REQUEST(serialize_unsigned_tx) -- unsigned_tx_hex: '%s', public_key_hex: '%s'" % ( - unsigned_tx_hex, public_key_hex)) - - try: - unsigned_tx_bin = hex_to_binary(unsigned_tx_hex) - pytx = PyTx().unserialize(unsigned_tx_bin) - - #compose a txmap manually via bitcoind's getrawtransaction call because armory's way of - # doing it (TheBDM.bdv().getTxByHash()) seems to not always work in 0.93.3+ ... - tx_map = {} - for txin in pytx.inputs: - outpt = txin.outpoint - txhash = outpt.txHash - txhash_hex = binary_to_hex(txhash, BIGENDIAN) - try: - raw_tx_result = call_rpc("getrawtransaction", [txhash_hex, 1]) - except Exception as e: - raise Exception("Could not locate input txhash %s: %s" % (txhash_hex, e)) - return - tx_map[txhash] = PyTx().unserialize(hex_to_binary(raw_tx_result['hex'])) - - utx = UnsignedTransaction(pytx=pytx, pubKeyMap=hex_to_binary(public_key_hex), txMap=tx_map) - unsigned_tx_ascii = utx.serializeAscii() - except Exception, e: - raise Exception("Could not serialize transaction: %s" % e) - - return unsigned_tx_ascii - -@dispatcher.add_method -def convert_signed_tx_to_raw_hex(signed_tx_ascii): - """Converts a signed tx from armory's offline format to a raw hex tx that bitcoind can broadcast/use""" - print("REQUEST(convert_signed_tx_to_raw_hex) -- signed_tx_ascii:\n'%s'\n" % (signed_tx_ascii,)) - - try: - utx = UnsignedTransaction() - utx.unserializeAscii(signed_tx_ascii) - except Exception, e: - raise Exception("Could not decode transaction: %s" % e) - - #see if the tx is signed - if not utx.evaluateSigningStatus().canBroadcast: - raise Exception("Passed transaction is not signed") - - try: - pytx = utx.getSignedPyTx() - raw_tx_bin = pytx.serialize() - raw_tx_hex = binary_to_hex(raw_tx_bin) - except Exception, e: - raise Exception("Could not serialize transaction: %s" % e) - - return raw_tx_hex - -@app.route('/', methods=["POST",]) -@app.route('/api/', methods=["POST",]) -def handle_post(): - request_json = flask.request.get_data().decode('utf-8') - rpc_response = jsonrpc.JSONRPCResponseManager.handle(request_json, dispatcher) - rpc_response_json = json.dumps(rpc_response.data).encode() - response = flask.Response(rpc_response_json, 200, mimetype='application/json') - return response - -def blockchainLoaded(args): - print("**** Initializing Flask (HTTP) server ...") - app.run(host="127.0.0.1", port=ARMORY_UTXSVR_PORT_MAINNET if not is_testnet else ARMORY_UTXSVR_PORT_TESTNET, threaded=True) - print("**** Ready to serve ...") - -def newBlock(args): - print('**** NEW BLOCK: Current height is %s' % TheBDM.getTopBlockHeight()) - -def main(): - global is_testnet, bitcoind_url - - print("**** Starting up ...") - parser = argparse.ArgumentParser(description='Armory offline transaction generator daemon') - parser.add_argument('--testnet', action='store_true', help='Run for testnet') - parser.add_argument('bitcoind_url', help='bitcoind RPC endpoint URL, e.g. "http://rpc:rpcpass@localhost:8332"') - parser_args = parser.parse_args() - - btcdir = "/home/xcp/.bitcoin" + ("/testnet3" if parser_args.testnet else '') - is_testnet = parser_args.testnet - bitcoind_url = parser_args.bitcoind_url - - print("**** Initializing armory ...") - #require armory to be installed, adding the configured armory path to PYTHONPATH - TheBDM.btcdir = btcdir - TheBDM.RegisterEventForSignal(blockchainLoaded, FINISH_LOAD_BLOCKCHAIN_ACTION) - TheBDM.RegisterEventForSignal(newBlock, NEW_BLOCK_ACTION) - TheBDM.goOnline() - - try: - while(True): - time.sleep(1) - except KeyboardInterrupt: - print("******** Exiting *********") - exit(0) - -if __name__ == '__main__': - main() diff --git a/counterblock/lib/blockchain.py b/counterblock/lib/blockchain.py index ee8b8015..fe4c523a 100644 --- a/counterblock/lib/blockchain.py +++ b/counterblock/lib/blockchain.py @@ -122,7 +122,7 @@ def gettransaction_batch(txhash_list): 'skip_missing': True}, abort_on_error=True)['result'] txes = {} - for tx_hash, tx in raw_txes.iteritems(): + for tx_hash, tx in raw_txes.items(): if tx is None: txes[tx_hash] = None continue diff --git a/counterblock/lib/blockfeed.py b/counterblock/lib/blockfeed.py index dd7d2a69..6e6b49b0 100644 --- a/counterblock/lib/blockfeed.py +++ b/counterblock/lib/blockfeed.py @@ -9,7 +9,7 @@ import logging import datetime import decimal -import ConfigParser +import configparser import time import itertools import pymongo @@ -87,7 +87,7 @@ def publish_mempool_tx(): # TODO: Better handling of double parsing try: result = function['function'](tx, json.loads(tx['bindings'])) or None - except pymongo.errors.DuplicateKeyError, e: + except pymongo.errors.DuplicateKeyError as e: logging.exception(e) if result == 'ABORT_THIS_MESSAGE_PROCESSING' or result == 'continue': break @@ -114,7 +114,7 @@ def parse_message(msg): # TODO: Better handling of double parsing try: result = function['function'](msg, msg_data) or None - except pymongo.errors.DuplicateKeyError, e: + except pymongo.errors.DuplicateKeyError as e: logging.exception(e) if result in ('ABORT_THIS_MESSAGE_PROCESSING', 'continue', #just abort further MessageProcessors for THIS message @@ -196,13 +196,13 @@ def parse_block(block_data): if iteration % 10 == 0: logger.info("Heartbeat (%s, block: %s, caught up: %s)" % ( iteration, config.state['my_latest_block']['block_index'], fuzzy_is_caught_up())) - logger.info("iteration: ap %s/%s, cp_latest_block_index: %s, my_latest_block: %s" % (autopilot, autopilot_runner, + logger.debug("iteration: ap %s/%s, cp_latest_block_index: %s, my_latest_block: %s" % (autopilot, autopilot_runner, config.state['cp_latest_block_index'], config.state['my_latest_block']['block_index'])) if not autopilot or autopilot_runner == 0: try: cp_running_info = util.jsonrpc_api("get_running_info", abort_on_error=True)['result'] - except Exception, e: + except Exception as e: logger.warn("Cannot contact counterparty-server get_running_info: %s" % e) time.sleep(3) continue @@ -284,7 +284,7 @@ def parse_block(block_data): block_data = cache.get_block_info(cur_block_index, prefetch=min(100, (config.state['cp_latest_block_index'] - config.state['my_latest_block']['block_index'])), min_message_index=config.state['last_message_index'] + 1 if config.state['last_message_index'] != -1 else None) - except Exception, e: + except Exception as e: logger.warn(str(e) + " Waiting 3 seconds before trying again...") time.sleep(3) continue diff --git a/counterblock/lib/cache.py b/counterblock/lib/cache.py index 43709e41..98876e24 100644 --- a/counterblock/lib/cache.py +++ b/counterblock/lib/cache.py @@ -29,7 +29,7 @@ def get_block_info(block_index, prefetch=0, min_message_index=None): blockinfo_cache.clear() blocks = util.call_jsonrpc_api('get_blocks', - {'block_indexes': range(block_index, block_index + prefetch), + {'block_indexes': list(range(block_index, block_index + prefetch)), 'min_message_index': min_message_index}, abort_on_error=True)['result'] for block in blocks: @@ -55,7 +55,7 @@ def cached_function(*args, **kwargs): 'result': json.dumps(result) }) return result - except Exception, e: + except Exception as e: logger.exception(e) else: #logger.info("result from cache ({}, {}, {})".format(func.__name__, block_index, function_signature)) diff --git a/counterblock/lib/config.py b/counterblock/lib/config.py index 2b874ec2..e4ef2606 100644 --- a/counterblock/lib/config.py +++ b/counterblock/lib/config.py @@ -68,13 +68,13 @@ def init_data_dir(args): def load(args): import os - import ConfigParser + import configparser import email.utils assert data_dir and config_dir and log_dir #Read config file - configfile = ConfigParser.ConfigParser() + configfile = configparser.ConfigParser() if args.config_file: config_path = args.config_file else: diff --git a/counterblock/lib/database.py b/counterblock/lib/database.py index ecc868c3..c53d17f3 100644 --- a/counterblock/lib/database.py +++ b/counterblock/lib/database.py @@ -82,7 +82,7 @@ def rollback(max_block_index): NOTE: after calling this function, you should always trigger a "continue" statement to reiterate the processing loop (which will get a new cp_latest_block from counterpartyd and resume as appropriate) """ - assert isinstance(max_block_index, (int, long)) and max_block_index >= config.BLOCK_FIRST + assert isinstance(max_block_index, int) and max_block_index >= config.BLOCK_FIRST if not config.mongo_db.processed_blocks.find_one({"block_index": max_block_index}): raise Exception("Can't roll back to specified block index: %i doesn't exist in database" % max_block_index) diff --git a/counterblock/lib/log.py b/counterblock/lib/log.py index 81477886..06c8e8eb 100644 --- a/counterblock/lib/log.py +++ b/counterblock/lib/log.py @@ -35,6 +35,10 @@ def set_up(verbose): socketio_log = logging.getLogger('socketio') socketio_log.setLevel(logging.DEBUG if verbose else logging.WARNING) socketio_log.propagate = False + + #requests/urllib3 logging (make it not so chatty) + logging.getLogger('requests').setLevel(logging.WARNING) + logging.getLogger('urllib3').setLevel(logging.CRITICAL) #Transaction log tx_logger = logging.getLogger("transaction_log") #get transaction logger diff --git a/counterblock/lib/module.py b/counterblock/lib/module.py index 96a39681..0f8a7980 100644 --- a/counterblock/lib/module.py +++ b/counterblock/lib/module.py @@ -42,9 +42,9 @@ def get_mod_params_dict(params): #Read module configuration file module_conf = ConfigObj(os.path.join(config.config_dir, CONFIG_FILENAME % config.net_path_part)) - for key, container in module_conf.items(): + for key, container in list(module_conf.items()): if key == 'LoadModule': - for module, user_settings in container.items(): + for module, user_settings in list(container.items()): try: params = get_mod_params_dict(user_settings) if params['enabled'] is True: @@ -58,12 +58,12 @@ def get_mod_params_dict(params): logger.warn("Invalid config header %s in %s" % (key, CONFIG_FILENAME % config.net_path_part)) continue #print(processor_functions) - for func_name, user_settings in container.items(): + for func_name, user_settings in list(container.items()): #print(func_name, user_settings) if func_name in processor_functions: params = get_mod_params_dict(user_settings) #print(func_name, params) - for param_name, param_value in params.items(): + for param_name, param_value in list(params.items()): processor_functions[func_name][param_name] = param_value else: logger.warn("Attempted to configure a non-existent processor %s" % func_name) @@ -73,7 +73,7 @@ def toggle(mod, enabled=True): try: imp.find_module(mod) except: - print("Unable to find module %s" % mod) + print(("Unable to find module %s" % mod)) return mod_config_path = os.path.join(config.config_dir, CONFIG_FILENAME % config.net_path_part) module_conf = ConfigObj(mod_config_path) @@ -87,12 +87,12 @@ def toggle(mod, enabled=True): if not "LoadModule" in module_conf: module_conf['LoadModule'] = {} module_conf['LoadModule'][mod] = enabled module_conf.write() - print("%s Module %s" %("Enabled" if enabled else "Disabled", mod)) + print(("%s Module %s" %("Enabled" if enabled else "Disabled", mod))) def list_all(): mod_config_path = os.path.join(config.config_dir, CONFIG_FILENAME % config.net_path_part) module_conf = ConfigObj(mod_config_path) - for name, modules in module_conf.items(): - print("Configuration for %s" %name) - for module, settings in modules.items(): - print(" %s %s: %s" %(("Module" if name == "LoadModule" else "Function"), module, settings)) + for name, modules in list(module_conf.items()): + print(("Configuration for %s" %name)) + for module, settings in list(modules.items()): + print((" %s %s: %s" %(("Module" if name == "LoadModule" else "Function"), module, settings))) diff --git a/counterblock/lib/modules/assets.py b/counterblock/lib/modules/assets.py index 2a9c95bb..6f81c17f 100644 --- a/counterblock/lib/modules/assets.py +++ b/counterblock/lib/modules/assets.py @@ -9,11 +9,11 @@ import datetime import logging import decimal -import urllib +import urllib.request, urllib.parse, urllib.error import json import base64 import pymongo -import ConfigParser +import configparser import dateutil.parser @@ -348,7 +348,7 @@ def get_asset_history(asset, reverse=False): history = [] raw = asset['_history'] + [asset,] #oldest to newest. add on the current state prev = None - for i in xrange(len(raw)): #oldest to newest + for i in range(len(raw)): #oldest to newest if i == 0: assert raw[i]['_change_type'] == 'created' history.append({ diff --git a/counterblock/lib/modules/betting.py b/counterblock/lib/modules/betting.py index 2c4f4b7f..7ed974ac 100644 --- a/counterblock/lib/modules/betting.py +++ b/counterblock/lib/modules/betting.py @@ -9,9 +9,9 @@ import datetime import logging import decimal -import urllib +import urllib.request, urllib.parse, urllib.error import json -import ConfigParser +import configparser import base64 import pymongo @@ -104,7 +104,7 @@ def get_user_bets(addresses = [], status="open"): return { 'bets': bets, - 'feeds': get_feeds_by_source_addresses(sources.keys()) + 'feeds': get_feeds_by_source_addresses(list(sources.keys())) } @API.add_method diff --git a/counterblock/lib/modules/counterwallet.py b/counterblock/lib/modules/counterwallet.py index 5d874cc4..1dfc89da 100644 --- a/counterblock/lib/modules/counterwallet.py +++ b/counterblock/lib/modules/counterwallet.py @@ -9,12 +9,12 @@ import datetime import logging import decimal -import urllib +import urllib.request, urllib.parse, urllib.error import json import pymongo import flask import jsonrpc -import ConfigParser +import configparser import dateutil.parser @@ -31,7 +31,7 @@ module_config = {} def _read_config(): - configfile = ConfigParser.ConfigParser() + configfile = configparser.ConfigParser() config_path = os.path.join(config.config_dir, 'counterwallet%s.conf' % config.net_path_part) logger.info("Loading config at: %s" % config_path) try: @@ -357,7 +357,7 @@ def gen_stats_for_network(network): ts = time.mktime(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']).timetuple()) assert ts in new_entries if e['_id']['referer'] is None: continue - referer_key = urllib.quote(e['_id']['referer']).replace('.', '%2E') + referer_key = urllib.parse.quote(e['_id']['referer']).replace('.', '%2E') if 'referers' not in new_entries[ts]: new_entries[ts]['referers'] = {} if e['_id']['referer'] not in new_entries[ts]['referers']: new_entries[ts]['referers'][referer_key] = 0 new_entries[ts]['referers'][referer_key] += 1 @@ -407,8 +407,8 @@ def gen_stats_for_network(network): if new_entries: #insert the rest #logger.info("Stats, new entries: %s" % new_entries.values()) - config.mongo_db.wallet_stats.insert(new_entries.values()) - logger.info("Added wallet statistics for %i full days" % len(new_entries.values())) + config.mongo_db.wallet_stats.insert(list(new_entries.values())) + logger.info("Added wallet statistics for %i full days" % len(list(new_entries.values()))) gen_stats_for_network('mainnet') gen_stats_for_network('testnet') @@ -455,7 +455,7 @@ def init(): module_config['COUNTERWALLET_CONFIG_JSON'] = '{}' try: module_config['COUNTERWALLET_CONFIG'] = json.loads(module_config['COUNTERWALLET_CONFIG_JSON']) - except Exception, e: + except Exception as e: logger.error("Exception loading counterwallet client-side config: %s" % e) #init GEOIP diff --git a/counterblock/lib/modules/counterwallet_iofeeds.py b/counterblock/lib/modules/counterwallet_iofeeds.py index 92e22687..cb0d0031 100644 --- a/counterblock/lib/modules/counterwallet_iofeeds.py +++ b/counterblock/lib/modules/counterwallet_iofeeds.py @@ -13,7 +13,7 @@ import socket import collections import json -import ConfigParser +import configparser import zmq.green as zmq import pymongo @@ -32,7 +32,7 @@ zmq_publisher_eventfeed = None #set on init def _read_config(): - configfile = ConfigParser.ConfigParser() + configfile = configparser.ConfigParser() config_path = os.path.join(config.config_dir, 'counterwallet_iofeeds.conf') logger.info("Loading config at: %s" % config_path) try: @@ -104,7 +104,7 @@ def get_chat_handle(wallet_id): @API.add_method def store_chat_handle(wallet_id, handle): """Set or update a chat handle""" - if not isinstance(handle, basestring): + if not isinstance(handle, str): raise Exception("Invalid chat handle: bad data type") if not re.match(r'^[\sA-Za-z0-9_-]{4,12}$', handle): raise Exception("Invalid chat handle: bad syntax/length") @@ -324,7 +324,7 @@ def on_command(self, command, args): config.mongo_db.chat_handles.save(p) #make the change active immediately handle_lower = handle.lower() - for sessid, socket in self.socket.server.sockets.iteritems(): + for sessid, socket in self.socket.server.sockets.items(): if socket.session.get('handle', None).lower() == handle_lower: socket.session['is_op'] = p['is_op'] if self.socket.session['is_primary_server']: #let all users know @@ -348,7 +348,7 @@ def on_command(self, command, args): config.mongo_db.chat_handles.save(p) #make the change active immediately handle_lower = handle.lower() - for sessid, socket in self.socket.server.sockets.iteritems(): + for sessid, socket in self.socket.server.sockets.items(): if socket.session.get('handle', None).lower() == handle_lower: socket.session['banned_until'] = p['banned_until'] if self.socket.session['is_primary_server']: #let all users know @@ -365,7 +365,7 @@ def on_command(self, command, args): config.mongo_db.chat_handles.save(p) #make the change active immediately handle_lower = handle.lower() - for sessid, socket in self.socket.server.sockets.iteritems(): + for sessid, socket in self.socket.server.sockets.items(): if socket.session.get('handle', None).lower() == handle_lower: socket.session['banned_until'] = None if self.socket.session['is_primary_server']: #let all users know @@ -391,7 +391,7 @@ def on_command(self, command, args): config.mongo_db.chat_handles.save(p) #make the change active immediately handle_lower = handle.lower() - for sessid, socket in self.socket.server.sockets.iteritems(): + for sessid, socket in self.socket.server.sockets.items(): if socket.session.get('handle', None).lower() == handle_lower: socket.session['handle'] = new_handle if self.socket.session['is_primary_server']: #let all users know diff --git a/counterblock/lib/modules/dex/__init__.py b/counterblock/lib/modules/dex/__init__.py index 45772be7..637a2af3 100644 --- a/counterblock/lib/modules/dex/__init__.py +++ b/counterblock/lib/modules/dex/__init__.py @@ -11,11 +11,11 @@ import datetime import logging import decimal -import urllib +import urllib.request, urllib.parse, urllib.error import json import operator import base64 -import ConfigParser +import configparser import pymongo from bson.son import SON @@ -26,6 +26,7 @@ from counterblock.lib.processor import MessageProcessor, MempoolMessageProcessor, BlockProcessor, StartUpProcessor, CaughtUpProcessor, RollbackProcessor, API, start_task from . import assets_trading, dex +EIGHT_PLACES = Decimal(10) ** -8 COMPILE_MARKET_PAIR_INFO_PERIOD = 10 * 60 #in seconds (this is every 10 minutes currently) COMPILE_ASSET_MARKET_INFO_PERIOD = 30 * 60 #in seconds (this is every 30 minutes currently) @@ -191,7 +192,7 @@ def get_market_price_history(asset1, asset2, start_ts=None, end_ts=None, as_dict midline = [((r['high'] + r['low']) / 2.0) for r in result] if as_dict: - for i in xrange(len(result)): + for i in range(len(result)): result[i]['interval_time'] = int(time.mktime(datetime.datetime( result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000) result[i]['midline'] = midline[i] @@ -199,7 +200,7 @@ def get_market_price_history(asset1, asset2, start_ts=None, end_ts=None, as_dict return result else: list_result = [] - for i in xrange(len(result)): + for i in range(len(result)): list_result.append([ int(time.mktime(datetime.datetime( result[i]['_id']['year'], result[i]['_id']['month'], result[i]['_id']['day'], result[i]['_id']['hour']).timetuple()) * 1000), @@ -357,7 +358,7 @@ def make_book(orders, isBidBook): book.setdefault(id, {'unit_price': unit_price, 'quantity': 0, 'count': 0}) book[id]['quantity'] += remaining #base quantity outstanding book[id]['count'] += 1 #num orders at this price level - book = sorted(book.itervalues(), key=operator.itemgetter('unit_price'), reverse=isBidBook) + book = sorted(iter(book.values()), key=operator.itemgetter('unit_price'), reverse=isBidBook) #^ convert to list and sort -- bid book = descending, ask book = ascending return book @@ -560,10 +561,10 @@ def parse_trade_book(msg, msg_data): } trade['unit_price'] = float( ( D(trade['quote_quantity_normalized']) / D(trade['base_quantity_normalized']) ).quantize( - D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) + EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN), context=decimal.Context(prec=20)) trade['unit_price_inverse'] = float( ( D(trade['base_quantity_normalized']) / D(trade['quote_quantity_normalized']) ).quantize( - D('.00000000'), rounding=decimal.ROUND_HALF_EVEN)) + EIGHT_PLACES, rounding=decimal.ROUND_HALF_EVEN), context=decimal.Context(prec=20)) config.mongo_db.trades.insert(trade) logger.info("Procesed Trade from tx %s :: %s" % (msg['message_index'], trade)) diff --git a/counterblock/lib/modules/dex/assets_trading.py b/counterblock/lib/modules/dex/assets_trading.py index 5e9d7401..c9ed7b88 100644 --- a/counterblock/lib/modules/dex/assets_trading.py +++ b/counterblock/lib/modules/dex/assets_trading.py @@ -19,7 +19,7 @@ def get_market_price(price_data, vol_data): assert len(price_data) == len(vol_data) assert len(price_data) <= config.MARKET_PRICE_DERIVE_NUM_POINTS - market_price = util.weighted_average(zip(price_data, vol_data)) + market_price = util.weighted_average(list(zip(price_data, vol_data))) return market_price def get_market_price_summary(asset1, asset2, with_last_trades=0, start_dt=None, end_dt=None): @@ -55,8 +55,8 @@ def get_market_price_summary(asset1, asset2, with_last_trades=0, start_dt=None, last_trades.reverse() #from newest to oldest market_price = get_market_price( - [last_trades[i]['unit_price'] for i in xrange(min(len(last_trades), config.MARKET_PRICE_DERIVE_NUM_POINTS))], - [(last_trades[i]['base_quantity_normalized'] + last_trades[i]['quote_quantity_normalized']) for i in xrange(min(len(last_trades), config.MARKET_PRICE_DERIVE_NUM_POINTS))]) + [last_trades[i]['unit_price'] for i in range(min(len(last_trades), config.MARKET_PRICE_DERIVE_NUM_POINTS))], + [(last_trades[i]['base_quantity_normalized'] + last_trades[i]['quote_quantity_normalized']) for i in range(min(len(last_trades), config.MARKET_PRICE_DERIVE_NUM_POINTS))]) result = { 'market_price': float(D(market_price)), 'base_asset': base_asset, @@ -156,7 +156,7 @@ def get_xcp_btc_price_info(asset, mps_xcp_btc, xcp_btc_price, btc_xcp_price, wit price_summary_in_btc['market_price'] = calc_inverse(price_summary_in_btc['market_price']) price_summary_in_btc['base_asset'] = config.BTC price_summary_in_btc['quote_asset'] = config.XCP - for i in xrange(len(price_summary_in_btc['last_trades'])): + for i in range(len(price_summary_in_btc['last_trades'])): #[0]=block_time, [1]=unit_price, [2]=base_quantity_normalized, [3]=quote_quantity_normalized, [4]=block_index price_summary_in_btc['last_trades'][i][1] = calc_inverse(price_summary_in_btc['last_trades'][i][1]) price_summary_in_btc['last_trades'][i][2], price_summary_in_btc['last_trades'][i][3] = \ @@ -368,7 +368,7 @@ def compile_7d_market_info(asset): ]) _7d_history_in_xcp = list(_7d_history) _7d_history_in_btc = copy.deepcopy(_7d_history_in_xcp) - for i in xrange(len(_7d_history_in_btc)): + for i in range(len(_7d_history_in_btc)): _7d_history_in_btc[i]['price'] = calc_inverse(_7d_history_in_btc[i]['price']) _7d_history_in_btc[i]['vol'] = calc_inverse(_7d_history_in_btc[i]['vol']) @@ -458,7 +458,7 @@ def get_price(base_quantity_normalized, quote_quantity_normalized): #compose price data, relative to BTC and XCP mps_xcp_btc, xcp_btc_price, btc_xcp_price = get_price_primatives() - for pair, e in pair_data.iteritems(): + for pair, e in pair_data.items(): base_asset, quote_asset = pair.split('/') _24h_vol_in_btc = None _24h_vol_in_xcp = None @@ -510,7 +510,7 @@ def get_price(base_quantity_normalized, quote_quantity_normalized): #remove any old pairs that were not just updated config.mongo_db.asset_pair_market_info.remove({'last_updated': {'$lt': end_dt}}) - logger.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(pair_data.keys()))) + logger.info("Recomposed 24h trade statistics for %i asset pairs: %s" % (len(pair_data), ', '.join(list(pair_data.keys())))) def compile_asset_market_info(): """Run through all assets and compose and store market ranking information.""" diff --git a/counterblock/lib/modules/dex/dex.py b/counterblock/lib/modules/dex/dex.py index 74ccab16..d75bd74b 100644 --- a/counterblock/lib/modules/dex/dex.py +++ b/counterblock/lib/modules/dex/dex.py @@ -28,7 +28,7 @@ def calculate_price(base_quantity, quote_quantity, base_divisibility, quote_divi decimal.setcontext(decimal.Context(prec=8, rounding=decimal.ROUND_HALF_EVEN)) return price - except Exception, e: + except Exception as e: logging.exception(e) decimal.setcontext(decimal.Context(prec=8, rounding=decimal.ROUND_HALF_EVEN)) raise(e) @@ -253,7 +253,7 @@ def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provi try: fee_provided = order['fee_provided'] / (order['give_quantity'] / 100) market_order['fee_provided'] = format(D(order['fee_provided']) / (D(order['give_quantity']) / D(100)), '.2f') - except Exception, e: + except Exception as e: fee_provided = min_fee_provided - 1 # exclude exclude = fee_provided < min_fee_provided @@ -262,7 +262,7 @@ def get_market_orders(asset1, asset2, addresses=[], supplies=None, min_fee_provi try: fee_required = order['fee_required'] / (order['get_quantity'] / 100) market_order['fee_required'] = format(D(order['fee_required']) / (D(order['get_quantity']) / D(100)), '.2f') - except Exception, e: + except Exception as e: fee_required = max_fee_required + 1 # exclude exclude = fee_required > max_fee_required diff --git a/counterblock/lib/modules/transaction_stats.py b/counterblock/lib/modules/transaction_stats.py index 2d21fd63..d2ba493e 100644 --- a/counterblock/lib/modules/transaction_stats.py +++ b/counterblock/lib/modules/transaction_stats.py @@ -10,7 +10,7 @@ import datetime import logging import json -import ConfigParser +import configparser import pymongo from bson.son import SON @@ -68,7 +68,7 @@ def get_transaction_stats(start_ts=None, end_ts=None): time_val = int(time.mktime(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']).timetuple()) * 1000) times.setdefault(time_val, True) categories[e['_id']['category']][time_val] = e['count'] - times_list = times.keys() + times_list = list(times.keys()) times_list.sort() #fill in each array with all found timestamps for e in categories: @@ -78,7 +78,7 @@ def get_transaction_stats(start_ts=None, end_ts=None): categories[e] = a #replace with array data #take out to final data structure categories_list = [] - for k, v in categories.iteritems(): + for k, v in categories.items(): categories_list.append({'name': k, 'data': v}) return categories_list diff --git a/counterblock/lib/processor/__init__.py b/counterblock/lib/processor/__init__.py index 8968b0fb..31865c6a 100644 --- a/counterblock/lib/processor/__init__.py +++ b/counterblock/lib/processor/__init__.py @@ -24,7 +24,7 @@ def wrapped_f(*args, **kwargs): exceptions = tuple(self._error_handlers.keys()) try: return func(*args, **kwargs) - except exceptions, exception: + except exceptions as exception: for type in self._error_handlers: if isinstance(exception, type): handler, greenlet = self._error_handlers[type] @@ -125,8 +125,8 @@ def build_method_map(self, prototype): for method in dir(prototype) if not method.startswith('_')) - for attr, method in prototype.items(): - if callable(method): + for attr, method in list(prototype.items()): + if isinstance(method, collections.Callable): self[attr] = method @@ -156,7 +156,7 @@ def __repr__(self): def active_functions(self): if not self.active_functions_data: - self.active_functions_data = sorted((func for func in self.values() if func['enabled']), key=lambda x: x['priority'], reverse=True) + self.active_functions_data = sorted((func for func in list(self.values()) if func['enabled']), key=lambda x: x['priority'], reverse=True) return self.active_functions_data def run_active_functions(self, *args, **kwargs): diff --git a/counterblock/lib/processor/api.py b/counterblock/lib/processor/api.py index d6768128..be283ce9 100644 --- a/counterblock/lib/processor/api.py +++ b/counterblock/lib/processor/api.py @@ -8,13 +8,12 @@ import operator import logging import copy -import urllib +import urllib.request, urllib.parse, urllib.error import functools from logging import handlers as logging_handlers from gevent import wsgi -from geventhttpclient import HTTPClient -from geventhttpclient.url import URL +import grequests import flask import jsonrpc import pymongo @@ -103,10 +102,10 @@ def get_chain_txns_status(txn_hashes): def get_last_n_messages(count=100): if count > 1000: raise Exception("The count is too damn high") - message_indexes = range(max(config.state['last_message_index'] - count, 0) + 1, config.state['last_message_index'] + 1) + message_indexes = list(range(max(config.state['last_message_index'] - count, 0) + 1, config.state['last_message_index'] + 1)) msgs = util.call_jsonrpc_api("get_messages_by_index", { 'message_indexes': message_indexes }, abort_on_error=True)['result'] - for i in xrange(len(msgs)): + for i in range(len(msgs)): msgs[i] = messages.decorate_message_for_feed(msgs[i]) return msgs @@ -303,7 +302,7 @@ def get_address_history(address, start_block=None, end_block=None): txns = [] d = get_address_history(address, start_block=start_block_index, end_block=end_block_index) #mash it all together - for category, entries in d.iteritems(): + for category, entries in d.items(): if category in ['balances',]: continue for e in entries: @@ -331,7 +330,7 @@ def proxy_to_counterpartyd(method='', params=[]): if result: try: result = json.loads(result) - except Exception, e: + except Exception as e: logging.warn("Error loading JSON from cache: %s, cached data: '%s'" % (e, result)) result = None #skip from reading from cache and just make the API call @@ -373,7 +372,7 @@ def handle_get(): data_json = flask.request.get_data().decode('utf-8') data = json.loads(data_json) assert 'csp-report' in data - except Exception, e: + except Exception as e: obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(data="Invalid JSON-RPC 2.0 request format") return flask.Response(obj_error.json.encode(), 200, mimetype='application/json') @@ -400,22 +399,22 @@ def handle_get(): "params": [], } try: - url = URL("http://127.0.0.1:%s/api/" % config.RPC_PORT) - client = HTTPClient.from_url(url) - r = client.post(url.request_uri, body=json.dumps(payload), headers={'content-type': 'application/json'}) - except Exception, e: + url = "http://127.0.0.1:%s/api/" % config.RPC_PORT + r = grequests.map((grequests.post(url, data=json.dumps(payload), + headers={'content-type': 'application/json', 'Connection': 'close'}),))[0] + if r is None: + raise Exception("result is None") + except Exception as e: cb_result_valid = False cb_result_error_code = "GOT EXCEPTION: %s" % e else: if r.status_code != 200: cb_result_valid = False cb_result_error_code = "GOT STATUS %s" % r.status_code if r else 'COULD NOT CONTACT' - cb_result = json.loads(r.read()) - if 'error' in r: + cb_result = r.json() + if 'error' in cb_result: cb_result_valid = False - cb_result_error_code = "GOT ERROR: %s" % r['error'] - finally: - client.close() + cb_result_error_code = "GOT ERROR: %s" % cb_result['error'] cb_e = time.time() result = { @@ -496,7 +495,7 @@ def handle_post(): try: assert 'method' in request_data tx_logger.info("TRANSACTION --- %s ||| REQUEST: %s ||| RESPONSE: %s" % (request_data['method'], request_json, rpc_response_json)) - except Exception, e: + except Exception as e: logger.info("Could not log transaction: Invalid format: %s" % e) response = flask.Response(rpc_response_json, 200, mimetype='application/json') diff --git a/counterblock/lib/tests/tests.py b/counterblock/lib/tests/tests.py index 5880868e..f3a86009 100644 --- a/counterblock/lib/tests/tests.py +++ b/counterblock/lib/tests/tests.py @@ -13,7 +13,7 @@ import appdirs default_dbhash_file = None -standard_collections = [u'asset_pair_market_info', u'tracked_assets', u'balance_changes', u'transaction_stats', u'trades', u'processed_blocks'] +standard_collections = ['asset_pair_market_info', 'tracked_assets', 'balance_changes', 'transaction_stats', 'trades', 'processed_blocks'] early_exit_block = 313000 logger = logging.getLogger(__name__) @@ -88,44 +88,44 @@ def compare_md5_database_hashes(): db_info = get_db_info_from_file() while len(db_info) > 1: head_label, db_hash = db_info.popitem() - for other_label, other_hash in db_info.items(): - print("Comparing DB hashes for Git Heads: %s And %s" %(head_label, other_label)) - for col, col_hash in db_hash.items(): + for other_label, other_hash in list(db_info.items()): + print(("Comparing DB hashes for Git Heads: %s And %s" %(head_label, other_label))) + for col, col_hash in list(db_hash.items()): if not other_hash.get(col): - print("Collection does not exist %s in %s skipping..." %(col, other_label)) + print(("Collection does not exist %s in %s skipping..." %(col, other_label))) continue try: assert(col_hash == other_hash[col]) msg = "OK..." except: msg = "Failed..." - print("Comparing Collections %s, %s == %s %s" %(col, col_hash, other_hash[col], msg)) + print(("Comparing Collections %s, %s == %s %s" %(col, col_hash, other_hash[col], msg))) def compare_default_database_hashes(): db_info = get_db_info_from_file() while len(db_info) > 1: head_label, db_hash = db_info.popitem() - for other_label, other_hash in db_info.items(): - print("Comparing DB hashes for Git Heads: %s And %s" %(head_label, other_label)) - for i, j in db_hash.items(): + for other_label, other_hash in list(db_info.items()): + print(("Comparing DB hashes for Git Heads: %s And %s" %(head_label, other_label))) + for i, j in list(db_hash.items()): if i == 'collections': - for col, col_hash in j.items(): + for col, col_hash in list(j.items()): if not other_hash[i].get(col): - print("Collection does not exist %s in %s skipping..." %(col, other_label)) + print(("Collection does not exist %s in %s skipping..." %(col, other_label))) continue try: assert(col_hash == other_hash[i][col]) msg = "OK..." except: msg = "Failed..." - print("Comparing Collections %s, %s == %s %s" %(col, col_hash, other_hash[i][col], msg)) + print(("Comparing Collections %s, %s == %s %s" %(col, col_hash, other_hash[i][col], msg))) else: try: assert(j == other_hash[i]) msg = "OK..." except: msg = "Failed..." - print("Comparing %s, %s == %s %s" %(i, j, other_hash[i], msg)) + print(("Comparing %s, %s == %s %s" %(i, j, other_hash[i], msg))) if __name__ == '__main__': compare_md5_database_hashes() diff --git a/counterblock/lib/util.py b/counterblock/lib/util.py index 356317ea..15eb023b 100644 --- a/counterblock/lib/util.py +++ b/counterblock/lib/util.py @@ -8,7 +8,7 @@ import decimal import cgi import itertools -import StringIO +import io import subprocess import calendar import hashlib @@ -18,9 +18,8 @@ import gevent import gevent.pool import gevent.ssl +import grequests import pymongo -from geventhttpclient import HTTPClient -from geventhttpclient.url import URL import lxml.html from PIL import Image from jsonschema import FormatChecker, Draft4Validator, FormatError @@ -40,7 +39,7 @@ def sanitize_eliteness(text): def http_basic_auth_str(username, password): """Returns a Basic Auth string.""" - authstr = 'Basic ' + str(base64.b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()) + authstr = 'Basic ' + base64.b64encode(('%s:%s' % (username, password)).encode('latin1')).decode("utf-8").strip() return authstr def is_valid_url(url, suffix='', allow_localhost=False, allow_no_protocol=False): @@ -81,21 +80,20 @@ def jsonrpc_api(method, params=None, endpoint=None, auth=None, abort_on_error=Fa if 'result' not in result: raise AssertionError("Could not contact counterpartyd") return result - except Exception, e: + except Exception as e: retry += 1 logger.warn(str(e) + " -- Waiting {} seconds before trying again...".format(retry_interval)) time.sleep(retry_interval) continue def call_jsonrpc_api(method, params=None, endpoint=None, auth=None, abort_on_error=False): - socket.setdefaulttimeout(JSONRPC_API_REQUEST_TIMEOUT) if not endpoint: endpoint = config.COUNTERPARTY_RPC if not auth: auth = config.COUNTERPARTY_AUTH if not params: params = {} - + payload = { "id": 0, "jsonrpc": "2.0", @@ -106,28 +104,26 @@ def call_jsonrpc_api(method, params=None, endpoint=None, auth=None, abort_on_err headers = { 'Content-Type': 'application/json', - 'Connection':'close', #no keepalive + 'Connection': 'close', # no keepalive } - if auth: #auth should be a (username, password) tuple, if specified + if auth: # auth should be a (username, password) tuple, if specified headers['Authorization'] = http_basic_auth_str(auth[0], auth[1]) + try: - u = URL(endpoint) - client = HTTPClient.from_url(u, connection_timeout=JSONRPC_API_REQUEST_TIMEOUT, - network_timeout=JSONRPC_API_REQUEST_TIMEOUT) - r = client.post(u.request_uri, body=json.dumps(payload), headers=headers) - except Exception, e: + r = grequests.map((grequests.post(endpoint, data=json.dumps(payload), timeout=JSONRPC_API_REQUEST_TIMEOUT, headers=headers),))[0] + if r is None: + raise Exception("result is None") + except Exception as e: raise Exception("Got call_jsonrpc_api request error: %s" % e) else: if r.status_code != 200: if abort_on_error: - raise Exception("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.read())) + raise Exception("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.text)) else: - logging.warning("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.read())) + logging.warning("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.text)) result = None else: - result = json.loads(r.read()) - finally: - client.close() + result = r.json() if abort_on_error and 'error' in result and result['error'] is not None: raise Exception("Got back error from server: %s" % result['error']) @@ -144,32 +140,26 @@ def get_url(url, abort_on_error=False, is_json=True, fetch_timeout=5, auth=None, headers['Authorization'] = http_basic_auth_str(auth[0], auth[1]) try: - u = URL(url) - client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} - if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} - client = HTTPClient.from_url(u, **client_kwargs) if post_data is not None: if is_json: headers['content-type'] = 'application/json' - r = client.post(u.request_uri, body=post_data, headers=headers) + r = grequests.map((grequests.post(url, data=post_data, timeout=fetch_timeout, headers=headers, verify=False),))[0] else: - r = client.get(u.request_uri, headers=headers) - except Exception, e: + r = grequests.map((grequests.get(url, timeout=fetch_timeout, headers=headers, verify=False),))[0] + if r is None: + raise Exception("result is None") + except Exception as e: raise Exception("Got get_url request error: %s" % e) else: if r.status_code != 200 and abort_on_error: - raise Exception("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.read())) - result = r.read() - result = json.loads(result) if result and is_json else result - finally: - client.close() - return result + raise Exception("Bad status code returned: '%s'. result body: '%s'." % (r.status_code, r.text)) + return r.json() if r.text and is_json else r.text def grouper(n, iterable, fillmissing=False, fillvalue=None): #Modified from http://stackoverflow.com/a/1625013 "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n - data = itertools.izip_longest(*args, fillvalue=fillvalue) + data = itertools.zip_longest(*args, fillvalue=fillvalue) if not fillmissing: data = [[e for e in g if e != fillvalue] for g in data] return data @@ -189,7 +179,7 @@ def comparer(left, right): def cumsum(iterable): values = list(iterable) - for pos in xrange(1, len(values)): + for pos in range(1, len(values)): values[pos] += values[pos - 1] return values @@ -215,7 +205,7 @@ def json_dthandler(obj): #give datetime objects to javascript as epoch ts in ms (i.e. * 1000) return int(time.mktime(obj.timetuple())) * 1000 else: - raise TypeError, 'Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj)) + raise TypeError('Object of type %s with value of %s is not JSON serializable' % (type(obj), repr(obj))) def stream_fetch(urls, completed_callback, urls_group_size=50, urls_group_time_spacing=0, max_fetch_size=4*1024, fetch_timeout=1, is_json=True, per_request_complete_callback=None): @@ -223,12 +213,10 @@ def stream_fetch(urls, completed_callback, urls_group_size=50, urls_group_time_s def make_stream_request(url): try: - u = URL(url) - client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} - if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} - client = HTTPClient.from_url(u, **client_kwargs) - r = client.get(u.request_uri, headers={'Connection':'close'}) - except Exception, e: + r = grequests.map((grequests.get(url, timeout=fetch_timeout, headers={'Connection':'close'}, verify=False, stream=True),))[0] + if r is None: + raise Exception("result is None") + except Exception as e: data = (False, "Got exception: %s" % e) else: if r.status_code != 200: @@ -236,20 +224,21 @@ def make_stream_request(url): else: try: #read up to max_fetch_size - raw_data = r.read(max_fetch_size) + raw_data = r.iter_content(chunk_size=max_fetch_size) if is_json: #try to convert to JSON try: data = json.loads(raw_data) - except Exception, e: + except Exception as e: data = (False, "Invalid JSON data: %s" % e) else: data = (True, data) else: #keep raw data = (True, raw_data) - except Exception, e: + except Exception as e: data = (False, "Request error: %s" % e) finally: - client.close() + if r: + r.close() if per_request_complete_callback: per_request_complete_callback(url, data) @@ -277,7 +266,7 @@ def process_group(group): urls = list(set(urls)) #remove duplicates (so we only fetch any given URL, once) groups = grouper(urls_group_size, urls) - for i in xrange(len(groups)): + for i in range(len(groups)): #logger.debug("Stream fetching group %i of %i..." % (i, len(groups))) group = groups[i] if urls_group_time_spacing and i != 0: @@ -296,24 +285,24 @@ def make_data_dir(subfolder): try: #fetch the image data try: - u = URL(url) - client_kwargs = {'connection_timeout': fetch_timeout, 'network_timeout': fetch_timeout, 'insecure': True} - if u.scheme == "https": client_kwargs['ssl_options'] = {'cert_reqs': gevent.ssl.CERT_NONE} - client = HTTPClient.from_url(u, **client_kwargs) - r = client.get(u.request_uri, headers={'Connection':'close'}) - raw_image_data = r.read(max_size) #read up to max_size - except Exception, e: + r = grequests.map((grequests.get(url, timeout=fetch_timeout, headers={'Connection':'close'}, verify=False, stream=True),))[0] + if r is None: + raise Exception("result is None") + + raw_image_data = r.iter_content(chunk_size=max_size) #read up to max_size + except Exception as e: raise Exception("Got fetch_image request error: %s" % e) else: if r.status_code != 200: raise Exception("Bad status code returned from fetch_image: '%s'" % (r.status_code)) finally: - client.close() + if r: + r.close() #decode image data try: - image = Image.open(StringIO.StringIO(raw_image_data)) - except Exception, e: + image = Image.open(io.StringIO(raw_image_data)) + except Exception as e: raise Exception("Unable to parse image data at: %s" % url) if image.format.lower() not in formats: raise Exception("Image is not a PNG: %s (got %s)" % (url, image.format)) if image.size != dimensions: raise Exception("Image size is not 48x48: %s (got %s)" % (url, image.size)) @@ -323,14 +312,14 @@ def make_data_dir(subfolder): image.save(imagePath) os.system("exiftool -q -overwrite_original -all= %s" % imagePath) #strip all metadata, just in case return True - except Exception, e: + except Exception as e: logger.warn(e) return False def date_param(strDate): try: return calendar.timegm(dateutil.parser.parse(strDate).utctimetuple()) - except Exception, e: + except Exception as e: return False def parse_iso8601_interval(value): @@ -355,20 +344,20 @@ def is_valid_json(data, schema): def next_interval_date(interval): try: generator = parse_iso8601_interval(interval) - except Exception, e: + except Exception as e: return None def ts(dt): return time.mktime(dt.timetuple()) previous = None - next = generator.next() + next = next(generator) now = datetime.datetime.now() while ts(next) < ts(now) and next != previous: try: previous = next - next = generator.next() - except Exception, e: + next = next(generator) + except Exception as e: break if ts(next) < ts(now): @@ -379,4 +368,4 @@ def ts(dt): def subprocess_cmd(command): process = subprocess.Popen(command,stdout=subprocess.PIPE, shell=True) proc_stdout = process.communicate()[0].strip() - print proc_stdout + print(proc_stdout) diff --git a/counterblock/server.py b/counterblock/server.py index 053a451b..b83e139f 100755 --- a/counterblock/server.py +++ b/counterblock/server.py @@ -1,11 +1,16 @@ -#! /usr/bin/env python +#! /usr/bin/env python3 """ counterblockd server """ #import before importing other modules import gevent -from gevent import monkey; monkey.patch_all() +from gevent import monkey +# now, import grequests as it will perform monkey_patch_all() +# letting grequests do it avoids us double monkey patching... (ugh...) +import grequests # this will monkey patch +if not monkey.is_module_patched("os"): # if this fails, it's because gevent stopped monkey patching for us + monkey.patch_all() import sys import os @@ -87,7 +92,7 @@ def main(): config.init(args) log.set_up(args.verbose) - #Log unhandled errors. + #log unhandled errors. def handle_exception(exc_type, exc_value, exc_traceback): logger.error("Unhandled Exception", exc_info=(exc_type, exc_value, exc_traceback)) sys.excepthook = handle_exception diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..4ae0bd1d --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +--index-url https://pypi.python.org/simple/ + +hg+https://bitbucket.org/noppo/gevent-websocket/@python3-support#egg=gevent-websocket +git+https://github.com/Rhizi/gevent-socketio.git@python3-compat#egg=gevent-socketio +-e . diff --git a/setup.py b/setup.py index bfbd6ea5..d1fe122b 100755 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 from setuptools import setup, find_packages import os @@ -8,40 +8,31 @@ from counterblock.lib import config required_packages = [ - 'appdirs>=1.4.0', - 'prettytable>=0.7.2', - 'python-dateutil>=2.4.0', - 'flask>=0.10.1', - 'json-rpc>=1.10.2', - 'pytest>=2.6.3', - 'pycoin>=0.52', - 'python-bitcoinlib>=0.2.1', - 'pymongo>=3.1', - 'gevent<=1.0.2,<1.1.0', - 'gevent-websocket>=0.9.5', - 'gevent-socketio>=0.3.6', - 'geventhttpclient<=1.2.0', - 'redis>=2.10.3', - 'grequests<=0.2.1,<0.3.0', - 'pyzmq>=14.4.1', - 'pillow>=2.6.1', - 'lxml>=3.4.1', - 'jsonschema>=2.4.0', - 'strict_rfc3339>=0.5', - 'rfc3987>=1.3.4', - 'aniso8601>=0.82', - 'pygeoip>=0.3.2', - 'colorama>=0.3.2', - 'configobj>=5.0.6', - 'repoze.lru>=0.6' -] - -required_repos = [ - #'https://github.com/robby-dermody/gevent/archive/e3cd7272178af7da0b85583cd2820f2520a8ba54.zip#egg=gevent-1.2', - 'https://github.com/gevent/gevent/archive/52924e3f68fff76c249dac7d867bae262e1196cc.zip#egg=gevent-1.2', - 'https://github.com/gwik/geventhttpclient/archive/83ded6980a2e37025acbe5a93a52ceedd8f9338a.zip#egg=geventhttpclient-1.2.0', - 'https://github.com/natecode/grequests/archive/ea00e193074fc11d71b4ff74138251f6055ca364.zip#egg=grequests-0.2.1', - #^ grequests (waiting until the next point release with natecode's pull request factored in) + 'appdirs', + 'prettytable', + 'python-dateutil', + 'flask', + 'json-rpc', + 'pytest', + 'pycoin', + 'python-bitcoinlib', + 'pymongo', + 'gevent', + 'gevent-websocket', + 'gevent-socketio', + 'grequests', + 'redis', + 'pyzmq', + 'pillow', + 'lxml', + 'jsonschema', + 'strict_rfc3339', + 'rfc3987', + 'aniso8601', + 'pygeoip', + 'colorama', + 'configobj', + 'repoze.lru' ] setup_options = { @@ -63,14 +54,12 @@ 'provides': ['counterblock'], 'packages': find_packages(), 'zip_safe': False, - 'dependency_links': required_repos, - 'setup_requires': ['appdirs==1.4.0','cython>=0.23.4'], + 'setup_requires': ['appdirs',], 'install_requires': required_packages, 'include_package_data': True, 'entry_points': { 'console_scripts': [ 'counterblock = counterblock:server_main', - 'armory_utxsvr = counterblock:armory_utxsvr_main', ] }, 'package_data': { From e0d0e2b65ce7ca1d92471a444113d14330de88b3 Mon Sep 17 00:00:00 2001 From: robby-dermody Date: Tue, 17 May 2016 18:17:44 +0000 Subject: [PATCH 03/36] pep8 --- counterblock/__init__.py | 4 +- counterblock/lib/blockchain.py | 63 ++- counterblock/lib/blockfeed.py | 188 +++---- counterblock/lib/cache.py | 29 +- counterblock/lib/config.py | 76 +-- counterblock/lib/database.py | 62 +-- counterblock/lib/log.py | 29 +- counterblock/lib/messages.py | 52 +- counterblock/lib/module.py | 68 +-- counterblock/lib/modules/__init__.py | 10 +- counterblock/lib/modules/assets.py | 277 +++++----- counterblock/lib/modules/betting.py | 141 +++--- counterblock/lib/modules/counterwallet.py | 224 +++++---- .../lib/modules/counterwallet_iofeeds.py | 318 ++++++------ counterblock/lib/modules/dex/__init__.py | 373 +++++++------- .../lib/modules/dex/assets_trading.py | 425 ++++++++-------- counterblock/lib/modules/dex/dex.py | 84 ++-- counterblock/lib/modules/transaction_stats.py | 44 +- counterblock/lib/processor/__init__.py | 62 +-- counterblock/lib/processor/api.py | 475 +++++++++--------- counterblock/lib/processor/caughtup.py | 2 +- counterblock/lib/processor/messages.py | 16 +- counterblock/lib/processor/startup.py | 18 +- counterblock/lib/tests/tests.py | 121 ++--- counterblock/lib/util.py | 128 +++-- counterblock/server.py | 44 +- setup.py | 4 +- 27 files changed, 1821 insertions(+), 1516 deletions(-) diff --git a/counterblock/__init__.py b/counterblock/__init__.py index 9ff757f5..762badda 100644 --- a/counterblock/__init__.py +++ b/counterblock/__init__.py @@ -1,4 +1,6 @@ -import os, sys +import os +import sys + def server_main(): from counterblock import server diff --git a/counterblock/lib/blockchain.py b/counterblock/lib/blockchain.py index fe4c523a..262a03c6 100644 --- a/counterblock/lib/blockchain.py +++ b/counterblock/lib/blockchain.py @@ -16,27 +16,33 @@ decimal.getcontext().prec = 8 logger = logging.getLogger(__name__) + def round_out(num): """round out to 8 decimal places""" - return float(D(num)) + return float(D(num)) + def normalize_quantity(quantity, divisible=True): """Goes from satoshis to normal human readable format""" if divisible: - return float((D(quantity) / D(config.UNIT))) - else: return quantity + return float((D(quantity) / D(config.UNIT))) + else: + return quantity + def denormalize_quantity(quantity, divisible=True): """Goes from normal human readable format to satoshis""" if divisible: return int(quantity * config.UNIT) - else: return quantity + else: + return quantity + def get_btc_supply(normalize=False, at_block_index=None): """returns the total supply of BTC (based on what bitcoind says the current block height is)""" block_count = config.state['my_latest_block']['block_index'] if at_block_index is None else at_block_index blocks_remaining = block_count - total_supply = 0 + total_supply = 0 reward = 50.0 while blocks_remaining > 0: if blocks_remaining >= 210000: @@ -46,9 +52,10 @@ def get_btc_supply(normalize=False, at_block_index=None): else: total_supply += (blocks_remaining * reward) blocks_remaining = 0 - + return total_supply if normalize else int(total_supply * config.UNIT) + def pubkey_to_address(pubkey_hex): sec = binascii.unhexlify(pubkey_hex) compressed = encoding.is_sec_compressed(sec) @@ -56,22 +63,27 @@ def pubkey_to_address(pubkey_hex): address_prefix = b'\x6f' if config.TESTNET else b'\x00' return encoding.public_pair_to_bitcoin_address(public_pair, compressed=compressed, address_prefix=address_prefix) + def bitcoind_rpc(command, params): - return util.call_jsonrpc_api(command, - params = params, - endpoint = config.BACKEND_URL_NOAUTH, - auth = config.BACKEND_AUTH, - abort_on_error = True)['result'] - + return util.call_jsonrpc_api( + command, + params=params, + endpoint=config.BACKEND_URL_NOAUTH, + auth=config.BACKEND_AUTH, + abort_on_error=True)['result'] + + def is_multisig(address): array = address.split('_') return (len(array) > 1) + def get_btc_balance(address, confirmed=True): all_unspent, confirmed_unspent = get_unspent_txouts(address, return_confirmed=True) unspent = confirmed_unspent if confirmed else all_unspent return sum(out['amount'] for out in unspent) + def listunspent(address): outputs = get_unspent_txouts(address) utxo = [] @@ -89,11 +101,12 @@ def listunspent(address): utxo.append(newtxo) return utxo + def getaddressinfo(address): all_unspent, confirmed_unspent = get_unspent_txouts(address, return_confirmed=True) balance = sum(out['amount'] for out in confirmed_unspent) unconfirmed_balance = sum(out['amount'] for out in all_unspent) - balance - + if is_multisig(address): array = address.split('_') # TODO: filter transactions @@ -115,18 +128,19 @@ def getaddressinfo(address): 'transactions': transactions } + def gettransaction_batch(txhash_list): raw_txes = util.call_jsonrpc_api("getrawtransaction_batch", { 'txhash_list': txhash_list, 'verbose': True, 'skip_missing': True}, abort_on_error=True)['result'] txes = {} - + for tx_hash, tx in raw_txes.items(): if tx is None: txes[tx_hash] = None continue - + valueOut = 0 for vout in tx['vout']: valueOut += vout['value'] @@ -143,14 +157,16 @@ def gettransaction_batch(txhash_list): 'vout': tx['vout'] } return txes - + + def gettransaction(tx_hash): - return gettransaction_batch([tx_hash,])[tx_hash] + return gettransaction_batch([tx_hash, ])[tx_hash] + def get_pubkey_from_transactions(address, raw_transactions): - #for each transaction we got back, extract the vin, pubkey, go through, convert it to binary, and see if it reduces down to the given address + # for each transaction we got back, extract the vin, pubkey, go through, convert it to binary, and see if it reduces down to the given address for tx in raw_transactions: - #parse the pubkey out of the first sent transaction + # parse the pubkey out of the first sent transaction for vin in tx['vin']: scriptsig = vin['scriptSig'] asm = scriptsig['asm'].split(' ') @@ -162,25 +178,29 @@ def get_pubkey_from_transactions(address, raw_transactions): pass return None + def get_pubkey_for_address(address): if is_multisig(address): array = address.split('_') addresses = array[1:-1] else: addresses = [address] - + pubkeys = [] for address in addresses: raw_transactions = search_raw_transactions(address) pubkey = get_pubkey_from_transactions(address, raw_transactions) - if pubkey: pubkeys.append(pubkey) + if pubkey: + pubkeys.append(pubkey) return pubkeys + def search_raw_transactions(address, unconfirmed=True): return util.call_jsonrpc_api("search_raw_transactions", {'address': address, 'unconfirmed': unconfirmed}, abort_on_error=True)['result'] + def get_unspent_txouts(source, return_confirmed=False): """returns a list of unspent outputs for a specific address @return: A list of dicts, with each entry in the dict having the following keys: @@ -191,5 +211,6 @@ def get_unspent_txouts(source, return_confirmed=False): else: return txouts + def broadcast_tx(signed_tx_hex): return bitcoind_rpc('sendrawtransaction', [signed_tx_hex]) diff --git a/counterblock/lib/blockfeed.py b/counterblock/lib/blockfeed.py index 6e6b49b0..52340fe0 100644 --- a/counterblock/lib/blockfeed.py +++ b/counterblock/lib/blockfeed.py @@ -18,9 +18,10 @@ from counterblock.lib import config, util, blockchain, cache, database from counterblock.lib.processor import MessageProcessor, MempoolMessageProcessor, BlockProcessor, CaughtUpProcessor -D = decimal.Decimal +D = decimal.Decimal logger = logging.getLogger(__name__) + def fuzzy_is_caught_up(): """We don't want to give users 525 errors or login errors if counterblockd/counterparty-server is in the process of getting caught up, but we DO if counterblockd is either clearly out of date with the blockchain, or reinitializing its database""" @@ -28,40 +29,41 @@ def fuzzy_is_caught_up(): return False if not config.state['cp_backend_block_index'] or not config.state['my_latest_block']['block_index']: return False - if config.state['my_latest_block']['block_index'] < config.state['cp_backend_block_index'] - 1: #"fuzzy" part + if config.state['my_latest_block']['block_index'] < config.state['cp_backend_block_index'] - 1: # "fuzzy" part return False return True - + + def process_cp_blockfeed(): config.LATEST_BLOCK_INIT = {'block_index': config.BLOCK_FIRST, 'block_time': None, 'block_hash': None} - #initialize state - config.state['cur_block'] = {'block_index': 0, } #block being currently processed - config.state['my_latest_block'] = {'block_index': 0 } #last block that was successfully processed by counterblockd - config.state['last_message_index'] = -1 #initialize (last processed message index) - config.state['cp_latest_block_index'] = 0 #last block that was successfully processed by counterparty - config.state['cp_backend_block_index'] = 0 #the latest block height as reported by the cpd blockchain backend - config.state['cp_caught_up'] = False #whether counterparty-server is caught up to the backend (e.g. bitcoind) + # initialize state + config.state['cur_block'] = {'block_index': 0, } # block being currently processed + config.state['my_latest_block'] = {'block_index': 0} # last block that was successfully processed by counterblockd + config.state['last_message_index'] = -1 # initialize (last processed message index) + config.state['cp_latest_block_index'] = 0 # last block that was successfully processed by counterparty + config.state['cp_backend_block_index'] = 0 # the latest block height as reported by the cpd blockchain backend + config.state['cp_caught_up'] = False # whether counterparty-server is caught up to the backend (e.g. bitcoind) config.state['caught_up_started_events'] = False - #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain - - #enabled processor functions + #^ set after we are caught up and start up the recurring events that depend on us being caught up with the blockchain + + # enabled processor functions logger.debug("Enabled Message Processor Functions {0}".format(MessageProcessor.active_functions())) logger.debug("Enabled Block Processor Functions {0}".format(BlockProcessor.active_functions())) - + def publish_mempool_tx(): """fetch new tx from mempool""" tx_hashes = [] mempool_txs = config.mongo_db.mempool.find(projection={'tx_hash': True}) for mempool_tx in mempool_txs: tx_hashes.append(str(mempool_tx['tx_hash'])) - + params = None if len(tx_hashes) > 0: params = { 'filters': [ - {'field':'tx_hash', 'op': 'NOT IN', 'value': tx_hashes}, - {'field':'category', 'op': 'IN', 'value': ['sends', 'btcpays', 'issuances', 'dividends']} + {'field': 'tx_hash', 'op': 'NOT IN', 'value': tx_hashes}, + {'field': 'category', 'op': 'IN', 'value': ['sends', 'btcpays', 'issuances', 'dividends']} ], 'filterop': 'AND' } @@ -76,7 +78,7 @@ def publish_mempool_tx(): 'timestamp': new_tx['timestamp'], 'viewed_in_block': config.state['my_latest_block']['block_index'] } - + config.mongo_db.mempool.insert(tx) del(tx['_id']) tx['_category'] = tx['category'] @@ -92,23 +94,24 @@ def publish_mempool_tx(): if result == 'ABORT_THIS_MESSAGE_PROCESSING' or result == 'continue': break elif result: - raise Exception("Message processor returned unknown code -- processor: '%s', result: '%s'" % + raise Exception( + "Message processor returned unknown code -- processor: '%s', result: '%s'" % (function, result)) - + def clean_mempool_tx(): """clean mempool transactions older than MAX_REORG_NUM_BLOCKS blocks""" config.mongo_db.mempool.remove( {"viewed_in_block": {"$lt": config.state['my_latest_block']['block_index'] - config.MAX_REORG_NUM_BLOCKS}}) - def parse_message(msg): + def parse_message(msg): msg_data = json.loads(msg['bindings']) logger.debug("Received message %s: %s ..." % (msg['message_index'], msg)) - - #out of order messages should not happen (anymore), but just to be sure + + # out of order messages should not happen (anymore), but just to be sure if msg['message_index'] != config.state['last_message_index'] + 1 and config.state['last_message_index'] != -1: raise Exception("Message index mismatch. Next message's message_index: %s, last_message_index: %s" % ( msg['message_index'], config.state['last_message_index'])) - + for function in MessageProcessor.active_functions(): logger.debug('MessageProcessor: starting {}'.format(function['function'])) # TODO: Better handling of double parsing @@ -117,11 +120,13 @@ def parse_message(msg): except pymongo.errors.DuplicateKeyError as e: logging.exception(e) - if result in ('ABORT_THIS_MESSAGE_PROCESSING', 'continue', #just abort further MessageProcessors for THIS message - 'ABORT_BLOCK_PROCESSING'): #abort all further block processing, including that of all messages in the block + if result in ( + 'ABORT_THIS_MESSAGE_PROCESSING', 'continue', # just abort further MessageProcessors for THIS message + 'ABORT_BLOCK_PROCESSING'): # abort all further block processing, including that of all messages in the block break elif result not in (True, False, None): - raise Exception("Message processor returned unknown code -- processor: '%s', result: '%s'" % + raise Exception( + "Message processor returned unknown code -- processor: '%s', result: '%s'" % (function, result)) config.state['last_message_index'] = msg['message_index'] @@ -132,38 +137,38 @@ def parse_block(block_data): config.state['cur_block']['block_time_obj'] \ = datetime.datetime.utcfromtimestamp(config.state['cur_block']['block_time']) config.state['cur_block']['block_time_str'] = config.state['cur_block']['block_time_obj'].isoformat() - - for msg in config.state['cur_block']['_messages']: + + for msg in config.state['cur_block']['_messages']: result = parse_message(msg) - if result == 'ABORT_BLOCK_PROCESSING': #reorg + if result == 'ABORT_BLOCK_PROCESSING': # reorg return False - - #run block processor Functions + + # run block processor Functions BlockProcessor.run_active_functions() - #block successfully processed, track this in our DB + # block successfully processed, track this in our DB new_block = { 'block_index': config.state['cur_block']['block_index'], 'block_time': config.state['cur_block']['block_time_obj'], 'block_hash': config.state['cur_block']['block_hash'], } config.mongo_db.processed_blocks.insert(new_block) - - config.state['my_latest_block'] = new_block + + config.state['my_latest_block'] = new_block logger.info("Block: %i of %i [message height=%s]" % ( config.state['my_latest_block']['block_index'], - config.state['cp_backend_block_index'] \ - if config.state['cp_backend_block_index'] else '???', + config.state['cp_backend_block_index'] + if config.state['cp_backend_block_index'] else '???', config.state['last_message_index'] if config.state['last_message_index'] != -1 else '???')) return True - - #grab our stored preferences, and rebuild the database if necessary + + # grab our stored preferences, and rebuild the database if necessary app_config = config.mongo_db.app_config.find() assert app_config.count() in [0, 1] - if ( app_config.count() == 0 - or config.REPARSE_FORCED - or app_config[0]['db_version'] != config.DB_VERSION - or app_config[0]['running_testnet'] != config.TESTNET): + if(app_config.count() == 0 or + config.REPARSE_FORCED or + app_config[0]['db_version'] != config.DB_VERSION or + app_config[0]['running_testnet'] != config.TESTNET): if app_config.count(): logger.warn("counterblockd database version UPDATED (from %i to %i) or testnet setting changed (from %s to %s), or REINIT forced (%s). REBUILDING FROM SCRATCH ..." % ( app_config[0]['db_version'], config.DB_VERSION, app_config[0]['running_testnet'], @@ -174,31 +179,33 @@ def parse_block(block_data): config.state['my_latest_block'] = config.LATEST_BLOCK_INIT else: app_config = app_config[0] - #get the last processed block out of mongo + # get the last processed block out of mongo my_latest_block = config.mongo_db.processed_blocks.find_one(sort=[("block_index", pymongo.DESCENDING)]) if my_latest_block: - #remove any data we have for blocks higher than this (would happen if counterblockd or mongo died + # remove any data we have for blocks higher than this (would happen if counterblockd or mongo died # or errored out while processing a block) database.rollback(my_latest_block['block_index']) else: - #no block state in the database yet + # no block state in the database yet config.state['my_latest_block'] = config.LATEST_BLOCK_INIT - - #avoid contacting counterparty-server (on reparse, to speed up) + + # avoid contacting counterparty-server (on reparse, to speed up) autopilot = False autopilot_runner = 0 iteration = 0 - #start polling counterparty-server for new blocks + # start polling counterparty-server for new blocks cp_running_info = None while True: iteration += 1 if iteration % 10 == 0: - logger.info("Heartbeat (%s, block: %s, caught up: %s)" % ( - iteration, config.state['my_latest_block']['block_index'], fuzzy_is_caught_up())) - logger.debug("iteration: ap %s/%s, cp_latest_block_index: %s, my_latest_block: %s" % (autopilot, autopilot_runner, - config.state['cp_latest_block_index'], config.state['my_latest_block']['block_index'])) - + logger.info( + "Heartbeat (%s, block: %s, caught up: %s)" + % (iteration, config.state['my_latest_block']['block_index'], fuzzy_is_caught_up())) + logger.debug( + "iteration: ap %s/%s, cp_latest_block_index: %s, my_latest_block: %s" % (autopilot, autopilot_runner, + config.state['cp_latest_block_index'], config.state['my_latest_block']['block_index'])) + if not autopilot or autopilot_runner == 0: try: cp_running_info = util.jsonrpc_api("get_running_info", abort_on_error=True)['result'] @@ -206,12 +213,12 @@ def parse_block(block_data): logger.warn("Cannot contact counterparty-server get_running_info: %s" % e) time.sleep(3) continue - - #wipe our state data if necessary, if counterparty-server has moved on to a new DB version + + # wipe our state data if necessary, if counterparty-server has moved on to a new DB version wipeState = False updatePrefs = False - - #Checking appconfig against old running info (when batch-fetching) is redundant + + # Checking appconfig against old running info (when batch-fetching) is redundant if app_config['counterpartyd_db_version_major'] is None \ or app_config['counterpartyd_db_version_minor'] is None \ or app_config['counterpartyd_running_testnet'] is None: @@ -226,8 +233,8 @@ def parse_block(block_data): elif cp_running_info['version_minor'] != app_config['counterpartyd_db_version_minor']: logger.warn( "counterparty-server MINOR DB version change (we built from %s.%s, counterparty-server is at %s.%s). Wiping our state data." % ( - app_config['counterpartyd_db_version_major'], app_config['counterpartyd_db_version_minor'], - cp_running_info['version_major'], cp_running_info['version_minor'])) + app_config['counterpartyd_db_version_major'], app_config['counterpartyd_db_version_minor'], + cp_running_info['version_major'], cp_running_info['version_minor'])) wipeState = True updatePrefs = True elif cp_running_info.get('running_testnet', False) != app_config['counterpartyd_running_testnet']: @@ -238,17 +245,17 @@ def parse_block(block_data): if wipeState: app_config = database.reset_db_state() if updatePrefs: - app_config['counterpartyd_db_version_major'] = cp_running_info['version_major'] + app_config['counterpartyd_db_version_major'] = cp_running_info['version_major'] app_config['counterpartyd_db_version_minor'] = cp_running_info['version_minor'] app_config['counterpartyd_running_testnet'] = cp_running_info['running_testnet'] config.mongo_db.app_config.update({}, app_config) - #reset my latest block record + # reset my latest block record config.state['my_latest_block'] = config.LATEST_BLOCK_INIT config.state['caught_up'] = False - - #work up to what block counterpartyd is at + + # work up to what block counterpartyd is at try: - if cp_running_info['last_block']: #should normally exist, unless counterparty-server had an error getting it + if cp_running_info['last_block']: # should normally exist, unless counterparty-server had an error getting it assert cp_running_info['last_block']['block_index'] config.state['cp_latest_block_index'] = cp_running_info['last_block']['block_index'] elif cp_running_info['db_caught_up']: @@ -256,7 +263,8 @@ def parse_block(block_data): else: assert False except: - logger.warn("counterparty-server not returning a valid last processed block (probably is reparsing or was just restarted)." + logger.warn( + "counterparty-server not returning a valid last processed block (probably is reparsing or was just restarted)." + " Waiting 3 seconds before trying again... (Data returned: %s, we have: %s)" % ( cp_running_info, config.state['cp_latest_block_index'])) time.sleep(3) @@ -264,67 +272,69 @@ def parse_block(block_data): config.state['cp_backend_block_index'] = cp_running_info['bitcoin_block_count'] config.state['cp_caught_up'] = cp_running_info['db_caught_up'] - + if config.state['my_latest_block']['block_index'] < config.state['cp_latest_block_index']: - #need to catch up + # need to catch up config.state['caught_up'] = False - - #TODO: Autopilot and autopilot runner are redundant + + # TODO: Autopilot and autopilot runner are redundant if config.state['cp_latest_block_index'] - config.state['my_latest_block']['block_index'] > 500: - #we are safely far from the tip, switch to bulk-everything + # we are safely far from the tip, switch to bulk-everything autopilot = True if autopilot_runner == 0: autopilot_runner = 500 autopilot_runner -= 1 else: autopilot = False - + cur_block_index = config.state['my_latest_block']['block_index'] + 1 try: - block_data = cache.get_block_info(cur_block_index, + block_data = cache.get_block_info( + cur_block_index, prefetch=min(100, (config.state['cp_latest_block_index'] - config.state['my_latest_block']['block_index'])), min_message_index=config.state['last_message_index'] + 1 if config.state['last_message_index'] != -1 else None) except Exception as e: logger.warn(str(e) + " Waiting 3 seconds before trying again...") time.sleep(3) continue - + # clean api block cache - if config.state['cp_latest_block_index'] - cur_block_index <= config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip + if config.state['cp_latest_block_index'] - cur_block_index <= config.MAX_REORG_NUM_BLOCKS: # only when we are near the tip cache.clean_block_cache(cur_block_index) try: result = parse_block(block_data) - except Exception as e: #if anything bubbles up + except Exception as e: # if anything bubbles up logger.exception("Unhandled exception while processing block. Rolling back, waiting 3 seconds and retrying. Error was: %s" % e) - #counterparty-server might have gone away... + # counterparty-server might have gone away... my_latest_block = config.mongo_db.processed_blocks.find_one(sort=[("block_index", pymongo.DESCENDING)]) if my_latest_block: database.rollback(my_latest_block['block_index']) - - #disable autopilot this next iteration to force us to check up against counterparty-server + + # disable autopilot this next iteration to force us to check up against counterparty-server # (it will be re-enabled later on in that same iteration if we are far enough from the tip) autopilot = False time.sleep(3) continue - if result is False: #reorg, or block processing otherwise not completed - autopilot = False - - if config.state['cp_latest_block_index'] - cur_block_index < config.MAX_REORG_NUM_BLOCKS: #only when we are near the tip + if result is False: # reorg, or block processing otherwise not completed + autopilot = False + + if config.state['cp_latest_block_index'] - cur_block_index < config.MAX_REORG_NUM_BLOCKS: # only when we are near the tip clean_mempool_tx() elif config.state['my_latest_block']['block_index'] > config.state['cp_latest_block_index']: # should get a reorg message. Just to be on the safe side, prune back MAX_REORG_NUM_BLOCKS blocks # before what counterpartyd is saying if we see this - logger.error("Very odd: Ahead of counterparty-server with block indexes! Pruning back %s blocks to be safe." + logger.error( + "Very odd: Ahead of counterparty-server with block indexes! Pruning back %s blocks to be safe." % config.MAX_REORG_NUM_BLOCKS) database.rollback(config.state['cp_latest_block_index'] - config.MAX_REORG_NUM_BLOCKS) else: #...we may be caught up (to counterpartyd), but counterpartyd may not be (to the blockchain). And if it isn't, we aren't config.state['caught_up'] = config.state['cp_caught_up'] - - #this logic here will cover a case where we shut down counterblockd, then start it up again quickly... + + # this logic here will cover a case where we shut down counterblockd, then start it up again quickly... # in that case, there are no new blocks for it to parse, so config.state['last_message_index'] would otherwise remain 0. # With this logic, we will correctly initialize config.state['last_message_index'] to the last message ID of the last processed block if config.state['last_message_index'] == -1 or config.state['my_latest_block']['block_index'] == 0: @@ -334,12 +344,12 @@ def parse_block(block_data): config.state['my_latest_block']['block_index'] = cp_running_info['last_block']['block_index'] logger.info("Detected blocks caught up on startup. Setting last message idx to %s, current block index to %s ..." % ( config.state['last_message_index'], config.state['my_latest_block']['block_index'])) - + if config.state['caught_up'] and not config.state['caught_up_started_events']: - #start up recurring events that depend on us being fully caught up with the blockchain to run + # start up recurring events that depend on us being fully caught up with the blockchain to run CaughtUpProcessor.run_active_functions() config.state['caught_up_started_events'] = True publish_mempool_tx() - time.sleep(2) #counterblockd itself is at least caught up, wait a bit to query again for the latest block from cpd + time.sleep(2) # counterblockd itself is at least caught up, wait a bit to query again for the latest block from cpd diff --git a/counterblock/lib/cache.py b/counterblock/lib/cache.py index 98876e24..7468b6cb 100644 --- a/counterblock/lib/cache.py +++ b/counterblock/lib/cache.py @@ -5,37 +5,43 @@ import gevent import redis import redis.connection -redis.connection.socket = gevent.socket #make redis play well with gevent +redis.connection.socket = gevent.socket # make redis play well with gevent from counterblock.lib import config, util logger = logging.getLogger(__name__) -blockinfo_cache = {} +blockinfo_cache = {} ## -## REDIS-RELATED +# REDIS-RELATED ## + + def get_redis_connection(): logger.info("Connecting to redis @ %s" % config.REDIS_CONNECT) return redis.StrictRedis(host=config.REDIS_CONNECT, port=config.REDIS_PORT, db=config.REDIS_DATABASE) ## -## NOT REDIS RELATED +# NOT REDIS RELATED ## + + def get_block_info(block_index, prefetch=0, min_message_index=None): global blockinfo_cache if block_index in blockinfo_cache: return blockinfo_cache[block_index] - + blockinfo_cache.clear() - blocks = util.call_jsonrpc_api('get_blocks', - {'block_indexes': list(range(block_index, block_index + prefetch)), - 'min_message_index': min_message_index}, - abort_on_error=True)['result'] + blocks = util.call_jsonrpc_api( + 'get_blocks', + {'block_indexes': list(range(block_index, block_index + prefetch)), + 'min_message_index': min_message_index}, + abort_on_error=True)['result'] for block in blocks: blockinfo_cache[block['block_index']] = block return blockinfo_cache[block_index] + def block_cache(func): """decorator""" def cached_function(*args, **kwargs): @@ -50,7 +56,7 @@ def cached_function(*args, **kwargs): try: result = func(*args, **kwargs) config.mongo_db.counterblockd_cache.insert({ - 'block_index': block_index, + 'block_index': block_index, 'function': function_signature, 'result': json.dumps(result) }) @@ -61,9 +67,10 @@ def cached_function(*args, **kwargs): #logger.info("result from cache ({}, {}, {})".format(func.__name__, block_index, function_signature)) result = json.loads(cached_result['result']) return result - + return cached_function + def clean_block_cache(block_index): #logger.info("clean block cache lower than {}".format(block_index)) config.mongo_db.counterblockd_cache.remove({'block_index': {'$lt': block_index}}) diff --git a/counterblock/lib/config.py b/counterblock/lib/config.py index e4ef2606..842f25f5 100644 --- a/counterblock/lib/config.py +++ b/counterblock/lib/config.py @@ -1,15 +1,15 @@ # -*- coding: utf-8 -*- ## -## CONSTANTS +# CONSTANTS ## -VERSION = "1.3.1" #should keep up with counterblockd repo's release tag +VERSION = "1.3.1" # should keep up with counterblockd repo's release tag -DB_VERSION = 23 #a db version increment will cause counterblockd to rebuild its database off of counterpartyd +DB_VERSION = 23 # a db version increment will cause counterblockd to rebuild its database off of counterpartyd UNIT = 100000000 -MARKET_PRICE_DERIVE_NUM_POINTS = 8 #number of last trades over which to derive the market price (via WVAP) +MARKET_PRICE_DERIVE_NUM_POINTS = 8 # number of last trades over which to derive the market price (via WVAP) # FROM counterpartyd # NOTE: These constants must match those in counterpartyd/lib/py @@ -24,28 +24,28 @@ XCP_NAME = "Counterparty" APP_NAME = "counterblock" -MAX_REORG_NUM_BLOCKS = 10 #max reorg we'd likely ever see -MAX_FORCED_REORG_NUM_BLOCKS = 20 #but let us go deeper when messages are out of sync +MAX_REORG_NUM_BLOCKS = 10 # max reorg we'd likely ever see +MAX_FORCED_REORG_NUM_BLOCKS = 20 # but let us go deeper when messages are out of sync -QUOTE_ASSETS = ['BTC', 'XBTC', 'XCP'] # define the priority for quote asset -MARKET_LIST_QUOTE_ASSETS = ['XCP', 'XBTC', 'BTC'] # define the order in the market list +QUOTE_ASSETS = ['BTC', 'XBTC', 'XCP'] # define the priority for quote asset +MARKET_LIST_QUOTE_ASSETS = ['XCP', 'XBTC', 'BTC'] # define the order in the market list DEFAULT_BACKEND_PORT_TESTNET = 18332 DEFAULT_BACKEND_PORT = 8332 ## -## STATE +# STATE ## -mongo_db = None #will be set on server init +mongo_db = None # will be set on server init state = { - 'caught_up': False #atomic state variable, set to True when counterpartyd AND counterblockd are caught up - #the rest of this is added dynamically + 'caught_up': False # atomic state variable, set to True when counterpartyd AND counterblockd are caught up + # the rest of this is added dynamically } ## -## METHODS +# METHODS ## def init_data_dir(args): import os @@ -56,7 +56,7 @@ def init_data_dir(args): if not os.path.isdir(data_dir): os.makedirs(data_dir) - global config_dir + global config_dir config_dir = appdirs.user_config_dir(appauthor=XCP_NAME, appname=APP_NAME, roaming=True) if not os.path.isdir(config_dir): os.makedirs(config_dir) @@ -65,15 +65,16 @@ def init_data_dir(args): log_dir = appdirs.user_log_dir(appauthor=XCP_NAME, appname=APP_NAME) if not os.path.isdir(log_dir): os.makedirs(log_dir) - + + def load(args): import os import configparser import email.utils - + assert data_dir and config_dir and log_dir - - #Read config file + + # Read config file configfile = configparser.ConfigParser() if args.config_file: config_path = args.config_file @@ -81,7 +82,7 @@ def load(args): config_path = os.path.join(config_dir, 'server.conf') configfile.read(config_path) has_config = configfile.has_section('Default') - + # testnet global TESTNET if args.testnet: @@ -93,18 +94,18 @@ def load(args): global net_path_part net_path_part = '.testnet' if TESTNET else '' - - #first block + + # first block global BLOCK_FIRST if TESTNET: BLOCK_FIRST = 310000 else: BLOCK_FIRST = 278270 - #forced reparse? + # forced reparse? global REPARSE_FORCED REPARSE_FORCED = args.reparse - + ############## # THINGS WE CONNECT TO @@ -129,7 +130,7 @@ def load(args): assert int(BACKEND_PORT) > 1 and int(BACKEND_PORT) < 65535 except: raise Exception("Please specify a valid port number for the backend-port configuration parameter") - + global BACKEND_USER if args.backend_user: BACKEND_USER = args.backend_user @@ -148,13 +149,13 @@ def load(args): global BACKEND_AUTH BACKEND_AUTH = (BACKEND_USER, BACKEND_PASSWORD) if (BACKEND_USER and BACKEND_PASSWORD) else None - + global BACKEND_URL BACKEND_URL = 'http://' + BACKEND_USER + ':' + BACKEND_PASSWORD + '@' + BACKEND_CONNECT + ':' + str(BACKEND_PORT) global BACKEND_URL_NOAUTH BACKEND_URL_NOAUTH = 'http://' + BACKEND_CONNECT + ':' + str(BACKEND_PORT) + '/' - + # counterpartyd RPC connection global COUNTERPARTY_CONNECT if args.counterparty_connect: @@ -176,7 +177,7 @@ def load(args): assert int(COUNTERPARTY_PORT) > 1 and int(COUNTERPARTY_PORT) < 65535 except: raise Exception("Please specify a valid port number for the counterparty-port configuration parameter") - + global COUNTERPARTY_USER if args.counterparty_user: COUNTERPARTY_USER = args.counterparty_user @@ -195,7 +196,7 @@ def load(args): global COUNTERPARTY_RPC COUNTERPARTY_RPC = 'http://' + COUNTERPARTY_CONNECT + ':' + str(COUNTERPARTY_PORT) + '/api/' - + global COUNTERPARTY_AUTH COUNTERPARTY_AUTH = (COUNTERPARTY_USER, COUNTERPARTY_PASSWORD) if (COUNTERPARTY_USER and COUNTERPARTY_PASSWORD) else None @@ -220,7 +221,7 @@ def load(args): assert int(MONGODB_PORT) > 1 and int(MONGODB_PORT) < 65535 except: raise Exception("Please specify a valid port number for the mongodb-port configuration parameter") - + global MONGODB_DATABASE if args.mongodb_database: MONGODB_DATABASE = args.mongodb_database @@ -290,7 +291,7 @@ def load(args): ############## # THINGS WE SERVE - + global RPC_HOST if args.rpc_host: RPC_HOST = args.rpc_host @@ -319,12 +320,12 @@ def load(args): RPC_ALLOW_CORS = configfile.getboolean('Default', 'rpc-allow-cors') else: RPC_ALLOW_CORS = True - - #Other things + + # Other things global SUBDIR_ASSET_IMAGES - SUBDIR_ASSET_IMAGES = "asset_img%s" % net_path_part #goes under the data dir and stores retrieved asset images + SUBDIR_ASSET_IMAGES = "asset_img%s" % net_path_part # goes under the data dir and stores retrieved asset images global SUBDIR_FEED_IMAGES - SUBDIR_FEED_IMAGES = "feed_img%s" % net_path_part #goes under the data dir and stores retrieved feed images + SUBDIR_FEED_IMAGES = "feed_img%s" % net_path_part # goes under the data dir and stores retrieved feed images ############## # OTHER SETTINGS @@ -332,7 +333,7 @@ def load(args): # System (logging, pids, etc) global COUNTERBLOCKD_DIR COUNTERBLOCKD_DIR = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir)) - + global LOG if args.log_file: LOG = args.log_file @@ -366,7 +367,7 @@ def load(args): assert LOG_NUM_FILES > 0 and LOG_NUM_FILES <= 100 except: raise Exception("Please specify a valid log-num-files value (must be less than 100)") - + global TX_LOG if args.tx_log_file: TX_LOG = args.tx_log_file @@ -392,10 +393,11 @@ def load_schemas(): global ASSET_SCHEMA ASSET_SCHEMA = json.load(open(os.path.join(COUNTERBLOCKD_DIR, 'schemas', 'asset.schema.json'))) - + global FEED_SCHEMA FEED_SCHEMA = json.load(open(os.path.join(COUNTERBLOCKD_DIR, 'schemas', 'feed.schema.json'))) + def init(args): init_data_dir(args) load(args) diff --git a/counterblock/lib/database.py b/counterblock/lib/database.py index c53d17f3..df9a4c56 100644 --- a/counterblock/lib/database.py +++ b/counterblock/lib/database.py @@ -7,86 +7,93 @@ logger = logging.getLogger(__name__) + def get_connection(): """Connect to mongodb, returning a connection object""" logger.info("Connecting to mongoDB backend ...") mongo_client = pymongo.MongoClient(config.MONGODB_CONNECT, config.MONGODB_PORT) - mongo_db = mongo_client[config.MONGODB_DATABASE] #will create if it doesn't exist + mongo_db = mongo_client[config.MONGODB_DATABASE] # will create if it doesn't exist if config.MONGODB_USER and config.MONGODB_PASSWORD: if not mongo_db.authenticate(config.MONGODB_USER, config.MONGODB_PASSWORD): raise Exception("Could not authenticate to mongodb with the supplied username and password.") return mongo_db + def init_base_indexes(): """insert mongo indexes if need-be (i.e. for newly created database)""" - ##COLLECTIONS THAT ARE PURGED AS A RESULT OF A REPARSE - #processed_blocks + # COLLECTIONS THAT ARE PURGED AS A RESULT OF A REPARSE + # processed_blocks config.mongo_db.processed_blocks.ensure_index('block_index', unique=True) - ##COLLECTIONS THAT ARE *NOT* PURGED AS A RESULT OF A REPARSE - #mempool + # COLLECTIONS THAT ARE *NOT* PURGED AS A RESULT OF A REPARSE + # mempool config.mongo_db.mempool.ensure_index('tx_hash') + def get_block_indexes_for_dates(start_dt=None, end_dt=None): """Returns a 2 tuple (start_block, end_block) result for the block range that encompasses the given start_date and end_date unix timestamps""" if start_dt is None: start_block_index = config.BLOCK_FIRST else: - start_block = config.mongo_db.processed_blocks.find_one({"block_time": {"$lte": start_dt} }, sort=[("block_time", pymongo.DESCENDING)]) + start_block = config.mongo_db.processed_blocks.find_one({"block_time": {"$lte": start_dt}}, sort=[("block_time", pymongo.DESCENDING)]) start_block_index = config.BLOCK_FIRST if not start_block else start_block['block_index'] - + if end_dt is None: end_block_index = config.state['my_latest_block']['block_index'] else: - end_block = config.mongo_db.processed_blocks.find_one({"block_time": {"$gte": end_dt} }, sort=[("block_time", pymongo.ASCENDING)]) + end_block = config.mongo_db.processed_blocks.find_one({"block_time": {"$gte": end_dt}}, sort=[("block_time", pymongo.ASCENDING)]) if not end_block: end_block_index = config.mongo_db.processed_blocks.find_one(sort=[("block_index", pymongo.DESCENDING)])['block_index'] else: end_block_index = end_block['block_index'] return (start_block_index, end_block_index) + def get_block_time(block_index): """TODO: implement result caching to avoid having to go out to the database""" - block = config.mongo_db.processed_blocks.find_one({"block_index": block_index }) - if not block: return None + block = config.mongo_db.processed_blocks.find_one({"block_index": block_index}) + if not block: + return None return block['block_time'] + def reset_db_state(): """boom! blow away all applicable collections in mongo""" config.mongo_db.processed_blocks.drop() - - #create/update default app_config object + + # create/update default app_config object config.mongo_db.app_config.update({}, { - 'db_version': config.DB_VERSION, #counterblockd database version - 'running_testnet': config.TESTNET, - 'counterpartyd_db_version_major': None, - 'counterpartyd_db_version_minor': None, - 'counterpartyd_running_testnet': None, - 'last_block_assets_compiled': config.BLOCK_FIRST, #for asset data compilation in tasks.py (resets on reparse as well) + 'db_version': config.DB_VERSION, # counterblockd database version + 'running_testnet': config.TESTNET, + 'counterpartyd_db_version_major': None, + 'counterpartyd_db_version_minor': None, + 'counterpartyd_running_testnet': None, + 'last_block_assets_compiled': config.BLOCK_FIRST, # for asset data compilation in tasks.py (resets on reparse as well) }, upsert=True) app_config = config.mongo_db.app_config.find()[0] - - #reinitialize some internal counters + + # reinitialize some internal counters config.state['my_latest_block'] = {'block_index': 0} config.state['last_message_index'] = -1 - - #call any rollback processors for any extension modules + + # call any rollback processors for any extension modules RollbackProcessor.run_active_functions(None) - + return app_config + def rollback(max_block_index): """called if there are any records for blocks higher than this in the database? If so, they were impartially created and we should get rid of them - + NOTE: after calling this function, you should always trigger a "continue" statement to reiterate the processing loop (which will get a new cp_latest_block from counterpartyd and resume as appropriate) """ assert isinstance(max_block_index, int) and max_block_index >= config.BLOCK_FIRST if not config.mongo_db.processed_blocks.find_one({"block_index": max_block_index}): raise Exception("Can't roll back to specified block index: %i doesn't exist in database" % max_block_index) - - logger.warn("Pruning to block %i ..." % (max_block_index)) + + logger.warn("Pruning to block %i ..." % (max_block_index)) config.mongo_db.processed_blocks.remove({"block_index": {"$gt": max_block_index}}) config.state['last_message_index'] = -1 @@ -94,6 +101,5 @@ def rollback(max_block_index): cache.blockinfo_cache.clear() config.state['my_latest_block'] = config.mongo_db.processed_blocks.find_one({"block_index": max_block_index}) or config.LATEST_BLOCK_INIT - #call any rollback processors for any extension modules + # call any rollback processors for any extension modules RollbackProcessor.run_active_functions(max_block_index) - diff --git a/counterblock/lib/log.py b/counterblock/lib/log.py index 06c8e8eb..3f7e5282 100644 --- a/counterblock/lib/log.py +++ b/counterblock/lib/log.py @@ -3,45 +3,46 @@ from counterblock.lib import config + def set_up(verbose): global MAX_LOG_SIZE - MAX_LOG_SIZE = config.LOG_SIZE_KB * 1024 #max log size of 20 MB before rotation (make configurable later) + MAX_LOG_SIZE = config.LOG_SIZE_KB * 1024 # max log size of 20 MB before rotation (make configurable later) global MAX_LOG_COUNT MAX_LOG_COUNT = config.LOG_NUM_FILES # Initialize logging (to file and console) - logger = logging.getLogger() #get root logger + logger = logging.getLogger() # get root logger logger.setLevel(logging.DEBUG if verbose else logging.INFO) - - #Color logging on console for warnings and errors + + # Color logging on console for warnings and errors logging.addLevelName(logging.WARNING, "\033[1;31m%s\033[1;0m" % logging.getLevelName(logging.WARNING)) logging.addLevelName(logging.ERROR, "\033[1;41m%s\033[1;0m" % logging.getLevelName(logging.ERROR)) - - #Console logging + + # Console logging console = logging.StreamHandler() console.setLevel(logging.DEBUG if verbose else logging.INFO) formatter = logging.Formatter('%(levelname)s:%(module)s: %(message)s') console.setFormatter(formatter) logger.addHandler(console) - - #File logging (rotated) + + # File logging (rotated) fileh = logging.handlers.RotatingFileHandler(config.LOG, maxBytes=MAX_LOG_SIZE, backupCount=MAX_LOG_COUNT) fileh.setLevel(logging.DEBUG if verbose else logging.INFO) formatter = logging.Formatter('%(asctime)s:%(levelname)s:%(module)s:%(message)s', '%Y-%m-%d-T%H:%M:%S%z') fileh.setFormatter(formatter) logger.addHandler(fileh) - - #socketio logging (don't show on console in normal operation) + + # socketio logging (don't show on console in normal operation) socketio_log = logging.getLogger('socketio') socketio_log.setLevel(logging.DEBUG if verbose else logging.WARNING) socketio_log.propagate = False - #requests/urllib3 logging (make it not so chatty) + # requests/urllib3 logging (make it not so chatty) logging.getLogger('requests').setLevel(logging.WARNING) logging.getLogger('urllib3').setLevel(logging.CRITICAL) - - #Transaction log - tx_logger = logging.getLogger("transaction_log") #get transaction logger + + # Transaction log + tx_logger = logging.getLogger("transaction_log") # get transaction logger tx_logger.setLevel(logging.DEBUG if verbose else logging.INFO) tx_fileh = logging.handlers.RotatingFileHandler(config.TX_LOG, maxBytes=MAX_LOG_SIZE, backupCount=MAX_LOG_COUNT) tx_fileh.setLevel(logging.DEBUG if verbose else logging.INFO) diff --git a/counterblock/lib/messages.py b/counterblock/lib/messages.py index 92671569..fbcb3c3a 100644 --- a/counterblock/lib/messages.py +++ b/counterblock/lib/messages.py @@ -7,59 +7,62 @@ logger = logging.getLogger(__name__) + def decorate_message(message, for_txn_history=False): - #insert custom fields in certain events... - #even invalid actions need these extra fields for proper reporting to the client (as the reporting message + # insert custom fields in certain events... + # even invalid actions need these extra fields for proper reporting to the client (as the reporting message # is produced via PendingActionViewModel.calcText) -- however make it able to deal with the queried data not existing in this case assert '_category' in message if for_txn_history: - message['_command'] = 'insert' #history data doesn't include this + message['_command'] = 'insert' # history data doesn't include this block_index = message['block_index'] if 'block_index' in message else message['tx1_block_index'] message['_block_time'] = database.get_block_time(block_index) - message['_tx_index'] = message['tx_index'] if 'tx_index' in message else message.get('tx1_index', None) + message['_tx_index'] = message['tx_index'] if 'tx_index' in message else message.get('tx1_index', None) if message['_category'] in ['bet_expirations', 'order_expirations', 'bet_match_expirations', 'order_match_expirations']: - message['_tx_index'] = 0 #add tx_index to all entries (so we can sort on it secondarily in history view), since these lack it - + message['_tx_index'] = 0 # add tx_index to all entries (so we can sort on it secondarily in history view), since these lack it + if message['_category'] in ['credits', 'debits']: - #find the last balance change on record - bal_change = config.mongo_db.balance_changes.find_one({ 'address': message['address'], 'asset': message['asset'] }, + # find the last balance change on record + bal_change = config.mongo_db.balance_changes.find_one( + {'address': message['address'], 'asset': message['asset']}, sort=[("block_time", pymongo.DESCENDING)]) message['_quantity_normalized'] = abs(bal_change['quantity_normalized']) if bal_change else None message['_balance'] = bal_change['new_balance'] if bal_change else None message['_balance_normalized'] = bal_change['new_balance_normalized'] if bal_change else None - if message['_category'] in ['orders',] and message['_command'] == 'insert': + if message['_category'] in ['orders', ] and message['_command'] == 'insert': get_asset_info = config.mongo_db.tracked_assets.find_one({'asset': message['get_asset']}) give_asset_info = config.mongo_db.tracked_assets.find_one({'asset': message['give_asset']}) message['_get_asset_divisible'] = get_asset_info['divisible'] if get_asset_info else None message['_give_asset_divisible'] = give_asset_info['divisible'] if give_asset_info else None - - if message['_category'] in ['order_matches',] and message['_command'] == 'insert': + + if message['_category'] in ['order_matches', ] and message['_command'] == 'insert': forward_asset_info = config.mongo_db.tracked_assets.find_one({'asset': message['forward_asset']}) backward_asset_info = config.mongo_db.tracked_assets.find_one({'asset': message['backward_asset']}) message['_forward_asset_divisible'] = forward_asset_info['divisible'] if forward_asset_info else None message['_backward_asset_divisible'] = backward_asset_info['divisible'] if backward_asset_info else None - - if message['_category'] in ['orders', 'order_matches',]: + + if message['_category'] in ['orders', 'order_matches', ]: message['_btc_below_dust_limit'] = ( - ('forward_asset' in message and message['forward_asset'] == config.BTC and message['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) - or ('backward_asset' in message and message['backward_asset'] == config.BTC and message['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) + ('forward_asset' in message and message['forward_asset'] == config.BTC and message['forward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) + or ('backward_asset' in message and message['backward_asset'] == config.BTC and message['backward_quantity'] <= config.ORDER_BTC_DUST_LIMIT_CUTOFF) ) - if message['_category'] in ['dividends', 'sends',]: + if message['_category'] in ['dividends', 'sends', ]: asset_info = config.mongo_db.tracked_assets.find_one({'asset': message['asset']}) message['_divisible'] = asset_info['divisible'] if asset_info else None - - if message['_category'] in ['issuances',]: + + if message['_category'] in ['issuances', ]: message['_quantity_normalized'] = blockchain.normalize_quantity(message['quantity'], message['divisible']) return message + def decorate_message_for_feed(msg, msg_data=None): """This function takes a message from counterpartyd's message feed and mutates it a bit to be suitable to be sent through the counterblockd message feed to an end-client""" if not msg_data: msg_data = json.loads(msg['bindings']) - + message = copy.deepcopy(msg_data) message['_message_index'] = msg['message_index'] message['_command'] = msg['command'] @@ -70,14 +73,15 @@ def decorate_message_for_feed(msg, msg_data=None): message = decorate_message(message) return message + def get_address_cols_for_entity(entity): if entity in ['debits', 'credits']: - return ['address',] - elif entity in ['issuances',]: - return ['issuer',] + return ['address', ] + elif entity in ['issuances', ]: + return ['issuer', ] elif entity in ['sends', 'dividends', 'bets', 'cancels', 'orders', 'burns', 'broadcasts', 'btcpays']: - return ['source',] - #elif entity in ['order_matches', 'bet_matches']: + return ['source', ] + # elif entity in ['order_matches', 'bet_matches']: elif entity in ['order_matches', 'order_expirations', 'order_match_expirations', 'bet_matches', 'bet_expirations', 'bet_match_expirations']: return ['tx0_address', 'tx1_address'] diff --git a/counterblock/lib/module.py b/counterblock/lib/module.py index 0f8a7980..ea948570 100644 --- a/counterblock/lib/module.py +++ b/counterblock/lib/module.py @@ -10,89 +10,95 @@ CONFIG_FILENAME = 'modules%s.conf' + def load_all(): """Loads 3rd party plugin modules (note that this does not yet run startup processors, etc) """ - def load_module(module_path): + def load_module(module_path): logger.info('Loading Plugin Module %s' % module_path) module_path_only = os.path.join(*module_path.split('/')[:-1]) module_path_full = os.path.join(os.path.dirname( os.path.abspath(os.path.join(__file__, os.pardir))), module_path_only) module_name = module_path.split('/')[-1] - f, fl, dsc = imp.find_module(module_name, [module_path_full,]) + f, fl, dsc = imp.find_module(module_name, [module_path_full, ]) imp.load_module(module_name, f, fl, dsc) logger.debug('Plugin Module Loaded %s' % module_name) - + def get_mod_params_dict(params): if not isinstance(params, list): - params = [params] - params_dict = {} - - try: + params = [params] + params_dict = {} + + try: params_dict['priority'] = float(params[0]) except: params_dict['enabled'] = False if "false" == params[0].lower() else True - - if len(params) > 1: + + if len(params) > 1: try: - params_dict['priority'] = float(params[1]) + params_dict['priority'] = float(params[1]) except: params_dict['enabled'] = False if "false" == params[1].lower() else True return params_dict - #Read module configuration file + # Read module configuration file module_conf = ConfigObj(os.path.join(config.config_dir, CONFIG_FILENAME % config.net_path_part)) for key, container in list(module_conf.items()): if key == 'LoadModule': - for module, user_settings in list(container.items()): + for module, user_settings in list(container.items()): try: params = get_mod_params_dict(user_settings) if params['enabled'] is True: - load_module(module) + load_module(module) except Exception as e: logger.warn("Failed to load Module %s. Reason: %s" % (module, e)) elif 'Processor' in key: try: processor_functions = processor.__dict__[key] - except: + except: logger.warn("Invalid config header %s in %s" % (key, CONFIG_FILENAME % config.net_path_part)) continue - #print(processor_functions) - for func_name, user_settings in list(container.items()): + # print(processor_functions) + for func_name, user_settings in list(container.items()): #print(func_name, user_settings) if func_name in processor_functions: params = get_mod_params_dict(user_settings) #print(func_name, params) - for param_name, param_value in list(params.items()): + for param_name, param_value in list(params.items()): processor_functions[func_name][param_name] = param_value else: logger.warn("Attempted to configure a non-existent processor %s" % func_name) logger.debug(processor_functions) + def toggle(mod, enabled=True): try: imp.find_module(mod) - except: - print(("Unable to find module %s" % mod)) + except: + print(("Unable to find module %s" % mod)) return mod_config_path = os.path.join(config.config_dir, CONFIG_FILENAME % config.net_path_part) module_conf = ConfigObj(mod_config_path) try: try: - if module_conf['LoadModule'][mod][0] in ['True', 'False']: + if module_conf['LoadModule'][mod][0] in ['True', 'False']: module_conf['LoadModule'][mod][0] = enabled - else: module_conf['LoadModule'][mod][1] = enabled - except: module_conf['LoadModule'][mod].insert(0, enabled) - except: - if not "LoadModule" in module_conf: module_conf['LoadModule'] = {} - module_conf['LoadModule'][mod] = enabled + else: + module_conf['LoadModule'][mod][1] = enabled + except: + module_conf['LoadModule'][mod].insert(0, enabled) + except: + if not "LoadModule" in module_conf: + module_conf['LoadModule'] = {} + module_conf['LoadModule'][mod] = enabled module_conf.write() - print(("%s Module %s" %("Enabled" if enabled else "Disabled", mod))) - + print(("%s Module %s" % ("Enabled" if enabled else "Disabled", mod))) + + def list_all(): mod_config_path = os.path.join(config.config_dir, CONFIG_FILENAME % config.net_path_part) module_conf = ConfigObj(mod_config_path) - for name, modules in list(module_conf.items()): - print(("Configuration for %s" %name)) - for module, settings in list(modules.items()): - print((" %s %s: %s" %(("Module" if name == "LoadModule" else "Function"), module, settings))) + for name, modules in list(module_conf.items()): + print(("Configuration for %s" % name)) + for module, settings in list(modules.items()): + print((" %s %s: %s" % (("Module" if name == "LoadModule" else "Function"), module, settings))) diff --git a/counterblock/lib/modules/__init__.py b/counterblock/lib/modules/__init__.py index a8906251..c623bfb2 100644 --- a/counterblock/lib/modules/__init__.py +++ b/counterblock/lib/modules/__init__.py @@ -1,13 +1,13 @@ NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY = 9999 -#MessageProcessor -#priority constants for message processors between modules +# MessageProcessor +# priority constants for message processors between modules ASSETS_PRIORITY_PARSE_ISSUANCE = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 0 ASSETS_PRIORITY_BALANCE_CHANGE = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 1 DEX_PRIORITY_PARSE_TRADEBOOK = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 2 BETTING_PRIORITY_PARSE_BROADCAST = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 3 -CWIOFEEDS_PRIORITY_PARSE_FOR_SOCKETIO = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 4 #comes last +CWIOFEEDS_PRIORITY_PARSE_FOR_SOCKETIO = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 4 # comes last -#MempoolMessageProcessor -CWIOFEEDS_PRIORITY_PUBLISH_MEMPOOL = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 0 \ No newline at end of file +# MempoolMessageProcessor +CWIOFEEDS_PRIORITY_PUBLISH_MEMPOOL = NON_CORE_DEPDENDENT_TASKS_FIRST_PRIORITY - 0 diff --git a/counterblock/lib/modules/assets.py b/counterblock/lib/modules/assets.py index 6f81c17f..ec98ad48 100644 --- a/counterblock/lib/modules/assets.py +++ b/counterblock/lib/modules/assets.py @@ -9,7 +9,9 @@ import datetime import logging import decimal -import urllib.request, urllib.parse, urllib.error +import urllib.request +import urllib.parse +import urllib.error import json import base64 import pymongo @@ -26,6 +28,7 @@ D = decimal.Decimal logger = logging.getLogger(__name__) + def inc_fetch_retry(asset, max_retry=ASSET_MAX_RETRY, new_status='error', errors=[]): asset['fetch_info_retry'] += 1 asset['errors'] = errors @@ -33,42 +36,47 @@ def inc_fetch_retry(asset, max_retry=ASSET_MAX_RETRY, new_status='error', errors asset['info_status'] = new_status config.mongo_db.asset_extended_info.save(asset) + def process_asset_info(asset, info_data): def sanitize_json_data(data): data['asset'] = util.sanitize_eliteness(data['asset']) - if 'description' in data: data['description'] = util.sanitize_eliteness(data['description']) - if 'website' in data: data['website'] = util.sanitize_eliteness(data['website']) - if 'pgpsig' in data: data['pgpsig'] = util.sanitize_eliteness(data['pgpsig']) + if 'description' in data: + data['description'] = util.sanitize_eliteness(data['description']) + if 'website' in data: + data['website'] = util.sanitize_eliteness(data['website']) + if 'pgpsig' in data: + data['pgpsig'] = util.sanitize_eliteness(data['pgpsig']) return data # sanity check assert asset['info_status'] == 'needfetch' assert 'info_url' in asset - assert util.is_valid_url(asset['info_url'], allow_no_protocol=True) #already validated in the fetch + assert util.is_valid_url(asset['info_url'], allow_no_protocol=True) # already validated in the fetch errors = util.is_valid_json(info_data, config.ASSET_SCHEMA) - + if not isinstance(info_data, dict) or 'asset' not in info_data: errors.append('Invalid data format') elif asset['asset'] != info_data['asset']: errors.append('asset field does not match asset name') - + if len(errors) > 0: inc_fetch_retry(asset, new_status='invalid', errors=errors) - return (False, errors) + return (False, errors) asset['info_status'] = 'valid' - #fetch any associated images... - #TODO: parallelize this 2nd level asset image fetching ... (e.g. just compose a list here, and process it in later on) + # fetch any associated images... + # TODO: parallelize this 2nd level asset image fetching ... (e.g. just compose a list here, and process it in later on) if 'image' in info_data: - info_data['valid_image'] = util.fetch_image(info_data['image'], - config.SUBDIR_ASSET_IMAGES, asset['asset'], fetch_timeout=5) - + info_data['valid_image'] = util.fetch_image( + info_data['image'], config.SUBDIR_ASSET_IMAGES, asset['asset'], fetch_timeout=5) + asset['info_data'] = sanitize_json_data(info_data) config.mongo_db.asset_extended_info.save(asset) return (True, None) + def task_compile_extended_asset_info(): assets = list(config.mongo_db.asset_extended_info.find({'info_status': 'needfetch'})) asset_info_urls = [] @@ -81,7 +89,7 @@ def asset_fetch_complete_hook(urls_data): info_url = ('http://' + asset['info_url']) \ if not asset['info_url'].startswith('http://') and not asset['info_url'].startswith('https://') else asset['info_url'] assert info_url in urls_data - if not urls_data[info_url][0]: #request was not successful + if not urls_data[info_url][0]: # request was not successful inc_fetch_retry(asset, max_retry=ASSET_MAX_RETRY, errors=[urls_data[info_url][1]]) logger.warn("Fetch for asset at %s not successful: %s (try %i of %i)" % ( info_url, urls_data[info_url][1], asset['fetch_info_retry'], ASSET_MAX_RETRY)) @@ -91,29 +99,36 @@ def asset_fetch_complete_hook(urls_data): logger.info("Processing for asset %s at %s not successful: %s" % (asset['asset'], info_url, result[1])) else: logger.debug("Processing for asset %s at %s successful" % (asset['asset'], info_url)) - - #compose and fetch all info URLs in all assets with them + + # compose and fetch all info URLs in all assets with them for asset in assets: if not asset['info_url']: continue - + if asset.get('disabled', False): logger.info("ExtendedAssetInfo: Skipping disabled asset %s" % asset['asset']) continue - #may or may not end with .json. may or may not start with http:// or https:// - asset_info_urls.append(('http://' + asset['info_url']) \ - if not asset['info_url'].startswith('http://') and not asset['info_url'].startswith('https://') else asset['info_url']) + # may or may not end with .json. may or may not start with http:// or https:// + asset_info_urls.append(( + ('http://' + asset['info_url']) + if not asset['info_url'].startswith('http://') and not asset['info_url'].startswith('https://') + else asset['info_url'])) asset_info_urls_str = ', '.join(asset_info_urls) - asset_info_urls_str = (asset_info_urls_str[:2000] + ' ...') if len(asset_info_urls_str) > 2000 else asset_info_urls_str #truncate if necessary + asset_info_urls_str = ( + (asset_info_urls_str[:2000] + ' ...') + if len(asset_info_urls_str) > 2000 + else asset_info_urls_str) # truncate if necessary if len(asset_info_urls): logger.info('Fetching enhanced asset info for %i assets: %s' % (len(asset_info_urls), asset_info_urls_str)) - util.stream_fetch(asset_info_urls, asset_fetch_complete_hook, - fetch_timeout=10, max_fetch_size=4*1024, urls_group_size=20, urls_group_time_spacing=20, + util.stream_fetch( + asset_info_urls, asset_fetch_complete_hook, + fetch_timeout=10, max_fetch_size=4 * 1024, urls_group_size=20, urls_group_time_spacing=20, per_request_complete_callback=lambda url, data: logger.debug("Asset info URL %s retrieved, result: %s" % (url, data))) - - start_task(task_compile_extended_asset_info, delay=60 * 60) #call again in 60 minutes + + start_task(task_compile_extended_asset_info, delay=60 * 60) # call again in 60 minutes + @API.add_method def get_normalized_balances(addresses): @@ -126,34 +141,36 @@ def get_normalized_balances(addresses): raise Exception("addresses must be a list of addresses, even if it just contains one address") if not len(addresses): raise Exception("Invalid address list supplied") - + filters = [] for address in addresses: filters.append({'field': 'address', 'op': '==', 'value': address}) - + mappings = {} - result = util.call_jsonrpc_api("get_balances", + result = util.call_jsonrpc_api( + "get_balances", {'filters': filters, 'filterop': 'or'}, abort_on_error=True)['result'] isowner = {} - owned_assets = config.mongo_db.tracked_assets.find( { '$or': [{'owner': a } for a in addresses] }, { '_history': 0, '_id': 0 } ) + owned_assets = config.mongo_db.tracked_assets.find( + {'$or': [{'owner': a} for a in addresses]}, {'_history': 0, '_id': 0}) for o in owned_assets: - isowner[o['owner'] + o['asset']] = o + isowner[o['owner'] + o['asset']] = o data = [] for d in result: if not d['quantity'] and ((d['address'] + d['asset']) not in isowner): - continue #don't include balances with a zero asset value + continue # don't include balances with a zero asset value asset_info = config.mongo_db.tracked_assets.find_one({'asset': d['asset']}) - divisible = True # XCP and BTC + divisible = True # XCP and BTC if asset_info and 'divisible' in asset_info: divisible = asset_info['divisible'] d['normalized_quantity'] = blockchain.normalize_quantity(d['quantity'], divisible) d['owner'] = (d['address'] + d['asset']) in isowner mappings[d['address'] + d['asset']] = d data.append(d) - - #include any owned assets for each address, even if their balance is zero + + # include any owned assets for each address, even if their balance is zero for key in isowner: if key not in mappings: o = isowner[key] @@ -167,11 +184,12 @@ def get_normalized_balances(addresses): return data + @API.add_method def get_escrowed_balances(addresses): - addresses_holder = ','.join(['?' for e in range(0,len(addresses))]) + addresses_holder = ','.join(['?' for e in range(0, len(addresses))]) - sql ='''SELECT (source || '_' || give_asset) AS source_asset, source AS address, give_asset AS asset, SUM(give_remaining) AS quantity + sql = '''SELECT (source || '_' || give_asset) AS source_asset, source AS address, give_asset AS asset, SUM(give_remaining) AS quantity FROM orders WHERE source IN ({}) AND status = ? AND give_asset != ? GROUP BY source_asset'''.format(addresses_holder) @@ -223,9 +241,10 @@ def get_escrowed_balances(addresses): return escrowed_balances + @API.add_method def get_assets_info(assetsList): - assets = assetsList #TODO: change the parameter name at some point in the future...shouldn't be using camel case here + assets = assetsList # TODO: change the parameter name at some point in the future...shouldn't be using camel case here if not isinstance(assets, list): raise Exception("assets must be a list of asset names, even if it just contains one entry") assets_info = [] @@ -251,7 +270,7 @@ def get_assets_info(assetsList): # User-created asset. tracked_asset = config.mongo_db.tracked_assets.find_one({'asset': asset}, {'_id': 0, '_history': 0}) if not tracked_asset: - continue #asset not found, most likely + continue # asset not found, most likely assets_info.append({ 'asset': asset, 'owner': tracked_asset['owner'], @@ -262,11 +281,12 @@ def get_assets_info(assetsList): 'issuer': tracked_asset['owner']}) return assets_info + @API.add_method def get_base_quote_asset(asset1, asset2): """Given two arbitrary assets, returns the base asset and the quote asset. """ - #DEPRECATED 1.5 + # DEPRECATED 1.5 base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) base_asset_info = config.mongo_db.tracked_assets.find_one({'asset': base_asset}) quote_asset_info = config.mongo_db.tracked_assets.find_one({'asset': quote_asset}) @@ -281,19 +301,21 @@ def get_base_quote_asset(asset1, asset2): 'pair_name': pair_name } + @API.add_method def get_owned_assets(addresses): """Gets a list of owned assets for one or more addresses""" result = config.mongo_db.tracked_assets.find({ 'owner': {"$in": addresses} - }, {"_id":0}).sort("asset", pymongo.ASCENDING) + }, {"_id": 0}).sort("asset", pymongo.ASCENDING) return list(result) + @API.add_method def get_asset_pair_market_info(asset1=None, asset2=None, limit=50): """Given two arbitrary assets, returns the base asset and the quote asset. """ - #DEPRECATED 1.5 + # DEPRECATED 1.5 assert (asset1 and asset2) or (asset1 is None and asset2 is None) if asset1 and asset2: base_asset, quote_asset = util.assets_to_asset_pair(asset1, asset2) @@ -303,26 +325,28 @@ def get_asset_pair_market_info(asset1=None, asset2=None, limit=50): #^ sort by this for now, may want to sort by a market_cap value in the future return list(pair_info) or [] + @API.add_method def get_asset_extended_info(asset): ext_info = config.mongo_db.asset_extended_info.find_one({'asset': asset}, {'_id': 0}) return ext_info or False + @API.add_method def get_asset_history(asset, reverse=False): """ Returns a list of changes for the specified asset, from its inception to the current time. - + @param asset: The asset to retrieve a history on @param reverse: By default, the history is returned in the order of oldest to newest. Set this parameter to True to return items in the order of newest to oldest. - + @return: Changes are returned as a list of dicts, with each dict having the following format: * type: One of 'created', 'issued_more', 'changed_description', 'locked', 'transferred', 'called_back' * 'at_block': The block number this change took effect * 'at_block_time': The block time this change took effect - + * IF type = 'created': Has the following fields, as specified when the asset was initially created: * owner, description, divisible, locked, total_issued, total_issued_normalized * IF type = 'issued_more': @@ -340,15 +364,15 @@ def get_asset_history(asset, reverse=False): * IF type = 'called_back': * 'percentage': The percentage of the asset called back (between 0 and 100) """ - asset = config.mongo_db.tracked_assets.find_one({ 'asset': asset }, {"_id":0}) + asset = config.mongo_db.tracked_assets.find_one({'asset': asset}, {"_id": 0}) if not asset: raise Exception("Unrecognized asset") - - #run down through _history and compose a diff log + + # run down through _history and compose a diff log history = [] - raw = asset['_history'] + [asset,] #oldest to newest. add on the current state + raw = asset['_history'] + [asset, ] # oldest to newest. add on the current state prev = None - for i in range(len(raw)): #oldest to newest + for i in range(len(raw)): # oldest to newest if i == 0: assert raw[i]['_change_type'] == 'created' history.append({ @@ -364,7 +388,7 @@ def get_asset_history(asset, reverse=False): }) prev = raw[i] continue - + assert prev if raw[i]['_change_type'] == 'locked': history.append({ @@ -388,7 +412,7 @@ def get_asset_history(asset, reverse=False): 'prev_description': prev['description'], 'new_description': raw[i]['description'], }) - else: #issue additional + else: # issue additional assert raw[i]['total_issued'] - prev['total_issued'] > 0 history.append({ 'type': 'issued_more', @@ -400,11 +424,13 @@ def get_asset_history(asset, reverse=False): 'total_issued_normalized': raw[i]['total_issued_normalized'], }) prev = raw[i] - + final_history = history - if reverse: final_history.reverse() + if reverse: + final_history.reverse() return final_history + @API.add_method def get_balance_history(asset, addresses, normalize=True, start_ts=None, end_ts=None): """Retrieves the ordered balance history for a given address (or list of addresses) and asset pair, within the specified date range @@ -414,15 +440,15 @@ def get_balance_history(asset, addresses, normalize=True, start_ts=None, end_ts= """ if not isinstance(addresses, list): raise Exception("addresses must be a list of addresses, even if it just contains one address") - + asset_info = config.mongo_db.tracked_assets.find_one({'asset': asset}) if not asset_info: raise Exception("Asset does not exist.") - + now_ts = time.mktime(datetime.datetime.utcnow().timetuple()) - if not end_ts: #default to current datetime + if not end_ts: # default to current datetime end_ts = now_ts - if not start_ts: #default to 30 days before the end date + if not start_ts: # default to 30 days before the end date start_ts = end_ts - (30 * 24 * 60 * 60) results = [] for address in addresses: @@ -433,7 +459,7 @@ def get_balance_history(asset, addresses, normalize=True, start_ts=None, end_ts= "$gte": datetime.datetime.utcfromtimestamp(start_ts) } if end_ts == now_ts else { "$gte": datetime.datetime.utcfromtimestamp(start_ts), - "$lte": datetime.datetime.utcfromtimestamp(end_ts) + "$lte": datetime.datetime.utcfromtimestamp(end_ts) } }).sort("block_time", pymongo.ASCENDING) entry = { @@ -441,16 +467,19 @@ def get_balance_history(asset, addresses, normalize=True, start_ts=None, end_ts= 'data': [ (time.mktime(r['block_time'].timetuple()) * 1000, r['new_balance_normalized'] if normalize else r['new_balance'] - ) for r in result] + ) for r in result] } results.append(entry) return results + @MessageProcessor.subscribe(priority=ASSETS_PRIORITY_PARSE_ISSUANCE) def parse_issuance(msg, msg_data): - if msg['category'] != 'issuances': return - if msg_data['status'] != 'valid': return - + if msg['category'] != 'issuances': + return + if msg_data['status'] != 'valid': + return + cur_block_index = config.state['cur_block']['block_index'] cur_block = config.state['cur_block'] @@ -458,19 +487,20 @@ def modify_extended_asset_info(asset, description): """adds an asset to asset_extended_info collection if the description is a valid json link. or, if the link is not a valid json link, will remove the asset entry from the table if it exists""" if util.is_valid_url(description, suffix='.json', allow_no_protocol=True): - config.mongo_db.asset_extended_info.update({'asset': asset}, + config.mongo_db.asset_extended_info.update( + {'asset': asset}, {'$set': { 'info_url': description, 'info_status': 'needfetch', - 'fetch_info_retry': 0, # retry ASSET_MAX_RETRY times to fetch info from info_url + 'fetch_info_retry': 0, # retry ASSET_MAX_RETRY times to fetch info from info_url 'info_data': {}, 'errors': [] }}, upsert=True) #^ valid info_status settings: needfetch, valid, invalid, error - #additional fields will be added later in events, once the asset info is pulled + # additional fields will be added later in events, once the asset info is pulled else: - config.mongo_db.asset_extended_info.remove({ 'asset': asset }) - #remove any saved asset image data + config.mongo_db.asset_extended_info.remove({'asset': asset}) + # remove any saved asset image data imagePath = os.path.join(config.data_dir, config.SUBDIR_ASSET_IMAGES, asset + '.png') if os.path.exists(imagePath): os.remove(imagePath) @@ -478,49 +508,49 @@ def modify_extended_asset_info(asset, description): tracked_asset = config.mongo_db.tracked_assets.find_one( {'asset': msg_data['asset']}, {'_id': 0, '_history': 0}) #^ pulls the tracked asset without the _id and history fields. This may be None - - if msg_data['locked']: #lock asset + + if msg_data['locked']: # lock asset assert tracked_asset is not None config.mongo_db.tracked_assets.update( {'asset': msg_data['asset']}, {"$set": { '_at_block': cur_block_index, - '_at_block_time': cur_block['block_time_obj'], + '_at_block_time': cur_block['block_time_obj'], '_change_type': 'locked', 'locked': True, - }, - "$push": {'_history': tracked_asset } }, upsert=False) + }, + "$push": {'_history': tracked_asset}}, upsert=False) logger.info("Locking asset %s" % (msg_data['asset'],)) - elif msg_data['transfer']: #transfer asset + elif msg_data['transfer']: # transfer asset assert tracked_asset is not None config.mongo_db.tracked_assets.update( {'asset': msg_data['asset']}, {"$set": { '_at_block': cur_block_index, - '_at_block_time': cur_block['block_time_obj'], + '_at_block_time': cur_block['block_time_obj'], '_change_type': 'transferred', 'owner': msg_data['issuer'], - }, - "$push": {'_history': tracked_asset } }, upsert=False) + }, + "$push": {'_history': tracked_asset}}, upsert=False) logger.info("Transferring asset %s to address %s" % (msg_data['asset'], msg_data['issuer'])) - elif msg_data['quantity'] == 0 and tracked_asset is not None: #change description + elif msg_data['quantity'] == 0 and tracked_asset is not None: # change description config.mongo_db.tracked_assets.update( {'asset': msg_data['asset']}, {"$set": { '_at_block': cur_block_index, - '_at_block_time': cur_block['block_time_obj'], + '_at_block_time': cur_block['block_time_obj'], '_change_type': 'changed_description', 'description': msg_data['description'], - }, - "$push": {'_history': tracked_asset } }, upsert=False) + }, + "$push": {'_history': tracked_asset}}, upsert=False) modify_extended_asset_info(msg_data['asset'], msg_data['description']) logger.info("Changing description for asset %s to '%s'" % (msg_data['asset'], msg_data['description'])) - else: #issue new asset or issue addition qty of an asset - if not tracked_asset: #new issuance + else: # issue new asset or issue addition qty of an asset + if not tracked_asset: # new issuance tracked_asset = { '_change_type': 'created', - '_at_block': cur_block_index, #the block ID this asset is current for - '_at_block_time': cur_block['block_time_obj'], + '_at_block': cur_block_index, # the block ID this asset is current for + '_at_block_time': cur_block['block_time_obj'], #^ NOTE: (if there are multiple asset tracked changes updates in a single block for the same # asset, the last one with _at_block == that block id in the history array is the # final version for that asset at that block @@ -531,52 +561,53 @@ def modify_extended_asset_info(asset, description): 'locked': False, 'total_issued': int(msg_data['quantity']), 'total_issued_normalized': blockchain.normalize_quantity(msg_data['quantity'], msg_data['divisible']), - '_history': [] #to allow for block rollbacks + '_history': [] # to allow for block rollbacks } config.mongo_db.tracked_assets.insert(tracked_asset) logger.info("Tracking new asset: %s" % msg_data['asset']) modify_extended_asset_info(msg_data['asset'], msg_data['description']) - else: #issuing additional of existing asset + else: # issuing additional of existing asset assert tracked_asset is not None config.mongo_db.tracked_assets.update( {'asset': msg_data['asset']}, {"$set": { '_at_block': cur_block_index, - '_at_block_time': cur_block['block_time_obj'], + '_at_block_time': cur_block['block_time_obj'], '_change_type': 'issued_more', - }, - "$inc": { - 'total_issued': msg_data['quantity'], - 'total_issued_normalized': blockchain.normalize_quantity(msg_data['quantity'], msg_data['divisible']) - }, - "$push": {'_history': tracked_asset} }, upsert=False) + }, + "$inc": { + 'total_issued': msg_data['quantity'], + 'total_issued_normalized': blockchain.normalize_quantity(msg_data['quantity'], msg_data['divisible']) + }, + "$push": {'_history': tracked_asset}}, upsert=False) logger.info("Adding additional %s quantity for asset %s" % ( blockchain.normalize_quantity(msg_data['quantity'], msg_data['divisible']), msg_data['asset'])) return True -@MessageProcessor.subscribe(priority=ASSETS_PRIORITY_BALANCE_CHANGE) #must come after parse_issuance -def parse_balance_change(msg, msg_data): - #track balance changes for each address + +@MessageProcessor.subscribe(priority=ASSETS_PRIORITY_BALANCE_CHANGE) # must come after parse_issuance +def parse_balance_change(msg, msg_data): + # track balance changes for each address bal_change = None - if msg['category'] in ['credits', 'debits',]: + if msg['category'] in ['credits', 'debits', ]: actionName = 'credit' if msg['category'] == 'credits' else 'debit' address = msg_data['address'] - asset_info = config.mongo_db.tracked_assets.find_one({ 'asset': msg_data['asset'] }) + asset_info = config.mongo_db.tracked_assets.find_one({'asset': msg_data['asset']}) if asset_info is None: logger.warn("Credit/debit of %s where asset ('%s') does not exist. Ignoring..." % (msg_data['quantity'], msg_data['asset'])) return 'ABORT_THIS_MESSAGE_PROCESSING' quantity = msg_data['quantity'] if msg['category'] == 'credits' else -msg_data['quantity'] quantity_normalized = blockchain.normalize_quantity(quantity, asset_info['divisible']) - #look up the previous balance to go off of + # look up the previous balance to go off of last_bal_change = config.mongo_db.balance_changes.find_one({ 'address': address, 'asset': asset_info['asset'] }, sort=[("block_index", pymongo.DESCENDING), ("_id", pymongo.DESCENDING)]) - + if last_bal_change \ and last_bal_change['block_index'] == config.state['cur_block']['block_index']: - #modify this record, as we want at most one entry per block index for each (address, asset) pair + # modify this record, as we want at most one entry per block index for each (address, asset) pair last_bal_change['quantity'] += quantity last_bal_change['quantity_normalized'] += quantity_normalized last_bal_change['new_balance'] += quantity @@ -587,9 +618,9 @@ def parse_balance_change(msg, msg_data): 'from' if actionName == 'debit' else 'to', last_bal_change['address'], ('%f' % last_bal_change['new_balance_normalized']).rstrip('0').rstrip('.'), msg['message_index'],)) bal_change = last_bal_change - else: #new balance change record for this block + else: # new balance change record for this block bal_change = { - 'address': address, + 'address': address, 'asset': asset_info['asset'], 'block_index': config.state['cur_block']['block_index'], 'block_time': config.state['cur_block']['block_time_obj'], @@ -607,11 +638,11 @@ def parse_balance_change(msg, msg_data): @StartUpProcessor.subscribe() def init(): - #init db and indexes - #asset_extended_info + # init db and indexes + # asset_extended_info config.mongo_db.asset_extended_info.ensure_index('asset', unique=True) config.mongo_db.asset_extended_info.ensure_index('info_status') - #balance_changes + # balance_changes config.mongo_db.balance_changes.ensure_index('block_index') config.mongo_db.balance_changes.ensure_index([ ("address", pymongo.ASCENDING), @@ -619,35 +650,37 @@ def init(): ("block_index", pymongo.DESCENDING), ("_id", pymongo.DESCENDING) ]) - try: #drop unnecessary indexes if they exist + try: # drop unnecessary indexes if they exist config.mongo_db.balance_changes.drop_index('address_1_asset_1_block_time_1') except: pass - - #tracked_assets + + # tracked_assets config.mongo_db.tracked_assets.ensure_index('asset', unique=True) - config.mongo_db.tracked_assets.ensure_index('_at_block') #for tracked asset pruning + config.mongo_db.tracked_assets.ensure_index('_at_block') # for tracked asset pruning config.mongo_db.tracked_assets.ensure_index([ ("owner", pymongo.ASCENDING), ("asset", pymongo.ASCENDING), ]) - #feeds (also init in betting module) + # feeds (also init in betting module) config.mongo_db.feeds.ensure_index('source') config.mongo_db.feeds.ensure_index('owner') config.mongo_db.feeds.ensure_index('category') config.mongo_db.feeds.ensure_index('info_url') + @CaughtUpProcessor.subscribe() -def start_tasks(): +def start_tasks(): start_task(task_compile_extended_asset_info) + @RollbackProcessor.subscribe() def process_rollback(max_block_index): - if not max_block_index: #full reparse + if not max_block_index: # full reparse config.mongo_db.balance_changes.drop() config.mongo_db.tracked_assets.drop() config.mongo_db.asset_extended_info.drop() - #create XCP and BTC assets in tracked_assets + # create XCP and BTC assets in tracked_assets for asset in [config.XCP, config.BTC]: base_asset = { 'asset': asset, @@ -655,14 +688,14 @@ def process_rollback(max_block_index): 'divisible': True, 'locked': False, 'total_issued': None, - '_at_block': config.BLOCK_FIRST, #the block ID this asset is current for - '_history': [] #to allow for block rollbacks + '_at_block': config.BLOCK_FIRST, # the block ID this asset is current for + '_history': [] # to allow for block rollbacks } config.mongo_db.tracked_assets.insert(base_asset) - else: #rollback + else: # rollback config.mongo_db.balance_changes.remove({"block_index": {"$gt": max_block_index}}) - - #to roll back the state of the tracked asset, dive into the history object for each asset that has + + # to roll back the state of the tracked asset, dive into the history object for each asset that has # been updated on or after the block that we are pruning back to assets_to_prune = config.mongo_db.tracked_assets.find({'_at_block': {"$gt": max_block_index}}) for asset in assets_to_prune: @@ -672,13 +705,13 @@ def process_rollback(max_block_index): if prev_ver['_at_block'] <= max_block_index: break if not prev_ver or prev_ver['_at_block'] > max_block_index: - #even the first history version is newer than max_block_index. - #in this case, just remove the asset tracking record itself + # even the first history version is newer than max_block_index. + # in this case, just remove the asset tracking record itself logger.info("Pruning asset %s (last modified @ block %i, removing as no older state available that is <= block %i)" % ( asset['asset'], asset['_at_block'], max_block_index)) config.mongo_db.tracked_assets.remove({'asset': asset['asset']}) else: - #if here, we were able to find a previous version that was saved at or before max_block_index + # if here, we were able to find a previous version that was saved at or before max_block_index # (which should be prev_ver ... restore asset's values to its values logger.info("Pruning asset %s (last modified @ block %i, pruning to state at block %i)" % ( asset['asset'], asset['_at_block'], max_block_index)) diff --git a/counterblock/lib/modules/betting.py b/counterblock/lib/modules/betting.py index 7ed974ac..e616689a 100644 --- a/counterblock/lib/modules/betting.py +++ b/counterblock/lib/modules/betting.py @@ -9,7 +9,9 @@ import datetime import logging import decimal -import urllib.request, urllib.parse, urllib.error +import urllib.request +import urllib.parse +import urllib.error import json import configparser import base64 @@ -28,63 +30,73 @@ D = decimal.Decimal logger = logging.getLogger(__name__) + def sanitize_json_data(data): if 'operator' in data: data['operator']['name'] = util.sanitize_eliteness(data['operator']['name']) - if 'description' in data['operator']: data['operator']['description'] = util.sanitize_eliteness(data['operator']['description']) + if 'description' in data['operator']: + data['operator']['description'] = util.sanitize_eliteness(data['operator']['description']) data['title'] = util.sanitize_eliteness(data['title']) - if 'description' in data: data['description'] = util.sanitize_eliteness(data['description']) + if 'description' in data: + data['description'] = util.sanitize_eliteness(data['description']) if 'targets' in data: for i in range(len(data['targets'])): data['targets'][i]['text'] = util.sanitize_eliteness(data['targets'][i]['text']) - if 'description' in data['targets'][i]: data['targets'][i]['description'] = util.sanitize_eliteness(data['targets'][i]['description']) + if 'description' in data['targets'][i]: + data['targets'][i]['description'] = util.sanitize_eliteness(data['targets'][i]['description']) if 'labels' in data['targets'][i]: data['targets'][i]['labels']['equal'] = util.sanitize_eliteness(data['targets'][i]['labels']['equal']) data['targets'][i]['labels']['not_equal'] = util.sanitize_eliteness(data['targets'][i]['labels']['not_equal']) if 'customs' in data: for key in data['customs']: - if isinstance(data['customs'][key], str): data['customs'][key] = util.sanitize_eliteness(data['customs'][key]) + if isinstance(data['customs'][key], str): + data['customs'][key] = util.sanitize_eliteness(data['customs'][key]) return data + def get_feeds_by_source_addresses(addresses): - conditions = { 'source': { '$in': addresses }} + conditions = {'source': {'$in': addresses}} feeds = config.mongo_db.feeds.find(spec=conditions, projection={'_id': False}) feeds_by_source = {} - for feed in feeds: feeds_by_source[feed['source']] = feed + for feed in feeds: + feeds_by_source[feed['source']] = feed return feeds_by_source + def get_feed_counters(feed_address): - counters = {} - sql = 'SELECT COUNT(*) AS bet_count, SUM(wager_quantity) AS wager_quantity, SUM(wager_remaining) AS wager_remaining, status FROM bets ' + counters = {} + sql = 'SELECT COUNT(*) AS bet_count, SUM(wager_quantity) AS wager_quantity, SUM(wager_remaining) AS wager_remaining, status FROM bets ' sql += 'WHERE feed_address=? GROUP BY status ORDER BY status DESC' - bindings = [feed_address] + bindings = [feed_address] params = { 'query': sql, 'bindings': bindings - } + } counters['bets'] = util.call_jsonrpc_api('sql', params)['result'] - return counters; + return counters + @API.add_method def get_bets(bet_type, feed_address, deadline, target_value=None, leverage=5040): limit = 50 - bindings = [] - sql = 'SELECT * FROM bets WHERE counterwager_remaining>0 AND ' + bindings = [] + sql = 'SELECT * FROM bets WHERE counterwager_remaining>0 AND ' sql += 'bet_type=? AND feed_address=? AND leverage=? AND deadline=? ' bindings += [bet_type, feed_address, leverage, deadline] if target_value != None: sql += 'AND target_value=? ' bindings.append(target_value) - sql += 'ORDER BY ((counterwager_quantity+0.0)/(wager_quantity+0.0)) ASC LIMIT ?'; + sql += 'ORDER BY ((counterwager_quantity+0.0)/(wager_quantity+0.0)) ASC LIMIT ?' bindings.append(limit) params = { 'query': sql, 'bindings': bindings - } + } return util.call_jsonrpc_api('sql', params)['result'] + @API.add_method -def get_user_bets(addresses = [], status="open"): +def get_user_bets(addresses=[], status="open"): params = { 'filters': { 'field': 'source', @@ -101,14 +113,15 @@ def get_user_bets(addresses = [], status="open"): sources = {} for bet in bets: sources[bet['feed_address']] = True - + return { 'bets': bets, 'feeds': get_feeds_by_source_addresses(list(sources.keys())) } + @API.add_method -def get_feed(address_or_url = ''): +def get_feed(address_or_url=''): conditions = { '$or': [{'source': address_or_url}, {'info_url': address_or_url}], 'info_status': 'valid' @@ -121,7 +134,7 @@ def get_feed(address_or_url = ''): feed['info_data']['next_deadline'] = util.next_interval_date(feed['info_data']['deadline']) result = feed result['counters'] = get_feed_counters(feed['source']) - + if 'counters' not in result: params = { 'filters': { @@ -141,18 +154,20 @@ def get_feed(address_or_url = ''): } return result + @API.add_method -def get_feeds_by_source(addresses = []): +def get_feeds_by_source(addresses=[]): feed = get_feeds_by_source_addresses(addresses) return feed + @API.add_method def parse_base64_feed(base64_feed): decoded_feed = base64.b64decode(base64_feed) feed = json.loads(decoded_feed) if not isinstance(feed, dict) or 'feed' not in feed: return False - + errors = util.is_valid_json(feed['feed'], config.FEED_SCHEMA) if len(errors) > 0: raise Exception("Invalid json: {}".format(", ".join(errors))) @@ -177,25 +192,26 @@ def parse_base64_feed(base64_feed): complete_feed['locked'] = broadcasts[0]['locked'] complete_feed['counters'] = get_feed_counters(broadcasts[0]['source']) complete_feed['info_data'] = sanitize_json_data(feed['feed']) - + feed['feed'] = complete_feed return feed + @MessageProcessor.subscribe(priority=BETTING_PRIORITY_PARSE_BROADCAST) -def parse_broadcast(msg, msg_data): +def parse_broadcast(msg, msg_data): if msg['category'] != 'broadcasts': return save = False feed = config.mongo_db.feeds.find_one({'source': msg_data['source']}) - + if util.is_valid_url(msg_data['text'], allow_no_protocol=True) and msg_data['value'] == -1.0: - if feed is None: + if feed is None: feed = {} feed['source'] = msg_data['source'] feed['info_url'] = msg_data['text'] - feed['info_status'] = 'needfetch' #needfetch, valid (included in CW feed directory), invalid, error - feed['fetch_info_retry'] = 0 # retry FEED_MAX_RETRY times to fetch info from info_url + feed['info_status'] = 'needfetch' # needfetch, valid (included in CW feed directory), invalid, error + feed['fetch_info_retry'] = 0 # retry FEED_MAX_RETRY times to fetch info from info_url feed['info_data'] = {} feed['fee_fraction_int'] = msg_data['fee_fraction_int'] feed['locked'] = False @@ -212,14 +228,15 @@ def parse_broadcast(msg, msg_data): } feed['fee_fraction_int'] = msg_data['fee_fraction_int'] save = True - if save: + if save: config.mongo_db.feeds.save(feed) return save + def task_compile_extended_feed_info(): feeds = list(config.mongo_db.feeds.find({'info_status': 'needfetch'})) feed_info_urls = [] - + def inc_fetch_retry(feed, max_retry=FEED_MAX_RETRY, new_status='error', errors=[]): feed['fetch_info_retry'] += 1 feed['errors'] = errors @@ -231,28 +248,30 @@ def process_feed_info(feed, info_data): # sanity check assert feed['info_status'] == 'needfetch' assert 'info_url' in feed - assert util.is_valid_url(feed['info_url'], allow_no_protocol=True) #already validated in the fetch - + assert util.is_valid_url(feed['info_url'], allow_no_protocol=True) # already validated in the fetch + errors = util.is_valid_json(info_data, config.FEED_SCHEMA) - + if not isinstance(info_data, dict) or 'address' not in info_data: errors.append('Invalid data format') elif feed['source'] != info_data['address']: errors.append('Invalid address') - + if len(errors) > 0: inc_fetch_retry(feed, new_status='invalid', errors=errors) - return (False, errors) - + return (False, errors) + feed['info_status'] = 'valid' - - #fetch any associated images... - #TODO: parallelize this 2nd level feed image fetching ... (e.g. just compose a list here, and process it in later on) + + # fetch any associated images... + # TODO: parallelize this 2nd level feed image fetching ... (e.g. just compose a list here, and process it in later on) if 'image' in info_data: - info_data['valid_image'] = util.fetch_image(info_data['image'], + info_data['valid_image'] = util.fetch_image( + info_data['image'], config.SUBDIR_FEED_IMAGES, feed['source'] + '_topic', fetch_timeout=5) if 'operator' in info_data and 'image' in info_data['operator']: - info_data['operator']['valid_image'] = util.fetch_image(info_data['operator']['image'], + info_data['operator']['valid_image'] = util.fetch_image( + info_data['operator']['image'], config.SUBDIR_FEED_IMAGES, feed['source'] + '_owner', fetch_timeout=5) if 'targets' in info_data: for i in range(len(info_data['targets'])): @@ -260,11 +279,11 @@ def process_feed_info(feed, info_data): image_name = feed['source'] + '_tv_' + str(info_data['targets'][i]['value']) info_data['targets'][i]['valid_image'] = util.fetch_image( info_data['targets'][i]['image'], config.SUBDIR_FEED_IMAGES, image_name, fetch_timeout=5) - + feed['info_data'] = sanitize_json_data(info_data) config.mongo_db.feeds.save(feed) return (True, None) - + def feed_fetch_complete_hook(urls_data): logger.info("Enhanced feed info fetching complete. %s unique URLs fetched. Processing..." % len(urls_data)) feeds = config.mongo_db.feeds.find({'info_status': 'needfetch'}) @@ -277,7 +296,7 @@ def feed_fetch_complete_hook(urls_data): logger.warn("URL %s not properly fetched (not one of %i entries in urls_data), skipping..." % (info_url, len(urls_data))) continue assert info_url in urls_data - if not urls_data[info_url][0]: #request was not successful + if not urls_data[info_url][0]: # request was not successful inc_fetch_retry(feed, max_retry=FEED_MAX_RETRY, errors=[urls_data[info_url][1]]) logger.warn("Fetch for feed at %s not successful: %s (try %i of %i)" % ( info_url, urls_data[info_url][1], feed['fetch_info_retry'], FEED_MAX_RETRY)) @@ -287,38 +306,44 @@ def feed_fetch_complete_hook(urls_data): logger.info("Processing for feed at %s not successful: %s" % (info_url, result[1])) else: logger.info("Processing for feed at %s successful" % info_url) - - #compose and fetch all info URLs in all feeds with them + + # compose and fetch all info URLs in all feeds with them for feed in feeds: assert feed['info_url'] - feed_info_urls.append(('http://' + feed['info_url']) \ - if not feed['info_url'].startswith('http://') and not feed['info_url'].startswith('https://') else feed['info_url']) + feed_info_urls.append( + ('http://' + feed['info_url']) + if not feed['info_url'].startswith('http://') and not feed['info_url'].startswith('https://') + else feed['info_url']) feed_info_urls_str = ', '.join(feed_info_urls) - feed_info_urls_str = (feed_info_urls_str[:2000] + ' ...') if len(feed_info_urls_str) > 2000 else feed_info_urls_str #truncate if necessary + feed_info_urls_str = (feed_info_urls_str[:2000] + ' ...') if len(feed_info_urls_str) > 2000 else feed_info_urls_str # truncate if necessary if len(feed_info_urls): logger.info('Fetching enhanced feed info for %i feeds: %s' % (len(feed_info_urls), feed_info_urls_str)) - util.stream_fetch(feed_info_urls, feed_fetch_complete_hook, - fetch_timeout=10, max_fetch_size=4*1024, urls_group_size=20, urls_group_time_spacing=20, + util.stream_fetch( + feed_info_urls, feed_fetch_complete_hook, + fetch_timeout=10, max_fetch_size=4 * 1024, urls_group_size=20, urls_group_time_spacing=20, per_request_complete_callback=lambda url, data: logger.debug("Feed at %s retrieved, result: %s" % (url, data))) - start_task(task_compile_extended_feed_info, delay=60 * 5) #call again in 5 minutes + start_task(task_compile_extended_feed_info, delay=60 * 5) # call again in 5 minutes + @StartUpProcessor.subscribe() def init(): - #init db and indexes - #feeds (also init in enhanced_asset_info module) + # init db and indexes + # feeds (also init in enhanced_asset_info module) config.mongo_db.feeds.ensure_index('source') config.mongo_db.feeds.ensure_index('owner') config.mongo_db.feeds.ensure_index('category') config.mongo_db.feeds.ensure_index('info_url') - + + @CaughtUpProcessor.subscribe() -def start_tasks(): +def start_tasks(): start_task(task_compile_extended_feed_info) + @RollbackProcessor.subscribe() def process_rollback(max_block_index): - if not max_block_index: #full reparse + if not max_block_index: # full reparse pass - else: #rollback + else: # rollback pass diff --git a/counterblock/lib/modules/counterwallet.py b/counterblock/lib/modules/counterwallet.py index 1dfc89da..cba7c583 100644 --- a/counterblock/lib/modules/counterwallet.py +++ b/counterblock/lib/modules/counterwallet.py @@ -9,7 +9,9 @@ import datetime import logging import decimal -import urllib.request, urllib.parse, urllib.error +import urllib.request +import urllib.parse +import urllib.error import json import pymongo import flask @@ -22,7 +24,7 @@ from counterblock.lib.processor import MessageProcessor, MempoolMessageProcessor, BlockProcessor, StartUpProcessor, CaughtUpProcessor, RollbackProcessor, API, start_task from counterblock.lib.processor import startup -PREFERENCES_MAX_LENGTH = 100000 #in bytes, as expressed in JSON +PREFERENCES_MAX_LENGTH = 100000 # in bytes, as expressed in JSON ARMORY_UTXSVR_PORT_MAINNET = 6590 ARMORY_UTXSVR_PORT_TESTNET = 6591 @@ -30,6 +32,7 @@ logger = logging.getLogger(__name__) module_config = {} + def _read_config(): configfile = configparser.ConfigParser() config_path = os.path.join(config.config_dir, 'counterwallet%s.conf' % config.net_path_part) @@ -39,28 +42,28 @@ def _read_config(): assert configfile.has_section('Default') except: logging.warn("Could not find or parse counterwallet%s.conf config file!" % config.net_path_part) - - #email-related + + # email-related if configfile.has_option('Default', 'support-email'): module_config['SUPPORT_EMAIL'] = configfile.get('Default', 'support-email') else: - module_config['SUPPORT_EMAIL'] = None #disabled + module_config['SUPPORT_EMAIL'] = None # disabled if module_config['SUPPORT_EMAIL']: if not email.utils.parseaddr(module_config['SUPPORT_EMAIL'])[1]: raise Exception("Invalid support email address") - + if configfile.has_option('Default', 'email-server'): module_config['EMAIL_SERVER'] = configfile.get('Default', 'email-server') else: module_config['EMAIL_SERVER'] = "localhost" - + # pref pruning if configfile.has_option('Default', 'prefs-prune-enable'): module_config['PREFS_PRUNE_ENABLE'] = configfile.getboolean('Default', 'prefs-prune-enable') else: module_config['PREFS_PRUNE_ENABLE'] = False - - #vending machine integration + + # vending machine integration if configfile.has_option('Default', 'vending-machine-provider'): module_config['VENDING_MACHINE_PROVIDER'] = configfile.get('Default', 'vending-machine-provider') else: @@ -86,6 +89,7 @@ def is_ready(): 'quick_buy_enable': True if module_config['VENDING_MACHINE_PROVIDER'] is not None else False } + @API.add_method def get_reflected_host_info(): """Allows the requesting host to get some info about itself, such as its IP. Used for troubleshooting.""" @@ -97,26 +101,27 @@ def get_reflected_host_info(): 'country': country } + @API.add_method def get_wallet_stats(start_ts=None, end_ts=None): now_ts = time.mktime(datetime.datetime.utcnow().timetuple()) - if not end_ts: #default to current datetime + if not end_ts: # default to current datetime end_ts = now_ts - if not start_ts: #default to 360 days before the end date + if not start_ts: # default to 360 days before the end date start_ts = end_ts - (360 * 24 * 60 * 60) - + num_wallets_mainnet = config.mongo_db.preferences.find({'network': 'mainnet'}).count() num_wallets_testnet = config.mongo_db.preferences.find({'network': 'testnet'}).count() num_wallets_unknown = config.mongo_db.preferences.find({'network': None}).count() wallet_stats = [] - + for net in ['mainnet', 'testnet']: filters = { "when": { "$gte": datetime.datetime.utcfromtimestamp(start_ts) } if end_ts == now_ts else { "$gte": datetime.datetime.utcfromtimestamp(start_ts), - "$lte": datetime.datetime.utcfromtimestamp(end_ts) + "$lte": datetime.datetime.utcfromtimestamp(end_ts) }, 'network': net } @@ -126,21 +131,25 @@ def get_wallet_stats(start_ts=None, end_ts=None): distinct_login_counts = [] for e in stats: d = int(time.mktime(datetime.datetime(e['when'].year, e['when'].month, e['when'].day).timetuple()) * 1000) - - if 'distinct_login_count' in e: distinct_login_counts.append([ d, e['distinct_login_count'] ]) - if 'login_count' in e: login_counts.append([ d, e['login_count'] ]) - if 'new_count' in e: new_wallet_counts.append([ d, e['new_count'] ]) + + if 'distinct_login_count' in e: + distinct_login_counts.append([d, e['distinct_login_count']]) + if 'login_count' in e: + login_counts.append([d, e['login_count']]) + if 'new_count' in e: + new_wallet_counts.append([d, e['new_count']]) wallet_stats.append({'name': '%s: Logins' % net.capitalize(), 'data': login_counts}) wallet_stats.append({'name': '%s: Active Wallets' % net.capitalize(), 'data': distinct_login_counts}) wallet_stats.append({'name': '%s: New Wallets' % net.capitalize(), 'data': new_wallet_counts}) - + return { 'num_wallets_mainnet': num_wallets_mainnet, 'num_wallets_testnet': num_wallets_testnet, 'num_wallets_unknown': num_wallets_unknown, 'wallet_stats': wallet_stats} + @API.add_method def get_preferences(wallet_id, for_login=False, network=None): """Gets stored wallet preferences @@ -151,24 +160,26 @@ def get_preferences(wallet_id, for_login=False, network=None): if for_login and network is None: raise Exception("network parameter required if for_login is set") - result = config.mongo_db.preferences.find_one({"wallet_id": wallet_id}) - if not result: return False #doesn't exist - + result = config.mongo_db.preferences.find_one({"wallet_id": wallet_id}) + if not result: + return False # doesn't exist + last_touched_date = datetime.datetime.utcfromtimestamp(result['last_touched']).date() now = datetime.datetime.utcnow() - - if for_login: #record user login + + if for_login: # record user login ip = flask.request.headers.get('X-Real-Ip', flask.request.remote_addr) ua = flask.request.headers.get('User-Agent', '') config.mongo_db.login_history.insert({'wallet_id': wallet_id, 'when': now, 'network': network, 'action': 'login', 'ip': ip, 'ua': ua}) - + result['last_touched'] = time.mktime(time.gmtime()) config.mongo_db.preferences.save(result) return { 'preferences': json.loads(result['preferences']), 'last_updated': result.get('last_updated', None) - } + } + @API.add_method def store_preferences(wallet_id, preferences, for_login=False, network=None, referer=None): @@ -185,23 +196,25 @@ def store_preferences(wallet_id, preferences, for_login=False, network=None, ref preferences_json = json.dumps(preferences) except: raise Exception("Cannot dump preferences to JSON") - + now = datetime.datetime.utcnow() - - #sanity check around max size + + # sanity check around max size if len(preferences_json) >= PREFERENCES_MAX_LENGTH: raise Exception("Preferences object is too big.") - - if for_login: #mark this as a new signup IF the wallet doesn't exist already + + if for_login: # mark this as a new signup IF the wallet doesn't exist already existing_record = config.mongo_db.login_history.find({'wallet_id': wallet_id, 'network': network, 'action': 'create'}) if existing_record.count() == 0: ip = flask.request.headers.get('X-Real-Ip', flask.request.remote_addr) ua = flask.request.headers.get('User-Agent', '') - config.mongo_db.login_history.insert({'wallet_id': wallet_id, 'when': now, - 'network': network, 'action': 'create', 'referer': referer, 'ip': ip, 'ua': ua}) - config.mongo_db.login_history.insert({'wallet_id': wallet_id, 'when': now, - 'network': network, 'action': 'login', 'ip': ip, 'ua': ua}) #also log a wallet login - + config.mongo_db.login_history.insert( + {'wallet_id': wallet_id, 'when': now, + 'network': network, 'action': 'create', 'referer': referer, 'ip': ip, 'ua': ua}) + config.mongo_db.login_history.insert( + {'wallet_id': wallet_id, 'when': now, + 'network': network, 'action': 'login', 'ip': ip, 'ua': ua}) # also log a wallet login + now_ts = time.mktime(time.gmtime()) config.mongo_db.preferences.update( {'wallet_id': wallet_id}, @@ -209,12 +222,13 @@ def store_preferences(wallet_id, preferences, for_login=False, network=None, ref 'wallet_id': wallet_id, 'preferences': preferences_json, 'last_updated': now_ts, - 'last_touched': now_ts }, + 'last_touched': now_ts}, '$setOnInsert': {'when_created': now_ts, 'network': network} - }, upsert=True) + }, upsert=True) #^ last_updated MUST be in GMT, as it will be compaired again other servers return True + @API.add_method def create_armory_utx(unsigned_tx_hex, public_key_hex): endpoint = "http://127.0.0.1:%s/" % ( @@ -223,6 +237,7 @@ def create_armory_utx(unsigned_tx_hex, public_key_hex): utx_ascii = util.call_jsonrpc_api("serialize_unsigned_tx", params=params, endpoint=endpoint, abort_on_error=True)['result'] return utx_ascii + @API.add_method def convert_armory_signedtx_to_raw_hex(signed_tx_ascii): endpoint = "http://127.0.0.1:%s/" % ( @@ -231,6 +246,7 @@ def convert_armory_signedtx_to_raw_hex(signed_tx_ascii): raw_tx_hex = util.call_jsonrpc_api("convert_signed_tx_to_raw_hex", params=params, endpoint=endpoint, abort_on_error=True)['result'] return raw_tx_hex + @API.add_method def create_support_case(name, from_email, problem, screenshot=None, addtl_info=''): """create an email with the information received @@ -244,45 +260,46 @@ def create_support_case(name, from_email, problem, screenshot=None, addtl_info=' from email.MIMEBase import MIMEBase from email.MIMEText import MIMEText from email.mime.image import MIMEImage - + if not module_config['SUPPORT_EMAIL']: raise Exception("Sending of support emails are disabled on the server: no SUPPORT_EMAIL address set") - - if not email.utils.parseaddr(from_email)[1]: #should have been validated in the form + + if not email.utils.parseaddr(from_email)[1]: # should have been validated in the form raise Exception("Invalid support email address") - + try: if screenshot: screenshot_data = screenshot.split(',', 1)[1] screenshot_data_decoded = base64.b64decode(screenshot_data) except: raise Exception("screenshot data format unexpected") - + try: addtl_info = json.loads(addtl_info) addtl_info = json.dumps(addtl_info, indent=1, sort_keys=False) except: raise Exception("addtl_info data format unexpected") - + from_email_formatted = email.utils.formataddr((name, from_email)) msg = MIMEMultipart() - msg['Subject'] = Header((problem[:75] + '...') if len(problem) > 75 else problem, 'utf-8') + msg['Subject'] = Header((problem[:75] + '...') if len(problem) > 75 else problem, 'utf-8') msg['From'] = from_email_formatted msg['Reply-to'] = from_email_formatted msg['To'] = module_config['SUPPORT_EMAIL'] msg['Date'] = email.utils.formatdate(localtime=True) - + msg_text = MIMEText("""Problem: %s\n\nAdditional Info:\n%s""" % (problem, addtl_info)) msg.attach(msg_text) - + if screenshot: image = MIMEImage(screenshot_data_decoded, name="screenshot.png") msg.attach(image) - + server = smtplib.SMTP(module_config['EMAIL_SERVER']) server.sendmail(from_email, module_config['SUPPORT_EMAIL'], msg.as_string()) return True + @API.add_method def get_vennd_machine(): if module_config['VENDING_MACHINE_PROVIDER'] is not None: @@ -296,12 +313,14 @@ def task_expire_stale_prefs(): Every day, clear out preferences objects that haven't been touched in > 30 days, in order to reduce abuse risk/space consumed """ min_last_updated = time.mktime((datetime.datetime.utcnow() - datetime.timedelta(days=30)).timetuple()) - + num_stale_records = config.mongo_db.preferences.find({'last_touched': {'$lt': min_last_updated}}).count() config.mongo_db.preferences.remove({'last_touched': {'$lt': min_last_updated}}) - if num_stale_records: logger.warn("REMOVED %i stale preferences objects" % num_stale_records) - - start_task(task_expire_stale_prefs, delay=86400) #call again in 1 day + if num_stale_records: + logger.warn("REMOVED %i stale preferences objects" % num_stale_records) + + start_task(task_expire_stale_prefs, delay=86400) # call again in 1 day + def task_generate_wallet_stats(): """ @@ -309,18 +328,18 @@ def task_generate_wallet_stats(): """ def gen_stats_for_network(network): assert network in ('mainnet', 'testnet') - #get the latest date in the stats table present + # get the latest date in the stats table present now = datetime.datetime.utcnow() latest_stat = config.mongo_db.wallet_stats.find({'network': network}).sort('when', pymongo.DESCENDING).limit(1) latest_stat = latest_stat[0] if latest_stat.count() else None new_entries = {} - - #the queries below work with data that happened on or after the date of the latest stat present - #aggregate over the same period for new logins, adding the referrers to a set + + # the queries below work with data that happened on or after the date of the latest stat present + # aggregate over the same period for new logins, adding the referrers to a set match_criteria = {'when': {"$gte": latest_stat['when']}, 'network': network, 'action': 'create'} \ if latest_stat else {'when': {"$lte": now}, 'network': network, 'action': 'create'} new_wallets = config.mongo_db.login_history.aggregate([ - {"$match": match_criteria }, + {"$match": match_criteria}, {"$project": { "year": {"$year": "$when"}, "month": {"$month": "$when"}, @@ -333,14 +352,14 @@ def gen_stats_for_network(network): ]) for e in new_wallets: ts = time.mktime(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']).timetuple()) - new_entries[ts] = { #a future wallet_stats entry + new_entries[ts] = { # a future wallet_stats entry 'when': datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']), 'network': network, 'new_count': e['new_count'], } - + referer_counts = config.mongo_db.login_history.aggregate([ - {"$match": match_criteria }, + {"$match": match_criteria}, {"$project": { "year": {"$year": "$when"}, "month": {"$month": "$when"}, @@ -356,17 +375,20 @@ def gen_stats_for_network(network): for e in referer_counts: ts = time.mktime(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']).timetuple()) assert ts in new_entries - if e['_id']['referer'] is None: continue + if e['_id']['referer'] is None: + continue referer_key = urllib.parse.quote(e['_id']['referer']).replace('.', '%2E') - if 'referers' not in new_entries[ts]: new_entries[ts]['referers'] = {} - if e['_id']['referer'] not in new_entries[ts]['referers']: new_entries[ts]['referers'][referer_key] = 0 + if 'referers' not in new_entries[ts]: + new_entries[ts]['referers'] = {} + if e['_id']['referer'] not in new_entries[ts]['referers']: + new_entries[ts]['referers'][referer_key] = 0 new_entries[ts]['referers'][referer_key] += 1 - - #logins (not new wallets) - generate stats + + # logins (not new wallets) - generate stats match_criteria = {'when': {"$gte": latest_stat['when']}, 'network': network, 'action': 'login'} \ if latest_stat else {'when': {"$lte": now}, 'network': network, 'action': 'login'} logins = config.mongo_db.login_history.aggregate([ - {"$match": match_criteria }, + {"$match": match_criteria}, {"$project": { "year": {"$year": "$when"}, "month": {"$month": "$when"}, @@ -382,7 +404,7 @@ def gen_stats_for_network(network): for e in logins: ts = time.mktime(datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']).timetuple()) if ts not in new_entries: - new_entries[ts] = { #a future wallet_stats entry + new_entries[ts] = { # a future wallet_stats entry 'when': datetime.datetime(e['_id']['year'], e['_id']['month'], e['_id']['day']), 'network': network, 'new_count': 0, @@ -390,8 +412,8 @@ def gen_stats_for_network(network): } new_entries[ts]['login_count'] = e['login_count'] new_entries[ts]['distinct_login_count'] = len(e['distinct_wallets']) - - #add/replace the wallet_stats data + + # add/replace the wallet_stats data if latest_stat: updated_entry_ts = time.mktime(datetime.datetime( latest_stat['when'].year, latest_stat['when'].month, latest_stat['when'].day).timetuple()) @@ -399,52 +421,56 @@ def gen_stats_for_network(network): updated_entry = new_entries[updated_entry_ts] del new_entries[updated_entry_ts] assert updated_entry['when'] == latest_stat['when'] - del updated_entry['when'] #not required for the upsert - logger.info("Revised wallet statistics for partial day %s-%s-%s: %s" % ( - latest_stat['when'].year, latest_stat['when'].month, latest_stat['when'].day, updated_entry)) - config.mongo_db.wallet_stats.update({'when': latest_stat['when']}, + del updated_entry['when'] # not required for the upsert + logger.info( + "Revised wallet statistics for partial day %s-%s-%s: %s" + % (latest_stat['when'].year, latest_stat['when'].month, latest_stat['when'].day, updated_entry)) + config.mongo_db.wallet_stats.update( + {'when': latest_stat['when']}, {"$set": updated_entry}, upsert=True) - - if new_entries: #insert the rest + + if new_entries: # insert the rest #logger.info("Stats, new entries: %s" % new_entries.values()) config.mongo_db.wallet_stats.insert(list(new_entries.values())) logger.info("Added wallet statistics for %i full days" % len(list(new_entries.values()))) - + gen_stats_for_network('mainnet') gen_stats_for_network('testnet') - start_task(task_generate_wallet_stats, delay=30 * 60) #call again in 30 minutes + start_task(task_generate_wallet_stats, delay=30 * 60) # call again in 30 minutes + @CaughtUpProcessor.subscribe() -def start_tasks(): +def start_tasks(): start_task(task_expire_stale_prefs) start_task(task_generate_wallet_stats) + @StartUpProcessor.subscribe() def init(): _read_config() - - #init db and indexes - ##COLLECTIONS THAT *ARE* PURGED AS A RESULT OF A REPARSE - #wallet_stats + + # init db and indexes + # COLLECTIONS THAT *ARE* PURGED AS A RESULT OF A REPARSE + # wallet_stats config.mongo_db.wallet_stats.ensure_index([ ("when", pymongo.ASCENDING), ("network", pymongo.ASCENDING), ]) - - ##COLLECTIONS THAT ARE *NOT* PURGED AS A RESULT OF A REPARSE - #preferences + + # COLLECTIONS THAT ARE *NOT* PURGED AS A RESULT OF A REPARSE + # preferences config.mongo_db.preferences.ensure_index('wallet_id', unique=True) config.mongo_db.preferences.ensure_index('network') config.mongo_db.preferences.ensure_index('last_touched') - #login_history + # login_history config.mongo_db.login_history.ensure_index('wallet_id') config.mongo_db.login_history.ensure_index([ ("when", pymongo.DESCENDING), ("network", pymongo.ASCENDING), ("action", pymongo.ASCENDING), ]) - - #load counterwallet json config + + # load counterwallet json config counterwallet_config_path = os.path.join('/home/xcp/counterwallet/counterwallet.conf.json') if os.path.exists(counterwallet_config_path): logger.info("Loading counterwallet client-side config at '%s'" % counterwallet_config_path) @@ -458,39 +484,39 @@ def init(): except Exception as e: logger.error("Exception loading counterwallet client-side config: %s" % e) - #init GEOIP + # init GEOIP import pygeoip geoip_data_path = os.path.join(config.data_dir, 'GeoIP.dat') + def download_geoip_data(): logger.info("Checking/updating GeoIP.dat ...") - download = False; - + download = False + if not os.path.isfile(geoip_data_path): download = True else: - one_week_ago = time.time() - 60*60*24*7 + one_week_ago = time.time() - 60 * 60 * 24 * 7 file_stat = os.stat(geoip_data_path) if file_stat.st_ctime < one_week_ago: download = True - + if download: logger.info("Downloading GeoIP.dat") - ##TODO: replace with pythonic way to do this! + # TODO: replace with pythonic way to do this! cmd = "cd '{}'; wget -N -q http://geolite.maxmind.com/download/geoip/database/GeoLiteCountry/GeoIP.dat.gz; gzip -dfq GeoIP.dat.gz".format(config.data_dir) util.subprocess_cmd(cmd) else: logger.info("GeoIP.dat database up to date. Not downloading.") download_geoip_data() - module_config['GEOIP'] = pygeoip.GeoIP(geoip_data_path) - + module_config['GEOIP'] = pygeoip.GeoIP(geoip_data_path) + if not module_config['SUPPORT_EMAIL']: logger.warn("Support email setting not set: To enable, please specify an email for the 'support-email' setting in your counterblockd.conf") @RollbackProcessor.subscribe() def process_rollback(max_block_index): - if not max_block_index: #full reparse + if not max_block_index: # full reparse config.mongo_db.wallet_stats.drop() - else: #rollback + else: # rollback pass - diff --git a/counterblock/lib/modules/counterwallet_iofeeds.py b/counterblock/lib/modules/counterwallet_iofeeds.py index cb0d0031..30bce142 100644 --- a/counterblock/lib/modules/counterwallet_iofeeds.py +++ b/counterblock/lib/modules/counterwallet_iofeeds.py @@ -26,10 +26,11 @@ from counterblock.lib.processor import MessageProcessor, MempoolMessageProcessor, BlockProcessor, StartUpProcessor, CaughtUpProcessor, RollbackProcessor, API, CORE_FIRST_PRIORITY logger = logging.getLogger(__name__) -online_clients = {} #key = walletID, value = datetime when connected +online_clients = {} # key = walletID, value = datetime when connected #^ tracks "online status" via the chat feed module_config = {} -zmq_publisher_eventfeed = None #set on init +zmq_publisher_eventfeed = None # set on init + def _read_config(): configfile = configparser.ConfigParser() @@ -40,12 +41,12 @@ def _read_config(): assert configfile.has_section('Default') except: logging.warn("Could not find or parse counterwallet_iofeeds.conf config file!") - + if configfile.has_option('Default', 'socketio-host'): module_config['SOCKETIO_HOST'] = configfile.get('Default', 'socketio-host') else: module_config['SOCKETIO_HOST'] = "localhost" - + if configfile.has_option('Default', 'socketio-port'): module_config['SOCKETIO_PORT'] = configfile.get('Default', 'socketio-port') else: @@ -55,12 +56,12 @@ def _read_config(): assert int(module_config['SOCKETIO_PORT']) > 1 and int(module_config['SOCKETIO_PORT']) < 65535 except: raise Exception("Please specific a valid port number socketio-port configuration parameter") - + if configfile.has_option('Default', 'socketio-chat-host'): module_config['SOCKETIO_CHAT_HOST'] = configfile.get('Default', 'socketio-chat-host') else: module_config['SOCKETIO_CHAT_HOST'] = "localhost" - + if configfile.has_option('Default', 'socketio-chat-port'): module_config['SOCKETIO_CHAT_PORT'] = configfile.get('Default', 'socketio-chat-port') else: @@ -74,33 +75,37 @@ def _read_config(): @API.add_method def get_num_users_online(): - #gets the current number of users attached to the server's chat feed - return len(online_clients) + # gets the current number of users attached to the server's chat feed + return len(online_clients) + @API.add_method def is_chat_handle_in_use(handle): - #DEPRECATED 1.5 - results = config.mongo_db.chat_handles.find({ 'handle': { '$regex': '^%s$' % handle, '$options': 'i' } }) - return True if results.count() else False + # DEPRECATED 1.5 + results = config.mongo_db.chat_handles.find({'handle': {'$regex': '^%s$' % handle, '$options': 'i'}}) + return True if results.count() else False + @API.add_method def get_chat_handle(wallet_id): result = config.mongo_db.chat_handles.find_one({"wallet_id": wallet_id}) - if not result: return False #doesn't exist + if not result: + return False # doesn't exist result['last_touched'] = time.mktime(time.gmtime()) config.mongo_db.chat_handles.save(result) data = { 'handle': re.sub('[^\sA-Za-z0-9_-]', "", result['handle']), 'is_op': result.get('is_op', False), 'last_updated': result.get('last_updated', None) - } if result else {} - banned_until = result.get('banned_until', None) + } if result else {} + banned_until = result.get('banned_until', None) if banned_until != -1 and banned_until is not None: - data['banned_until'] = int(time.mktime(banned_until.timetuple())) * 1000 #convert to epoch ts in ms + data['banned_until'] = int(time.mktime(banned_until.timetuple())) * 1000 # convert to epoch ts in ms else: - data['banned_until'] = banned_until #-1 or None + data['banned_until'] = banned_until # -1 or None return data + @API.add_method def store_chat_handle(wallet_id, handle): """Set or update a chat handle""" @@ -108,12 +113,12 @@ def store_chat_handle(wallet_id, handle): raise Exception("Invalid chat handle: bad data type") if not re.match(r'^[\sA-Za-z0-9_-]{4,12}$', handle): raise Exception("Invalid chat handle: bad syntax/length") - - #see if this handle already exists (case insensitive) - results = config.mongo_db.chat_handles.find({ 'handle': { '$regex': '^%s$' % handle, '$options': 'i' } }) + + # see if this handle already exists (case insensitive) + results = config.mongo_db.chat_handles.find({'handle': {'$regex': '^%s$' % handle, '$options': 'i'}}) if results.count(): if results[0]['wallet_id'] == wallet_id: - return True #handle already saved for this wallet ID + return True # handle already saved for this wallet ID else: raise Exception("Chat handle already is in use") @@ -123,25 +128,25 @@ def store_chat_handle(wallet_id, handle): 'wallet_id': wallet_id, 'handle': handle, 'last_updated': time.mktime(time.gmtime()), - 'last_touched': time.mktime(time.gmtime()) - } + 'last_touched': time.mktime(time.gmtime()) + } }, upsert=True) #^ last_updated MUST be in UTC, as it will be compaired again other servers return True + @API.add_method def get_chat_history(start_ts=None, end_ts=None, handle=None, limit=1000): - #DEPRECATED 1.5 + # DEPRECATED 1.5 now_ts = time.mktime(datetime.datetime.utcnow().timetuple()) - if not end_ts: #default to current datetime + if not end_ts: # default to current datetime end_ts = now_ts - if not start_ts: #default to 5 days before the end date + if not start_ts: # default to 5 days before the end date start_ts = end_ts - (30 * 24 * 60 * 60) - + if limit >= 5000: raise Exception("Requesting too many lines (limit too high") - - + filters = { "when": { "$gte": datetime.datetime.utcfromtimestamp(start_ts) @@ -154,9 +159,10 @@ def get_chat_history(start_ts=None, end_ts=None, handle=None, limit=1000): filters['handle'] = handle chat_history = config.mongo_db.chat_history.find(filters, {'_id': 0}).sort("when", pymongo.DESCENDING).limit(limit) if not chat_history.count(): - return False #no suitable trade data to form a market price + return False # no suitable trade data to form a market price chat_history = list(chat_history) - return chat_history + return chat_history + @API.add_method def is_wallet_online(wallet_id): @@ -164,53 +170,55 @@ def is_wallet_online(wallet_id): class MessagesFeedServerNamespace(BaseNamespace): + def __init__(self, *args, **kwargs): super(MessagesFeedServerNamespace, self).__init__(*args, **kwargs) self._running = True - + def listener(self): - #subscribe to the zmq queue + # subscribe to the zmq queue sock = self.request['zmq_context'].socket(zmq.SUB) sock.setsockopt(zmq.SUBSCRIBE, "") sock.connect('inproc://queue_eventfeed') - + poller = zmq.Poller() poller.register(sock, zmq.POLLIN) - - #as we receive messages, send them out to the socket.io listener - #do this in a way that doesn't block indefinitely, and gracefully falls through if/when a client disconnects + + # as we receive messages, send them out to the socket.io listener + # do this in a way that doesn't block indefinitely, and gracefully falls through if/when a client disconnects while self._running: - socks = poller.poll(2500) #wait *up to* 2.5 seconds for events to arrive + socks = poller.poll(2500) # wait *up to* 2.5 seconds for events to arrive if socks: - event = socks[0][0].recv_json() #only one sock we're polling - #logger.info("socket.io: Sending message ID %s -- %s:%s" % ( + event = socks[0][0].recv_json() # only one sock we're polling + # logger.info("socket.io: Sending message ID %s -- %s:%s" % ( # event['_message_index'], event['_category'], event['_command'])) self.emit(event['_category'], event) - #sock.shutdown(socket.SHUT_RDWR) + # sock.shutdown(socket.SHUT_RDWR) sock.close() def on_subscribe(self): if 'listening' not in self.socket.session: self.socket.session['listening'] = True self.spawn(self.listener) - + def disconnect(self, silent=False): """Triggered when the client disconnects (e.g. client closes their browser)""" self._running = False return super(MessagesFeedServerNamespace, self).disconnect(silent=silent) - + class SocketIOMessagesFeedServer(object): """ Funnel messages coming from counterpartyd polls to socket.io clients """ + def __init__(self, zmq_context): # Dummy request object to maintain state between Namespace initialization. self.request = { 'zmq_context': zmq_context, - } - + } + def __call__(self, environ, start_response): if not environ['PATH_INFO'].startswith('/socket.io'): start_response('401 UNAUTHORIZED', []) @@ -220,43 +228,43 @@ def __call__(self, environ, start_response): class ChatFeedServerNamespace(BaseNamespace, BroadcastMixin): MAX_TEXT_LEN = 500 - TIME_BETWEEN_MESSAGES = 10 #in seconds (auto-adjust this in the future based on chat speed/volume) + TIME_BETWEEN_MESSAGES = 10 # in seconds (auto-adjust this in the future based on chat speed/volume) NUM_HISTORY_LINES_ON_JOIN = 100 - NUM_HISTORY_LINES_NO_REPEAT = 3 #max number of lines to go back ensuring the user is not repeating him/herself - + NUM_HISTORY_LINES_NO_REPEAT = 3 # max number of lines to go back ensuring the user is not repeating him/herself + def disconnect(self, silent=False): """Triggered when the client disconnects (e.g. client closes their browser)""" - #record the client as offline + # record the client as offline if 'wallet_id' not in self.socket.session: logger.warn("wallet_id not found in socket session: %s" % socket.session) return super(ChatFeedServerNamespace, self).disconnect(silent=silent) if self.socket.session['wallet_id'] in online_clients: del online_clients[self.socket.session['wallet_id']] return super(ChatFeedServerNamespace, self).disconnect(silent=silent) - + def on_ping(self, wallet_id): - """used to force a triggering of the connection tracking""" - #record the client as online + """used to force a triggering of the connection tracking""" + # record the client as online self.socket.session['wallet_id'] = wallet_id online_clients[wallet_id] = {'when': datetime.datetime.utcnow(), 'state': self} return True - + def on_start_chatting(self, wallet_id, is_primary_server): """this must be the first message sent after connecting to the chat server. Based on the passed wallet ID, it will retrieve the chat handle the user initially registered with. - + If is_primary_server is specified as True, the user is designating this server as its primary chat server. This means that it will be this server that will rebroadcast chat lines to the user (other, non-primary servers will not) """ - #normally, wallet ID should be set from on_ping, however if the server goes down and comes back up, this will + # normally, wallet ID should be set from on_ping, however if the server goes down and comes back up, this will # not be the case for clients already logged in and chatting - if 'wallet_id' not in self.socket.session: #we specify wallet + if 'wallet_id' not in self.socket.session: # we specify wallet self.socket.session['wallet_id'] = wallet_id else: assert self.socket.session['wallet_id'] == wallet_id - #lookup the walletid and ensure that it has a handle match for chat - chat_profile = config.mongo_db.chat_handles.find_one({"wallet_id": self.socket.session['wallet_id']}) + # lookup the walletid and ensure that it has a handle match for chat + chat_profile = config.mongo_db.chat_handles.find_one({"wallet_id": self.socket.session['wallet_id']}) handle = chat_profile['handle'] if chat_profile else None if not handle: return self.error('invalid_id', "No handle is defined for wallet ID %s" % self.socket.session['wallet_id']) @@ -269,187 +277,200 @@ def on_start_chatting(self, wallet_id, is_primary_server): def on_get_lastlines(self): return list(config.mongo_db.chat_history.find({}, {'_id': 0}).sort( "when", pymongo.DESCENDING).limit(self.NUM_HISTORY_LINES_ON_JOIN)) - + def on_command(self, command, args): """command is the command to run, args is a list of arguments to the command""" if 'is_op' not in self.socket.session: - return self.error('invalid_state', "Invalid state") #this will trigger the client to auto re-establish state + return self.error('invalid_state', "Invalid state") # this will trigger the client to auto re-establish state if command not in ['online', 'msg', 'op', 'unop', 'ban', 'unban', 'handle', 'help', 'disextinfo', 'enextinfo']: return self.error('invalid_command', "Unknown command: %s. Try /help for help." % command) if command not in ['online', 'msg', 'help'] and not self.socket.session['is_op']: return self.error('invalid_access', "Must be an op to use this command") - - if command == 'online': #/online - if not self.socket.session['is_primary_server']: return + + if command == 'online': # /online + if not self.socket.session['is_primary_server']: + return if len(args) != 1: return self.error('invalid_args', "USAGE: /online {handle=} -- Desc: Determines whether a specific user is online") handle = args[0] - p = config.mongo_db.chat_handles.find_one({ 'handle': { '$regex': '^%s$' % handle, '$options': 'i' } }) + p = config.mongo_db.chat_handles.find_one({'handle': {'$regex': '^%s$' % handle, '$options': 'i'}}) if not p: return self.error('invalid_args', "Handle '%s' not found" % handle) return self.emit("online_status", p['handle'], p['wallet_id'] in online_clients) - elif command == 'msg': #/msg - if not self.socket.session['is_primary_server']: return + elif command == 'msg': # /msg + if not self.socket.session['is_primary_server']: + return if len(args) < 2: return self.error('invalid_args', "USAGE: /msg {handle} {private message to send} -- Desc: Sends a private message to a specific user") handle = args[0] message = ' '.join(args[1:]) if handle.lower() == self.socket.session['handle'].lower(): return self.error('invalid_args', "Don't be cray cray and try to message yourself, %s" % handle) - + now = datetime.datetime.utcnow() if self.socket.session['banned_until'] == -1: return self.error('banned', "Your handle is banned from chat indefinitely.") if self.socket.session['banned_until'] and self.socket.session['banned_until'] >= now: - return self.error('banned', "Your handle is still banned from chat for %s more seconds." + return self.error( + 'banned', "Your handle is still banned from chat for %s more seconds." % int((self.socket.session['banned_until'] - now).total_seconds())) - - p = config.mongo_db.chat_handles.find_one({ 'handle': { '$regex': '^%s$' % handle, '$options': 'i' } }) + + p = config.mongo_db.chat_handles.find_one({'handle': {'$regex': '^%s$' % handle, '$options': 'i'}}) if not p: return self.error('invalid_args', "Handle '%s' not found" % handle) if p['wallet_id'] not in online_clients: return self.error('invalid_args', "Handle '%s' is not online" % p['handle']) - - message = util.sanitize_eliteness(message[:self.MAX_TEXT_LEN]) #truncate to max allowed and sanitize - online_clients[p['wallet_id']]['state'].emit("emote", self.socket.session['handle'], - message, self.socket.session['is_op'], True, False) #isPrivate = True, viaCommand = False - elif command in ['op', 'unop']: #/op|unop + + message = util.sanitize_eliteness(message[:self.MAX_TEXT_LEN]) # truncate to max allowed and sanitize + online_clients[p['wallet_id']]['state'].emit( + "emote", self.socket.session['handle'], + message, self.socket.session['is_op'], True, False) # isPrivate = True, viaCommand = False + elif command in ['op', 'unop']: # /op|unop if len(args) != 1: return self.error('invalid_args', "USAGE: /op|unop {handle to op/unop} -- Desc: Gives/removes operator priveledges from a specific user") handle = args[0] - p = config.mongo_db.chat_handles.find_one({ 'handle': { '$regex': '^%s$' % handle, '$options': 'i' } }) + p = config.mongo_db.chat_handles.find_one({'handle': {'$regex': '^%s$' % handle, '$options': 'i'}}) if not p: return self.error('invalid_args', "Handle '%s' not found" % handle) p['is_op'] = command == 'op' config.mongo_db.chat_handles.save(p) - #make the change active immediately + # make the change active immediately handle_lower = handle.lower() for sessid, socket in self.socket.server.sockets.items(): if socket.session.get('handle', None).lower() == handle_lower: socket.session['is_op'] = p['is_op'] - if self.socket.session['is_primary_server']: #let all users know + if self.socket.session['is_primary_server']: # let all users know self.broadcast_event("oped" if command == "op" else "unoped", self.socket.session['handle'], p['handle']) - elif command == 'ban': #/ban