diff --git a/.env b/.env index 43e1ee5..bf874a3 100644 --- a/.env +++ b/.env @@ -1,5 +1,8 @@ -# .env file -# +# === APPLICATION PORTS === +# you also need to update run_rest.sh and run_rpc.sh to these values if you change :) +REST_PORT=5000 +RPC_PORT=5001 + # === REDIS === # change 127.0.0.1 to the address or name of docker service REDIS_URL=redis://127.0.0.1:6379/0 @@ -13,35 +16,20 @@ INCREASE_COUNTER_EVERY=25 # ==================================== # = RPCS & REST ENDPOINTS (TO QUERY) = # ==================================== -RPC_URL="http://localhost:26657" +# Note: These can be localhost if you wish to run on the node itself +RPC_URL="http://15.204.143.232:26657" BACKUP_RPC_URL="https://rpc.juno.strange.love" -# REST_URL="http://15.204.143.232:1317" -REST_URL="http://localhost:1317" -BACKUP_REST_URL="https://api.juno.strange.love" # TODO - -# ==================================== -# = RPCS & REST ENDPOINTS (COSMETIC) = -# ==================================== -# Basically remove non required ports (80/443) & no http(s):// -# This repalces the RPCs HTML to match out endpoints on click -BASE_RPC="localhost:26657" -BACKUP_BASE_RPC="rpc.juno.strange.love" +REST_URL="http://15.204.143.232:1317" +BACKUP_REST_URL="https://api.juno.strange.love" -# What your A record + domain is ( where the user goes without and ports or http(s):// ) -RPC_DOMAIN="juno-rpc.reece.sh" +# === WEBSOCKET === +RPC_WEBSOCKET="15.204.143.232:26657" -# === WEBSOCKETS === -WEBSOCKET_ADDR="15.204.143.232:26657" -# BACKUP_WEBSOCKET_ADDR="rpc.juno.strange.love:443" ?? -# === APPLICATION PORTS === -# you also need to update run_rest.sh and run_rpc.sh to these values if you change :) -REST_PORT=5000 -RPC_PORT=5001 # === Cosmetic === -API_TITLE="Juno Network API" RPC_TITLE="Juno Network RPC" +API_TITLE="Juno Network REST API" RPC_CUSTOM_TEXT='Custom caching solution active for {RPC_DOMAIN}
My Juno REST API
' \ No newline at end of file diff --git a/CONFIG.py b/CONFIG.py new file mode 100644 index 0000000..169fa7b --- /dev/null +++ b/CONFIG.py @@ -0,0 +1,77 @@ +import json +import os +import re +from os import getenv + +import redis +from dotenv import load_dotenv + +CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) + +load_dotenv(os.path.join(CURRENT_DIR, ".env")) + +# ============= +# === REDIS === +# ============= +REDIS_URL = getenv("REDIS_URL", "redis://127.0.0.1:6379/0") +REDIS_DB = redis.Redis.from_url(REDIS_URL) + +ENABLE_COUNTER = getenv("ENABLE_COUNTER", "true").lower().startswith("t") +INC_EVERY = int(getenv("INCREASE_COUNTER_EVERY", 10)) + +# =========== +# === RPC === +# =========== +RPC_PORT = int(getenv("RPC_PORT", 5001)) +RPC_PREFIX = getenv("REDIS_RPC_PREFIX", "junorpc") +RPC_URL = getenv("RPC_URL", "https://juno-rpc.reece.sh:443") + +BACKUP_RPC_URL = getenv("BACKUP_RPC_URL", "https://rpc.juno.strange.love:443") + +RPC_WEBSOCKET = f'ws://{getenv("WEBSOCKET_ADDR", "15.204.143.232:26657")}/websocket' + +RPC_DOMAIN = getenv("RPC_DOMAIN", "localhost:5001") + +# ============ +# === REST === +# ============ +REST_PORT = int(getenv("REST_PORT", 5000)) + +API_TITLE = getenv("API_TITLE", "Swagger API") +REST_PREFIX = getenv("REDIS_REST_PREFIX", "junorest") + +REST_URL = getenv("REST_URL", "https://juno-rest.reece.sh") +BACKUP_REST_URL = getenv("BACKUP_REST_URL", f"https://api.juno.strange.love") + +OPEN_API = f"{REST_URL}/static/openapi.yml" + +# === Cache Times === +cache_times: dict = {} +DEFAULT_CACHE_SECONDS: int = 6 +RPC_ENDPOINTS: dict = {} +REST_ENDPOINTS: dict = {} + +# === CACHE HELPER === +def update_cache_times(): + """ + Updates any config variables which can be changed without restarting the server. + Useful for the /cache_info endpoint & actually applying said cache changes at any time + """ + global cache_times, DEFAULT_CACHE_SECONDS, RPC_ENDPOINTS, REST_ENDPOINTS + + with open(os.path.join(CURRENT_DIR, "cache_times.json"), "r") as f: + cache_times = json.loads(f.read()) + + DEFAULT_CACHE_SECONDS = cache_times.get("DEFAULT", 6) + RPC_ENDPOINTS = cache_times.get("rpc", {}) + REST_ENDPOINTS = cache_times.get("rest", {}) + + +def get_cache_time_seconds(path: str, is_rpc: bool) -> int: + endpoints = RPC_ENDPOINTS if is_rpc else REST_ENDPOINTS + + for k, seconds in endpoints.items(): + if re.match(k, path): + return seconds + + return DEFAULT_CACHE_SECONDS diff --git a/HELPERS.py b/HELPERS.py new file mode 100644 index 0000000..4f09b3a --- /dev/null +++ b/HELPERS.py @@ -0,0 +1,93 @@ +import re +from os import getenv + +import requests + +import CONFIG +from CONFIG import REDIS_DB + +total_calls = { + # RPC: + "total_cache;get_rpc_endpoint": 0, + "total_outbound;get_rpc_endpoint": 0, + # RPC Cache: + "total_cache;post_endpoint": 0, + "total_outbound;post_endpoint": 0, + # REST: + "total_cache;get_all_rest": 0, + "total_outbound;get_all_rest": 0, +} + + +def increment_call_value(key): + global total_calls + + if CONFIG.ENABLE_COUNTER == False: + return + + if key not in total_calls: + total_calls[key] = 0 + + if total_calls[key] >= CONFIG.INC_EVERY: + REDIS_DB.incr(f"{CONFIG.RPC_PREFIX};{key}", amount=total_calls[key]) + total_calls[key] = 0 + else: + total_calls[key] += 1 + + +def download_openapi_locally(): + r = requests.get(CONFIG.OPEN_API) + file_loc = f"{CONFIG.CURRENT_DIR}/static/openapi.yml" + with open(file_loc, "w") as f: + f.write(r.text) + + +def get_swagger_code_from_source(): + req = requests.get(f"{CONFIG.REST_URL}") + + html = req.text.replace( + "//unpkg.com/swagger-ui-dist@3.40.0/favicon-16x16.png", + "/static/rest-favicon.png", + ) + html = re.sub(r".*", f"{CONFIG.API_TITLE}", html) + return html + + +def replace_rpc_text() -> str: + # we replace after on requests of the user, then repalce this text to our cache endpoint at time of requests to root endpoint + try: + RPC_ROOT_HTML = requests.get(f"{CONFIG.RPC_URL}/").text + except: + RPC_ROOT_HTML = requests.get(f"{CONFIG.BACKUP_RPC_URL}/").text + + RPC_TITLE = getenv("RPC_TITLE", "") + if len(RPC_TITLE) > 0: + RPC_ROOT_HTML = RPC_ROOT_HTML.replace( + "", + f"{RPC_TITLE}", + ) + + # Puts text at the bottom, maybe put at the top in the future? + RPC_CUSTOM_TEXT = getenv("RPC_CUSTOM_TEXT", "").replace( + "{RPC_DOMAIN}", f"{CONFIG.RPC_DOMAIN}" + ) + if len(RPC_CUSTOM_TEXT) > 0: + RPC_ROOT_HTML = RPC_ROOT_HTML.replace( + "Available endpoints:

", + f"{RPC_CUSTOM_TEXT}
Available endpoints:

", + ) + + # add cache_info endpoint. THIS REMOVES BLANK 'Available endpoints:

' + RPC_ROOT_HTML = RPC_ROOT_HTML.replace( + "Available endpoints:

", + f'//{{BASE_URL}}/cache_info

', + # we replace the BASE_URL on the call to the root endpoint + ) + + # Set RPC favicon to nothing + RPC_ROOT_HTML = RPC_ROOT_HTML.replace( + "", + f'', + ) + + return RPC_ROOT_HTML diff --git a/README.md b/README.md index 6c623a9..80680f9 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,12 @@ docs here... ### Variable Length Cache In the `cache_times.json` file, you can specify specific endpoints and how long said queries should persist in the cache. -This is useful for large queries such as /validators which may return 100+ validators. This data does not change all the often, making it useful for caching for longer periods of time. +This is useful for large queries such as /validators which may return 100+ validators. This data does not change often, making it useful for caching for longer periods of time. + +If you wish to disable the cache, you can set the value to 0 for said endpoint. If you wish to disable the endpoint query entirely, set to a value less than 0 (such as -1). +By default the cosmos/auth/v1beta1/accounts endpoint is disabled, as it temporarily halts the node. This file uses regex pattern matching as keys, with values as the number of seconds to cache once it has been called. For python strings, you must prefix any `*` you find with a `.`. So to match "random" in "my 8 random11 string", you would do `.*random.*` to match all before and after. + +This is ONLY the path, which means it does not start with a `/`. diff --git a/cache_times.json b/cache_times.json index 5c6d0b5..2ba04dd 100644 --- a/cache_times.json +++ b/cache_times.json @@ -1,23 +1,26 @@ { "DEFAULT": 6, "rpc": { - ".*genesis.*": 3600, - ".*block?height=.*": 3600, - ".*block_results?height=.*": 3600, - ".*unconfirmed_txs.*": 1 + "genesis": 86400, + "genesis.*": 86400, + "block?height=.*": 21600, + "block_results?height=.*": 21600, + "unconfirmed_txs": 1 }, "rest": { - ".*/delegations": 300, - ".*bank/v1beta1/supply.*": 300, - ".*/params": 300, - ".*/slashes": 30, - ".*/commission": 30, - ".*/outstanding_rewards": 30, - ".*/proposals": 60, - ".*/historical_info/.*": 3600, - ".*cosmos/staking/v1beta1/pool": 30, - ".*cosmos/staking/v1beta1/validators": 120, - ".*ibc/apps/transfer/v1/denom_traces": 30, - ".*cosmos/base/tendermint/v1beta1/node_info": 60 + "cosmos\/auth\/v1beta1\/accounts": -1, + + ".*\/params": 300, + ".*delegations": 300, + ".*slashes": 30, + ".*commission": 30, + ".*outstanding_rewards": 30, + "cosmos\/gov\/v1beta1\/proposals.*": 60, + "cosmos\/staking\/v1beta1\/historical_info.*": 3600, + "cosmos\/bank\/v1beta1\/supply": 60, + "cosmos\/staking\/v1beta1\/pool": 30, + "cosmos\/staking\/v1beta1\/validators": 120, + "ibc\/apps\/transfer\/v1\/denom_traces": 30, + "tendermint\/v1beta1\/node_info": 60 } } \ No newline at end of file diff --git a/rest.py b/rest.py new file mode 100644 index 0000000..ae278e7 --- /dev/null +++ b/rest.py @@ -0,0 +1,86 @@ +# Reece Williams | https://reece.sh | Jan 2023 +# ---------------------------------------------- +# pip install Flask redis flask_caching requests +# pip install --upgrade urllib3 +# ---------------------------------------------- + +import json +import re + +import requests +from flask import Flask, jsonify, request +from flask_cors import CORS, cross_origin + +import CONFIG +from CONFIG import REDIS_DB +from HELPERS import ( + download_openapi_locally, + get_swagger_code_from_source, + increment_call_value, +) + +app = Flask(__name__) +cors = CORS(app, resources={r"/*": {"origins": "*"}}) + + +REST_SWAGGER_HTML = "" + + +@app.before_first_request +def before_first_request(): + CONFIG.update_cache_times() + download_openapi_locally() + + +# if route is just /, return the openapi swagger ui +@app.route("/", methods=["GET"]) +@cross_origin() +def root(): + global REST_SWAGGER_HTML + + if len(REST_SWAGGER_HTML) > 0: + return REST_SWAGGER_HTML + + REST_SWAGGER_HTML = get_swagger_code_from_source() + return REST_SWAGGER_HTML + + +# return all RPC queries +@app.route("/", methods=["GET"]) +@cross_origin() +def get_all_rest(path): + url = f"{CONFIG.REST_URL}/{path}" + args = request.args + + cache_seconds = CONFIG.get_cache_time_seconds(path, is_rpc=False) + if cache_seconds < 0: + return jsonify( + { + "error": f"cosmos endpoint cache: The path '{path}' is disabled on this node..." + } + ) + + key = f"{CONFIG.REST_PREFIX};{url};{args}" + + v = REDIS_DB.get(key) + if v: + increment_call_value("total_cache;get_all_rest") + return jsonify(json.loads(v)) + + try: + req = requests.get(url, params=args) + except: + req = requests.get(f"{CONFIG.BACKUP_REST_URL}/{path}", params=args) + + if req.status_code != 200: + return jsonify(req.json()) + + REDIS_DB.setex(key, cache_seconds, json.dumps(req.json())) + increment_call_value("total_outbound;get_all_rest") + + return req.json() + + +if __name__ == "__main__": + before_first_request() + app.run(debug=True, host="0.0.0.0", port=CONFIG.REST_PORT) diff --git a/rest/rest.py b/rest/rest.py deleted file mode 100644 index 22c820c..0000000 --- a/rest/rest.py +++ /dev/null @@ -1,136 +0,0 @@ -# Reece Williams | https://reece.sh | Jan 2023 -# ---------------------------------------------- -# pip install Flask redis flask_caching requests -# pip install --upgrade urllib3 -# ---------------------------------------------- - -# https://flask.palletsprojects.com/en/2.0.x/deploying/wsgi-standalone/#proxy-setups - -import json -import os -import re -from os import getenv - -import redis -import requests -from dotenv import load_dotenv -from flask import Flask, jsonify, request -from flask_cors import CORS, cross_origin - -current_dir = os.path.dirname(os.path.realpath(__file__)) -parent_dir = os.path.dirname(current_dir) - -# Load specific cache times (regex supported) -with open(f"{parent_dir}/cache_times.json", "r") as f: - cache_times: dict = json.loads(f.read()) - -DEFAULT_CACHE_SECONDS = cache_times.get("DEFAULT", 6) -ENDPOINTS = cache_times.get("rest", {}) - -load_dotenv(os.path.join(parent_dir, ".env")) - -API_TITLE = getenv("API_TITLE", "Swagger API") - -port = int(getenv("REST_PORT", 5000)) - -# Multiple in the future to iterate over? -# REST_URL = "https://juno-rest.reece.sh" -REST_URL = getenv("REST_URL", "https://juno-rest.reece.sh") -OPEN_API = f"{REST_URL}/static/openapi.yml" - -ENABLE_COUNTER = getenv("ENABLE_COUNTER", "true").lower().startswith("t") - -PREFIX = getenv("REDIS_REST_PREFIX", "junorest") - -app = Flask(__name__) -cors = CORS(app, resources={r"/*": {"origins": "*"}}) - - -def download_openapi_locally(): - r = requests.get(OPEN_API) - file_loc = f"{current_dir}/static/openapi.yml" - with open(file_loc, "w") as f: - f.write(r.text) - - -REDIS_URL = getenv("REDIS_URL", "redis://127.0.0.1:6379/0") -rDB = redis.Redis.from_url(REDIS_URL) - - -total_calls = { - "total_cache;get_all_rest": 0, - "total_outbound;get_all_rest": 0, -} - -INC_EVERY = int(getenv("INCREASE_COUNTER_EVERY", 10)) - - -def inc_value(key): - global total_calls - - if ENABLE_COUNTER == False: - return - - if key not in total_calls: - total_calls[key] = 0 - - if total_calls[key] >= INC_EVERY: - rDB.incr(f"{PREFIX};{key}", amount=total_calls[key]) - total_calls[key] = 0 - else: - total_calls[key] += 1 - - -HTML = "" - -# if route is just /, return the openapi swagger ui -@app.route("/", methods=["GET"]) -@cross_origin() -def root(): - global HTML - - if len(HTML) > 0: - return HTML - - # sets HTML if not set - req = requests.get(f"{REST_URL}") - - HTML = req.text.replace( - "//unpkg.com/swagger-ui-dist@3.40.0/favicon-16x16.png", "/static/favicon.png" - ) - HTML = re.sub(r".*", f"{API_TITLE}", HTML) - - return HTML - - -# return any RPC queries -@app.route("/", methods=["GET"]) -@cross_origin() -def get_all_rest(path): - url = f"{REST_URL}/{path}" - args = request.args - - key = f"{PREFIX};{url};{args}" - v = rDB.get(key) - if v: - inc_value("total_cache;get_all_rest") - return jsonify(json.loads(v.decode("utf-8"))) - - try: - req = requests.get(url, params=args) - except: - return {"error": "error"} - - cache_seconds = next( - (v for k, v in ENDPOINTS.items() if re.match(k, path)), DEFAULT_CACHE_SECONDS - ) - - rDB.setex(key, cache_seconds, json.dumps(req.json())) - inc_value("total_outbound;get_all_rest") - - return req.json() - - -if __name__ == "__main__": - download_openapi_locally() - app.run(debug=True, host="0.0.0.0", port=port) diff --git a/rest/run_rest.sh b/rest/run_rest.sh deleted file mode 100755 index 5842a11..0000000 --- a/rest/run_rest.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/sh -# https://www.awright.io/posts/7/running-multiple-instances-of-gunicorn-with-systemd -# -# chmod +x rest/run_rest.sh -# -# code /lib/systemd/system/juno_rest.service -# -# [Unit] -# Description=gunicorn rest -# After=network.target -# PartOf=gunicorn.target -# # Since systemd 235 reloading target can pass through -# ReloadPropagatedFrom=gunicorn.target -# [Service] -# User=root -# Group=root -# WorkingDirectory=/root/python-rpc-cache/rest/%i -# ExecStart=/root/python-rpc-cache/run_rest.sh -# [Install] -# WantedBy=gunicorn.target -# -# sudo systemctl daemon-reload -# sudo systemctl restart juno_rest.service -# sudo systemctl start juno_rest.service -# sudo systemctl stop juno_rest.service -# sudo systemctl enable juno_rest.service -# sudo systemctl status juno_rest.service - -WORKERS=${WORKERS:-8} -THREADS=${THREADS:-4} - -cd /root/python-rpc-cache/rest -gunicorn --workers $WORKERS --threads $THREADS --preload --bind 0.0.0.0:5000 rest:app \ No newline at end of file diff --git a/rpc.py b/rpc.py index 053033c..b4ea6a6 100644 --- a/rpc.py +++ b/rpc.py @@ -6,116 +6,69 @@ import asyncio import json -import os import re -from os import getenv -import redis import requests import websockets -from dotenv import load_dotenv from flask import Flask, jsonify, request from flask_cors import CORS, cross_origin from flask_sock import Sock from flask_socketio import emit -current_dir = os.path.dirname(os.path.realpath(__file__)) - -load_dotenv(os.path.join(current_dir, ".env")) - -port = int(getenv("RPC_PORT", 5001)) - -PREFIX = getenv("REDIS_RPC_PREFIX", "junorpc") - -RPC_URL = getenv("RPC_URL", "https://juno-rpc.reece.sh:443") -BASE_RPC = getenv("BASE_RPC", "15.204.143.232:26657") - -BACKUP_RPC_URL = getenv("BACKUP_RPC_URL", "https://rpc.juno.strange.love:443") -BACKUP_BASE_RPC = getenv("BACKUP_BASE_RPC", "rpc.juno.strange.love") - -ENABLE_COUNTER = getenv("ENABLE_COUNTER", "true").lower().startswith("t") - -data_websocket = f'ws://{getenv("WEBSOCKET_ADDR", "15.204.143.232:26657")}/websocket' - -RPC_DOMAIN = getenv("RPC_DOMAIN", "localhost:5001") - -# Load specific cache times (regex supported) -with open(f"{current_dir}/cache_times.json", "r") as f: - cache_times: dict = json.loads(f.read()) - -DEFAULT_CACHE_SECONDS = cache_times.get("DEFAULT", 6) -ENDPOINTS = cache_times.get("rest", {}) - -# replace RPC text to the updated domain -try: - RPC_ROOT_HTML = requests.get(f"{RPC_URL}/").text.replace(BASE_RPC, RPC_DOMAIN) -except: - RPC_ROOT_HTML = requests.get(f"{BACKUP_RPC_URL}/").text.replace( - BACKUP_BASE_RPC, RPC_DOMAIN - ) - -RPC_TITLE = getenv("RPC_TITLE", "") -if len(RPC_TITLE) > 0: - RPC_ROOT_HTML = RPC_ROOT_HTML.replace( - "", - f"{RPC_TITLE}", - ) - -# Puts text at the bottom, maybe put at the top in the future? -RPC_CUSTOM_TEXT = getenv("RPC_CUSTOM_TEXT", "").replace("{RPC_DOMAIN}", f"{RPC_DOMAIN}") -if len(RPC_CUSTOM_TEXT) > 0: - RPC_ROOT_HTML = RPC_ROOT_HTML.replace( - "Available endpoints:

", - f"{RPC_CUSTOM_TEXT}
Available endpoints:

", - ) - +import CONFIG +from CONFIG import REDIS_DB +from HELPERS import increment_call_value, replace_rpc_text +# === FLASK === rpc_app = Flask(__name__) sock = Sock(rpc_app) cors = CORS(rpc_app, resources={r"/*": {"origins": "*"}}) -REDIS_URL = getenv("REDIS_URL", "redis://127.0.0.1:6379/0") -rDB = redis.Redis.from_url(REDIS_URL) - - -@rpc_app.route("/", methods=["GET"]) -@cross_origin() -def get_all_rpc(): - return RPC_ROOT_HTML +RPC_ROOT_HTML: str -total_calls = { - "total_cache;get_rpc_endpoint": 0, - "total_outbound;get_rpc_endpoint": 0, - # - "total_cache;post_endpoint": 0, - "total_outbound;post_endpoint": 0, -} +@rpc_app.before_first_request +def before_first_request(): + global RPC_ROOT_HTML + CONFIG.update_cache_times() + RPC_ROOT_HTML = replace_rpc_text() -INC_EVERY = int(getenv("INCREASE_COUNTER_EVERY", 10)) +# === ROUTES === +@rpc_app.route("/", methods=["GET"]) +@cross_origin() +def root(): + # get the data between :// and the final / + base = re.search(r"\/\/.*\/", request.base_url).group(0) + # remove any /'s + base = base.replace("/", "") -def inc_value(key): - global total_calls + #

Endpoints that require arguments:
)', RPC_ROOT_HTML + ).group(0) - if ENABLE_COUNTER == False: - return + return RPC_ROOT_HTML.replace(rpc_url, base).replace("{BASE_URL}", base) - if key not in total_calls: - total_calls[key] = 0 - if total_calls[key] >= INC_EVERY: - rDB.incr(f"{PREFIX};{key}", amount=total_calls[key]) - total_calls[key] = 0 - else: - total_calls[key] += 1 +@rpc_app.route("/cache_info", methods=["GET"]) +@cross_origin() +def cache_info(): + """ + Updates viewable cache times (seconds) at DOMAIN/cache_info. + Auto updates for this program on update/change automatically without restart. + + We only store the data so any time its requested every X minutes, we regenerate the data. + """ + key = f"{CONFIG.RPC_PREFIX};cache_times" + v = REDIS_DB.get(key) + if v: + return jsonify(json.loads(v)) + CONFIG.update_cache_times() -def get_cache_time_seconds(path: str) -> int: - cache_seconds = next( - (v for k, v in ENDPOINTS.items() if re.match(k, path)), DEFAULT_CACHE_SECONDS - ) - return cache_seconds + REDIS_DB.setex(key, 15 * 60, json.dumps(CONFIG.cache_times)) + return jsonify(CONFIG.cache_times) @rpc_app.route("/", methods=["GET"]) @@ -123,53 +76,69 @@ def get_cache_time_seconds(path: str) -> int: def get_rpc_endpoint(path): global total_calls - url = f"{RPC_URL}/{path}" + url = f"{CONFIG.RPC_URL}/{path}" args = request.args - key = f"{PREFIX};{url};{args}" + key = f"{CONFIG.RPC_PREFIX};{url};{args}" + + cache_seconds = CONFIG.get_cache_time_seconds(path, is_rpc=True) + if cache_seconds < 0: + return jsonify( + { + "error": f"cosmos endpoint cache: The path '{path}' is disabled on this node..." + } + ) - v = rDB.get(key) + v = REDIS_DB.get(key) if v: - inc_value("total_cache;get_rpc_endpoint") - return jsonify(json.loads(v.decode("utf-8"))) + increment_call_value("total_cache;get_rpc_endpoint") + return jsonify(json.loads(v)) try: req = requests.get(url, params=args) except Exception as e: - print(e) - req = requests.get(f"{BACKUP_RPC_URL}/{path}", params=args) + req = requests.get(f"{CONFIG.BACKUP_RPC_URL}/{path}", params=args) - cache_seconds = get_cache_time_seconds(path) + if req.status_code != 200: + return jsonify(req.json()) - rDB.setex(key, cache_seconds, json.dumps(req.json())) - inc_value("total_outbound;get_rpc_endpoint") + REDIS_DB.setex(key, cache_seconds, json.dumps(req.json())) + increment_call_value("total_outbound;get_rpc_endpoint") return req.json() @rpc_app.route("/", methods=["POST"]) @cross_origin() -def post_endpoint(): +def post_rpc_endpoint(): REQ_DATA: dict = request.get_json() method, params = REQ_DATA.get("method", None), REQ_DATA.get("params", None) - key = f"{PREFIX};{method};{params}" + key = f"{CONFIG.RPC_PREFIX};{method};{params}" + + cache_seconds = CONFIG.get_cache_time_seconds(method, is_rpc=True) + if cache_seconds < 0: + return jsonify( + { + "error": f"cosmos endpoint cache: The RPC method '{method}' is disabled on this node..." + } + ) - v = rDB.get(key) + v = REDIS_DB.get(key) if v: - inc_value("total_cache;post_endpoint") - return jsonify(json.loads(v.decode("utf-8"))) + increment_call_value("total_cache;post_endpoint") + return jsonify(json.loads(v)) - # make req try: - req = requests.post(f"{RPC_URL}", data=json.dumps(REQ_DATA)) + req = requests.post(f"{CONFIG.RPC_URL}", data=json.dumps(REQ_DATA)) except: - req = requests.post(f"{BACKUP_RPC_URL}", data=json.dumps(REQ_DATA)) + req = requests.post(f"{CONFIG.BACKUP_RPC_URL}", data=json.dumps(REQ_DATA)) - cache_seconds = get_cache_time_seconds(method) + if req.status_code != 200: + return jsonify(req.json()) - rDB.setex(key, cache_seconds, json.dumps(req.json())) - inc_value("total_outbound;post_endpoint") + REDIS_DB.setex(key, cache_seconds, json.dumps(req.json())) + increment_call_value("total_outbound;post_endpoint") return req.json() @@ -180,7 +149,6 @@ def post_endpoint(): # JSONRPC requests can be also made via websocket. The websocket endpoint is at /websocket, e.g. localhost:26657/websocket. Asynchronous RPC functions like event subscribe and unsubscribe are only available via websockets. # https://github.com/hashrocket/ws # grpcurl -plaintext -d "{\"address\":\"juno10r39fueph9fq7a6lgswu4zdsg8t3gxlq670lt0\"}" wss://juno-rpc.reece.sh/websocket cosmos.bank.v1beta1.Query/AllBalances -# flask jsonrpc_websocket /websocket endpoint, connect to data_websocket # grpcurl -plaintext -d "{\"address\":\"juno10r39fueph9fq7a6lgswu4zdsg8t3gxlq670lt0\"}" 15.204.143.232:9090 cosmos.bank.v1beta1.Query/AllBalances # curl -X GET -H "Content-Type: application/json" -H "x-cosmos-block-height: 6619410" http://15.204.143.232:1317/cosmos/bank/v1beta1/balances/juno10r39fueph9fq7a6lgswu4zdsg8t3gxlq670lt0 @@ -190,7 +158,7 @@ def websocket(ws): print("websocket connected") async def handle_subscribe(): - async with websockets.connect(data_websocket) as websocket: + async with websockets.connect(CONFIG.RPC_WEBSOCKET) as websocket: while True: # receive data from the websocket data = await websocket.recv() @@ -204,4 +172,5 @@ async def handle_subscribe(): if __name__ == "__main__": - rpc_app.run(debug=True, host="0.0.0.0", port=port) + before_first_request() + rpc_app.run(debug=True, host="0.0.0.0", port=CONFIG.RPC_PORT) diff --git a/run_rest.sh b/run_rest.sh new file mode 100755 index 0000000..a387d3a --- /dev/null +++ b/run_rest.sh @@ -0,0 +1,40 @@ +#!/bin/sh +# +# chmod +x run_rest.sh +# +# sudo nano /lib/systemd/system/juno_rest.service +# +# If you are running as root, `sudo python -m pip install -r requirements.txt` +# +# [Unit] +# Description=gunicorn rest +# After=network.target +# PartOf=gunicorn.target +# # Since systemd 235 reloading target can pass through +# ReloadPropagatedFrom=gunicorn.target +# [Service] +# User=root +# Group=root +# WorkingDirectory=/root/cosmos-endpoint-cache/%i +# ExecStart=/root/cosmos-endpoint-cache/run_rest.sh +# [Install] +# WantedBy=gunicorn.target +# +# sudo systemctl daemon-reload +# sudo systemctl status juno_rest.service +# sudo systemctl start juno_rest.service +# sudo systemctl stop juno_rest.service +# sudo systemctl restart juno_rest.service +# sudo systemctl enable juno_rest.service + +PORT=${PORT:-5000} + +WORKERS=${WORKERS:-8} +THREADS=${THREADS:-2} +W_CONN=${W_CONN:-2} +BACKLOG=${BACKLOG:-2048} + +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +cd $THIS_DIR + +gunicorn --workers $WORKERS --threads $THREADS --worker-connections $W_CONN --backlog $BACKLOG --bind 0.0.0.0:$PORT --preload rest:app \ No newline at end of file diff --git a/run_rpc.sh b/run_rpc.sh index 7c1fffa..0b5912c 100755 --- a/run_rpc.sh +++ b/run_rpc.sh @@ -1,22 +1,40 @@ #!/bin/sh +# # chmod +x run_rpc.sh -# code /lib/systemd/system/juno_rpc.service +# +# sudo nano /lib/systemd/system/juno_rpc.service +# +# If you are running as root, `sudo python -m pip install -r requirements.txt` +# # [Unit] -# Description=My Shell Script +# Description=gunicorn rpc +# After=network.target +# PartOf=gunicorn.target +# # Since systemd 235 reloading target can pass through +# ReloadPropagatedFrom=gunicorn.target # [Service] -# ExecStart=/root/python-rpc-cache/run_rpc.sh +# User=root +# Group=root +# WorkingDirectory=/root/cosmos-endpoint-cache/%i +# ExecStart=/root/cosmos-endpoint-cache/run_rpc.sh # [Install] -# WantedBy=multi-user.target +# WantedBy=gunicorn.target # # sudo systemctl daemon-reload -# sudo systemctl restart juno_rpc.service +# sudo systemctl status juno_rpc.service # sudo systemctl start juno_rpc.service # sudo systemctl stop juno_rpc.service +# sudo systemctl restart juno_rpc.service # sudo systemctl enable juno_rpc.service -# sudo systemctl status juno_rpc.service + +PORT=${PORT:-5001} WORKERS=${WORKERS:-20} THREADS=${THREADS:-2} +W_CONN=${W_CONN:-2} +BACKLOG=${BACKLOG:-2048} + +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +cd $THIS_DIR -cd /root/python-rpc-cache -gunicorn --workers $WORKERS --threads $THREADS --backlog 80000 --preload --worker-connections 80000 --bind 0.0.0.0:5001 rpc:rpc_app \ No newline at end of file +gunicorn --workers $WORKERS --threads $THREADS --worker-connections $W_CONN --backlog $BACKLOG --bind 0.0.0.0:$PORT --preload rpc:rpc_app \ No newline at end of file diff --git a/scripts/pain.sh b/scripts/pain.sh deleted file mode 100644 index e0ea599..0000000 --- a/scripts/pain.sh +++ /dev/null @@ -1,8 +0,0 @@ -x=1 -while [ $x -le 5 ] -do - curl https://juno-rpc-cache.reece.sh/abci_info? & -done - - -while true; do curl -d '{"jsonrpc":"2.0","id":542337771993,"method":"status","params":{}}' -H "Content-Type: application/json" -X POST https://juno-rpc-cache.reece.sh/; sleep 0.1; done \ No newline at end of file diff --git a/rest/static/openapi.yml b/static/openapi.yml similarity index 100% rename from rest/static/openapi.yml rename to static/openapi.yml diff --git a/rest/static/favicon.png b/static/rest-favicon.png similarity index 100% rename from rest/static/favicon.png rename to static/rest-favicon.png diff --git a/todo.txt b/todo.txt index 8640b37..17321de 100644 --- a/todo.txt +++ b/todo.txt @@ -4,4 +4,4 @@ List of endpoints in the .env file. Change to a JSON? 3) Add docker / Akash support + instructions -4) Example nginx config +4) Example nginx config \ No newline at end of file