Skip to content

Commit

Permalink
Migrate to Py KVStore (from Redis) (#33)
Browse files Browse the repository at this point in the history
* Migrates away from redis into build in KV Cache

* rm openapi yml

* Add Debug, only use hset if args > 0 for search & block (cosmos.directory fix)

* version bump

* Final fixes

* bump requirements

* migration steps for v0.0.9

* RPC debug

* v0.0.9 hopefully final

* minor fix to akash deploy
  • Loading branch information
Reecepbcups authored Apr 24, 2023
1 parent 9583580 commit c9cf163
Show file tree
Hide file tree
Showing 22 changed files with 197 additions and 55,968 deletions.
1 change: 0 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,4 @@

/.env
/cache_times.json
/redis.json
/README.md
7 changes: 5 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,8 @@ __pycache__/

# ignores project root directory configs
/.env
/redis.json
/cache_times.json
/cache_times.json

kvstores/*.json

static/openapi.yml
10 changes: 5 additions & 5 deletions COINGECKO.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from pycoingecko import CoinGeckoAPI

import CONFIG
from CONFIG import REDIS_DB
from CONFIG import KV_STORE
from HELPERS import ttl_block_only
from HELPERS_TYPES import Mode

Expand All @@ -24,7 +24,7 @@ def get_symbols(self):
ids = CONFIG.COINGECKO_IDS

key = f"coingecko_symbols;{ids}"
values = REDIS_DB.get(key)
values = KV_STORE.get(key)
if values is not None:
return json.loads(values)

Expand All @@ -34,7 +34,7 @@ def get_symbols(self):
symbol = data.get("symbol", "")
values[_id] = symbol

REDIS_DB.set(key, json.dumps(values), ex=86400)
KV_STORE.set(key, json.dumps(values), timeout=86400)
return values

def get_price(self):
Expand All @@ -44,7 +44,7 @@ def get_price(self):
cache_seconds = int(CONFIG.COINGECKO_CACHE.get("seconds", 7))
key = f"coingecko;{ttl_block_only(cache_seconds)};{ids};{vs_currencies}"

value = REDIS_DB.get(key)
value = KV_STORE.get(key)
if value is not None:
return json.loads(value)

Expand All @@ -65,7 +65,7 @@ def get_price(self):
if cache_seconds == Mode.FOR_BLOCK_TIME.value: # -2
cache_seconds = int(CONFIG.DEFAULT_CACHE_SECONDS)

REDIS_DB.set(key, json.dumps(data), ex=int(cache_seconds))
KV_STORE.set(key, json.dumps(data), timeout=int(cache_seconds))
return data


Expand Down
42 changes: 13 additions & 29 deletions CONFIG.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
import re
from os import getenv

import redis
import requests
from dotenv import load_dotenv
from py_kvstore import KVStore

HEADERS = {
"accept": "application/json",
Expand All @@ -14,6 +14,9 @@

PROJECT_DIR = os.path.dirname(os.path.realpath(__file__))

KV_DIR = os.path.join(PROJECT_DIR, "kvstores")
os.makedirs(KV_DIR, exist_ok=True)

env_file = os.path.join(PROJECT_DIR, ".env")


Expand Down Expand Up @@ -52,31 +55,12 @@ def get_config_file(filename: str):
return os.path.join(PROJECT_DIR, "configs", filename) # default


# =============
# === REDIS ===
# =============
REDIS_URL = getenv("REDIS_URL", "redis://127.0.0.1:6379/0")

if "http://" in REDIS_URL or "https://" in REDIS_URL:
# remove that http from the url
REDIS_URL = REDIS_URL.replace("http://", "redis://").replace("https://", "redis://")
print(
"WARNING: Found http(s):// in your URL. It has been removed but you should ensure this is correct."
)

REDIS_DB = redis.Redis.from_url(REDIS_URL)
try:
REDIS_DB.ping()
except redis.ConnectionError:
print("Error connecting to Redis. Please check if Redis is running and the REDIS_URL is set correctly.")
exit(1)

redis_config = get_config_file("redis.json")
values = json.loads(open(redis_config, "r").read()).items()
if len(values) > 0:
for k, v in values:
REDIS_DB.config_set(k, v)
DEBUGGING = getenv("DEBUGGING", "false").lower().startswith("t")

# KVStore
KV_STORE_NAME = getenv("STORE_NAME", "node_store")
KV_STORE = KVStore(name=KV_STORE_NAME, dump_dir=KV_DIR)
KV_STORE.load()

ENABLE_COUNTER = getenv("ENABLE_COUNTER", "true").lower().startswith("t")
INC_EVERY = int(getenv("INCREASE_COUNTER_EVERY", 250))
Expand All @@ -92,10 +76,9 @@ def get_config_file(filename: str):
# === RPC ===
# ===========
RPC_PORT = int(getenv("RPC_PORT", 5001))
RPC_PREFIX = getenv("REDIS_RPC_PREFIX", "junorpc")


RPC_URL = getenv("RPC_URL", "https://juno-rpc.reece.sh:443")
RPC_URL = getenv("RPC_URL", "https://juno-rpc.polkachu.com:443")
BACKUP_RPC_URL = getenv("BACKUP_RPC_URL", "https://rpc.juno.strange.love:443")
if USE_BACKUP_AS_PRIMARY:
RPC_URL = BACKUP_RPC_URL
Expand All @@ -113,9 +96,8 @@ def get_config_file(filename: str):
REST_PORT = int(getenv("REST_PORT", 5000))

API_TITLE = getenv("API_TITLE", "Swagger API")
REST_PREFIX = getenv("REDIS_REST_PREFIX", "junorest")

REST_URL = getenv("REST_URL", "https://juno-rest.reece.sh")
REST_URL = getenv("REST_URL", "https://juno-api.polkachu.com")
BACKUP_REST_URL = getenv("BACKUP_REST_URL", f"https://api.juno.strange.love")
if USE_BACKUP_AS_PRIMARY:
REST_URL = BACKUP_REST_URL
Expand All @@ -137,6 +119,7 @@ def get_config_file(filename: str):
REST_ENDPOINTS: dict = {}
COINGECKO_CACHE: dict = {}


# === CACHE HELPER ===
def update_cache_times():
"""
Expand All @@ -162,6 +145,7 @@ def get_cache_time_seconds(path: str, is_rpc: bool) -> int:

cache_seconds = DEFAULT_CACHE_SECONDS
for k, seconds in endpoints.items():
k.replace("*", ".+")
if re.match(k, path):
cache_seconds = seconds
break
Expand Down
15 changes: 9 additions & 6 deletions CONNECT_WEBSOCKET.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,16 @@
import rel
import websocket

from CONFIG import REDIS_DB, RPC_WEBSOCKET
from CONFIG import KV_STORE, RPC_WEBSOCKET

SUBSCRIBE_MSG = '{"jsonrpc": "2.0", "method": "subscribe", "params": ["tm.event=\'NewBlock\'"], "id": 1}'

logger = logging.getLogger(__name__)

CONNECTED = False

# on a new block message, we will clear redis of any values which the config set to -2

# on a new block message, we will clear in the KV Store of any values which the config set to -2
# Use this for an indexer in the future?? :D
def on_message(ws, message):
msg = json.loads(message)
Expand All @@ -36,11 +38,12 @@ def on_message(ws, message):

logger.debug(f"""New Block: {block_height}""")

# resets all blockOnly keys (balances for example)
del_keys = REDIS_DB.keys("*;IsBlockOnly;*")
del_keys = KV_STORE.get_keys("*;IsBlockOnly;*")
if len(del_keys) > 0:
logger.debug(f"Deleting {len(del_keys)} keys...")
REDIS_DB.delete(*del_keys)
res: bool = KV_STORE.delete(del_keys)
if res:
logger.debug(f"Deleting {len(del_keys)} keys...")
# KV_STORE.dump()


def on_error(ws, error):
Expand Down
14 changes: 9 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# docker build . -t reecepbcups/rpc-cache:latest
# docker run --name rpc-cache -p 5001:5001 reecepbcups/rpc-cache:latest
# docker build -t reecepbcups/rpc-cache:latest .
# docker run -e RPC_WORKER_THREADS=2 -e REMOTE_CONFIG_TIME_FILE=https://raw.githubusercontent.com/Reecepbcups/cosmos-endpoint-cache/main/configs/cache_times.json -p 5001:5001 reecepbcups/rpc-cache:latest

FROM python:3.6-slim
FROM python:3.11-slim

RUN apt-get clean \
&& apt-get -y update
Expand All @@ -17,5 +17,9 @@ COPY . /srv/flask_app
WORKDIR /srv/flask_app

EXPOSE 5001
# ["gunicorn", "-w","3", "-b", "0.0.0.0:5000", "app"]
CMD ["gunicorn", "-b", "0.0.0.0:5001", "rpc:rpc_app"]

# You can set this at run time with -e
ENV RPC_WORKER_THREADS=1

# CMD ["gunicorn", "-w", "echo ${WORKER_THREADS}", "-b", "0.0.0.0:5001", "rpc:rpc_app"]
CMD gunicorn -w ${RPC_WORKER_THREADS} -b 0.0.0.0:5001 rpc:rpc_app
10 changes: 7 additions & 3 deletions Dockerfile.rest
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# docker build . -f Dockerfile.rest -t reecepbcups/api-cache:latest
# docker run --name rpc-cache -p 5000:5000 reecepbcups/api-cache:latest
# docker run -e REST_URL=http://15.204.143.232:1317 -e DISABLE_SWAGGER_UI=false -e REST_WORKER_THREADS=1 -e REMOTE_CONFIG_TIME_FILE=https://raw.githubusercontent.com/Reecepbcups/cosmos-endpoint-cache/main/configs/cache_times.json -p 5000:5000 reecepbcups/api-cache:latest

FROM python:3.6-slim
FROM python:3.11

RUN apt-get clean \
&& apt-get -y update
Expand All @@ -17,4 +17,8 @@ COPY . /srv/flask_app
WORKDIR /srv/flask_app

EXPOSE 5000
CMD ["gunicorn", "-b", "0.0.0.0:5000", "rest:app"]

# You can set this at run time with -e
ENV REST_WORKER_THREADS=1

CMD gunicorn -w ${REST_WORKER_THREADS} -b 0.0.0.0:5000 rest:app
27 changes: 18 additions & 9 deletions HELPERS.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import httpx

import CONFIG
from CONFIG import REDIS_DB
from CONFIG import KV_STORE
from HELPERS_TYPES import CallType, Mode


Expand Down Expand Up @@ -41,15 +41,24 @@ def increment_call_value(key: str, amount: int = 1):
total_calls[str(key)] = 0

if total_calls[key] >= CONFIG.INC_EVERY:
REDIS_DB.incr(f"{key}", amount=total_calls[key])
KV_STORE.incr(f"{key}", amount=total_calls[key])
total_calls[key] = 0
else:
total_calls[key] += amount

if CONFIG.DEBUGGING:
print(f"incremented {key} to {total_calls[key]}")

# NOTE: testing only
# print("testing only dump here")
# KV_STORE.dump()


def download_openapi_locally():
# TODO: What if there is no swagger API?
r = httpx.get(CONFIG.OPEN_API)
if r.status_code != 200:
return
file_loc = f"{CONFIG.PROJECT_DIR}/static/openapi.yml"
with open(file_loc, "w") as f:
f.write(r.text)
Expand Down Expand Up @@ -129,15 +138,15 @@ def get_config_values():
def get_stats_html():
updates_every = CONFIG.INC_EVERY

# gets information about the redis
rpc_get_cache = REDIS_DB.get(CallType.RPC_GET_CACHE.value)
rpc_get_outbound = REDIS_DB.get(CallType.RPC_GET_OUTBOUND.value)
# gets information about the kv store
rpc_get_cache = KV_STORE.get(CallType.RPC_GET_CACHE.value)
rpc_get_outbound = KV_STORE.get(CallType.RPC_GET_OUTBOUND.value)

rpc_post_cache = REDIS_DB.get(CallType.RPC_POST_CACHE.value)
rpc_post_outbound = REDIS_DB.get(CallType.RPC_POST_OUTBOUND.value)
rpc_post_cache = KV_STORE.get(CallType.RPC_POST_CACHE.value)
rpc_post_outbound = KV_STORE.get(CallType.RPC_POST_OUTBOUND.value)

rest_cache = REDIS_DB.get(CallType.REST_GET_CACHE.value)
rest_outbound = REDIS_DB.get(CallType.REST_GET_OUTBOUND.value)
rest_cache = KV_STORE.get(CallType.REST_GET_CACHE.value)
rest_outbound = KV_STORE.get(CallType.REST_GET_OUTBOUND.value)
# no rest post yet, not added.

# converts (1 so no div / 0 errors)
Expand Down
16 changes: 8 additions & 8 deletions HELPERS_TYPES.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from enum import Enum

import CONFIG
from CONFIG import REDIS_DB
from CONFIG import KV_STORE


class Mode(Enum):
Expand All @@ -12,16 +12,16 @@ class Mode(Enum):

class CallType(Enum):
# RPC
RPC_GET_CACHE = f"{CONFIG.RPC_PREFIX};amt;cache;rpc_get"
RPC_GET_OUTBOUND = f"{CONFIG.RPC_PREFIX};amt;outbound;rpc_get"
RPC_GET_CACHE = f"rpc;amt;cache;rpc_get"
RPC_GET_OUTBOUND = f"rpc;amt;outbound;rpc_get"

# RPC POST
RPC_POST_CACHE = f"{CONFIG.RPC_PREFIX};amt;cache;rpc_post"
RPC_POST_OUTBOUND = f"{CONFIG.RPC_PREFIX};amt;outbound;rpc_post"
RPC_POST_CACHE = f"rpc;amt;cache;rpc_post"
RPC_POST_OUTBOUND = f"rpc;amt;outbound;rpc_post"

# REST GET
REST_GET_CACHE = f"{CONFIG.REST_PREFIX};amt;cache;rest_get"
REST_GET_OUTBOUND = f"{CONFIG.REST_PREFIX};amt;outbound;rest_get"
REST_GET_CACHE = f"rest;amt;cache;rest_get"
REST_GET_OUTBOUND = f"rest;amt;outbound;rest_get"


if __name__ == "__main__":
Expand All @@ -34,5 +34,5 @@ class CallType(Enum):
print(CallType.REST_GET_CACHE)
print(CallType.REST_GET_OUTBOUND)

v = REDIS_DB.get(CallType.RPC_GET_CACHE.value)
v = KV_STORE.get(CallType.RPC_GET_CACHE.value)
print(1 if v == None else int(v.decode("utf-8")))
43 changes: 43 additions & 0 deletions MIGRATIONS/v0.0.9.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# v0.0.8 -> v0.0.9

This upgrade brings a new .env file & removes the redis dependency. Please modify your .env file to take effect.

```sh
# Install the latest dependenies.
python3 -m pip install -r requirements/requirements.txt --upgrade
```

## Config Changes

```toml
# Remove
REDIS_URL=...
REDIS_RPC_PREFIX=...
REDIS_REST_PREFIX=...

# Add
DEBUGGING=false
# Saves to a file in this dir on close / open for the KV values.
# Set this to any unique name
STORE_NAME="reeces_juno-1"
```

## The same goes for akash / compose image env files if you use.

---

## (Docker) Worker Threads

You can now set the number of threads you want in docker. Useful for akash deployments with multiple cores.

by default, only 1 thread is used. To expand, more threads, use the following

```env
RPC_WORKER_THREADS=2
and
REST_WORKER_THREADS=2
```

Where "2" launches 2 threads for each process with its cache
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
VERSION=0.0.8
VERSION=0.0.9

run:
docker-compose up
Expand Down
Loading

0 comments on commit c9cf163

Please sign in to comment.