burntensor / app.py
pawkanarek's picture
change word
beaff01
raw
history blame
2.75 kB
import bittensor as bt
from substrateinterface import Keypair
import gradio as gr
import pandas as pd
import time
# uga-buga caching
g_cached_data: pd.DataFrame | None = None
g_last_fetch_time = 0.0
def fetch_incentive_data() -> pd.DataFrame:
data = []
subtensor = bt.subtensor(network="finney")
print("connected to subtensor")
subnets = subtensor.all_subnets()
print("fetched all subnets")
metagraphs = subtensor.get_all_metagraphs_info()
print("fetched all metagraphs")
assert subnets, "WTF"
assert metagraphs, "WTF"
for sn in range(1, 129):
subnet = subnets[sn]
metagraph = metagraphs[sn]
address_to_uid = {hk: i for i, hk in enumerate(metagraph.hotkeys)}
addresses = [("coldkey", subnet.owner_coldkey), ("hotkey", subnet.owner_hotkey)]
for key_type, address in addresses:
uid = address_to_uid.get(address, None)
if uid is None:
continue
incentive = metagraph.incentives[uid]
if incentive <= 0:
continue
data.append([
f"[{sn}](https://taostats.io/subnets/{sn})",
f"{incentive*100:.2f}%",
uid,
key_type,
f"[{address}](https://taostats.io/{key_type}/{address})"
])
break
data = [(i+1, *d) for i, d in enumerate(data)]
df = pd.DataFrame(data, columns=["#", "Subnet", "Burn", "UID", "Key", "Address"]) # type: ignore
print(f"{len(data)} subnets burn")
return df
def get_cached_data() -> tuple[str, pd.DataFrame]:
global g_cached_data, g_last_fetch_time
if g_cached_data is None or (time.time() - g_last_fetch_time) > 1200: # 20 min
g_last_fetch_time = time.time()
g_cached_data = fetch_incentive_data()
time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(g_last_fetch_time + 1200))
return time_str, g_cached_data
with gr.Blocks(title="Bittensor Subnet Incentives") as demo:
gr.Markdown(
"""
# Burntensor
### Bittensor Subnets Burn Dashboard
<div>
<img src="https://huggingface.co/spaces/pawkanarek/burntensor/resolve/main/assets/burn.gif" alt="Burn GIF" width="250">
</div>
This dashboard displays the burn percentage set by subnet owners for miners.
"""
)
next_process_text = gr.Textbox(label="Next refresh time", interactive=False)
output_df = gr.DataFrame(
datatype=["number", "markdown", "str", "number", "str", "markdown"],
label="Subnet Burn Data",
interactive=False,
max_height=1000000
)
demo.load(get_cached_data, None, [next_process_text, output_df])
demo.launch()