Spaces:
Running
Running
File size: 2,749 Bytes
ac6bb7e a0ea5d6 ac6bb7e 81a9ded ac6bb7e 81a9ded ac6bb7e 81a9ded a0ea5d6 81a9ded ac6bb7e 44c0f9f a0ea5d6 81a9ded a0ea5d6 ac6bb7e bc8476e beaff01 bc8476e b538081 ac6bb7e 81a9ded ac6bb7e a0ea5d6 ac6bb7e 81a9ded ac6bb7e 81a9ded ac6bb7e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
import bittensor as bt
from substrateinterface import Keypair
import gradio as gr
import pandas as pd
import time
# uga-buga caching
g_cached_data: pd.DataFrame | None = None
g_last_fetch_time = 0.0
def fetch_incentive_data() -> pd.DataFrame:
data = []
subtensor = bt.subtensor(network="finney")
print("connected to subtensor")
subnets = subtensor.all_subnets()
print("fetched all subnets")
metagraphs = subtensor.get_all_metagraphs_info()
print("fetched all metagraphs")
assert subnets, "WTF"
assert metagraphs, "WTF"
for sn in range(1, 129):
subnet = subnets[sn]
metagraph = metagraphs[sn]
address_to_uid = {hk: i for i, hk in enumerate(metagraph.hotkeys)}
addresses = [("coldkey", subnet.owner_coldkey), ("hotkey", subnet.owner_hotkey)]
for key_type, address in addresses:
uid = address_to_uid.get(address, None)
if uid is None:
continue
incentive = metagraph.incentives[uid]
if incentive <= 0:
continue
data.append([
f"[{sn}](https://taostats.io/subnets/{sn})",
f"{incentive*100:.2f}%",
uid,
key_type,
f"[{address}](https://taostats.io/{key_type}/{address})"
])
break
data = [(i+1, *d) for i, d in enumerate(data)]
df = pd.DataFrame(data, columns=["#", "Subnet", "Burn", "UID", "Key", "Address"]) # type: ignore
print(f"{len(data)} subnets burn")
return df
def get_cached_data() -> tuple[str, pd.DataFrame]:
global g_cached_data, g_last_fetch_time
if g_cached_data is None or (time.time() - g_last_fetch_time) > 1200: # 20 min
g_last_fetch_time = time.time()
g_cached_data = fetch_incentive_data()
time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(g_last_fetch_time + 1200))
return time_str, g_cached_data
with gr.Blocks(title="Bittensor Subnet Incentives") as demo:
gr.Markdown(
"""
# Burntensor
### Bittensor Subnets Burn Dashboard
<div>
<img src="https://huggingface.co/spaces/pawkanarek/burntensor/resolve/main/assets/burn.gif" alt="Burn GIF" width="250">
</div>
This dashboard displays the burn percentage set by subnet owners for miners.
"""
)
next_process_text = gr.Textbox(label="Next refresh time", interactive=False)
output_df = gr.DataFrame(
datatype=["number", "markdown", "str", "number", "str", "markdown"],
label="Subnet Burn Data",
interactive=False,
max_height=1000000
)
demo.load(get_cached_data, None, [next_process_text, output_df])
demo.launch()
|