File size: 2,724 Bytes
ac6bb7e
 
 
 
a0ea5d6
 
 
 
 
ac6bb7e
 
 
 
 
81a9ded
 
 
 
 
 
 
 
 
 
 
 
 
703d626
 
 
ac6bb7e
81a9ded
 
 
 
ac6bb7e
81a9ded
 
 
 
 
703d626
 
81a9ded
 
 
 
a0ea5d6
 
703d626
81a9ded
ac6bb7e
 
 
44c0f9f
a0ea5d6
 
 
 
81a9ded
 
 
a0ea5d6
 
ac6bb7e
 
 
bc8476e
703d626
ac6bb7e
 
81a9ded
703d626
ac6bb7e
703d626
a0ea5d6
ac6bb7e
81a9ded
ac6bb7e
81a9ded
ac6bb7e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import bittensor as bt
from substrateinterface import Keypair
import gradio as gr
import pandas as pd
import time

# uga-buga caching
g_cached_data: pd.DataFrame | None = None
g_last_fetch_time = 0.0


def fetch_incentive_data() -> pd.DataFrame:
    data = []
    subtensor = bt.subtensor(network="finney")
    print("connected to subtensor")
    subnets = subtensor.all_subnets()
    print("fetched all subnets")
    metagraphs = subtensor.get_all_metagraphs_info()
    print("fetched all metagraphs")

    assert subnets, "WTF"
    assert metagraphs, "WTF"

    for sn in range(1, 129):
        subnet = subnets[sn]
        metagraph = metagraphs[sn]
        address_to_uid = {hk: i for i, hk in enumerate(metagraph.hotkeys)}
        # The incentives that are assigned to the owner hotkey are being burned/not given out 
        #  Maciej Kula [Bo𝞃, Bo𝞃]
        addresses = [("hotkey", subnet.owner_hotkey)] # So don't include coldkey.

        for key_type, address in addresses:
            uid = address_to_uid.get(address, None)
            if uid is None:
                continue

            incentive = metagraph.incentives[uid]
            if incentive <= 0:
                continue

            data.append([
                f"[netuid: {sn} / {subnet.subnet_name}](https://taostats.io/subnets/{sn})",
                incentive*100,
                uid,
                f"[{address}](https://taostats.io/{key_type}/{address})"
            ])
            break

    data = [(i+1, *d) for i, d in enumerate(data)] 
    df = pd.DataFrame(data, columns=["#", "Subnet", "Burn (%)", "UID", "Address"]) # type: ignore
    print(f"{len(data)} subnets burn")
    return df


def get_cached_data() -> tuple[str, pd.DataFrame]:
    global g_cached_data, g_last_fetch_time
    if g_cached_data is None or (time.time() - g_last_fetch_time) > 1200: # 20 min
        g_last_fetch_time = time.time()
        g_cached_data = fetch_incentive_data()

    time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(g_last_fetch_time + 1200))
    return time_str, g_cached_data


with gr.Blocks(title="Bittensor Subnet Incentives") as demo:
    gr.Markdown(
        """
        # Burntensor
        ### This dashboard displays the burn percentage set by subnet owners for miners. Fetching data takes ~1min. 
        """
    )
    next_process_text = gr.Textbox(label="Next refresh time", interactive=False)
    gr.Image("/assets/burn.gif")
    output_df = gr.DataFrame(
        datatype=["number", "markdown", "number", "number", "markdown"],
        label="Subnet Burn Data",
        interactive=False,
        max_height=1000000
    )
    demo.load(get_cached_data, None, [next_process_text, output_df])
    demo.launch()