Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: Added normalized hyperparams #1891

Merged
merged 4 commits into from
Jun 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 17 additions & 6 deletions bittensor/commands/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,12 @@
from rich.prompt import Prompt
from rich.table import Table
from typing import List, Optional, Dict
from .utils import get_delegates_details, DelegatesDetails, check_netuid_set
from .utils import (
get_delegates_details,
DelegatesDetails,
check_netuid_set,
normalize_hyperparameters,
)
from .identity import SetIdentityCommand

console = bittensor.__console__
Expand Down Expand Up @@ -489,11 +494,14 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
table.title = "[white]Subnet Hyperparameters - NETUID: {} - {}".format(
cli.config.netuid, subtensor.network
)
table.add_column("[overline white]HYPERPARAMETER", style="bold white")
table.add_column("[overline white]HYPERPARAMETER", style="white")
table.add_column("[overline white]VALUE", style="green")
table.add_column("[overline white]NORMALIZED", style="cyan")

normalized_values = normalize_hyperparameters(subnet)

for param in subnet.__dict__:
table.add_row(" " + param, str(subnet.__dict__[param]))
for param, value, norm_value in normalized_values:
table.add_row(" " + param, value, norm_value)

bittensor.__console__.print(table)

Expand Down Expand Up @@ -595,9 +603,12 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
)
table.add_column("[overline white]HYPERPARAMETER", style="white")
table.add_column("[overline white]VALUE", style="green")
table.add_column("[overline white]NORMALIZED", style="cyan")

normalized_values = normalize_hyperparameters(subnet)

for param in subnet.__dict__:
table.add_row(param, str(subnet.__dict__[param]))
for param, value, norm_value in normalized_values:
table.add_row(" " + param, value, norm_value)

bittensor.__console__.print(table)

Expand Down
48 changes: 47 additions & 1 deletion bittensor/commands/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
import bittensor
import requests
from bittensor.utils.registration import torch
from typing import List, Dict, Any, Optional
from bittensor.utils.balance import Balance
from bittensor.utils import U64_NORMALIZED_FLOAT, U16_NORMALIZED_FLOAT
from typing import List, Dict, Any, Optional, Tuple
from rich.prompt import Confirm, PromptBase
from dataclasses import dataclass
from . import defaults
Expand Down Expand Up @@ -194,6 +196,50 @@ def filter_netuids_by_registered_hotkeys(
return list(set(netuids))


def normalize_hyperparameters(
subnet: bittensor.SubnetHyperparameters,
) -> List[Tuple[str, str, str]]:
"""
Normalizes the hyperparameters of a subnet.

Args:
subnet: The subnet hyperparameters object.

Returns:
A list of tuples containing the parameter name, value, and normalized value.
"""
param_mappings = {
"adjustment_alpha": U64_NORMALIZED_FLOAT,
"min_difficulty": U64_NORMALIZED_FLOAT,
"max_difficulty": U64_NORMALIZED_FLOAT,
"difficulty": U64_NORMALIZED_FLOAT,
"bonds_moving_avg": U64_NORMALIZED_FLOAT,
"max_weight_limit": U16_NORMALIZED_FLOAT,
"kappa": U16_NORMALIZED_FLOAT,
"min_burn": Balance.from_rao,
"max_burn": Balance.from_rao,
}

normalized_values: List[Tuple[str, str, str]] = []
subnet_dict = subnet.__dict__

for param, value in subnet_dict.items():
try:
if param in param_mappings:
norm_value = param_mappings[param](value)
if isinstance(norm_value, float):
norm_value = f"{norm_value:.{10}g}"
else:
norm_value = value
except Exception as e:
bittensor.logging.warning(f"Error normalizing parameter '{param}': {e}")
norm_value = "-"

normalized_values.append((param, str(value), str(norm_value)))

return normalized_values


@dataclass
class DelegatesDetails:
name: str
Expand Down
91 changes: 91 additions & 0 deletions tests/unit_tests/test_subtensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,13 @@
_logger,
Balance,
)
from bittensor.chain_data import SubnetHyperparameters
from bittensor.commands.utils import normalize_hyperparameters
from bittensor import subtensor_module
from bittensor.utils.balance import Balance

U16_MAX = 65535
U64_MAX = 18446744073709551615


def test_serve_axon_with_external_ip_set():
Expand Down Expand Up @@ -475,6 +481,91 @@ def test_weights_rate_limit_success_calls(subtensor, mocker):
assert isinstance(result, int)


@pytest.fixture
def sample_hyperparameters():
return MagicMock(spec=SubnetHyperparameters)


def get_normalized_value(normalized_data, param_name):
return next(
(
norm_value
for p_name, _, norm_value in normalized_data
if p_name == param_name
),
None,
)


@pytest.mark.parametrize(
"param_name, max_value, mid_value, zero_value, is_balance",
[
("adjustment_alpha", U64_MAX, U64_MAX / 2, 0, False),
("max_weight_limit", U16_MAX, U16_MAX / 2, 0, False),
("difficulty", U64_MAX, U64_MAX / 2, 0, False),
("min_difficulty", U64_MAX, U64_MAX / 2, 0, False),
("max_difficulty", U64_MAX, U64_MAX / 2, 0, False),
("bonds_moving_avg", U64_MAX, U64_MAX / 2, 0, False),
("min_burn", 10000000000, 5000000000, 0, True), # These are in rao
("max_burn", 20000000000, 10000000000, 0, True),
],
ids=[
"adjustment-alpha",
"max_weight_limit",
"difficulty",
"min_difficulty",
"max_difficulty",
"bonds_moving_avg",
"min_burn",
"max_burn",
],
)
def test_hyperparameter_normalization(
sample_hyperparameters, param_name, max_value, mid_value, zero_value, is_balance
):
setattr(sample_hyperparameters, param_name, mid_value)
normalized = normalize_hyperparameters(sample_hyperparameters)
norm_value = get_normalized_value(normalized, param_name)

# Mid-value test
if is_balance:
numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__))
expected_tao = mid_value / 1e9
assert (
numeric_value == expected_tao
), f"Mismatch in tao value for {param_name} at mid value"
else:
assert float(norm_value) == 0.5, f"Failed mid-point test for {param_name}"

# Max-value test
setattr(sample_hyperparameters, param_name, max_value)
normalized = normalize_hyperparameters(sample_hyperparameters)
norm_value = get_normalized_value(normalized, param_name)

if is_balance:
numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__))
expected_tao = max_value / 1e9
assert (
numeric_value == expected_tao
), f"Mismatch in tao value for {param_name} at max value"
else:
assert float(norm_value) == 1.0, f"Failed max value test for {param_name}"

# Zero-value test
setattr(sample_hyperparameters, param_name, zero_value)
normalized = normalize_hyperparameters(sample_hyperparameters)
norm_value = get_normalized_value(normalized, param_name)

if is_balance:
numeric_value = float(str(norm_value).lstrip(bittensor.__tao_symbol__))
expected_tao = zero_value / 1e9
assert (
numeric_value == expected_tao
), f"Mismatch in tao value for {param_name} at zero value"
else:
assert float(norm_value) == 0.0, f"Failed zero value test for {param_name}"


###########################
# Account functions tests #
###########################
Expand Down
Loading