Skip to content

Commit

Permalink
replace torch with numpy
Browse files Browse the repository at this point in the history
  • Loading branch information
andreea-popescu-reef committed Apr 9, 2024
1 parent de0bd31 commit 5747b9b
Show file tree
Hide file tree
Showing 32 changed files with 503 additions and 476 deletions.
10 changes: 2 additions & 8 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# syntax=docker/dockerfile:1
FROM pytorch/pytorch:2.0.1-cuda11.7-cudnn8-devel
FROM python:3.11.8-bookworm

LABEL bittensor.image.authors="bittensor.com" \
bittensor.image.vendor="Bittensor" \
Expand All @@ -9,19 +9,13 @@ LABEL bittensor.image.authors="bittensor.com" \
bittensor.image.revision="${VCS_REF}" \
bittensor.image.created="${BUILD_DATE}" \
bittensor.image.documentation="https://app.gitbook.com/@opentensor/s/bittensor/"
LABEL bittensor.dependencies.versions.torch="2.0.1"
LABEL bittensor.dependencies.versions.cuda="11.7"
ARG DEBIAN_FRONTEND=noninteractive

#nvidia key migration
RUN apt-key del 7fa2af80
RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/3bf863cc.pub
RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64/7fa2af80.pub
# Update the base image
RUN apt update && apt upgrade -y
# Install bittensor
## Install dependencies
RUN apt install -y curl sudo nano git htop netcat wget unzip tmux apt-utils cmake build-essential
RUN apt install -y curl sudo nano git htop netcat-openbsd wget unzip tmux apt-utils cmake build-essential
## Upgrade pip
RUN pip3 install --upgrade pip

Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -329,8 +329,8 @@ my_axon.attach(
).start()
```

Dendrite: Inheriting from PyTorch's Module class, represents the abstracted implementation of a network client module designed
to send requests to those endpoints to receive inputs.
Dendrite: Represents the abstracted implementation of a network client module
designed to send requests to those endpoints to receive inputs.

Example:
```python
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
6.9.3
7.0.0
2 changes: 1 addition & 1 deletion bittensor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
nest_asyncio.apply()

# Bittensor code and protocol version.
__version__ = "6.9.3"
__version__ = "7.0.0"

version_split = __version__.split(".")
__version_as_int__: int = (
Expand Down
55 changes: 24 additions & 31 deletions bittensor/chain_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import torch
import bittensor

import json
Expand Down Expand Up @@ -265,16 +264,14 @@ def from_neuron_info(cls, neuron_info: dict) -> "AxonInfo":
coldkey=neuron_info["coldkey"],
)

def to_parameter_dict(self) -> "torch.nn.ParameterDict":
r"""Returns a torch tensor of the subnet info."""
return torch.nn.ParameterDict(self.__dict__)
def to_parameter_dict(self) -> dict[str, Union[int, str]]:
r"""Returns a dict of the subnet info."""
return self.__dict__

@classmethod
def from_parameter_dict(
cls, parameter_dict: "torch.nn.ParameterDict"
) -> "AxonInfo":
r"""Returns an axon_info object from a torch parameter_dict."""
return cls(**dict(parameter_dict))
def from_parameter_dict(cls, parameter_dict: dict[str, Any]) -> "AxonInfo":
r"""Returns an axon_info object from a parameter_dict."""
return cls(**parameter_dict)


class ChainDataType(Enum):
Expand Down Expand Up @@ -942,16 +939,14 @@ def fix_decoded_values(cls, decoded: Dict) -> "SubnetInfo":
owner_ss58=ss58_encode(decoded["owner"], bittensor.__ss58_format__),
)

def to_parameter_dict(self) -> "torch.nn.ParameterDict":
r"""Returns a torch tensor of the subnet info."""
return torch.nn.ParameterDict(self.__dict__)
def to_parameter_dict(self) -> dict[str, Any]:
r"""Returns a dict of the subnet info."""
return self.__dict__

@classmethod
def from_parameter_dict(
cls, parameter_dict: "torch.nn.ParameterDict"
) -> "SubnetInfo":
r"""Returns a SubnetInfo object from a torch parameter_dict."""
return cls(**dict(parameter_dict))
def from_parameter_dict(cls, parameter_dict: dict[str, Any]) -> "SubnetInfo":
r"""Returns a SubnetInfo object from a parameter_dict."""
return cls(**parameter_dict)


@dataclass
Expand Down Expand Up @@ -1038,16 +1033,14 @@ def fix_decoded_values(cls, decoded: Dict) -> "SubnetHyperparameters":
difficulty=decoded["difficulty"],
)

def to_parameter_dict(self) -> "torch.nn.ParameterDict":
r"""Returns a torch tensor of the subnet hyperparameters."""
return torch.nn.ParameterDict(self.__dict__)
def to_parameter_dict(self) -> dict[str, Union[int, float, bool]]:
r"""Returns a dict of the subnet hyperparameters."""
return self.__dict__

@classmethod
def from_parameter_dict(
cls, parameter_dict: "torch.nn.ParameterDict"
) -> "SubnetInfo":
r"""Returns a SubnetHyperparameters object from a torch parameter_dict."""
return cls(**dict(parameter_dict))
def from_parameter_dict(cls, parameter_dict: dict[str, Any]) -> "SubnetInfo":
r"""Returns a SubnetHyperparameters object from a parameter_dict."""
return cls(**parameter_dict)


@dataclass
Expand Down Expand Up @@ -1103,14 +1096,14 @@ def fix_decoded_values(cls, decoded: Dict) -> "IPInfo":
protocol=decoded["ip_type_and_protocol"] & 0xF,
)

def to_parameter_dict(self) -> "torch.nn.ParameterDict":
r"""Returns a torch tensor of the subnet info."""
return torch.nn.ParameterDict(self.__dict__)
def to_parameter_dict(self) -> dict[str, str | int]:
r"""Returns a dict of the subnet ip info."""
return self.__dict__

@classmethod
def from_parameter_dict(cls, parameter_dict: "torch.nn.ParameterDict") -> "IPInfo":
r"""Returns a IPInfo object from a torch parameter_dict."""
return cls(**dict(parameter_dict))
def from_parameter_dict(cls, parameter_dict: dict[str, Any]) -> "IPInfo":
r"""Returns a IPInfo object from a parameter_dict."""
return cls(**parameter_dict)


# Senate / Proposal data
Expand Down
9 changes: 7 additions & 2 deletions bittensor/commands/register.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
)
sys.exit(1)

subtensor.register(
registered = subtensor.register(
wallet=wallet,
netuid=cli.config.netuid,
prompt=not cli.config.no_prompt,
Expand All @@ -234,6 +234,8 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
"verbose", defaults.pow_register.verbose
),
)
if not registered:
sys.exit(1)

@staticmethod
def add_args(parser: argparse.ArgumentParser):
Expand Down Expand Up @@ -408,7 +410,7 @@ def run(cli: "bittensor.cli"):
def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
r"""Register neuron."""
wallet = bittensor.wallet(config=cli.config)
subtensor.run_faucet(
success = subtensor.run_faucet(
wallet=wallet,
prompt=not cli.config.no_prompt,
tpb=cli.config.pow_register.cuda.get("tpb", None),
Expand All @@ -425,6 +427,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
"verbose", defaults.pow_register.verbose
),
)
if not success:
bittensor.logging.error("Faucet run failed.")
sys.exit(1)

@staticmethod
def add_args(parser: argparse.ArgumentParser):
Expand Down
14 changes: 7 additions & 7 deletions bittensor/commands/root.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# DEALINGS IN THE SOFTWARE.

import re
import torch
import numpy as np
import typing
import argparse
import numpy as np
Expand Down Expand Up @@ -301,7 +301,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
f"Boosting weight for netuid {cli.config.netuid} from {prev_weight} -> {new_weight}"
)
my_weights[cli.config.netuid] = new_weight
all_netuids = torch.tensor(list(range(len(my_weights))))
all_netuids = np.arange(len(my_weights))

bittensor.__console__.print("Setting root weights...")
subtensor.root_set_weights(
Expand Down Expand Up @@ -419,7 +419,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
my_weights = root.weights[my_uid]
my_weights[cli.config.netuid] -= cli.config.amount
my_weights[my_weights < 0] = 0 # Ensure weights don't go negative
all_netuids = torch.tensor(list(range(len(my_weights))))
all_netuids = np.arange(len(my_weights))

subtensor.root_set_weights(
wallet=wallet,
Expand Down Expand Up @@ -520,12 +520,12 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"):
cli.config.weights = Prompt.ask(f"Enter weights (e.g. {example})")

# Parse from string
netuids = torch.tensor(
list(map(int, re.split(r"[ ,]+", cli.config.netuids))), dtype=torch.long
netuids = np.array(
list(map(int, re.split(r"[ ,]+", cli.config.netuids))), dtype=np.int64
)
weights = torch.tensor(
weights = np.array(
list(map(float, re.split(r"[ ,]+", cli.config.weights))),
dtype=torch.float32,
dtype=np.float32,
)

# Run the set weights operation.
Expand Down
8 changes: 5 additions & 3 deletions bittensor/commands/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@

import sys
import os
import torch
import bittensor
import requests
from bittensor.utils.registration import maybe_get_torch
from typing import List, Dict, Any, Optional
from rich.prompt import Confirm, PromptBase
import requests
from dataclasses import dataclass
from . import defaults

Expand Down Expand Up @@ -78,7 +78,9 @@ def check_netuid_set(

def check_for_cuda_reg_config(config: "bittensor.config") -> None:
"""Checks, when CUDA is available, if the user would like to register with their CUDA device."""
if torch.cuda.is_available():

torch = maybe_get_torch()
if torch is not None and torch.cuda.is_available():
if not config.no_prompt:
if config.pow_register.cuda.get("use_cuda") == None: # flag not set
# Ask about cuda registration only if a CUDA device is available.
Expand Down
8 changes: 5 additions & 3 deletions bittensor/dendrite.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,14 @@
import asyncio
import uuid
import time
import torch
import aiohttp
import bittensor
from typing import Union, Optional, List, Union, AsyncGenerator, Any


class dendrite(torch.nn.Module):
class dendrite:
"""
The Dendrite class, inheriting from PyTorch's Module class, represents the abstracted implementation of a network client module.
The Dendrite class represents the abstracted implementation of a network client module.
In the brain analogy, dendrites receive signals
from other neurons (in this case, network servers or axons), and the Dendrite class here is designed
Expand Down Expand Up @@ -122,6 +121,9 @@ def __init__(

self._session: Optional[aiohttp.ClientSession] = None

async def __call__(self, *args, **kwargs):
return await self.forward(*args, **kwargs)

@property
async def session(self) -> aiohttp.ClientSession:
"""
Expand Down
11 changes: 9 additions & 2 deletions bittensor/extrinsics/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,10 @@

import bittensor

import torch
import time
from rich.prompt import Confirm
from typing import List, Union, Optional, Tuple
from bittensor.utils.registration import POWSolution, create_pow
from bittensor.utils.registration import POWSolution, create_pow, maybe_get_torch


def register_extrinsic(
Expand Down Expand Up @@ -102,6 +101,10 @@ def register_extrinsic(
):
return False

torch = maybe_get_torch()
if torch is None:
return False

# Attempt rolling registration.
attempts = 1
while True:
Expand Down Expand Up @@ -378,6 +381,10 @@ def run_faucet_extrinsic(
):
return False

torch = maybe_get_torch()
if torch is None:
return False

# Unlock coldkey
wallet.coldkey

Expand Down
19 changes: 10 additions & 9 deletions bittensor/extrinsics/root.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@
import bittensor

import time
import torch
import logging
import numpy as np
from numpy.typing import NDArray
from rich.prompt import Confirm
from typing import Union
import bittensor.utils.weight_utils as weight_utils
Expand Down Expand Up @@ -101,8 +102,8 @@ def root_register_extrinsic(
def set_root_weights_extrinsic(
subtensor: "bittensor.subtensor",
wallet: "bittensor.wallet",
netuids: Union[torch.LongTensor, list],
weights: Union[torch.FloatTensor, list],
netuids: Union[NDArray[np.int64], list],
weights: Union[NDArray[np.float32], list],
version_key: int = 0,
wait_for_inclusion: bool = False,
wait_for_finalization: bool = False,
Expand All @@ -115,7 +116,7 @@ def set_root_weights_extrinsic(
Bittensor wallet object.
netuids (List[int]):
The ``netuid`` of the subnet to set weights for.
weights ( Union[torch.FloatTensor, list]):
weights (Union[NDArray[np.float32], list]):
Weights to set. These must be ``float`` s and must correspond to the passed ``netuid`` s.
version_key (int):
The version key of the validator.
Expand All @@ -131,22 +132,22 @@ def set_root_weights_extrinsic(
"""
# First convert types.
if isinstance(netuids, list):
netuids = torch.tensor(netuids, dtype=torch.int64)
netuids = np.array(netuids, dtype=np.int64)
if isinstance(weights, list):
weights = torch.tensor(weights, dtype=torch.float32)
weights = np.array(weights, dtype=np.float32)

# Get weight restrictions.
min_allowed_weights = subtensor.min_allowed_weights(netuid=0)
max_weight_limit = subtensor.max_weight_limit(netuid=0)

# Get non zero values.
non_zero_weight_idx = torch.argwhere(weights > 0).squeeze(dim=1)
non_zero_weight_idx = np.argwhere(weights > 0).squeeze(axis=1)
non_zero_weight_uids = netuids[non_zero_weight_idx]
non_zero_weights = weights[non_zero_weight_idx]
if non_zero_weights.numel() < min_allowed_weights:
if non_zero_weights.size < min_allowed_weights:
raise ValueError(
"The minimum number of weights required to set weights is {}, got {}".format(
min_allowed_weights, non_zero_weights.numel()
min_allowed_weights, non_zero_weights.size
)
)

Expand Down
Loading

0 comments on commit 5747b9b

Please sign in to comment.