Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

V7 bittensor: commit/reveal weights #75

Merged
merged 29 commits into from
Jun 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions image_generation_subnet/base/neuron.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,12 @@ def __init__(self, config=None):
self.config.merge(base_config)
self.check_config(self.config)

bt.logging.enable_default()
if self.config.logging.trace:
bt.logging.enable_trace()
if self.config.logging.debug:
bt.logging.enable_debug()

# Set up logging with the provided configuration and directory.
bt.logging(config=self.config, logging_dir=self.config.full_path)

Expand Down
154 changes: 147 additions & 7 deletions image_generation_subnet/base/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,13 @@
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.


import random
import copy
import torch
import asyncio
import threading
import bittensor as bt
import numpy as np

from typing import List
from traceback import print_exception
Expand All @@ -38,6 +39,10 @@ class BaseValidatorNeuron(BaseNeuron):
def __init__(self, config=None):
super().__init__(config=config)

# Init commit, reveal weights variable
self.last_commit_weights_block = self.block - 1000
self.need_reveal = False # commit first

# Save a copy of the hotkeys to local memory.
self.hotkeys = copy.deepcopy(self.metagraph.hotkeys)

Expand All @@ -47,7 +52,7 @@ def __init__(self, config=None):

# Set up initial scoring weights for validation
bt.logging.info("Building validation weights.")
self.scores = torch.zeros_like(self.metagraph.S, dtype=torch.float32)
self.scores = torch.zeros_like(torch.from_numpy(self.metagraph.S), dtype=torch.float32)

# Init sync with the network. Updates the metagraph.
self.resync_metagraph()
Expand Down Expand Up @@ -207,6 +212,27 @@ def __exit__(self, exc_type, exc_value, traceback):
self.is_running = False
bt.logging.debug("Stopped")

def sync(self):
"""
Wrapper for synchronizing the state of the network for the given miner or validator.
"""
# Ensure miner or validator hotkey is still registered on the network.
self.check_registered()

if self.should_sync_metagraph():
self.resync_metagraph()

commit_reveal_weights_enabled = self.subtensor.get_subnet_hyperparameters(23).commit_reveal_weights_enabled
bt.logging.info(f"[Weights] commit_reveal_weights: {commit_reveal_weights_enabled}")
if commit_reveal_weights_enabled:
if self.should_reveal_last_weights():
self.reveal_weights()
if self.should_commit_new_weights():
self.commit_weights()
else:
if self.should_set_weights():
self.set_weights()

def set_weights(self):
"""
Sets the validator weights to the metagraph hotkeys based on the scores it has received from the miners. The weights determine the trust and incentive level the validator assigns to miner nodes on the network.
Expand All @@ -224,14 +250,14 @@ def set_weights(self):
bt.logging.trace("raw_weights", raw_weights)
bt.logging.trace("top10 values", raw_weights.sort()[0])
bt.logging.trace("top10 uids", raw_weights.sort()[1])

raw_weights = np.array(raw_weights).astype(np.float32)
# Process the raw weights to final_weights via subtensor limitations.
(
processed_weight_uids,
processed_weights,
) = bt.utils.weight_utils.process_weights_for_netuid(
uids=self.metagraph.uids.to("cpu"),
weights=raw_weights.to("cpu"),
uids=self.metagraph.uids,
weights=raw_weights,
netuid=self.config.netuid,
subtensor=self.subtensor,
metagraph=self.metagraph,
Expand All @@ -240,16 +266,130 @@ def set_weights(self):
bt.logging.trace("processed_weight_uids", processed_weight_uids)

# Set the weights on chain via our subtensor connection.
self.subtensor.set_weights(
success, message = self.subtensor.set_weights(
wallet=self.wallet,
netuid=self.config.netuid,
uids=processed_weight_uids,
weights=processed_weights,
wait_for_finalization=False,
version_key=self.spec_version,
)
if success:
bt.logging.success(f"[Weights] Set weights: {processed_weights}")
else:
bt.logging.error(f"[Weights] Set weights failed")

def reveal_weights(self):
success, message = self.subtensor.reveal_weights(
**self.last_commit_weights_info,
wait_for_finalization=False,
version_key=self.spec_version,
)
if success:
bt.logging.success(f"[Reveal Weights] Reveal weights successfully, salt: {self.last_commit_weights_info['salt']}, block: {self.block}")
self.need_reveal = False
else:
self.need_reveal = True

def should_set_weights(self) -> bool:
# Check if enough epoch blocks have elapsed since the last epoch.
if self.config.neuron.disable_set_weights:
return False

# Define appropriate logic for when set weights.
if not (
self.block - self.metagraph.last_update[self.uid]
) > self.config.neuron.epoch_length:
bt.logging.debug("[Weights] Not the time to set weights")
return False
bt.logging.info("[Weights] Allowed to set weights")
return True

def should_reveal_last_weights(self):
"""
Check
1. revealed the last commit weights
2. reveal interval
"""
if self.config.neuron.disable_set_weights:
return False
if not self.need_reveal:
bt.logging.warning("[Reveal Weights] Haven't set new weights since last time")
return False
commit_reveal_weights_interval = self.subtensor.get_subnet_hyperparameters(23).commit_reveal_weights_interval
if self.block - self.last_commit_weights_block < commit_reveal_weights_interval:
bt.logging.warning(f"[Reveal Weights] Too soon to REVEAL. Current block is {self.block}, commited at {self.last_commit_weights_block}, tempo is {commit_reveal_weights_interval}")
return False
return True

def should_commit_new_weights(self):
if self.config.neuron.disable_set_weights:
return False
commit_reveal_weights_interval = self.subtensor.get_subnet_hyperparameters(23).commit_reveal_weights_interval
if self.need_reveal:
bt.logging.warning(f"[Set Weights] - Need reveal lastest commited weights first!")
return False
if self.block - self.last_commit_weights_block < commit_reveal_weights_interval:
bt.logging.warning(f"[Set Weights] - Maybe too soon to reveal. Current block is {self.block}, commited at {self.last_commit_weights_block}, tempo is {commit_reveal_weights_interval}")
return False
return True

def commit_weights(self):
"""
set hashed weights
"""

# Check if self.scores contains any NaN values and log a warning if it does.
if torch.isnan(self.scores).any():
bt.logging.warning(
"Scores contain NaN values. This may be due to a lack of responses from miners, or a bug in your reward functions."
)

# Calculate the average reward for each uid across non-zero values.
# Replace any NaN values with 0.
raw_weights = torch.nn.functional.normalize(self.scores, p=1, dim=0)
bt.logging.trace("raw_weights", raw_weights)
bt.logging.trace("top10 values", raw_weights.sort()[0])
bt.logging.trace("top10 uids", raw_weights.sort()[1])
raw_weights = np.array(raw_weights).astype(np.float32)
uids = np.array(self.metagraph.uids).astype(np.int64)
# Process the raw weights to final_weights via subtensor limitations.
(
processed_weight_uids,
processed_weights
) = bt.utils.weight_utils.convert_weights_and_uids_for_emit(
uids=uids,
weights=raw_weights,
)
processed_weight_uids = processed_weight_uids
processed_weights = processed_weights
salt = [random.randint(0, 1000) for _ in range(4)]

bt.logging.trace("processed_weights", processed_weights)
bt.logging.trace("processed_weight_uids", processed_weight_uids)
bt.logging.trace("salt", salt)


commit_data = {
"wallet": self.wallet,
"netuid": self.config.netuid,
"uids": processed_weight_uids,
"salt": salt,
"weights": processed_weights
}

# Set the weights on chain via our subtensor connection.
success, message = self.subtensor.commit_weights(
**commit_data,
wait_for_finalization=False,
version_key=self.spec_version,
)

bt.logging.info(f"Set weights: {processed_weights}")
if success:
bt.logging.success(f"[Set Weights] Committed new weights! Salt:{salt}, Block: {self.block}")
self.need_reveal = True # commit weights successfully, wait for reveal after blocks
self.last_commit_weights_info = copy.deepcopy(commit_data)
self.last_commit_weights_block = self.block

def resync_metagraph(self):
"""Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph."""
Expand Down
9 changes: 6 additions & 3 deletions image_generation_subnet/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
class Information(bt.Synapse):
request_dict: dict = {}
response_dict: dict = {}

computed_body_hash: str = pydantic.Field("", title="Computed Body Hash", frozen=False)

class ImageGenerating(bt.Synapse):
prompt: str = pydantic.Field(
Expand Down Expand Up @@ -59,11 +59,13 @@ class ImageGenerating(bt.Synapse):
title="Dictionary contains response",
description="Dict contains arbitary information",
)
image = pydantic.Field(
image: str = pydantic.Field(
default="",
title="Base64 Image",
description="Base64 encoded image",
)
# avoid warnings
computed_body_hash: str = pydantic.Field("", title="Computed Body Hash", frozen=False)

def miner_update(self, update: dict):
return self.copy(update=update)
Expand Down Expand Up @@ -143,7 +145,8 @@ class TextGenerating(bt.Synapse):
model_name: str = ""
prompt_output: typing.Optional[dict] = {}
pipeline_params: dict = {}

# avoid warnings
computed_body_hash: str = pydantic.Field("", title="Computed Body Hash", frozen=False)
def miner_update(self, update: dict):
self.prompt_output = update

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
bittensor==6.9.3
bittensor==7.2.0
diffusers==0.28.0
transformers==4.40.0
accelerate==0.27.2
Expand Down