-
Notifications
You must be signed in to change notification settings - Fork 301
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #1917 from backend-developers-ltd/improve_no_torch
fix PoW and other functions not working with USE_TORCH=0 despite torch being available
- Loading branch information
Showing
11 changed files
with
193 additions
and
89 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,5 @@ | ||
# To use Torch functionality in bittensor, you must set the USE_TORCH flag to 1: | ||
USE_TORCH=1 | ||
|
||
# If set to 0 (or anything else), you will use the numpy functions. | ||
# This is generally what you want unless you have a specific reason for using torch | ||
# such as POW registration or legacy interoperability. | ||
# To use legacy Torch-based of bittensor, you must set USE_TORCH=1 | ||
USE_TORCH=0 | ||
# If set to 0 (or anything else than 1), it will use current, numpy-based, bittensor interface. | ||
# This is generally what you want unless you want legacy interoperability. | ||
# Please note that the legacy interface is deprecated, and is not tested nearly as much. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
import pytest | ||
|
||
from bittensor.utils.registration import LazyLoadedTorch | ||
|
||
|
||
class MockBittensorLogging: | ||
def __init__(self): | ||
self.messages = [] | ||
|
||
def error(self, message): | ||
self.messages.append(message) | ||
|
||
|
||
@pytest.fixture | ||
def mock_bittensor_logging(monkeypatch): | ||
mock_logger = MockBittensorLogging() | ||
monkeypatch.setattr("bittensor.btlogging", mock_logger) | ||
return mock_logger | ||
|
||
|
||
def test_lazy_loaded_torch__torch_installed(monkeypatch, mock_bittensor_logging): | ||
import torch | ||
|
||
lazy_torch = LazyLoadedTorch() | ||
|
||
assert bool(torch) is True | ||
|
||
assert lazy_torch.nn is torch.nn | ||
with pytest.raises(AttributeError): | ||
lazy_torch.no_such_thing | ||
|
||
|
||
def test_lazy_loaded_torch__no_torch(monkeypatch, mock_bittensor_logging): | ||
monkeypatch.setattr("bittensor.utils.registration._get_real_torch", lambda: None) | ||
|
||
torch = LazyLoadedTorch() | ||
|
||
assert not torch | ||
|
||
with pytest.raises(ImportError): | ||
torch.some_attribute | ||
|
||
# Check if the error message is logged correctly | ||
assert len(mock_bittensor_logging.messages) == 1 | ||
assert "This command requires torch." in mock_bittensor_logging.messages[0] |
Oops, something went wrong.