Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

v2.5.1 #281

Merged
merged 13 commits into from
Jun 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,12 @@ pip uninstall mathgenerator -y

# Installing package from the current directory
pip install -e .

# Updating the package list and installing jq and npm
apt update && apt install -y jq npm

# Installing PM2 globally
npm install pm2 -g

# Updating PM2
pm2 update
2 changes: 1 addition & 1 deletion prompting/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# DEALINGS IN THE SOFTWARE.

# Define the version of the template module.
__version__ = "2.5.0"
__version__ = "2.5.1"
version_split = __version__.split(".")
__spec_version__ = (
(10000 * int(version_split[0]))
Expand Down
34 changes: 17 additions & 17 deletions prompting/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,13 @@
import torch
import argparse
import bittensor as bt
from loguru import logger
import logging
from prompting.tasks import TASKS

from bittensor.btlogging.defines import BITTENSOR_LOGGER_NAME

logger = logging.getLogger(BITTENSOR_LOGGER_NAME)


def check_config(cls, config: "bt.Config"):
r"""Checks/validates the config namespace object."""
Expand All @@ -42,20 +46,16 @@ def check_config(cls, config: "bt.Config"):
if not os.path.exists(config.neuron.full_path):
os.makedirs(config.neuron.full_path, exist_ok=True)

log_level_exists = "EVENTS" in logger._core.levels
if not config.neuron.dont_save_events and not log_level_exists:
if not config.neuron.dont_save_events:
# Add custom event logger for the events.
logger.level("EVENTS", no=38, icon="📝")
logger.add(
os.path.join(config.neuron.full_path, "events.log"),
rotation=config.neuron.events_retention_size,
serialize=True,
enqueue=True,
backtrace=False,
diagnose=False,
level="EVENTS",
format="{time:YYYY-MM-DD at HH:mm:ss} | {level} | {message}",
event_handler = logging.FileHandler(
os.path.join(config.neuron.full_path, "events.log")
)
event_handler.setLevel(38) # Custom level
formatter = logging.Formatter("{asctime} | {levelname} | {message}", style="{")
event_handler.setFormatter(formatter)
logger.addHandler(event_handler)
logging.addLevelName(38, "EVENTS")


def add_args(cls, parser):
Expand All @@ -71,14 +71,14 @@ def add_args(cls, parser):
help="Device to run on.",
default="cuda" if torch.cuda.is_available() else "cpu",
)

parser.add_argument(
"--neuron.gpus",
type=int,
help="The number of visible GPUs to be considered in the llm initialization. This parameter currently reflects on the property `tensor_parallel_size` of vllm",
default=1,
)

parser.add_argument(
"--neuron.llm_max_allowed_memory_in_gb",
type=int,
Expand Down Expand Up @@ -307,7 +307,7 @@ def add_validator_args(cls, parser):
"--neuron.timeout",
type=float,
help="The timeout for each forward call in seconds.",
default=17,
default=15,
)

parser.add_argument(
Expand Down Expand Up @@ -396,7 +396,7 @@ def add_validator_args(cls, parser):
help="Only query a single hotkey per ip.",
default=False,
)

parser.add_argument(
"--neuron.forward_max_time",
type=int,
Expand Down
7 changes: 5 additions & 2 deletions prompting/utils/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,11 @@
from dataclasses import asdict, dataclass
from datetime import datetime
from typing import List
from loguru import logger
import logging
import prompting
from bittensor.btlogging.defines import BITTENSOR_LOGGER_NAME

logger = logging.getLogger(BITTENSOR_LOGGER_NAME)


@dataclass
Expand Down Expand Up @@ -103,7 +106,7 @@ def reinit_wandb(self):

def log_event(self, event):
if not self.config.neuron.dont_save_events:
logger.log("EVENTS", "events", **event)
logger.log(38, event)

if self.config.wandb.off:
return
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ argostranslate==1.9.6
python-dotenv
wikipedia_sections
vllm
loguru
argostranslate
transformers==4.41.2
autoawq==0.2.5
Loading