Skip to content

Commit

Permalink
style: Apply Ruff 0.4's latest formatter
Browse files Browse the repository at this point in the history
- Apply formatting rules inside f-string placeholders
- Convert single-quotes in f-string placeholders to double-quotes
- WARNING: When you backport these codes to releases using Python 3.11
  or older, you may have to edit the codes to make it compliant to
  the legacy f-string placeholder mini-language.
  • Loading branch information
achimnol committed Apr 21, 2024
1 parent d167ac5 commit 39219ae
Show file tree
Hide file tree
Showing 53 changed files with 134 additions and 134 deletions.
2 changes: 1 addition & 1 deletion src/ai/backend/accelerator/cuda_open/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ async def list_devices(self) -> Collection[CUDADevice]:
if dev_id in self.device_mask:
continue
raw_info = libcudart.get_device_props(int(dev_id))
sysfs_node_path = f"/sys/bus/pci/devices/{raw_info['pciBusID_str'].lower()}/numa_node"
sysfs_node_path = f"/sys/bus/pci/devices/{raw_info["pciBusID_str"].lower()}/numa_node"
node: Optional[int]
try:
node = int(Path(sysfs_node_path).read_text().strip())
Expand Down
4 changes: 2 additions & 2 deletions src/ai/backend/accelerator/mock/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ async def list_devices(self) -> Collection[MockDevice]:
init_kwargs["is_mig_device"] = dev_info["is_mig_device"]
if dev_info["is_mig_device"]:
init_kwargs["device_id"] = DeviceId(
f"MIG-{dev_info['mother_uuid']}/{idx}/0"
f"MIG-{dev_info["mother_uuid"]}/{idx}/0"
)
device_cls = CUDADevice
case _:
Expand Down Expand Up @@ -810,7 +810,7 @@ def get_metadata(self) -> AcceleratorMetadata:

device_format = self.device_formats[format_key]
return {
"slot_name": f"{self.mock_config['slot_name']}.{format_key}",
"slot_name": f"{self.mock_config["slot_name"]}.{format_key}",
"human_readable_name": device_format["human_readable_name"],
"description": device_format["description"],
"display_unit": device_format["display_unit"],
Expand Down
10 changes: 5 additions & 5 deletions src/ai/backend/agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1871,11 +1871,11 @@ async def create_kernel(
]
if len(overlapping_services) > 0:
raise AgentError(
f"Port {service['port']} overlaps with built-in service"
f" {overlapping_services[0]['name']}"
f"Port {service["port"]} overlaps with built-in service"
f" {overlapping_services[0]["name"]}"
)
service_ports.append({
"name": f"{model['name']}-{service['port']}",
"name": f"{model["name"]}-{service["port"]}",
"protocol": ServicePortProtocols.PREOPEN,
"container_ports": (service["port"],),
"host_ports": (None,),
Expand All @@ -1902,7 +1902,7 @@ async def create_kernel(
if len(overlapping_services) > 0:
raise AgentError(
f"Port {port_no} overlaps with built-in service"
f" {overlapping_services[0]['name']}"
f" {overlapping_services[0]["name"]}"
)

preopen_sport: ServicePort = {
Expand All @@ -1921,7 +1921,7 @@ async def create_kernel(
exposed_ports.append(cport)
for index, port in enumerate(ctx.kernel_config["allocated_host_ports"]):
service_ports.append({
"name": f"hostport{index+1}",
"name": f"hostport{index + 1}",
"protocol": ServicePortProtocols.INTERNAL,
"container_ports": (port,),
"host_ports": (port,),
Expand Down
6 changes: 3 additions & 3 deletions src/ai/backend/agent/docker/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -829,7 +829,7 @@ async def start_container(

container_config["Labels"]["ai.backend.service-ports"] = ",".join(service_ports_label)
update_nested_dict(container_config, self.computer_docker_args)
kernel_name = f"kernel.{self.image_ref.name.split('/')[-1]}.{self.kernel_id}"
kernel_name = f"kernel.{self.image_ref.name.split("/")[-1]}.{self.kernel_id}"

# optional local override of docker config
extra_container_opts_name = "agent-docker-container-opts.json"
Expand Down Expand Up @@ -1036,7 +1036,7 @@ async def __ainit__(self) -> None:
{
"Cmd": [
f"UNIX-LISTEN:/ipc/{self.agent_sockpath.name},unlink-early,fork,mode=777",
f"TCP-CONNECT:127.0.0.1:{self.local_config['agent']['agent-sock-port']}",
f"TCP-CONNECT:127.0.0.1:{self.local_config["agent"]["agent-sock-port"]}",
],
"HostConfig": {
"Mounts": [
Expand Down Expand Up @@ -1200,7 +1200,7 @@ async def handle_agent_socket(self):
while True:
agent_sock = zmq_ctx.socket(zmq.REP)
try:
agent_sock.bind(f"tcp://127.0.0.1:{self.local_config['agent']['agent-sock-port']}")
agent_sock.bind(f"tcp://127.0.0.1:{self.local_config["agent"]["agent-sock-port"]}")
while True:
msg = await agent_sock.recv_multipart()
if not msg:
Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/agent/docker/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ async def get_container_version_and_status(self) -> Tuple[int, bool]:
raise
if c["Config"].get("Labels", {}).get("ai.backend.system", "0") != "1":
raise RuntimeError(
f"An existing container named \"{c['Name'].lstrip('/')}\" is not a system container"
f"An existing container named \"{c["Name"].lstrip("/")}\" is not a system container"
" spawned by Backend.AI. Please check and remove it."
)
return (
Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/agent/kubernetes/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ async def get_container_version_and_status(self) -> Tuple[int, bool]:
raise
if c["Config"].get("Labels", {}).get("ai.backend.system", "0") != "1":
raise RuntimeError(
f"An existing container named \"{c['Name'].lstrip('/')}\" is not a system container"
f"An existing container named \"{c["Name"].lstrip("/")}\" is not a system container"
" spawned by Backend.AI. Please check and remove it."
)
return (
Expand Down
6 changes: 3 additions & 3 deletions src/ai/backend/agent/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -790,7 +790,7 @@ async def server_main(

log.info("Preparing kernel runner environments...")
kernel_mod = importlib.import_module(
f"ai.backend.agent.{local_config['agent']['backend'].value}.kernel",
f"ai.backend.agent.{local_config["agent"]["backend"].value}.kernel",
)
krunner_volumes = await kernel_mod.prepare_krunner_env(local_config) # type: ignore
# TODO: merge k8s branch: nfs_mount_path = local_config['baistatic']['mounted-at']
Expand All @@ -810,8 +810,8 @@ async def server_main(
}
scope_prefix_map = {
ConfigScopes.GLOBAL: "",
ConfigScopes.SGROUP: f"sgroup/{local_config['agent']['scaling-group']}",
ConfigScopes.NODE: f"nodes/agents/{local_config['agent']['id']}",
ConfigScopes.SGROUP: f"sgroup/{local_config["agent"]["scaling-group"]}",
ConfigScopes.NODE: f"nodes/agents/{local_config["agent"]["id"]}",
}
etcd = AsyncEtcd(
local_config["etcd"]["addr"],
Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/agent/watcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ def main(
fn = Path(cfg["logging"]["file"]["filename"])
cfg["logging"]["file"]["filename"] = f"{fn.stem}-watcher{fn.suffix}"

setproctitle(f"backend.ai: watcher {cfg['etcd']['namespace']}")
setproctitle(f"backend.ai: watcher {cfg["etcd"]["namespace"]}")
with logger:
log.info("Backend.AI Agent Watcher {0}", VERSION)
log.info("runtime: {0}", utils.env_info())
Expand Down
6 changes: 3 additions & 3 deletions src/ai/backend/cli/interaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,12 @@ def ask_string_in_array(prompt: str, choices: list, default: str) -> Optional[st

if default:
question = (
f"{prompt} (choices: {'/'.join(choices)}, "
f"{prompt} (choices: {"/".join(choices)}, "
f"if left empty, this will use default value: {default}): "
)
else:
question = (
f"{prompt} (choices: {'/'.join(choices)}, if left empty, this will remove this key): "
f"{prompt} (choices: {"/".join(choices)}, if left empty, this will remove this key): "
)

while True:
Expand All @@ -92,7 +92,7 @@ def ask_string_in_array(prompt: str, choices: list, default: str) -> Optional[st
elif user_reply.lower() in choices:
break
else:
print(f"Please answer in {'/'.join(choices)}.")
print(f"Please answer in {"/".join(choices)}.")
return user_reply


Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/client/cli/admin/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ async def rescan_images_impl(registry: str) -> None:
print_error(e)
sys.exit(ExitCode.FAILURE)
if not result["ok"]:
print_fail(f"Failed to begin registry scanning: {result['msg']}")
print_fail(f"Failed to begin registry scanning: {result["msg"]}")
sys.exit(ExitCode.FAILURE)
print_done("Started updating the image metadata from the configured registries.")
bgtask_id = result["task_id"]
Expand Down
4 changes: 2 additions & 2 deletions src/ai/backend/client/cli/pretty.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def format_error(exc: Exception):
if matches:
yield "\nCandidates (up to 10 recent entries):\n"
for item in matches:
yield f"- {item['id']} ({item['name']}, {item['status']})\n"
yield f"- {item["id"]} ({item["name"]}, {item["status"]})\n"
elif exc.data["type"].endswith("/session-already-exists"):
existing_session_id = exc.data["data"].get("existingSessionId", None)
if existing_session_id is not None:
Expand All @@ -145,7 +145,7 @@ def format_error(exc: Exception):
if exc.data["type"].endswith("/graphql-error"):
yield "\n\u279c Message:\n"
for err_item in exc.data.get("data", []):
yield f"{err_item['message']}"
yield f"{err_item["message"]}"
if err_path := err_item.get("path"):
yield f" (path: {_format_gql_path(err_path)})"
yield "\n"
Expand Down
4 changes: 2 additions & 2 deletions src/ai/backend/client/cli/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def info(ctx: CLIContext, service_name_or_id: str):
)
print()
for route in routes:
print(f"Route {route['routing_id']}: ")
print(f"Route {route["routing_id"]}: ")
ctx.output.print_item(
route,
_default_routing_fields,
Expand Down Expand Up @@ -610,7 +610,7 @@ def generate_token(ctx: CLIContext, service_name_or_id: str, duration: str, quie
if quiet:
print(resp["token"])
else:
print_done(f"Generated API token {resp['token']}")
print_done(f"Generated API token {resp["token"]}")
except Exception as e:
ctx.output.print_error(e)
sys.exit(ExitCode.FAILURE)
Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/client/func/vfolder.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ async def _upload_files(
input_file = open(base_path / file_path, "rb")
else:
input_file = open(str(Path(file_path).relative_to(base_path)), "rb")
print(f"Uploading {base_path / file_path} via {upload_info['url']} ...")
print(f"Uploading {base_path / file_path} via {upload_info["url"]} ...")
# TODO: refactor out the progress bar
uploader = tus_client.async_uploader(
file_stream=input_file,
Expand Down
12 changes: 6 additions & 6 deletions src/ai/backend/client/output/formatters.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def format_stats(raw_stats: Optional[str], indent="") -> str:
if raw_stats is None:
return "(unavailable)"
stats = json.loads(raw_stats)
text = "\n".join(f"- {k + ': ':18s}{v}" for k, v in stats.items())
text = "\n".join(f"- {k + ": ":18s}{v}" for k, v in stats.items())
return "\n" + textwrap.indent(text, indent)


Expand Down Expand Up @@ -239,7 +239,7 @@ def format_value(metric: MetricValue, binary: bool) -> str:
if metric["pct"] is None:
node_metric_bufs.append(f"{stat_key}: (calculating...) % ({num_cores} cores)")
else:
node_metric_bufs.append(f"{stat_key}: {metric['pct']} % ({num_cores} cores)")
node_metric_bufs.append(f"{stat_key}: {metric["pct"]} % ({num_cores} cores)")
else:
binary = stat_key == "mem"
node_metric_bufs.append(f"{stat_key}: {format_value(metric, binary)}")
Expand Down Expand Up @@ -322,10 +322,10 @@ def format_console(self, value: Any, field: FieldSpec, indent="") -> str:
else:
text = ""
for item in value:
text += f"+ {item['id']}\n"
text += f"+ {item["id"]}\n"
text += "\n".join(
f" - {f.humanized_name}: "
f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), ' ')}" # noqa
f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), " ")}" # noqa
for f in field.subfields.values()
if f.field_name != "id"
)
Expand All @@ -340,10 +340,10 @@ def format_console(self, value: Any, field: FieldSpec, indent="") -> str:
else:
text = ""
for item in value:
text += f"+ {item['name']} ({item['id']})\n"
text += f"+ {item["name"]} ({item["id"]})\n"
text += "\n".join(
f" - {f.humanized_name}: "
f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), ' ')}" # noqa
f"{_fit_multiline_in_cell(f.formatter.format_console(item[f.field_name], f), " ")}" # noqa
for f in field.subfields.values()
if f.field_name not in ("id", "name")
)
Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/common/docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ def generate_aliases(self) -> Mapping[str, "ImageRef"]:
for name in possible_names:
ret[name] = self
for name, ptags in itertools.product(possible_names, itertools.product(*possible_ptags)):
ret[f"{name}:{'-'.join(t for t in ptags if t)}"] = self
ret[f"{name}:{"-".join(t for t in ptags if t)}"] = self
return ret

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/common/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -726,7 +726,7 @@ def normalize_slots(self, *, ignore_unknown: bool) -> ResourceSlot:
known_slots = current_resource_slots.get()
unset_slots = known_slots.keys() - self.data.keys()
if not ignore_unknown and (unknown_slots := self.data.keys() - known_slots.keys()):
raise ValueError(f"Unknown slots: {', '.join(map(repr, unknown_slots))}")
raise ValueError(f"Unknown slots: {", ".join(map(repr, unknown_slots))}")
data = {k: v for k, v in self.data.items() if k in known_slots}
for k in unset_slots:
data[k] = Decimal(0)
Expand Down
8 changes: 4 additions & 4 deletions src/ai/backend/install/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -574,8 +574,8 @@ async def configure_client(self) -> None:
file=fp,
)
print("export BACKEND_ENDPOINT_TYPE=api", file=fp)
print(f"export BACKEND_ACCESS_KEY={keypair['access_key']}", file=fp)
print(f"export BACKEND_SECRET_KEY={keypair['secret_key']}", file=fp)
print(f"export BACKEND_ACCESS_KEY={keypair["access_key"]}", file=fp)
print(f"export BACKEND_SECRET_KEY={keypair["secret_key"]}", file=fp)
with self.resource_path("ai.backend.install.fixtures", "example-users.json") as user_path:
current_shell = os.environ.get("SHELL", "sh")
user_data = json.loads(Path(user_path).read_bytes())
Expand Down Expand Up @@ -604,8 +604,8 @@ async def configure_client(self) -> None:
f"""echo 'Run `./{client_executable} login` to activate a login session.'""",
file=fp,
)
print(f"""echo 'Your email: {user['email']}'""", file=fp)
print(f"""echo 'Your password: {user['password']}'""", file=fp)
print(f"""echo 'Your email: {user["email"]}'""", file=fp)
print(f"""echo 'Your password: {user["password"]}'""", file=fp)

async def dump_install_info(self) -> None:
self.log_header("Dumping the installation configs...")
Expand Down
6 changes: 3 additions & 3 deletions src/ai/backend/kernel/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ async def start_model_service(self, model_info):
if model_service_info is None:
result = {"status": "failed", "error": "service info not provided"}
return
service_name = f"{model_info['name']}-{model_service_info['port']}"
service_name = f"{model_info["name"]}-{model_service_info["port"]}"
self.service_parser.add_model_service(service_name, model_service_info)
service_info = {
"name": service_name,
Expand Down Expand Up @@ -696,7 +696,7 @@ async def start_model_service(self, model_info):
async def check_model_health(self, model_name, model_service_info):
health_check_info = model_service_info.get("health_check")
health_check_endpoint = (
f"http://localhost:{model_service_info['port']}{health_check_info['path']}"
f"http://localhost:{model_service_info["port"]}{health_check_info["path"]}"
)
retries = 0
current_health_status = HealthStatus.UNDETERMINED
Expand Down Expand Up @@ -837,7 +837,7 @@ async def _start_service(self, service_info, *, cwd: Optional[str] = None, do_no
await terminate_and_wait(proc, timeout=10.0)
self.services_running.pop(service_info["name"], None)
error_reason = (
f"opening the service port timed out: {service_info['name']}"
f"opening the service port timed out: {service_info["name"]}"
)
else:
error_reason = "TimeoutError (unknown)"
Expand Down
6 changes: 3 additions & 3 deletions src/ai/backend/kernel/intrinsic.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ async def init_sshd_service(child_env):
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
raise RuntimeError(f"sshd init error: {stderr.decode('utf8')}")
raise RuntimeError(f"sshd init error: {stderr.decode("utf8")}")
pub_key = stdout.splitlines()[1]
auth_path.write_bytes(pub_key)
auth_path.chmod(0o600)
Expand All @@ -52,7 +52,7 @@ async def init_sshd_service(child_env):
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
raise RuntimeError(f"sshd init error: {stderr.decode('utf8')}")
raise RuntimeError(f"sshd init error: {stderr.decode("utf8")}")
else:
try:
if (auth_path.parent.stat().st_mode & 0o077) != 0:
Expand All @@ -77,7 +77,7 @@ async def init_sshd_service(child_env):
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
raise RuntimeError(f"sshd init error: {stderr.decode('utf8')}")
raise RuntimeError(f"sshd init error: {stderr.decode("utf8")}")

cluster_privkey_src_path = Path("/home/config/ssh/id_cluster")
cluster_ssh_port_mapping_path = Path("/home/config/ssh/port-mapping.json")
Expand Down
2 changes: 1 addition & 1 deletion src/ai/backend/kernel/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ async def start_service(
action_impl = getattr(service_actions, action["action"])
except AttributeError:
raise InvalidServiceDefinition(
f"Service-def for {service_name} used invalid action: {action['action']}"
f"Service-def for {service_name} used invalid action: {action["action"]}"
)
ret = await action_impl(self.variables, **action["args"])
if (ref := action.get("ref")) is not None:
Expand Down
4 changes: 2 additions & 2 deletions src/ai/backend/manager/api/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -1112,8 +1112,8 @@ async def generate_ssh_keypair(request: web.Request) -> web.Response:
async def upload_ssh_keypair(request: web.Request, params: Any) -> web.Response:
domain_name = request["user"]["domain_name"]
access_key = request["keypair"]["access_key"]
pubkey = f"{params['pubkey'].rstrip()}\n"
privkey = f"{params['privkey'].rstrip()}\n"
pubkey = f"{params["pubkey"].rstrip()}\n"
privkey = f"{params["privkey"].rstrip()}\n"
log_fmt = "AUTH.SAVE_SSH_KEYPAIR(d:{}, ak:{})"
log_args = (domain_name, access_key)
log.info(log_fmt, *log_args)
Expand Down
Loading

0 comments on commit 39219ae

Please sign in to comment.