From e47c02d8abb1230a03147b2c66003dae33daff6e Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Mon, 22 Jul 2024 15:12:39 -0400 Subject: [PATCH 01/63] basic log streaming --- examples/slurm_poc.py | 8 +++- src/torchrunx/agent.py | 88 ++++++++++++++++++++++++--------------- src/torchrunx/launcher.py | 78 ++++++++++++++++++---------------- src/torchrunx/utils.py | 79 ++++++++++++++++++++++++++++++++++- 4 files changed, 179 insertions(+), 74 deletions(-) diff --git a/examples/slurm_poc.py b/examples/slurm_poc.py index 3a11ea75..c5c63779 100644 --- a/examples/slurm_poc.py +++ b/examples/slurm_poc.py @@ -1,9 +1,13 @@ +import logging import os +import sys import torch import torch.distributed as dist -import torchrunx +sys.path.append("../src") + +import torchrunx # noqa: I001 # this is not a pytest test, but a functional test designed to be run on a slurm allocation @@ -38,7 +42,7 @@ def simple_matmul(): o = torch.matmul(i, w) dist.all_reduce(o, op=dist.ReduceOp.SUM) - print(i) + logging.info(i) return o.detach().cpu() diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 57f41d80..bfeb4e25 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -1,6 +1,8 @@ from __future__ import annotations import datetime +import logging +import logging.handlers import os import socket import sys @@ -33,7 +35,8 @@ class WorkerArgs: local_rank: int local_world_size: int world_size: int - log_file: os.PathLike + log_name: str + log_host: str timeout: int def to_bytes(self) -> bytes: @@ -70,37 +73,46 @@ def flush(self): def entrypoint(serialized_worker_args: bytes): worker_args = WorkerArgs.from_bytes(serialized_worker_args) + logger = logging.getLogger(worker_args.log_name) + # TODO: set logging level? maybe argument to launch? + socketHandler = logging.handlers.SocketHandler(worker_args.log_host, + logging.handlers.DEFAULT_TCP_LOGGING_PORT) + logger.setLevel(logging.DEBUG) + logger.addHandler(socketHandler) + logger.debug("creating TCPStore for worker group.") + store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] + host_name=worker_args.master_hostname, + port=worker_args.master_port, + world_size=worker_args.world_size, + is_master=(worker_args.rank == 0), + ) + + backend = worker_args.backend + if backend is None: + backend = "nccl" if torch.cuda.is_available() else "gloo" + logger.debug("initializing worker process group.") + dist.init_process_group( + backend=backend, + world_size=worker_args.world_size, + rank=worker_args.rank, + store=store, + timeout=datetime.timedelta(seconds=worker_args.timeout), + ) + + os.environ["RANK"] = str(worker_args.rank) + os.environ["LOCAL_RANK"] = str(worker_args.local_rank) + os.environ["LOCAL_WORLD_SIZE"] = str(worker_args.local_world_size) + os.environ["WORLD_SIZE"] = str(worker_args.world_size) + os.environ["MASTER_ADDR"] = worker_args.master_hostname + os.environ["MASTER_PORT"] = str(worker_args.master_port) + logger.debug("calling user function") - with WorkerTee(worker_args.log_file, "w"): - store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] - host_name=worker_args.master_hostname, - port=worker_args.master_port, - world_size=worker_args.world_size, - is_master=(worker_args.rank == 0), - ) - - backend = worker_args.backend - if backend is None: - backend = "nccl" if torch.cuda.is_available() else "gloo" - dist.init_process_group( - backend=backend, - world_size=worker_args.world_size, - rank=worker_args.rank, - store=store, - timeout=datetime.timedelta(seconds=worker_args.timeout), - ) - - os.environ["RANK"] = str(worker_args.rank) - os.environ["LOCAL_RANK"] = str(worker_args.local_rank) - os.environ["LOCAL_WORLD_SIZE"] = str(worker_args.local_world_size) - os.environ["WORLD_SIZE"] = str(worker_args.world_size) - os.environ["MASTER_ADDR"] = worker_args.master_hostname - os.environ["MASTER_PORT"] = str(worker_args.master_port) - - return worker_args.function() + logging.root = logger + return worker_args.function() def main(launcher_agent_group: LauncherAgentGroup): + agent_rank = launcher_agent_group.rank - 1 payload = AgentPayload( @@ -113,10 +125,16 @@ def main(launcher_agent_group: LauncherAgentGroup): launcher_payload: LauncherPayload = all_payloads[0] # pyright: ignore[reportAssignmentType] main_agent_payload: AgentPayload = all_payloads[1] # pyright: ignore[reportAssignmentType] + logger = logging.getLogger(f"torchrunx.agent-{agent_rank}") + logger.setLevel(logging.DEBUG) + socketHandler = logging.handlers.SocketHandler(launcher_payload.log_host, + logging.handlers.DEFAULT_TCP_LOGGING_PORT) + logger.addHandler(socketHandler) + hostname = launcher_payload.hostnames[agent_rank] worker_world_size = launcher_payload.worker_world_size worker_global_ranks = launcher_payload.worker_global_ranks[agent_rank] - worker_log_files = launcher_payload.worker_log_files[agent_rank] + worker_log_names = launcher_payload.worker_log_names[agent_rank] num_workers = len(worker_global_ranks) # spawn workers @@ -135,17 +153,18 @@ def main(launcher_agent_group: LauncherAgentGroup): local_rank=i, local_world_size=num_workers, world_size=worker_world_size, - log_file=worker_log_files[i], + log_name=worker_log_names[i], + log_host=launcher_payload.log_host, timeout=launcher_payload.timeout, ).to_bytes(), ) for i in range(num_workers) }, envs={i: {} for i in range(num_workers)}, - logs_specs=DefaultLogsSpecs(log_dir=None, tee=Std.ALL, local_ranks_filter={0}), + logs_specs=DefaultLogsSpecs(log_dir="/dev/null"), start_method="spawn", ) - + logger.debug("starting processes") try: ctx.start() @@ -162,9 +181,10 @@ def main(launcher_agent_group: LauncherAgentGroup): break if any(s.is_failed() for s in agent_statuses): - raise RuntimeError() + raise RuntimeError("worker failure") - except: + except Exception as e: + logger.error(f"encountered error: {e}") raise finally: ctx.close() diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 346f303b..5ea52731 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -2,14 +2,15 @@ import datetime import fnmatch -import io import ipaddress import itertools +import logging +import logging.config +import logging.handlers import os import socket import subprocess import sys -import time from collections import ChainMap from dataclasses import dataclass, field from functools import partial @@ -25,6 +26,7 @@ AgentStatus, LauncherAgentGroup, LauncherPayload, + LogRecordSocketReceiver, get_open_port, ) @@ -47,34 +49,20 @@ def execute_command( command: str, hostname: str, ssh_config_file: str | os.PathLike | None = None, - outfile: str | os.PathLike | None = None, ) -> None: # TODO: permit different stderr / stdout if is_localhost(hostname): - _outfile = subprocess.DEVNULL - if outfile is not None: - _outfile = open(outfile, "w") - subprocess.Popen(command, shell=True, stdout=_outfile, stderr=_outfile) + subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) else: with fabric.Connection( host=hostname, config=fabric.Config(runtime_ssh_path=ssh_config_file) ) as conn: - if outfile is None: - outfile = "/dev/null" - conn.run(f"{command} >> {outfile} 2>&1 &", asynchronous=True) + conn.run(f"{command} >> /dev/null 2>&1 &", asynchronous=True) -def monitor_log(log_file: Path): - log_file.touch() - f = open(log_file, "r") - print(f.read()) - f.seek(0, io.SEEK_END) - while True: - new = f.read() - if len(new) != 0: - print(new) - time.sleep(0.1) - +def monitor_log(): + tcpserver = LogRecordSocketReceiver(host=socket.getfqdn()) + tcpserver.serve_until_stopped() @dataclass class Launcher: @@ -83,6 +71,7 @@ class Launcher: ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None log_dir: os.PathLike | str = "./logs" + propagate_logs: bool = True env_vars: list[str] = field( default_factory=lambda: [ "PATH", @@ -114,6 +103,23 @@ def run( :return: A dictionary mapping worker ranks to their output :rtype: dict[int, Any] """ + + logger = logging.getLogger("torchrunx") + logger.setLevel(logging.DEBUG) + logger.propagate = self.propagate_logs + + log_dir = Path(self.log_dir) + log_dir.mkdir(parents=True, exist_ok=True) + timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + log_file_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") + log_file_handler = logging.FileHandler(f"{log_dir}/{timestamp}.log") + log_file_handler.setFormatter(log_file_formatter) + logger.addHandler(log_file_handler) + + log_process = Process(target=monitor_log, args=(), daemon=True) + log_process.start() + if not dist.is_available(): raise RuntimeError("The torch.distributed package is not available.") @@ -156,22 +162,13 @@ def run( # rank set in the loop below ) - log_dir = Path(self.log_dir) - log_dir.mkdir(parents=True, exist_ok=True) - timestamp = datetime.datetime.now().isoformat(timespec="seconds") - agent_log_files = [log_dir / f"{timestamp}_{hostname}.log" for hostname in self.hostnames] - - # start process to read from agent 0 log - print_process = Process(target=monitor_log, args=(agent_log_files[0],), daemon=True) - print_process.start() - + logger.debug("starting agents") # start agents on each node for i, hostname in enumerate(self.hostnames): execute_command( command=f"{command} --rank {i+1}", hostname=hostname, ssh_config_file=self.ssh_config_file, - outfile=agent_log_files[i], ) # initialize launcher–agent process group @@ -195,12 +192,12 @@ def run( host_ranks = range(_cumulative_workers[n], _cumulative_workers[n + 1]) worker_global_ranks.append(list(host_ranks)) - worker_log_files = [ + worker_log_names = [ [ - log_dir / f"{timestamp}_{hostname}_{local_rank}.log" + f"torchrunx.agent-{i}.worker-{local_rank}" for local_rank in range(workers_per_host[i]) # type: ignore ] - for i, hostname in enumerate(self.hostnames) + for i in range(len(self.hostnames)) ] payload = LauncherPayload( @@ -208,7 +205,8 @@ def run( hostnames=self.hostnames, worker_world_size=worker_world_size, worker_global_ranks=worker_global_ranks, - worker_log_files=worker_log_files, + worker_log_names=worker_log_names, + log_host=launcher_hostname, backend=self.backend, timeout=self.timeout, ) @@ -236,9 +234,11 @@ def run( else: e += f"{v.message['message']}\n" e += f"{v.message['extraInfo']['py_callstack']}\n\n" + logger.error(f"workers threw:\n {e}") raise RuntimeError(e) except: # cleanup: SIGTERM all agents + logger.warn("agents encountered error, manually killing") for agent_pid, agent_hostname in zip(agent_pids, self.hostnames): execute_command( command=f"kill {agent_pid}", @@ -247,8 +247,10 @@ def run( ) raise finally: - print_process.kill() + logger.debug("killing log process") + log_process.kill() + logger.debug("returning") return_values: dict[int, Any] = dict(ChainMap(*[s.return_values for s in agent_statuses])) return return_values @@ -261,6 +263,7 @@ def launch( ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, log_dir: os.PathLike | str = "./logs", + propagate_logs: bool = True, env_vars: list[str] = [ "PATH", "LD_LIBRARY", @@ -291,6 +294,8 @@ def launch( :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional :param log_dir: A directory in which logs should be written, defaults to "./logs" :type log_dir: os.PathLike | str, optional + :param log_level: The logging level, defaults to logging.WARN + :type log_level: logging._Level, optional :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional :param env_file: An additional environment file that will be sourced prior to executing ``func``, defaults to None @@ -307,6 +312,7 @@ def launch( ssh_config_file=ssh_config_file, backend=backend, log_dir=log_dir, + propagate_logs=propagate_logs, env_vars=env_vars, env_file=env_file, timeout=timeout, diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 09bd2466..ae63eaf4 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -1,10 +1,15 @@ from __future__ import annotations import datetime +import logging +import logging.handlers +import pickle +import select import socket +import socketserver +import struct from contextlib import closing from dataclasses import dataclass, field -from pathlib import Path from typing import Any, Callable, Literal import cloudpickle @@ -27,7 +32,8 @@ class LauncherPayload: hostnames: list[str] worker_world_size: int worker_global_ranks: list[list[int]] - worker_log_files: list[list[Path]] + worker_log_names: list[list[str]] + log_host: str backend: Literal["mpi", "gloo", "nccl", "ucc", None] timeout: int @@ -112,3 +118,72 @@ def sync_payloads( def sync_agent_statuses(self, status: AgentStatus) -> list[AgentStatus]: return self._all_gather(object=status)[1:] + +class LogRecordStreamHandler(socketserver.StreamRequestHandler): + """Handler for a streaming logging request. + + This basically logs the record using whatever logging policy is + configured locally. + """ + + def handle(self): + """ + Handle multiple requests - each expected to be a 4-byte length, + followed by the LogRecord in pickle format. Logs the record + according to whatever policy is configured locally. + """ + while True: + chunk = self.connection.recv(4) + if len(chunk) < 4: + break + slen = struct.unpack('>L', chunk)[0] + chunk = self.connection.recv(slen) + while len(chunk) < slen: + chunk = chunk + self.connection.recv(slen - len(chunk)) + obj = self.unPickle(chunk) + record = logging.makeLogRecord(obj) + self.handleLogRecord(record) + + def unPickle(self, data): + return pickle.loads(data) + + def handleLogRecord(self, record): + # if a name is specified, we use the named logger rather than the one + # implied by the record. + if self.server.logname is not None: # type: ignore + name = self.server.logname # type: ignore + else: + name = record.name + logger = logging.getLogger(name) + # N.B. EVERY record gets logged. This is because Logger.handle + # is normally called AFTER logger-level filtering. If you want + # to do filtering, do it at the client end to save wasting + # cycles and network bandwidth! + if logger.getEffectiveLevel() <= record.levelno: + logger.handle(record) + +class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): + """ + Simple TCP socket-based logging receiver suitable for testing. + """ + + allow_reuse_address = 1 # type: ignore + + def __init__(self, host='localhost', + port=logging.handlers.DEFAULT_TCP_LOGGING_PORT, + handler=LogRecordStreamHandler): + socketserver.ThreadingTCPServer.__init__(self, (host, port), handler) + self.abort = 0 + self.timeout = 1 + self.logname = None + + + def serve_until_stopped(self): + abort = 0 + while not abort: + rd, wr, ex = select.select([self.socket.fileno()], + [], [], + self.timeout) + if rd: + self.handle_request() + abort = self.abort From ce8480c69116df33b7da00c2e2e1a32e8b433e21 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Tue, 23 Jul 2024 17:08:47 -0400 Subject: [PATCH 02/63] change logging structure --- src/torchrunx/agent.py | 1 + src/torchrunx/launcher.py | 37 ++++++++++++++++--------- src/torchrunx/utils.py | 57 +++++++++++++++++++++++++++++++-------- 3 files changed, 71 insertions(+), 24 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index bfeb4e25..77f3bd1d 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -80,6 +80,7 @@ def entrypoint(serialized_worker_args: bytes): logger.setLevel(logging.DEBUG) logger.addHandler(socketHandler) logger.debug("creating TCPStore for worker group.") + store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] host_name=worker_args.master_hostname, port=worker_args.master_port, diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 5ea52731..d80575b6 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -27,6 +27,7 @@ LauncherAgentGroup, LauncherPayload, LogRecordSocketReceiver, + default_logging, get_open_port, ) @@ -71,7 +72,7 @@ class Launcher: ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None log_dir: os.PathLike | str = "./logs" - propagate_logs: bool = True + log_spec: dict[str, list[logging.Handler]] | None = None env_vars: list[str] = field( default_factory=lambda: [ "PATH", @@ -106,16 +107,26 @@ def run( logger = logging.getLogger("torchrunx") logger.setLevel(logging.DEBUG) - logger.propagate = self.propagate_logs + logger.propagate = False log_dir = Path(self.log_dir) log_dir.mkdir(parents=True, exist_ok=True) - timestamp = datetime.datetime.now().isoformat(timespec="seconds") - - log_file_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") - log_file_handler = logging.FileHandler(f"{log_dir}/{timestamp}.log") - log_file_handler.setFormatter(log_file_formatter) - logger.addHandler(log_file_handler) + #timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + if self.log_spec is None: + # TODO: this assumes the type of workers_per_host is simply int. We should consider + # again whether it's worth supporting inhomogeneous allocations (list[int]) + self.log_spec = default_logging(num_agents=len(self.hostnames), + num_workers=self.workers_per_host, # type: ignore + log_dir=os.fspath(log_dir)) + + log_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") + + for lname, handlers in self.log_spec.items(): # type: ignore + _logger = logging.getLogger(f"torchrunx.{lname}") + for handler in handlers: + handler.setFormatter(log_formatter) + _logger.addHandler(handler) log_process = Process(target=monitor_log, args=(), daemon=True) log_process.start() @@ -194,7 +205,7 @@ def run( worker_log_names = [ [ - f"torchrunx.agent-{i}.worker-{local_rank}" + f"torchrunx.agent-{i}-worker-{local_rank}" for local_rank in range(workers_per_host[i]) # type: ignore ] for i in range(len(self.hostnames)) @@ -263,7 +274,7 @@ def launch( ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, log_dir: os.PathLike | str = "./logs", - propagate_logs: bool = True, + log_spec: dict[str, list[logging.Handler]] | None = None, env_vars: list[str] = [ "PATH", "LD_LIBRARY", @@ -294,8 +305,8 @@ def launch( :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional :param log_dir: A directory in which logs should be written, defaults to "./logs" :type log_dir: os.PathLike | str, optional - :param log_level: The logging level, defaults to logging.WARN - :type log_level: logging._Level, optional + :param log_spec: TODO + :type log_spec: TODO :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional :param env_file: An additional environment file that will be sourced prior to executing ``func``, defaults to None @@ -312,7 +323,7 @@ def launch( ssh_config_file=ssh_config_file, backend=backend, log_dir=log_dir, - propagate_logs=propagate_logs, + log_spec=log_spec, env_vars=env_vars, env_file=env_file, timeout=timeout, diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index ae63eaf4..744f3684 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -118,7 +118,8 @@ def sync_payloads( def sync_agent_statuses(self, status: AgentStatus) -> list[AgentStatus]: return self._all_gather(object=status)[1:] - + + class LogRecordStreamHandler(socketserver.StreamRequestHandler): """Handler for a streaming logging request. @@ -136,7 +137,7 @@ def handle(self): chunk = self.connection.recv(4) if len(chunk) < 4: break - slen = struct.unpack('>L', chunk)[0] + slen = struct.unpack(">L", chunk)[0] chunk = self.connection.recv(slen) while len(chunk) < slen: chunk = chunk + self.connection.recv(slen - len(chunk)) @@ -150,7 +151,7 @@ def unPickle(self, data): def handleLogRecord(self, record): # if a name is specified, we use the named logger rather than the one # implied by the record. - if self.server.logname is not None: # type: ignore + if self.server.logname is not None: # type: ignore name = self.server.logname # type: ignore else: name = record.name @@ -162,28 +163,62 @@ def handleLogRecord(self, record): if logger.getEffectiveLevel() <= record.levelno: logger.handle(record) + class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): """ Simple TCP socket-based logging receiver suitable for testing. """ - allow_reuse_address = 1 # type: ignore + allow_reuse_address = 1 # type: ignore - def __init__(self, host='localhost', - port=logging.handlers.DEFAULT_TCP_LOGGING_PORT, - handler=LogRecordStreamHandler): + def __init__( + self, + host="localhost", + port=logging.handlers.DEFAULT_TCP_LOGGING_PORT, + handler=LogRecordStreamHandler, + ): socketserver.ThreadingTCPServer.__init__(self, (host, port), handler) self.abort = 0 self.timeout = 1 self.logname = None - def serve_until_stopped(self): abort = 0 while not abort: - rd, wr, ex = select.select([self.socket.fileno()], - [], [], - self.timeout) + rd, wr, ex = select.select([self.socket.fileno()], [], [], self.timeout) if rd: self.handle_request() abort = self.abort + + +def default_logging( + num_agents: int, num_workers: int, log_dir: str +) -> dict[str, list[logging.Handler]]: + """ + Generates torchrunx's default + + :param num_agents: Number of agents in work group + :type num_agents: int + :param num_workers: Number of workers per agent + :type num_workers: int + :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument + :rtype: dict[str, list[logging.Handler]] + """ + + timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + agents: dict[str, list[logging.Handler]] = { + f"agent-{i}": [logging.FileHandler(f"{log_dir}/{timestamp}-agent-{i}.log")] + for i in range(num_agents) + } + workers: dict[str, list[logging.Handler]] = { + f"agent-{i}-worker-{j}": [ + logging.FileHandler(f"{log_dir}/{timestamp}-agent-{i}.worker-{j}.log") + ] + for j in range(num_workers) + for i in range(num_agents) + } + + workers["agent-0-worker-0"].append(logging.StreamHandler()) + + return {**agents, **workers} From 9c5e80495aaef3e6c78d0ea067bd6e30034fff32 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 24 Jul 2024 13:38:10 -0400 Subject: [PATCH 03/63] improvements to worker logging --- examples/slurm_poc.py | 2 -- src/torchrunx/agent.py | 23 +++++++++++++---------- src/torchrunx/utils.py | 13 +++++++++++++ 3 files changed, 26 insertions(+), 12 deletions(-) diff --git a/examples/slurm_poc.py b/examples/slurm_poc.py index c5c63779..811ed36c 100644 --- a/examples/slurm_poc.py +++ b/examples/slurm_poc.py @@ -22,8 +22,6 @@ def test_launch(): for i in range(len(result)): assert torch.all(result[i] == result[0]), "Not all tensors equal" - print(result[0]) - print("PASS") def simple_matmul(): diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 77f3bd1d..d6ecf28d 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -6,6 +6,7 @@ import os import socket import sys +import tempfile from dataclasses import dataclass from typing import Callable, Literal @@ -13,7 +14,7 @@ import torch import torch.distributed as dist from torch.distributed.elastic.multiprocessing import DefaultLogsSpecs -from torch.distributed.elastic.multiprocessing.api import MultiprocessContext, Std +from torch.distributed.elastic.multiprocessing.api import MultiprocessContext from typing_extensions import Self from .utils import ( @@ -21,6 +22,7 @@ AgentStatus, LauncherAgentGroup, LauncherPayload, + RenamingSocketHandler, get_open_port, ) @@ -73,13 +75,13 @@ def flush(self): def entrypoint(serialized_worker_args: bytes): worker_args = WorkerArgs.from_bytes(serialized_worker_args) - logger = logging.getLogger(worker_args.log_name) - # TODO: set logging level? maybe argument to launch? - socketHandler = logging.handlers.SocketHandler(worker_args.log_host, - logging.handlers.DEFAULT_TCP_LOGGING_PORT) + logger = logging.getLogger() logger.setLevel(logging.DEBUG) + logger.name = worker_args.log_name # overwrite root logger name + socketHandler = RenamingSocketHandler(worker_args.log_host, + logging.handlers.DEFAULT_TCP_LOGGING_PORT, + worker_args.log_name) logger.addHandler(socketHandler) - logger.debug("creating TCPStore for worker group.") store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] host_name=worker_args.master_hostname, @@ -91,7 +93,9 @@ def entrypoint(serialized_worker_args: bytes): backend = worker_args.backend if backend is None: backend = "nccl" if torch.cuda.is_available() else "gloo" - logger.debug("initializing worker process group.") + + logging.debug(f"using backend: {backend}") + dist.init_process_group( backend=backend, world_size=worker_args.world_size, @@ -106,9 +110,8 @@ def entrypoint(serialized_worker_args: bytes): os.environ["WORLD_SIZE"] = str(worker_args.world_size) os.environ["MASTER_ADDR"] = worker_args.master_hostname os.environ["MASTER_PORT"] = str(worker_args.master_port) - logger.debug("calling user function") - logging.root = logger + logging.debug(f"executing function: {worker_args.function}") return worker_args.function() @@ -162,7 +165,7 @@ def main(launcher_agent_group: LauncherAgentGroup): for i in range(num_workers) }, envs={i: {} for i in range(num_workers)}, - logs_specs=DefaultLogsSpecs(log_dir="/dev/null"), + logs_specs=DefaultLogsSpecs(log_dir=tempfile.mkdtemp()), start_method="spawn", ) logger.debug("starting processes") diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 744f3684..73f44a05 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -222,3 +222,16 @@ def default_logging( workers["agent-0-worker-0"].append(logging.StreamHandler()) return {**agents, **workers} + +class RenamingSocketHandler(logging.handlers.SocketHandler): + + def __init__(self, host, port, root_name): + + super().__init__(host, port) + + self.root_name = root_name + + def emit(self, record): + if not record.name.startswith(self.root_name): + record.name = f"{self.root_name}.{record.name}" + super().emit(record) \ No newline at end of file From c794bbd0df2abd6220e9207974826f23e25a1b58 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 24 Jul 2024 13:48:30 -0400 Subject: [PATCH 04/63] hostnames instead of agent-i --- src/torchrunx/agent.py | 2 +- src/torchrunx/launcher.py | 6 +++--- src/torchrunx/utils.py | 14 +++++++------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index d6ecf28d..c571bdfb 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -129,7 +129,7 @@ def main(launcher_agent_group: LauncherAgentGroup): launcher_payload: LauncherPayload = all_payloads[0] # pyright: ignore[reportAssignmentType] main_agent_payload: AgentPayload = all_payloads[1] # pyright: ignore[reportAssignmentType] - logger = logging.getLogger(f"torchrunx.agent-{agent_rank}") + logger = logging.getLogger(f"torchrunx.{launcher_payload.hostnames[agent_rank]}") logger.setLevel(logging.DEBUG) socketHandler = logging.handlers.SocketHandler(launcher_payload.log_host, logging.handlers.DEFAULT_TCP_LOGGING_PORT) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index d80575b6..36108e57 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -116,7 +116,7 @@ def run( if self.log_spec is None: # TODO: this assumes the type of workers_per_host is simply int. We should consider # again whether it's worth supporting inhomogeneous allocations (list[int]) - self.log_spec = default_logging(num_agents=len(self.hostnames), + self.log_spec = default_logging(hostnames=self.hostnames, num_workers=self.workers_per_host, # type: ignore log_dir=os.fspath(log_dir)) @@ -205,10 +205,10 @@ def run( worker_log_names = [ [ - f"torchrunx.agent-{i}-worker-{local_rank}" + f"torchrunx.{hostname}.worker-{local_rank}" for local_rank in range(workers_per_host[i]) # type: ignore ] - for i in range(len(self.hostnames)) + for i, hostname in enumerate(self.hostnames) ] payload = LauncherPayload( diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 73f44a05..d3b38f7c 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -192,7 +192,7 @@ def serve_until_stopped(self): def default_logging( - num_agents: int, num_workers: int, log_dir: str + hostnames: list[str], num_workers: int, log_dir: str ) -> dict[str, list[logging.Handler]]: """ Generates torchrunx's default @@ -208,18 +208,18 @@ def default_logging( timestamp = datetime.datetime.now().isoformat(timespec="seconds") agents: dict[str, list[logging.Handler]] = { - f"agent-{i}": [logging.FileHandler(f"{log_dir}/{timestamp}-agent-{i}.log")] - for i in range(num_agents) + hostname: [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.log")] + for hostname in hostnames } workers: dict[str, list[logging.Handler]] = { - f"agent-{i}-worker-{j}": [ - logging.FileHandler(f"{log_dir}/{timestamp}-agent-{i}.worker-{j}.log") + f"{hostname}.worker-{j}": [ + logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.worker-{j}.log") ] for j in range(num_workers) - for i in range(num_agents) + for hostname in hostnames } - workers["agent-0-worker-0"].append(logging.StreamHandler()) + workers[f"{hostnames[0]}.worker-0"].append(logging.StreamHandler()) return {**agents, **workers} From 6f109ed8d00ec1f90c3c09f56d79528acfe9cad8 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 24 Jul 2024 13:50:16 -0400 Subject: [PATCH 05/63] types and formatting --- src/torchrunx/agent.py | 16 ++++++++-------- src/torchrunx/launcher.py | 16 +++++++++------- src/torchrunx/utils.py | 9 ++++----- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index c571bdfb..e90d55c1 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -77,10 +77,10 @@ def entrypoint(serialized_worker_args: bytes): worker_args = WorkerArgs.from_bytes(serialized_worker_args) logger = logging.getLogger() logger.setLevel(logging.DEBUG) - logger.name = worker_args.log_name # overwrite root logger name - socketHandler = RenamingSocketHandler(worker_args.log_host, - logging.handlers.DEFAULT_TCP_LOGGING_PORT, - worker_args.log_name) + logger.name = worker_args.log_name # overwrite root logger name + socketHandler = RenamingSocketHandler( + worker_args.log_host, logging.handlers.DEFAULT_TCP_LOGGING_PORT, worker_args.log_name + ) logger.addHandler(socketHandler) store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] @@ -93,7 +93,7 @@ def entrypoint(serialized_worker_args: bytes): backend = worker_args.backend if backend is None: backend = "nccl" if torch.cuda.is_available() else "gloo" - + logging.debug(f"using backend: {backend}") dist.init_process_group( @@ -116,7 +116,6 @@ def entrypoint(serialized_worker_args: bytes): def main(launcher_agent_group: LauncherAgentGroup): - agent_rank = launcher_agent_group.rank - 1 payload = AgentPayload( @@ -131,8 +130,9 @@ def main(launcher_agent_group: LauncherAgentGroup): logger = logging.getLogger(f"torchrunx.{launcher_payload.hostnames[agent_rank]}") logger.setLevel(logging.DEBUG) - socketHandler = logging.handlers.SocketHandler(launcher_payload.log_host, - logging.handlers.DEFAULT_TCP_LOGGING_PORT) + socketHandler = logging.handlers.SocketHandler( + launcher_payload.log_host, logging.handlers.DEFAULT_TCP_LOGGING_PORT + ) logger.addHandler(socketHandler) hostname = launcher_payload.hostnames[agent_rank] diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 36108e57..5af868b8 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -1,6 +1,5 @@ from __future__ import annotations -import datetime import fnmatch import ipaddress import itertools @@ -65,6 +64,7 @@ def monitor_log(): tcpserver = LogRecordSocketReceiver(host=socket.getfqdn()) tcpserver.serve_until_stopped() + @dataclass class Launcher: hostnames: list[str] = field(default_factory=lambda: ["localhost"]) @@ -111,23 +111,25 @@ def run( log_dir = Path(self.log_dir) log_dir.mkdir(parents=True, exist_ok=True) - #timestamp = datetime.datetime.now().isoformat(timespec="seconds") + # timestamp = datetime.datetime.now().isoformat(timespec="seconds") if self.log_spec is None: # TODO: this assumes the type of workers_per_host is simply int. We should consider # again whether it's worth supporting inhomogeneous allocations (list[int]) - self.log_spec = default_logging(hostnames=self.hostnames, - num_workers=self.workers_per_host, # type: ignore - log_dir=os.fspath(log_dir)) + self.log_spec = default_logging( + hostnames=self.hostnames, + num_workers=self.workers_per_host, # type: ignore + log_dir=os.fspath(log_dir), + ) log_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") - for lname, handlers in self.log_spec.items(): # type: ignore + for lname, handlers in self.log_spec.items(): # type: ignore _logger = logging.getLogger(f"torchrunx.{lname}") for handler in handlers: handler.setFormatter(log_formatter) _logger.addHandler(handler) - + log_process = Process(target=monitor_log, args=(), daemon=True) log_process.start() diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index d3b38f7c..62afc914 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -201,9 +201,9 @@ def default_logging( :type num_agents: int :param num_workers: Number of workers per agent :type num_workers: int - :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument + :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument :rtype: dict[str, list[logging.Handler]] - """ + """ # noqa: E501 timestamp = datetime.datetime.now().isoformat(timespec="seconds") @@ -223,10 +223,9 @@ def default_logging( return {**agents, **workers} -class RenamingSocketHandler(logging.handlers.SocketHandler): +class RenamingSocketHandler(logging.handlers.SocketHandler): def __init__(self, host, port, root_name): - super().__init__(host, port) self.root_name = root_name @@ -234,4 +233,4 @@ def __init__(self, host, port, root_name): def emit(self, record): if not record.name.startswith(self.root_name): record.name = f"{self.root_name}.{record.name}" - super().emit(record) \ No newline at end of file + super().emit(record) From 5bb1933f7ae92393122c720a330e0cf69aab20bf Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 24 Jul 2024 13:51:11 -0400 Subject: [PATCH 06/63] format, again... --- src/torchrunx/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 62afc914..0016f280 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -201,7 +201,7 @@ def default_logging( :type num_agents: int :param num_workers: Number of workers per agent :type num_workers: int - :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument + :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument :rtype: dict[str, list[logging.Handler]] """ # noqa: E501 From 930dfa2ba2da2d47c4d0f8d1c37d2e9bba76fdd2 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 24 Jul 2024 14:05:38 -0400 Subject: [PATCH 07/63] fix ci test --- src/torchrunx/utils.py | 7 ++++--- tests/test_CI.py | 11 ++++++----- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 0016f280..dafbafef 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -192,7 +192,8 @@ def serve_until_stopped(self): def default_logging( - hostnames: list[str], num_workers: int, log_dir: str + hostnames: list[str], num_workers: int, log_dir: str, + stream: bool = True ) -> dict[str, list[logging.Handler]]: """ Generates torchrunx's default @@ -219,11 +220,11 @@ def default_logging( for hostname in hostnames } - workers[f"{hostnames[0]}.worker-0"].append(logging.StreamHandler()) + if stream: + workers[f"{hostnames[0]}.worker-0"].append(logging.StreamHandler()) return {**agents, **workers} - class RenamingSocketHandler(logging.handlers.SocketHandler): def __init__(self, host, port, root_name): super().__init__(host, port) diff --git a/tests/test_CI.py b/tests/test_CI.py index 6751eed7..593d1756 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -1,3 +1,4 @@ +import logging import os import shutil import sys @@ -45,7 +46,7 @@ def dist_func(): def test_logging(): def dist_func(): rank = int(os.environ["RANK"]) - print(f"worker rank: {rank}") + logging.info(f"worker rank: {rank}") try: shutil.rmtree("./test_logs") @@ -63,15 +64,15 @@ def dist_func(): for file in log_files: with open("./test_logs/" + file, "r") as f: if file.endswith("0.log"): - assert f.read() == "worker rank: 0\n" + assert "worker rank: 0\n" in f.read() elif file.endswith("1.log"): - assert f.read() == "worker rank: 1\n" + assert "worker rank: 1\n" in f.read() else: contents = f.read() assert "worker rank: 0" in contents - assert "worker rank: 1" not in contents + assert "worker rank: 1" in contents # clean up - shutil.rmtree("./test_logs") + shutil.rmtree("./test_logs", ignore_errors=True) dist.destroy_process_group() From 46e93cf5e19b618787d5d74ec7c6e2d3a5691925 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 24 Jul 2024 14:06:17 -0400 Subject: [PATCH 08/63] format --- src/torchrunx/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index dafbafef..9565eaa3 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -192,8 +192,7 @@ def serve_until_stopped(self): def default_logging( - hostnames: list[str], num_workers: int, log_dir: str, - stream: bool = True + hostnames: list[str], num_workers: int, log_dir: str, stream: bool = True ) -> dict[str, list[logging.Handler]]: """ Generates torchrunx's default @@ -225,6 +224,7 @@ def default_logging( return {**agents, **workers} + class RenamingSocketHandler(logging.handlers.SocketHandler): def __init__(self, host, port, root_name): super().__init__(host, port) From c466254ab85b172a883db0b8e355002a7e2f22f7 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 31 Jul 2024 12:40:39 -0400 Subject: [PATCH 09/63] refactoring --- examples/slurm_poc.py | 5 +- src/torchrunx/__init__.py | 3 +- src/torchrunx/agent.py | 28 +++--- src/torchrunx/launcher.py | 55 +++++------- src/torchrunx/log_utils.py | 170 +++++++++++++++++++++++++++++++++++++ src/torchrunx/utils.py | 125 +-------------------------- 6 files changed, 209 insertions(+), 177 deletions(-) create mode 100644 src/torchrunx/log_utils.py diff --git a/examples/slurm_poc.py b/examples/slurm_poc.py index 811ed36c..b7081051 100644 --- a/examples/slurm_poc.py +++ b/examples/slurm_poc.py @@ -1,13 +1,10 @@ import logging import os -import sys import torch import torch.distributed as dist -sys.path.append("../src") - -import torchrunx # noqa: I001 +import torchrunx # this is not a pytest test, but a functional test designed to be run on a slurm allocation diff --git a/src/torchrunx/__init__.py b/src/torchrunx/__init__.py index 8f407e3b..3548d402 100644 --- a/src/torchrunx/__init__.py +++ b/src/torchrunx/__init__.py @@ -1,4 +1,5 @@ from .launcher import Launcher, launch +from .log_utils import DefaultLogSpec, LogSpec from .slurm import slurm_hosts, slurm_workers -__all__ = ["Launcher", "launch", "slurm_hosts", "slurm_workers"] +__all__ = ["Launcher", "launch", "slurm_hosts", "slurm_workers", "LogSpec", "DefaultLogSpec"] diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index e90d55c1..2ab5e33a 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -17,12 +17,12 @@ from torch.distributed.elastic.multiprocessing.api import MultiprocessContext from typing_extensions import Self +from .log_utils import RenamingSocketHandler from .utils import ( AgentPayload, AgentStatus, LauncherAgentGroup, LauncherPayload, - RenamingSocketHandler, get_open_port, ) @@ -37,8 +37,9 @@ class WorkerArgs: local_rank: int local_world_size: int world_size: int - log_name: str + hostname: str log_host: str + log_port: int timeout: int def to_bytes(self) -> bytes: @@ -77,10 +78,10 @@ def entrypoint(serialized_worker_args: bytes): worker_args = WorkerArgs.from_bytes(serialized_worker_args) logger = logging.getLogger() logger.setLevel(logging.DEBUG) - logger.name = worker_args.log_name # overwrite root logger name - socketHandler = RenamingSocketHandler( - worker_args.log_host, logging.handlers.DEFAULT_TCP_LOGGING_PORT, worker_args.log_name + logger.name = ( + f"torchrunx.{worker_args.hostname}.{worker_args.local_rank}" # overwrite root logger name ) + socketHandler = RenamingSocketHandler(worker_args.log_host, worker_args.log_port, logger.name) logger.addHandler(socketHandler) store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] @@ -128,19 +129,19 @@ def main(launcher_agent_group: LauncherAgentGroup): launcher_payload: LauncherPayload = all_payloads[0] # pyright: ignore[reportAssignmentType] main_agent_payload: AgentPayload = all_payloads[1] # pyright: ignore[reportAssignmentType] + hostname = launcher_payload.hostnames[agent_rank] + worker_world_size = launcher_payload.worker_world_size + worker_global_ranks = launcher_payload.worker_global_ranks[agent_rank] + num_workers = len(worker_global_ranks) + logger = logging.getLogger(f"torchrunx.{launcher_payload.hostnames[agent_rank]}") logger.setLevel(logging.DEBUG) socketHandler = logging.handlers.SocketHandler( - launcher_payload.log_host, logging.handlers.DEFAULT_TCP_LOGGING_PORT + launcher_payload.log_host, + launcher_payload.log_port, ) logger.addHandler(socketHandler) - hostname = launcher_payload.hostnames[agent_rank] - worker_world_size = launcher_payload.worker_world_size - worker_global_ranks = launcher_payload.worker_global_ranks[agent_rank] - worker_log_names = launcher_payload.worker_log_names[agent_rank] - num_workers = len(worker_global_ranks) - # spawn workers ctx = MultiprocessContext( @@ -157,8 +158,9 @@ def main(launcher_agent_group: LauncherAgentGroup): local_rank=i, local_world_size=num_workers, world_size=worker_world_size, - log_name=worker_log_names[i], + hostname=launcher_payload.hostnames[agent_rank], log_host=launcher_payload.log_host, + log_port=launcher_payload.log_port, timeout=launcher_payload.timeout, ).to_bytes(), ) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 5af868b8..78f32c8e 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -14,19 +14,17 @@ from dataclasses import dataclass, field from functools import partial from multiprocessing import Process -from pathlib import Path from typing import Any, Callable, Literal import fabric import torch.distributed as dist +from .log_utils import DefaultLogSpec, LogRecordSocketReceiver, LogSpec from .utils import ( AgentPayload, AgentStatus, LauncherAgentGroup, LauncherPayload, - LogRecordSocketReceiver, - default_logging, get_open_port, ) @@ -60,9 +58,15 @@ def execute_command( conn.run(f"{command} >> /dev/null 2>&1 &", asynchronous=True) -def monitor_log(): - tcpserver = LogRecordSocketReceiver(host=socket.getfqdn()) - tcpserver.serve_until_stopped() +def monitor_log(log_spec: LogSpec, port: int): + log_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") + for lname, handlers in log_spec.get_map().items(): # type: ignore + _logger = logging.getLogger(f"torchrunx.{lname}") + for handler in handlers: + handler.setFormatter(log_formatter) + _logger.addHandler(handler) + + LogRecordSocketReceiver(host=socket.getfqdn(), port=port).serve_until_stopped() @dataclass @@ -71,8 +75,7 @@ class Launcher: workers_per_host: int | list[int] = 1 ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None - log_dir: os.PathLike | str = "./logs" - log_spec: dict[str, list[logging.Handler]] | None = None + log_spec: LogSpec | None = None env_vars: list[str] = field( default_factory=lambda: [ "PATH", @@ -109,28 +112,19 @@ def run( logger.setLevel(logging.DEBUG) logger.propagate = False - log_dir = Path(self.log_dir) - log_dir.mkdir(parents=True, exist_ok=True) + # log_dir = Path(self.log_dir) + # log_dir.mkdir(parents=True, exist_ok=True) # timestamp = datetime.datetime.now().isoformat(timespec="seconds") if self.log_spec is None: # TODO: this assumes the type of workers_per_host is simply int. We should consider # again whether it's worth supporting inhomogeneous allocations (list[int]) - self.log_spec = default_logging( - hostnames=self.hostnames, - num_workers=self.workers_per_host, # type: ignore - log_dir=os.fspath(log_dir), - ) - - log_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") + self.log_spec = DefaultLogSpec.basic( + hostnames=self.hostnames, num_workers=self.workers_per_host # type: ignore + ) - for lname, handlers in self.log_spec.items(): # type: ignore - _logger = logging.getLogger(f"torchrunx.{lname}") - for handler in handlers: - handler.setFormatter(log_formatter) - _logger.addHandler(handler) - - log_process = Process(target=monitor_log, args=(), daemon=True) + log_port = get_open_port() + log_process = Process(target=monitor_log, args=(self.log_spec, log_port), daemon=True) log_process.start() if not dist.is_available(): @@ -205,21 +199,13 @@ def run( host_ranks = range(_cumulative_workers[n], _cumulative_workers[n + 1]) worker_global_ranks.append(list(host_ranks)) - worker_log_names = [ - [ - f"torchrunx.{hostname}.worker-{local_rank}" - for local_rank in range(workers_per_host[i]) # type: ignore - ] - for i, hostname in enumerate(self.hostnames) - ] - payload = LauncherPayload( fn=partial(func, **func_kwargs), hostnames=self.hostnames, worker_world_size=worker_world_size, worker_global_ranks=worker_global_ranks, - worker_log_names=worker_log_names, log_host=launcher_hostname, + log_port=log_port, backend=self.backend, timeout=self.timeout, ) @@ -276,7 +262,7 @@ def launch( ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, log_dir: os.PathLike | str = "./logs", - log_spec: dict[str, list[logging.Handler]] | None = None, + log_spec: LogSpec | None = None, env_vars: list[str] = [ "PATH", "LD_LIBRARY", @@ -324,7 +310,6 @@ def launch( workers_per_host=workers_per_host, ssh_config_file=ssh_config_file, backend=backend, - log_dir=log_dir, log_spec=log_spec, env_vars=env_vars, env_file=env_file, diff --git a/src/torchrunx/log_utils.py b/src/torchrunx/log_utils.py new file mode 100644 index 00000000..bc36983a --- /dev/null +++ b/src/torchrunx/log_utils.py @@ -0,0 +1,170 @@ +from __future__ import annotations + +import datetime +import logging +import logging.handlers +import pickle +import select +import socketserver +import struct +from abc import ABC, abstractmethod +from collections import defaultdict + + +class LogRecordStreamHandler(socketserver.StreamRequestHandler): + """Handler for a streaming logging request. + + This basically logs the record using whatever logging policy is + configured locally. + """ + + def handle(self): + """ + Handle multiple requests - each expected to be a 4-byte length, + followed by the LogRecord in pickle format. Logs the record + according to whatever policy is configured locally. + """ + while True: + chunk = self.connection.recv(4) + if len(chunk) < 4: + break + slen = struct.unpack(">L", chunk)[0] + chunk = self.connection.recv(slen) + while len(chunk) < slen: + chunk = chunk + self.connection.recv(slen - len(chunk)) + obj = self.unPickle(chunk) + record = logging.makeLogRecord(obj) + self.handleLogRecord(record) + + def unPickle(self, data): + return pickle.loads(data) + + def handleLogRecord(self, record): + # if a name is specified, we use the named logger rather than the one + # implied by the record. + if self.server.logname is not None: # type: ignore + name = self.server.logname # type: ignore + else: + name = record.name + logger = logging.getLogger(name) + # N.B. EVERY record gets logged. This is because Logger.handle + # is normally called AFTER logger-level filtering. If you want + # to do filtering, do it at the client end to save wasting + # cycles and network bandwidth! + if logger.getEffectiveLevel() <= record.levelno: + logger.handle(record) + + +class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): + """ + Simple TCP socket-based logging receiver suitable for testing. + """ + + allow_reuse_address = 1 # type: ignore + + def __init__( + self, + host="localhost", + port=logging.handlers.DEFAULT_TCP_LOGGING_PORT, + handler=LogRecordStreamHandler, + ): + socketserver.ThreadingTCPServer.__init__(self, (host, port), handler) + self.abort = 0 + self.timeout = 1 + self.logname = None + + def serve_until_stopped(self): + abort = 0 + while not abort: + rd, wr, ex = select.select([self.socket.fileno()], [], [], self.timeout) + if rd: + self.handle_request() + abort = self.abort + + +class RenamingSocketHandler(logging.handlers.SocketHandler): + def __init__(self, host, port, root_name): + super().__init__(host, port) + + self.root_name = root_name + + def emit(self, record): + if not record.name.startswith(self.root_name): + record.name = f"{self.root_name}.{record.name}" + super().emit(record) + + +class LogSpec(ABC): + @abstractmethod + def get_map(self) -> dict[str, list[logging.Handler]]: + """ + Called by torchrunx.launch on the log_spec argument. + """ + raise NotImplementedError + + +class DefaultLogSpec(LogSpec): + def __init__(self, log_spec_dict: dict[str, list[logging.Handler]]): + self.log_spec_dict = log_spec_dict + + @classmethod + def basic( + cls, hostnames: list[str], num_workers: int, log_dir: str = "./logs", stream: bool = True + ) -> DefaultLogSpec: + """ + Generates torchrunx's default LogSpec + + :param hostnames: The node hostnames + :type hostnames: list[str] + :param num_agents: Number of agents in work group + :type num_agents: int + :param num_workers: Number of workers per agent + :type num_workers: int + :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument + :rtype: dict[str, list[logging.Handler]] + """ # noqa: E501 + + timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + agents: dict[str, list[logging.Handler]] = { + hostname: [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.log")] + for hostname in hostnames + } + workers: dict[str, list[logging.Handler]] = { + f"{hostname}.{j}": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.{j}.log")] + for j in range(num_workers) + for hostname in hostnames + } + + if stream: + workers[f"{hostnames[0]}.0"].append(logging.StreamHandler()) + + return cls({**agents, **workers}) + + @classmethod + def from_file_map( + cls, file_map: dict[str, list[str]], log_dir: str = "./logs" + ) -> DefaultLogSpec: + """ + Generates DefaultLogSpec from a mapping of filenames to worker/agent names that should be logged there. + + :param file_map: A dictionary mapping file suffixes (filenames will be prefixed with a timestamp) to worker and agent names. + :type file_map: dict[str, str] + :return: Returns an accordingly constructed DefaultLogSpec + :rtype: DefaultLogSpec + """ # noqa: E501 + + reverse_map: defaultdict[str, list[logging.Handler]] = defaultdict(lambda: []) + + timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + for file_suffix, loggers in file_map.items(): + for logger in loggers: + reverse_map[logger].append( + logging.FileHandler(f"{log_dir}/{timestamp}-{file_suffix}") + ) + + return DefaultLogSpec(reverse_map) # re-typing + + def get_map(self): + return self.log_spec_dict diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 9565eaa3..30ae812f 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -1,13 +1,7 @@ from __future__ import annotations import datetime -import logging -import logging.handlers -import pickle -import select import socket -import socketserver -import struct from contextlib import closing from dataclasses import dataclass, field from typing import Any, Callable, Literal @@ -32,8 +26,8 @@ class LauncherPayload: hostnames: list[str] worker_world_size: int worker_global_ranks: list[list[int]] - worker_log_names: list[list[str]] log_host: str + log_port: int backend: Literal["mpi", "gloo", "nccl", "ucc", None] timeout: int @@ -118,120 +112,3 @@ def sync_payloads( def sync_agent_statuses(self, status: AgentStatus) -> list[AgentStatus]: return self._all_gather(object=status)[1:] - - -class LogRecordStreamHandler(socketserver.StreamRequestHandler): - """Handler for a streaming logging request. - - This basically logs the record using whatever logging policy is - configured locally. - """ - - def handle(self): - """ - Handle multiple requests - each expected to be a 4-byte length, - followed by the LogRecord in pickle format. Logs the record - according to whatever policy is configured locally. - """ - while True: - chunk = self.connection.recv(4) - if len(chunk) < 4: - break - slen = struct.unpack(">L", chunk)[0] - chunk = self.connection.recv(slen) - while len(chunk) < slen: - chunk = chunk + self.connection.recv(slen - len(chunk)) - obj = self.unPickle(chunk) - record = logging.makeLogRecord(obj) - self.handleLogRecord(record) - - def unPickle(self, data): - return pickle.loads(data) - - def handleLogRecord(self, record): - # if a name is specified, we use the named logger rather than the one - # implied by the record. - if self.server.logname is not None: # type: ignore - name = self.server.logname # type: ignore - else: - name = record.name - logger = logging.getLogger(name) - # N.B. EVERY record gets logged. This is because Logger.handle - # is normally called AFTER logger-level filtering. If you want - # to do filtering, do it at the client end to save wasting - # cycles and network bandwidth! - if logger.getEffectiveLevel() <= record.levelno: - logger.handle(record) - - -class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): - """ - Simple TCP socket-based logging receiver suitable for testing. - """ - - allow_reuse_address = 1 # type: ignore - - def __init__( - self, - host="localhost", - port=logging.handlers.DEFAULT_TCP_LOGGING_PORT, - handler=LogRecordStreamHandler, - ): - socketserver.ThreadingTCPServer.__init__(self, (host, port), handler) - self.abort = 0 - self.timeout = 1 - self.logname = None - - def serve_until_stopped(self): - abort = 0 - while not abort: - rd, wr, ex = select.select([self.socket.fileno()], [], [], self.timeout) - if rd: - self.handle_request() - abort = self.abort - - -def default_logging( - hostnames: list[str], num_workers: int, log_dir: str, stream: bool = True -) -> dict[str, list[logging.Handler]]: - """ - Generates torchrunx's default - - :param num_agents: Number of agents in work group - :type num_agents: int - :param num_workers: Number of workers per agent - :type num_workers: int - :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument - :rtype: dict[str, list[logging.Handler]] - """ # noqa: E501 - - timestamp = datetime.datetime.now().isoformat(timespec="seconds") - - agents: dict[str, list[logging.Handler]] = { - hostname: [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.log")] - for hostname in hostnames - } - workers: dict[str, list[logging.Handler]] = { - f"{hostname}.worker-{j}": [ - logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.worker-{j}.log") - ] - for j in range(num_workers) - for hostname in hostnames - } - - if stream: - workers[f"{hostnames[0]}.worker-0"].append(logging.StreamHandler()) - - return {**agents, **workers} - - -class RenamingSocketHandler(logging.handlers.SocketHandler): - def __init__(self, host, port, root_name): - super().__init__(host, port) - - self.root_name = root_name - - def emit(self, record): - if not record.name.startswith(self.root_name): - record.name = f"{self.root_name}.{record.name}" - super().emit(record) From b366b32729c1971c4f540341df92b41047c74df8 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 31 Jul 2024 13:34:10 -0400 Subject: [PATCH 10/63] stdout logging --- examples/slurm_poc.py | 3 +-- src/torchrunx/agent.py | 5 ++++- src/torchrunx/launcher.py | 23 ++++++++++++----------- src/torchrunx/log_utils.py | 20 ++++++++++++++++++++ 4 files changed, 37 insertions(+), 14 deletions(-) diff --git a/examples/slurm_poc.py b/examples/slurm_poc.py index b7081051..aa00cb2a 100644 --- a/examples/slurm_poc.py +++ b/examples/slurm_poc.py @@ -1,4 +1,3 @@ -import logging import os import torch @@ -37,7 +36,7 @@ def simple_matmul(): o = torch.matmul(i, w) dist.all_reduce(o, op=dist.ReduceOp.SUM) - logging.info(i) + print(i) return o.detach().cpu() diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 2ab5e33a..dc03bab4 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -17,7 +17,7 @@ from torch.distributed.elastic.multiprocessing.api import MultiprocessContext from typing_extensions import Self -from .log_utils import RenamingSocketHandler +from .log_utils import RenamingSocketHandler, StreamLogger from .utils import ( AgentPayload, AgentStatus, @@ -84,6 +84,9 @@ def entrypoint(serialized_worker_args: bytes): socketHandler = RenamingSocketHandler(worker_args.log_host, worker_args.log_port, logger.name) logger.addHandler(socketHandler) + sys.stdout = StreamLogger(logger, sys.__stdout__) + sys.stderr = StreamLogger(logger, sys.__stderr__) + store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] host_name=worker_args.master_hostname, port=worker_args.master_port, diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 78f32c8e..930631df 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -58,12 +58,11 @@ def execute_command( conn.run(f"{command} >> /dev/null 2>&1 &", asynchronous=True) -def monitor_log(log_spec: LogSpec, port: int): - log_formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") +def monitor_log(log_spec: LogSpec, port: int, formatter: logging.Formatter): for lname, handlers in log_spec.get_map().items(): # type: ignore _logger = logging.getLogger(f"torchrunx.{lname}") for handler in handlers: - handler.setFormatter(log_formatter) + handler.setFormatter(formatter) _logger.addHandler(handler) LogRecordSocketReceiver(host=socket.getfqdn(), port=port).serve_until_stopped() @@ -111,6 +110,10 @@ def run( logger = logging.getLogger("torchrunx") logger.setLevel(logging.DEBUG) logger.propagate = False + logger.parent = None + + formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") + # logger. # log_dir = Path(self.log_dir) # log_dir.mkdir(parents=True, exist_ok=True) @@ -120,11 +123,14 @@ def run( # TODO: this assumes the type of workers_per_host is simply int. We should consider # again whether it's worth supporting inhomogeneous allocations (list[int]) self.log_spec = DefaultLogSpec.basic( - hostnames=self.hostnames, num_workers=self.workers_per_host # type: ignore - ) + hostnames=self.hostnames, + num_workers=self.workers_per_host, # type: ignore + ) log_port = get_open_port() - log_process = Process(target=monitor_log, args=(self.log_spec, log_port), daemon=True) + log_process = Process( + target=monitor_log, args=(self.log_spec, log_port, formatter), daemon=True + ) log_process.start() if not dist.is_available(): @@ -169,7 +175,6 @@ def run( # rank set in the loop below ) - logger.debug("starting agents") # start agents on each node for i, hostname in enumerate(self.hostnames): execute_command( @@ -233,11 +238,9 @@ def run( else: e += f"{v.message['message']}\n" e += f"{v.message['extraInfo']['py_callstack']}\n\n" - logger.error(f"workers threw:\n {e}") raise RuntimeError(e) except: # cleanup: SIGTERM all agents - logger.warn("agents encountered error, manually killing") for agent_pid, agent_hostname in zip(agent_pids, self.hostnames): execute_command( command=f"kill {agent_pid}", @@ -246,10 +249,8 @@ def run( ) raise finally: - logger.debug("killing log process") log_process.kill() - logger.debug("returning") return_values: dict[int, Any] = dict(ChainMap(*[s.return_values for s in agent_statuses])) return return_values diff --git a/src/torchrunx/log_utils.py b/src/torchrunx/log_utils.py index bc36983a..7a8e150c 100644 --- a/src/torchrunx/log_utils.py +++ b/src/torchrunx/log_utils.py @@ -9,6 +9,7 @@ import struct from abc import ABC, abstractmethod from collections import defaultdict +from io import StringIO, TextIOWrapper class LogRecordStreamHandler(socketserver.StreamRequestHandler): @@ -168,3 +169,22 @@ def from_file_map( def get_map(self): return self.log_spec_dict + +class StreamLogger: + def __init__(self, logger: logging.Logger, stream: TextIOWrapper | None): + self.logger = logger + self._string_io = StringIO() + if stream is None: + raise ValueError("stream cannot be None") + self.stream: TextIOWrapper = stream # type: ignore + + def write(self, data: str): + self._string_io.write(data) + self.stream.write(data) + + def flush(self): + value = self._string_io.getvalue() + if value != "": + self.logger.info(f"\n{value}") + self._string_io = StringIO() # "create a new one, it's faster" - someone online + self.stream.flush() \ No newline at end of file From 1c3f8f92300139283f9826c5b0b53ef992856eb6 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Mon, 12 Aug 2024 12:50:56 -0400 Subject: [PATCH 11/63] create log directory, fix CI test --- src/torchrunx/launcher.py | 3 --- src/torchrunx/log_utils.py | 20 +++++++++++++------- tests/test_CI.py | 12 ++++++++---- 3 files changed, 21 insertions(+), 14 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 930631df..52ec6a2e 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -262,7 +262,6 @@ def launch( workers_per_host: int | list[int] = 1, ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, - log_dir: os.PathLike | str = "./logs", log_spec: LogSpec | None = None, env_vars: list[str] = [ "PATH", @@ -292,8 +291,6 @@ def launch( :type ssh_config_file: str | os.PathLike | None, optional :param backend: A ``torch.distributed`` `backend string `_, defaults to None :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional - :param log_dir: A directory in which logs should be written, defaults to "./logs" - :type log_dir: os.PathLike | str, optional :param log_spec: TODO :type log_spec: TODO :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] diff --git a/src/torchrunx/log_utils.py b/src/torchrunx/log_utils.py index 7a8e150c..4943c5f1 100644 --- a/src/torchrunx/log_utils.py +++ b/src/torchrunx/log_utils.py @@ -10,6 +10,7 @@ from abc import ABC, abstractmethod from collections import defaultdict from io import StringIO, TextIOWrapper +from pathlib import Path class LogRecordStreamHandler(socketserver.StreamRequestHandler): @@ -123,10 +124,12 @@ def basic( :type num_workers: int :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument :rtype: dict[str, list[logging.Handler]] - """ # noqa: E501 + """ # noqa: E501 timestamp = datetime.datetime.now().isoformat(timespec="seconds") + Path(log_dir).mkdir(parents=True, exist_ok=True) + agents: dict[str, list[logging.Handler]] = { hostname: [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.log")] for hostname in hostnames @@ -153,12 +156,14 @@ def from_file_map( :type file_map: dict[str, str] :return: Returns an accordingly constructed DefaultLogSpec :rtype: DefaultLogSpec - """ # noqa: E501 + """ # noqa: E501 reverse_map: defaultdict[str, list[logging.Handler]] = defaultdict(lambda: []) timestamp = datetime.datetime.now().isoformat(timespec="seconds") + Path(log_dir).mkdir(parents=True, exist_ok=True) + for file_suffix, loggers in file_map.items(): for logger in loggers: reverse_map[logger].append( @@ -169,15 +174,16 @@ def from_file_map( def get_map(self): return self.log_spec_dict - + + class StreamLogger: def __init__(self, logger: logging.Logger, stream: TextIOWrapper | None): self.logger = logger self._string_io = StringIO() if stream is None: raise ValueError("stream cannot be None") - self.stream: TextIOWrapper = stream # type: ignore - + self.stream: TextIOWrapper = stream # type: ignore + def write(self, data: str): self._string_io.write(data) self.stream.write(data) @@ -186,5 +192,5 @@ def flush(self): value = self._string_io.getvalue() if value != "": self.logger.info(f"\n{value}") - self._string_io = StringIO() # "create a new one, it's faster" - someone online - self.stream.flush() \ No newline at end of file + self._string_io = StringIO() # "create a new one, it's faster" - someone online + self.stream.flush() diff --git a/tests/test_CI.py b/tests/test_CI.py index 593d1756..cb95c521 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -1,14 +1,12 @@ import logging import os import shutil -import sys import torch import torch.distributed as dist -sys.path.append("../src") - import torchrunx # noqa: I001 +from torchrunx.log_utils import DefaultLogSpec def test_simple_localhost(): @@ -54,7 +52,13 @@ def dist_func(): pass torchrunx.launch( - func=dist_func, func_kwargs={}, workers_per_host=2, backend="gloo", log_dir="./test_logs" + func=dist_func, + func_kwargs={}, + workers_per_host=2, + backend="gloo", + log_spec=DefaultLogSpec.basic( + hostnames=["localhost"], num_workers=2, log_dir="./test_logs" + ), ) log_files = next(os.walk("./test_logs"), (None, None, []))[2] From 5c8442848a5e4df337237177d2ca3ad1686441c2 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Tue, 13 Aug 2024 14:08:12 -0400 Subject: [PATCH 12/63] docstring, switch log naming --- src/torchrunx/agent.py | 2 +- src/torchrunx/launcher.py | 4 ++-- src/torchrunx/log_utils.py | 7 +++++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index dc03bab4..b9aef265 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -79,7 +79,7 @@ def entrypoint(serialized_worker_args: bytes): logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger.name = ( - f"torchrunx.{worker_args.hostname}.{worker_args.local_rank}" # overwrite root logger name + f"torchrunx.{worker_args.hostname}[{worker_args.local_rank}]" # overwrite root logger name ) socketHandler = RenamingSocketHandler(worker_args.log_host, worker_args.log_port, logger.name) logger.addHandler(socketHandler) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 52ec6a2e..9e545d5a 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -291,8 +291,8 @@ def launch( :type ssh_config_file: str | os.PathLike | None, optional :param backend: A ``torch.distributed`` `backend string `_, defaults to None :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional - :param log_spec: TODO - :type log_spec: TODO + :param log_spec: A :mod:torchrunx.LogSpec object specifying how to log the run. + :type log_spec: torchrunx.LogSpec :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional :param env_file: An additional environment file that will be sourced prior to executing ``func``, defaults to None diff --git a/src/torchrunx/log_utils.py b/src/torchrunx/log_utils.py index 4943c5f1..7f4769d7 100644 --- a/src/torchrunx/log_utils.py +++ b/src/torchrunx/log_utils.py @@ -135,13 +135,13 @@ def basic( for hostname in hostnames } workers: dict[str, list[logging.Handler]] = { - f"{hostname}.{j}": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.{j}.log")] + f"{hostname}[{j}]": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}[{j}].log")] for j in range(num_workers) for hostname in hostnames } if stream: - workers[f"{hostnames[0]}.0"].append(logging.StreamHandler()) + workers[f"{hostnames[0]}[0]"].append(logging.StreamHandler()) return cls({**agents, **workers}) @@ -177,6 +177,9 @@ def get_map(self): class StreamLogger: + """ + For logging write calls to streams such as stdout and stdin in the worker processes. + """ def __init__(self, logger: logging.Logger, stream: TextIOWrapper | None): self.logger = logger self._string_io = StringIO() From 621c116f46574915475ab1e6c74bab45f4a8bcb6 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Tue, 13 Aug 2024 14:10:48 -0400 Subject: [PATCH 13/63] fix docstring --- src/torchrunx/launcher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 9e545d5a..14d7829b 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -291,7 +291,7 @@ def launch( :type ssh_config_file: str | os.PathLike | None, optional :param backend: A ``torch.distributed`` `backend string `_, defaults to None :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional - :param log_spec: A :mod:torchrunx.LogSpec object specifying how to log the run. + :param log_spec: A :mod:`torchrunx.LogSpec` object specifying how to log the run. :type log_spec: torchrunx.LogSpec :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional From 777917e15003939809fe12f0c2f3b9c38b18d592 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Fri, 16 Aug 2024 14:58:26 -0400 Subject: [PATCH 14/63] docstrings and docs --- docs/source/advanced.rst | 7 ++++++- src/torchrunx/launcher.py | 2 +- src/torchrunx/log_utils.py | 6 +++--- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/docs/source/advanced.rst b/docs/source/advanced.rst index 45f08a74..5b43f312 100644 --- a/docs/source/advanced.rst +++ b/docs/source/advanced.rst @@ -22,8 +22,13 @@ In addition to ``torchrunx.launch``, we provide the ``torchrunx.Launcher`` datac Logging ------- -All logs are generated in the folder provided as the ``logs`` argument to :mod:`torchrunx.launch`. Each worker agent generates a log, named based on the current date and time, followed by the agent hostname. Each worker also has a log, named identically to their agent's log file except for the addition of the worker's local rank at the end of the name. Each agent includes the output from local worker 0 in its log. The launcher renders agent 0's log to ``stdout`` in real time. +Logs are generated at the worker and agent level, and are specified to :mod:`torchrunx.launch` via the ``log_spec`` argument. By default, a :mod:`torchrunx.DefaultLogSpec` is instantiated, causing logs at the worker and agent levels to be logged to files under ``'./logs'``, and the rank 0 worker's output streams are streamed to the launcher ``stdout``. Logs are prefixed with a timestamp by default. Agent logs have the format ``{timestamp}-{agent hostname}.log`` and workers have the format ``{timestamp}-{agent hostname}[{worker local rank}].log``. +Custom logging classes can be subclassed from the :mod:`torchrunx.LogSpec` class. Any subclass must have a ``get_map`` method returning a dictionary mapping logger names to lists of :mod:`logging.Handler` objects, in order to be passed to :mod:`torchrunx.launch`. The logger names are of the format ``{agent hostname}`` for agents and ``{agent hostname}[{worker local rank}]`` for workers. The :mod:`torchrunx.DefaultLogSpec` maps all the loggers to :mod:`logging.Filehandler` object pointing to the files mentioned in the previous paragraph. It additionally maps the global rank 0 worker to a :mod:`logging.StreamHandler`, which writes logs the launcher's ``stdout`` stream. + +Check out the interface of the :mod:`torchrunx.DefaultLogSpec` object below: + +.. autoclass:: torchrunx.DefaultLogSpec .. TODO: example log structure diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 14d7829b..43e6cdf1 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -291,7 +291,7 @@ def launch( :type ssh_config_file: str | os.PathLike | None, optional :param backend: A ``torch.distributed`` `backend string `_, defaults to None :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional - :param log_spec: A :mod:`torchrunx.LogSpec` object specifying how to log the run. + :param log_spec: A :mod:`torchrunx.LogSpec` object specifying how to log the run. When left empty, a :mod:`torchrunx.DefaultLogSpec` is constructed, defaults to None :type log_spec: torchrunx.LogSpec :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional diff --git a/src/torchrunx/log_utils.py b/src/torchrunx/log_utils.py index 7f4769d7..450d31c2 100644 --- a/src/torchrunx/log_utils.py +++ b/src/torchrunx/log_utils.py @@ -122,8 +122,8 @@ def basic( :type num_agents: int :param num_workers: Number of workers per agent :type num_workers: int - :return: A logging structure to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument - :rtype: dict[str, list[logging.Handler]] + :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument + :rtype: DefaultLogSpec """ # noqa: E501 timestamp = datetime.datetime.now().isoformat(timespec="seconds") @@ -154,7 +154,7 @@ def from_file_map( :param file_map: A dictionary mapping file suffixes (filenames will be prefixed with a timestamp) to worker and agent names. :type file_map: dict[str, str] - :return: Returns an accordingly constructed DefaultLogSpec + :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument :rtype: DefaultLogSpec """ # noqa: E501 From 83231b6ee4bfb35e595ef15dc520aa34403fc5fc Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Fri, 16 Aug 2024 15:11:08 -0400 Subject: [PATCH 15/63] use autodoc --- docs/source/advanced.rst | 11 ++++++++--- src/torchrunx/log_utils.py | 9 +++++++++ 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/docs/source/advanced.rst b/docs/source/advanced.rst index 5b43f312..022c0533 100644 --- a/docs/source/advanced.rst +++ b/docs/source/advanced.rst @@ -16,8 +16,8 @@ In addition to ``torchrunx.launch``, we provide the ``torchrunx.Launcher`` datac launcher.run(distributed_function, {}) .. autoclass:: torchrunx.Launcher - -.. autofunction:: torchrunx.Launcher.run + :members: +.. .. autofunction:: torchrunx.Launcher.run Logging ------- @@ -29,13 +29,18 @@ Custom logging classes can be subclassed from the :mod:`torchrunx.LogSpec` class Check out the interface of the :mod:`torchrunx.DefaultLogSpec` object below: .. autoclass:: torchrunx.DefaultLogSpec + :members: + +.. autoclass:: torchrunx.LogSpec + :members: + .. TODO: example log structure Worker environment ------------------ -The :mod:`torchrunx.launch` ``env_vars`` argument allows the user to specify which evnironmental variables should be copied to the agents from the launcher environment. By default, it attempts to copy variables related to Python and important packages/technologies that **torchrunx** uses such as PyTorch, NCCL, CUDA, and more. Strings provided are matched with the names of environmental variables using ``fnmatch`` - standard UNIX filename pattern matching. The variables are inserted into the agent environments, and then copied to workers' environments when they are spawned. +The :mod:`torchrunx.launch` ``env_vars`` argument allows the user to specify which environmental variables should be copied to the agents from the launcher environment. By default, it attempts to copy variables related to Python and important packages/technologies that **torchrunx** uses such as PyTorch, NCCL, CUDA, and more. Strings provided are matched with the names of environmental variables using ``fnmatch`` - standard UNIX filename pattern matching. The variables are inserted into the agent environments, and then copied to workers' environments when they are spawned. :mod:`torchrunx.launch` also accepts the ``env_file`` argument, which is designed to expose more advanced environmental configuration to the user. When a file is provided as this argument, the launcher will source the file on each node before executing the agent. This allows for custom bash scripts to be provided in the environmental variables, and allows for node-specific environmental variables to be set. diff --git a/src/torchrunx/log_utils.py b/src/torchrunx/log_utils.py index 450d31c2..60a618eb 100644 --- a/src/torchrunx/log_utils.py +++ b/src/torchrunx/log_utils.py @@ -101,12 +101,21 @@ class LogSpec(ABC): def get_map(self) -> dict[str, list[logging.Handler]]: """ Called by torchrunx.launch on the log_spec argument. + + :return: A mapping of logger names to lists of :mod:`logging.Handler` objects. + :rtype: dict[str, list[logging.Handler]] """ raise NotImplementedError class DefaultLogSpec(LogSpec): def __init__(self, log_spec_dict: dict[str, list[logging.Handler]]): + """ + Constructs a ``DefaultLogSpec``. + + :param log_spec_dict: A mapping of logger names to lists of :mod:`logging.Handler` objects. + :type log_spec_dict: dict[str, list[logging.Handler]] + """ self.log_spec_dict = log_spec_dict @classmethod From d2aec56db80d2bc3c8a9390cc39e46139b9e8759 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Fri, 16 Aug 2024 15:13:55 -0400 Subject: [PATCH 16/63] final docs fix for logging --- docs/source/advanced.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docs/source/advanced.rst b/docs/source/advanced.rst index 022c0533..21d1309e 100644 --- a/docs/source/advanced.rst +++ b/docs/source/advanced.rst @@ -26,12 +26,10 @@ Logs are generated at the worker and agent level, and are specified to :mod:`tor Custom logging classes can be subclassed from the :mod:`torchrunx.LogSpec` class. Any subclass must have a ``get_map`` method returning a dictionary mapping logger names to lists of :mod:`logging.Handler` objects, in order to be passed to :mod:`torchrunx.launch`. The logger names are of the format ``{agent hostname}`` for agents and ``{agent hostname}[{worker local rank}]`` for workers. The :mod:`torchrunx.DefaultLogSpec` maps all the loggers to :mod:`logging.Filehandler` object pointing to the files mentioned in the previous paragraph. It additionally maps the global rank 0 worker to a :mod:`logging.StreamHandler`, which writes logs the launcher's ``stdout`` stream. -Check out the interface of the :mod:`torchrunx.DefaultLogSpec` object below: - -.. autoclass:: torchrunx.DefaultLogSpec +.. autoclass:: torchrunx.LogSpec :members: -.. autoclass:: torchrunx.LogSpec +.. autoclass:: torchrunx.DefaultLogSpec :members: .. From acef4fb5c875fc2909ba2c33c9a114c11925f045 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Fri, 16 Aug 2024 23:35:23 -0400 Subject: [PATCH 17/63] logging cosmetics --- pixi.lock | 6120 +++++------------ src/torchrunx/__init__.py | 13 +- src/torchrunx/__main__.py | 7 +- src/torchrunx/agent.py | 49 +- src/torchrunx/launcher.py | 15 +- .../{log_utils.py => logging_utils.py} | 4 +- src/torchrunx/utils.py | 2 - tests/test_CI.py | 2 +- 8 files changed, 1606 insertions(+), 4606 deletions(-) rename src/torchrunx/{log_utils.py => logging_utils.py} (99%) diff --git a/pixi.lock b/pixi.lock index 5ce8376f..1781fc3a 100644 --- a/pixi.lock +++ b/pixi.lock @@ -77,13 +77,13 @@ environments: - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - conda: https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h4ab18f5_6.conda - pypi: https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/2f/f6/9c0a6de7ef78d573e10d0b7de3ef82454e2e6eb6fada453ea6c2b8fb3f0a/bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/4c/6a/ce950d4350c734bc5d9b7196a58fedbdc94f564c00b495a1222984431e03/bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl - pypi: https://files.pythonhosted.org/packages/e2/03/f3c8ba0a6b6e30d7d18c40faab90807c9bb5e9a1e3b2fe2008af624a9c97/build-1.2.1-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/1c/d5/c84e1a17bf61d4df64ca866a1c9a913874b4e9bdc131ec689a0ad013fb36/certifi-2024.7.4-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/f1/c9/326611aa83e16b13b6db4dbb73b5455c668159a003c4c2f0c3bcb2ddabaf/cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - pypi: https://files.pythonhosted.org/packages/3d/09/d82fe4a34c5f0585f9ea1df090e2a71eb9bb1e469723053e1ee9f57c16f3/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/35/66/2d87e9ca95c82c7ee5f2c09716fc4c4242c1ae6647b9bd27e55e920e9f10/cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/07/40/d6f6819c62e808ea74639c3c640f7edd636b86cce62cb14943996a15df92/cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl @@ -181,7 +181,7 @@ environments: - pypi: https://files.pythonhosted.org/packages/1c/d5/c84e1a17bf61d4df64ca866a1c9a913874b4e9bdc131ec689a0ad013fb36/certifi-2024.7.4-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/f1/c9/326611aa83e16b13b6db4dbb73b5455c668159a003c4c2f0c3bcb2ddabaf/cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - pypi: https://files.pythonhosted.org/packages/3d/09/d82fe4a34c5f0585f9ea1df090e2a71eb9bb1e469723053e1ee9f57c16f3/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/35/66/2d87e9ca95c82c7ee5f2c09716fc4c4242c1ae6647b9bd27e55e920e9f10/cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + - pypi: https://files.pythonhosted.org/packages/07/40/d6f6819c62e808ea74639c3c640f7edd636b86cce62cb14943996a15df92/cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl - pypi: https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/01/90/79fe92dd413a9cab314ef5c591b5aa9b9ba787ae4cadab75055b0ae00b33/exceptiongroup-1.2.1-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/e5/3e/741d8c82801c347547f8a2a06aa57dbb1992be9e948df2ea0eda2c8b79e8/idna-3.7-py3-none-any.whl @@ -218,4802 +218,1855 @@ environments: - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/ca/1c/89ffc63a9605b583d5df2be791a27bc1a42b7c32bab68d3c8f2f73a98cd4/urllib3-2.2.2-py3-none-any.whl - pypi: https://files.pythonhosted.org/packages/20/38/f5c473fe9b90c8debdd29ea68d5add0289f1936d6f923b6b9cc0b931194c/zipp-3.19.2-py3-none-any.whl - py310-torch20: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.14-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7f/df/700aaf009dfbfa04acb1ed487586c03c788c6a312f0361ad5f298c5f5a7d/cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/69/70/242937601f9ff9e6df4c0587b5a7702be4dbfd33420b409d80e2bccc276a/cmake-3.30.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/06/b36f150fa7c5bcc96a31a4d19a20fddbd1d965b6f02510b57a3bb8d4b930/lit-18.1.8-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ce/41/fdeb62b5437996e841d83d7d2714ca75b886547ee8017ee2fe6ea409d983/nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e6/9d/dd0cdcd800e642e3c82ee3b5987c751afd4f3fb9cc2752517f42c3bc6e49/nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ef/25/922c5996aada6611b79b53985af7999fc629aee1d5d001b6a22431e18fec/nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/36/92/89cf558b514125d2ebd8344dd2f0533404b416486ff681d5434a5832a019/nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/dc/30/66d4347d6e864334da5bb1c7571305e501dcb11b9155971421bb7bb5315f/nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/74/79/b912a77e38e41f15a0581a59f5c3548d1ddfdda3225936fb67c342719e7a/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/8f/11/af78d54b2420e64a4dd19e704f5bb69dcb5a6a3138b4465d6a48cdf59a21/nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/3e/77/66149e3153b19312fb782ea367f3f950123b93916a45538b573fe373570a/nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ea/6f/6d032cc1bb7db88a989ddce3f4968419a7edeafda362847f42f614b1f845/nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/55/92/914cdb650b6a5d1478f83148597a25e90ea37d739bd563c5096b0e8a5f43/nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/23/d5/09493ff0e64fd77523afbbb075108f27a13790479efe86b9ffb4587671b5/nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/e1/58/e0ef3b9974a04ce9cde2a7a33881ddcb2d68450803745804545cdd8d258f/setuptools-72.1.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/8c/4d/17e07377c9c3d1a0c4eb3fde1c7c16b5a0ce6133ddbabc08ceef6b7f2645/torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ca/31/ff6be541195daf77aa5c72303b2354661a69e717967d44d91eb4f3fdce32/triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/1b/d1/9babe2ccaecff775992753d8686970b1e2755d21c8a63be73aba7a4e7d77/wheel-0.44.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py310-torch21: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.14-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7f/df/700aaf009dfbfa04acb1ed487586c03c788c6a312f0361ad5f298c5f5a7d/cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/a4/05/23f8f38eec3d28e4915725b233c24d8f1a33cb6540a882f7b54be1befa02/nvidia_nccl_cu12-2.18.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/03/f1/13137340776dd5d5bcfd2574c9c6dfcc7618285035cd77240496e5c1a79b/torch-2.1.2-cp310-cp310-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4d/22/91a8af421c8a8902dde76e6ef3db01b258af16c53d81e8c0d0dc13900a9e/triton-2.1.0-0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py310-torch22: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.14-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7f/df/700aaf009dfbfa04acb1ed487586c03c788c6a312f0361ad5f298c5f5a7d/cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/38/00/d0d4e48aef772ad5aebcf70b73028f88db6e5640b36c38e90445b7a57c45/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/33/b3/1fcc3bccfddadfd6845dcbfe26eb4b099f1dfea5aa0e5cfb92b3c98dba5b/torch-2.2.2-cp310-cp310-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/95/05/ed974ce87fe8c8843855daa2136b3409ee1c126707ab54a8b72815c08b49/triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py310-torch23: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.14-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7f/df/700aaf009dfbfa04acb1ed487586c03c788c6a312f0361ad5f298c5f5a7d/cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/cb/e2/1bd899d3eb60c6495cf5d0d2885edacac08bde7a1407eadeb2ab36eca3c7/torch-2.3.1-cp310-cp310-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d7/69/8a9fde07d2d27a90e16488cdfe9878e985a247b2496a4b5b1a2126042528/triton-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py310-torch24: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.14-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7f/df/700aaf009dfbfa04acb1ed487586c03c788c6a312f0361ad5f298c5f5a7d/cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/9a/bd/4161ae28fb1c388a8ee30ca3aa72cf11ac3016ce62bc9e82c71ce193c410/torch-2.4.0-cp310-cp310-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py311-torch20: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.9-hb806964_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/f3/b9/f163bb3fa4fbc636ee1f2a6a4598c096cdef279823ddfaa5734e556dd206/cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/69/70/242937601f9ff9e6df4c0587b5a7702be4dbfd33420b409d80e2bccc276a/cmake-3.30.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/06/b36f150fa7c5bcc96a31a4d19a20fddbd1d965b6f02510b57a3bb8d4b930/lit-18.1.8-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ce/41/fdeb62b5437996e841d83d7d2714ca75b886547ee8017ee2fe6ea409d983/nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e6/9d/dd0cdcd800e642e3c82ee3b5987c751afd4f3fb9cc2752517f42c3bc6e49/nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ef/25/922c5996aada6611b79b53985af7999fc629aee1d5d001b6a22431e18fec/nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/36/92/89cf558b514125d2ebd8344dd2f0533404b416486ff681d5434a5832a019/nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/dc/30/66d4347d6e864334da5bb1c7571305e501dcb11b9155971421bb7bb5315f/nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/74/79/b912a77e38e41f15a0581a59f5c3548d1ddfdda3225936fb67c342719e7a/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/8f/11/af78d54b2420e64a4dd19e704f5bb69dcb5a6a3138b4465d6a48cdf59a21/nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/3e/77/66149e3153b19312fb782ea367f3f950123b93916a45538b573fe373570a/nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ea/6f/6d032cc1bb7db88a989ddce3f4968419a7edeafda362847f42f614b1f845/nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/55/92/914cdb650b6a5d1478f83148597a25e90ea37d739bd563c5096b0e8a5f43/nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/23/d5/09493ff0e64fd77523afbbb075108f27a13790479efe86b9ffb4587671b5/nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/e1/58/e0ef3b9974a04ce9cde2a7a33881ddcb2d68450803745804545cdd8d258f/setuptools-72.1.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c8/21/25020cfdd9f564a72f400ee491610e50cb212e8add8031abaa959af6451e/torch-2.0.1-cp311-cp311-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b7/cd/4aa0179919306f9c2e3e5308f269d20c094b2a4e2963b656e9405172763f/triton-2.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/1b/d1/9babe2ccaecff775992753d8686970b1e2755d21c8a63be73aba7a4e7d77/wheel-0.44.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py311-torch21: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.9-hb806964_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/f3/b9/f163bb3fa4fbc636ee1f2a6a4598c096cdef279823ddfaa5734e556dd206/cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/a4/05/23f8f38eec3d28e4915725b233c24d8f1a33cb6540a882f7b54be1befa02/nvidia_nccl_cu12-2.18.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/da/6a/7fb9d82db4568834ff6d4df2fe3b143de4ed65a3f8f93e7daed703626cb6/torch-2.1.2-cp311-cp311-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/5c/c1/54fffb2eb13d293d9a429fead3646752ea190de0229bcf3d591ba2481263/triton-2.1.0-0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py311-torch22: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.9-hb806964_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/f3/b9/f163bb3fa4fbc636ee1f2a6a4598c096cdef279823ddfaa5734e556dd206/cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/38/00/d0d4e48aef772ad5aebcf70b73028f88db6e5640b36c38e90445b7a57c45/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c3/33/d7a6123231bd4d04c7005dde8507235772f3bc4622a25f3a88c016415d49/torch-2.2.2-cp311-cp311-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bd/ac/3974caaa459bf2c3a244a84be8d17561f631f7d42af370fc311defeca2fb/triton-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py311-torch23: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.9-hb806964_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/f3/b9/f163bb3fa4fbc636ee1f2a6a4598c096cdef279823ddfaa5734e556dd206/cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/07/9a/4c5e74264439837814656201da13a898056a5201c976ef042544bceb840f/torch-2.3.1-cp311-cp311-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/64/16/956b7b9d2ed3a437a1a06792b2ae2e3c49147296ba2f4d59fcee376ded8f/triton-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py311-torch24: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.9-hb806964_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/f3/b9/f163bb3fa4fbc636ee1f2a6a4598c096cdef279823ddfaa5734e556dd206/cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/80/83/9b7681e41e59adb6c2b042f7e8eb716515665a6eed3dda4215c6b3385b90/torch-2.4.0-cp311-cp311-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/33/3e/a2f59384587eff6aeb7d37b6780de7fedd2214935e27520430ca9f5b7975/triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py38-torch20: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/e3/96/7a654027638ad9b7589effb6db77eb63eba64319dfeaf9c0f4ca953e5f76/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/60/9f/0b88c6ebc1b3a32917b396140a3505efdb115b4a64e7c1e80b12ee319c10/cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/69/70/242937601f9ff9e6df4c0587b5a7702be4dbfd33420b409d80e2bccc276a/cmake-3.30.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/77/9d/0b98c73cebfd41e4fb0439fe9ce08022e8d059f51caa7afc8934fc1edcd9/cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/06/b36f150fa7c5bcc96a31a4d19a20fddbd1d965b6f02510b57a3bb8d4b930/lit-18.1.8-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/a8/05/9d4f9b78ead6b2661d6e8ea772e111fc4a9fbd866ad0c81906c11206b55e/networkx-3.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ce/41/fdeb62b5437996e841d83d7d2714ca75b886547ee8017ee2fe6ea409d983/nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e6/9d/dd0cdcd800e642e3c82ee3b5987c751afd4f3fb9cc2752517f42c3bc6e49/nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ef/25/922c5996aada6611b79b53985af7999fc629aee1d5d001b6a22431e18fec/nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/36/92/89cf558b514125d2ebd8344dd2f0533404b416486ff681d5434a5832a019/nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/dc/30/66d4347d6e864334da5bb1c7571305e501dcb11b9155971421bb7bb5315f/nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/74/79/b912a77e38e41f15a0581a59f5c3548d1ddfdda3225936fb67c342719e7a/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/8f/11/af78d54b2420e64a4dd19e704f5bb69dcb5a6a3138b4465d6a48cdf59a21/nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/3e/77/66149e3153b19312fb782ea367f3f950123b93916a45538b573fe373570a/nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ea/6f/6d032cc1bb7db88a989ddce3f4968419a7edeafda362847f42f614b1f845/nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/55/92/914cdb650b6a5d1478f83148597a25e90ea37d739bd563c5096b0e8a5f43/nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/23/d5/09493ff0e64fd77523afbbb075108f27a13790479efe86b9ffb4587671b5/nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/e1/58/e0ef3b9974a04ce9cde2a7a33881ddcb2d68450803745804545cdd8d258f/setuptools-72.1.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/28/026dc037f177d53558477931677b120f649dd5a0dcdc4b44dc38b3d75711/torch-2.0.1-cp38-cp38-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/a6/4b/28142a3c70621cb3398ac626c276268ca87af50a3fa43667a834fa5d13bf/triton-2.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/1b/d1/9babe2ccaecff775992753d8686970b1e2755d21c8a63be73aba7a4e7d77/wheel-0.44.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/c6/56e718e2c58a4078518c14d97e531ef1e9e8a5c1ddafdc0d264a92be1a1a/wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py38-torch21: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/e3/96/7a654027638ad9b7589effb6db77eb63eba64319dfeaf9c0f4ca953e5f76/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/60/9f/0b88c6ebc1b3a32917b396140a3505efdb115b4a64e7c1e80b12ee319c10/cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/77/9d/0b98c73cebfd41e4fb0439fe9ce08022e8d059f51caa7afc8934fc1edcd9/cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/a8/05/9d4f9b78ead6b2661d6e8ea772e111fc4a9fbd866ad0c81906c11206b55e/networkx-3.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/a4/05/23f8f38eec3d28e4915725b233c24d8f1a33cb6540a882f7b54be1befa02/nvidia_nccl_cu12-2.18.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/c0/6e856c0c745dffd7696ec514381befa83f3449cd914f02b0968e0ca5a244/torch-2.1.2-cp38-cp38-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/72/98/34f43ed68ee6455ea874f749a5515c0600243186301ecd83819d942ce08a/triton-2.1.0-0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/c6/56e718e2c58a4078518c14d97e531ef1e9e8a5c1ddafdc0d264a92be1a1a/wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py38-torch22: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/e3/96/7a654027638ad9b7589effb6db77eb63eba64319dfeaf9c0f4ca953e5f76/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/60/9f/0b88c6ebc1b3a32917b396140a3505efdb115b4a64e7c1e80b12ee319c10/cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/77/9d/0b98c73cebfd41e4fb0439fe9ce08022e8d059f51caa7afc8934fc1edcd9/cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/a8/05/9d4f9b78ead6b2661d6e8ea772e111fc4a9fbd866ad0c81906c11206b55e/networkx-3.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/38/00/d0d4e48aef772ad5aebcf70b73028f88db6e5640b36c38e90445b7a57c45/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/99/bf/7f6c1a37ea7fdf6afbc05ac405faae6eba1c1450d9ed632e23535e6438e2/torch-2.2.2-cp38-cp38-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7f/fc/1c97813debad858dde5b84b5a8d4ea4077044a7b26e1ad8de9689af93565/triton-2.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/c6/56e718e2c58a4078518c14d97e531ef1e9e8a5c1ddafdc0d264a92be1a1a/wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py38-torch23: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/e3/96/7a654027638ad9b7589effb6db77eb63eba64319dfeaf9c0f4ca953e5f76/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/60/9f/0b88c6ebc1b3a32917b396140a3505efdb115b4a64e7c1e80b12ee319c10/cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/77/9d/0b98c73cebfd41e4fb0439fe9ce08022e8d059f51caa7afc8934fc1edcd9/cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/a8/05/9d4f9b78ead6b2661d6e8ea772e111fc4a9fbd866ad0c81906c11206b55e/networkx-3.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c0/7e/309d63c6330a0b821a6f55e06dcef6704a7ab8b707534a4923837570624e/torch-2.3.1-cp38-cp38-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d3/55/45b3882019a8d69ad73b5b2bd1714cb2d6653b39e7376b7ac5accf745760/triton-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/c6/56e718e2c58a4078518c14d97e531ef1e9e8a5c1ddafdc0d264a92be1a1a/wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py38-torch24: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/e3/96/7a654027638ad9b7589effb6db77eb63eba64319dfeaf9c0f4ca953e5f76/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/60/9f/0b88c6ebc1b3a32917b396140a3505efdb115b4a64e7c1e80b12ee319c10/cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/77/9d/0b98c73cebfd41e4fb0439fe9ce08022e8d059f51caa7afc8934fc1edcd9/cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/a8/05/9d4f9b78ead6b2661d6e8ea772e111fc4a9fbd866ad0c81906c11206b55e/networkx-3.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/fc/58/f93bdce23c9ff568c3dfb5129db0c14e60f7c72ab4d1a6de8fedca6e3792/torch-2.4.0-cp38-cp38-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4d/b4/c37e2776a1390bab7e78a6d52bd525441cb3cad7260a6a00b11b0b702e7c/triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/c6/56e718e2c58a4078518c14d97e531ef1e9e8a5c1ddafdc0d264a92be1a1a/wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py39-torch20: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.19-h0755675_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e1/d3/36e54b85f670400ff0440ab743fa0de66bdd89b8f54b7d2370708cdcb03f/cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/69/70/242937601f9ff9e6df4c0587b5a7702be4dbfd33420b409d80e2bccc276a/cmake-3.30.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/06/b36f150fa7c5bcc96a31a4d19a20fddbd1d965b6f02510b57a3bb8d4b930/lit-18.1.8-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ce/41/fdeb62b5437996e841d83d7d2714ca75b886547ee8017ee2fe6ea409d983/nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e6/9d/dd0cdcd800e642e3c82ee3b5987c751afd4f3fb9cc2752517f42c3bc6e49/nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ef/25/922c5996aada6611b79b53985af7999fc629aee1d5d001b6a22431e18fec/nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/36/92/89cf558b514125d2ebd8344dd2f0533404b416486ff681d5434a5832a019/nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/dc/30/66d4347d6e864334da5bb1c7571305e501dcb11b9155971421bb7bb5315f/nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/74/79/b912a77e38e41f15a0581a59f5c3548d1ddfdda3225936fb67c342719e7a/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/8f/11/af78d54b2420e64a4dd19e704f5bb69dcb5a6a3138b4465d6a48cdf59a21/nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/3e/77/66149e3153b19312fb782ea367f3f950123b93916a45538b573fe373570a/nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ea/6f/6d032cc1bb7db88a989ddce3f4968419a7edeafda362847f42f614b1f845/nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/55/92/914cdb650b6a5d1478f83148597a25e90ea37d739bd563c5096b0e8a5f43/nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/23/d5/09493ff0e64fd77523afbbb075108f27a13790479efe86b9ffb4587671b5/nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/e1/58/e0ef3b9974a04ce9cde2a7a33881ddcb2d68450803745804545cdd8d258f/setuptools-72.1.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/e5/9a/ce0fe125f226ffce8deba6a18bd8d0b9f589aa236780a83a6d70b5525f56/torch-2.0.1-cp39-cp39-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/77/ac/28b74ec1177c730d0da8803eaff5e5025bd532bcf07cadb0fcf661abed97/triton-2.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/1b/d1/9babe2ccaecff775992753d8686970b1e2755d21c8a63be73aba7a4e7d77/wheel-0.44.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/b1/e7/459a8a4f40f2fa65eb73cb3f339e6d152957932516d18d0e996c7ae2d7ae/wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py39-torch21: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.19-h0755675_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e1/d3/36e54b85f670400ff0440ab743fa0de66bdd89b8f54b7d2370708cdcb03f/cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/a4/05/23f8f38eec3d28e4915725b233c24d8f1a33cb6540a882f7b54be1befa02/nvidia_nccl_cu12-2.18.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/da/57/0a58fb9a7d110eab4492fe984bc207d51706797d0729dbd8ce7ff982c82e/torch-2.1.2-cp39-cp39-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d1/5a/e5811fcc8fc6703be39eb157af6224eaa3b628a42008df93b87e23eb9731/triton-2.1.0-0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/b1/e7/459a8a4f40f2fa65eb73cb3f339e6d152957932516d18d0e996c7ae2d7ae/wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py39-torch22: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.19-h0755675_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e1/d3/36e54b85f670400ff0440ab743fa0de66bdd89b8f54b7d2370708cdcb03f/cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/38/00/d0d4e48aef772ad5aebcf70b73028f88db6e5640b36c38e90445b7a57c45/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/68/6c/754b1b742258f9a76d8daf53ac55ce672228c988b5a1b59b16203dda6959/torch-2.2.2-cp39-cp39-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/6a/5c/01d9f062f719581cf6e60053e1a005d666ec67dcb59630fffaa3a3e5c9d8/triton-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/b1/e7/459a8a4f40f2fa65eb73cb3f339e6d152957932516d18d0e996c7ae2d7ae/wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py39-torch23: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.19-h0755675_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e1/d3/36e54b85f670400ff0440ab743fa0de66bdd89b8f54b7d2370708cdcb03f/cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/74/b3/1febb6be57a4f68cb55ea178f5ffca6a10b01b47e182f7b76eddd9168632/torch-2.3.1-cp39-cp39-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/fe/31/a3783aaab3a75d8b622b0fa822eb3ae95063dec8e866a18d574ae64f33bd/triton-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/b1/e7/459a8a4f40f2fa65eb73cb3f339e6d152957932516d18d0e996c7ae2d7ae/wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . - py39-torch24: - channels: - - url: https://conda.anaconda.org/conda-forge/ - indexes: - - https://pypi.org/simple - packages: - linux-64: - - conda: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.19-h0755675_0_cpython.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - - conda: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - - conda: https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 - - pypi: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/e1/d3/36e54b85f670400ff0440ab743fa0de66bdd89b8f54b7d2370708cdcb03f/cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/36/80/3ac18a2db50d832745c1c5db7e47c4d0e02f1a11e92185155a6b218cbbe3/torch-2.4.0-cp39-cp39-manylinux1_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/6c/bf/55cccf57c14787ad81ee827526ddd48fd0aff0291fcc7b8c2e2bdf28da0a/triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - - pypi: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl - - pypi: https://files.pythonhosted.org/packages/b1/e7/459a8a4f40f2fa65eb73cb3f339e6d152957932516d18d0e996c7ae2d7ae/wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - - pypi: . -packages: -- kind: conda - name: _libgcc_mutex - version: '0.1' - build: conda_forge - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 - sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 - md5: d7c89558ba9fa0495403155b64376d81 - license: None - purls: [] - size: 2562 - timestamp: 1578324546067 -- kind: conda - name: _openmp_mutex - version: '4.5' - build: 2_gnu - build_number: 16 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 - sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 - md5: 73aaf86a425cc6e73fcf236a5a46396d - depends: - - _libgcc_mutex 0.1 conda_forge - - libgomp >=7.5.0 - constrains: - - openmp_impl 9999 - license: BSD-3-Clause - license_family: BSD - purls: [] - size: 23621 - timestamp: 1650670423406 -- kind: pypi - name: backports-tarfile - version: 1.2.0 - url: https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl - sha256: 77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 - requires_dist: - - sphinx>=3.5 ; extra == 'docs' - - jaraco-packaging>=9.3 ; extra == 'docs' - - rst-linker>=1.9 ; extra == 'docs' - - furo ; extra == 'docs' - - sphinx-lint ; extra == 'docs' - - pytest!=8.1.*,>=6 ; extra == 'testing' - - pytest-checkdocs>=2.4 ; extra == 'testing' - - pytest-cov ; extra == 'testing' - - pytest-enabler>=2.2 ; extra == 'testing' - - jaraco-test ; extra == 'testing' - - pytest!=8.0.* ; extra == 'testing' - requires_python: '>=3.8' -- kind: pypi - name: bcrypt - version: 4.1.3 - url: https://files.pythonhosted.org/packages/2f/f6/9c0a6de7ef78d573e10d0b7de3ef82454e2e6eb6fada453ea6c2b8fb3f0a/bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 3d3b317050a9a711a5c7214bf04e28333cf528e0ed0ec9a4e55ba628d0f07c1a - requires_dist: - - pytest!=3.3.0,>=3.2.1 ; extra == 'tests' - - mypy ; extra == 'typecheck' - requires_python: '>=3.7' -- kind: pypi - name: bcrypt - version: 4.2.0 - url: https://files.pythonhosted.org/packages/e3/96/7a654027638ad9b7589effb6db77eb63eba64319dfeaf9c0f4ca953e5f76/bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d - requires_dist: - - pytest!=3.3.0,>=3.2.1 ; extra == 'tests' - - mypy ; extra == 'typecheck' - requires_python: '>=3.7' -- kind: pypi - name: bcrypt - version: 4.2.0 - url: https://files.pythonhosted.org/packages/4b/3b/ad784eac415937c53da48983756105d267b91e56aa53ba8a1b2014b8d930/bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe - requires_dist: - - pytest!=3.3.0,>=3.2.1 ; extra == 'tests' - - mypy ; extra == 'typecheck' - requires_python: '>=3.7' -- kind: pypi - name: build - version: 1.2.1 - url: https://files.pythonhosted.org/packages/e2/03/f3c8ba0a6b6e30d7d18c40faab90807c9bb5e9a1e3b2fe2008af624a9c97/build-1.2.1-py3-none-any.whl - sha256: 75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4 - requires_dist: - - packaging>=19.1 - - pyproject-hooks - - colorama ; os_name == 'nt' - - importlib-metadata>=4.6 ; python_full_version < '3.10.2' - - tomli>=1.1.0 ; python_version < '3.11' - - furo>=2023.8.17 ; extra == 'docs' - - sphinx~=7.0 ; extra == 'docs' - - sphinx-argparse-cli>=1.5 ; extra == 'docs' - - sphinx-autodoc-typehints>=1.10 ; extra == 'docs' - - sphinx-issues>=3.0.0 ; extra == 'docs' - - build[uv,virtualenv] ; extra == 'test' - - filelock>=3 ; extra == 'test' - - pytest>=6.2.4 ; extra == 'test' - - pytest-cov>=2.12 ; extra == 'test' - - pytest-mock>=2 ; extra == 'test' - - pytest-rerunfailures>=9.1 ; extra == 'test' - - pytest-xdist>=1.34 ; extra == 'test' - - wheel>=0.36.0 ; extra == 'test' - - setuptools>=42.0.0 ; extra == 'test' and python_version < '3.10' - - setuptools>=56.0.0 ; extra == 'test' and python_version == '3.10' - - setuptools>=56.0.0 ; extra == 'test' and python_version == '3.11' - - setuptools>=67.8.0 ; extra == 'test' and python_version >= '3.12' - - build[uv] ; extra == 'typing' - - importlib-metadata>=5.1 ; extra == 'typing' - - mypy~=1.9.0 ; extra == 'typing' - - tomli ; extra == 'typing' - - typing-extensions>=3.7.4.3 ; extra == 'typing' - - uv>=0.1.18 ; extra == 'uv' - - virtualenv>=20.0.35 ; extra == 'virtualenv' - requires_python: '>=3.8' -- kind: conda - name: bzip2 - version: 1.0.8 - build: h4bc722e_7 - build_number: 7 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda - sha256: 5ced96500d945fb286c9c838e54fa759aa04a7129c59800f0846b4335cee770d - md5: 62ee74e96c5ebb0af99386de58cf9553 - depends: - - __glibc >=2.17,<3.0.a0 - - libgcc-ng >=12 - license: bzip2-1.0.6 - license_family: BSD - purls: [] - size: 252783 - timestamp: 1720974456583 -- kind: conda - name: ca-certificates - version: 2024.7.4 - build: hbcca054_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda - sha256: c1548a3235376f464f9931850b64b02492f379b2f2bb98bc786055329b080446 - md5: 23ab7665c5f63cfb9f1f6195256daac6 - license: ISC - purls: [] - size: 154853 - timestamp: 1720077432978 -- kind: pypi - name: certifi - version: 2024.7.4 - url: https://files.pythonhosted.org/packages/1c/d5/c84e1a17bf61d4df64ca866a1c9a913874b4e9bdc131ec689a0ad013fb36/certifi-2024.7.4-py3-none-any.whl - sha256: c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 - requires_python: '>=3.6' -- kind: pypi - name: cffi - version: 1.16.0 - url: https://files.pythonhosted.org/packages/f1/c9/326611aa83e16b13b6db4dbb73b5455c668159a003c4c2f0c3bcb2ddabaf/cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 - requires_dist: - - pycparser - requires_python: '>=3.8' -- kind: pypi - name: cffi - version: 1.17.0 - url: https://files.pythonhosted.org/packages/60/9f/0b88c6ebc1b3a32917b396140a3505efdb115b4a64e7c1e80b12ee319c10/cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c - requires_dist: - - pycparser - requires_python: '>=3.8' -- kind: pypi - name: cffi - version: 1.17.0 - url: https://files.pythonhosted.org/packages/e1/d3/36e54b85f670400ff0440ab743fa0de66bdd89b8f54b7d2370708cdcb03f/cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932 - requires_dist: - - pycparser - requires_python: '>=3.8' -- kind: pypi - name: cffi - version: 1.17.0 - url: https://files.pythonhosted.org/packages/7f/df/700aaf009dfbfa04acb1ed487586c03c788c6a312f0361ad5f298c5f5a7d/cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b - requires_dist: - - pycparser - requires_python: '>=3.8' -- kind: pypi - name: cffi - version: 1.17.0 - url: https://files.pythonhosted.org/packages/f3/b9/f163bb3fa4fbc636ee1f2a6a4598c096cdef279823ddfaa5734e556dd206/cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6 - requires_dist: - - pycparser - requires_python: '>=3.8' -- kind: pypi - name: charset-normalizer - version: 3.3.2 - url: https://files.pythonhosted.org/packages/3d/09/d82fe4a34c5f0585f9ea1df090e2a71eb9bb1e469723053e1ee9f57c16f3/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 - requires_python: '>=3.7.0' -- kind: pypi - name: cloudpickle - version: 3.0.0 - url: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl - sha256: 246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7 - requires_python: '>=3.8' -- kind: pypi - name: cmake - version: 3.30.2 - url: https://files.pythonhosted.org/packages/69/70/242937601f9ff9e6df4c0587b5a7702be4dbfd33420b409d80e2bccc276a/cmake-3.30.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 9beca135489d56a89cf54cf3d324bcf8dd6c50cc9bdb76b9a97e8540935797b2 - requires_dist: - - importlib-metadata>=1.4 ; python_version < '3.8' - - coverage>=4.2 ; extra == 'test' - - pytest>=3.0.3 ; extra == 'test' - - pytest-cov>=2.4.0 ; extra == 'test' - requires_python: '>=3.7' -- kind: pypi - name: cryptography - version: 42.0.8 - url: https://files.pythonhosted.org/packages/35/66/2d87e9ca95c82c7ee5f2c09716fc4c4242c1ae6647b9bd27e55e920e9f10/cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 - requires_dist: - - cffi>=1.12 ; platform_python_implementation != 'PyPy' - - sphinx>=5.3.0 ; extra == 'docs' - - sphinx-rtd-theme>=1.1.1 ; extra == 'docs' - - pyenchant>=1.6.11 ; extra == 'docstest' - - readme-renderer ; extra == 'docstest' - - sphinxcontrib-spelling>=4.0.1 ; extra == 'docstest' - - nox ; extra == 'nox' - - ruff ; extra == 'pep8test' - - mypy ; extra == 'pep8test' - - check-sdist ; extra == 'pep8test' - - click ; extra == 'pep8test' - - build ; extra == 'sdist' - - bcrypt>=3.1.5 ; extra == 'ssh' - - pytest>=6.2.0 ; extra == 'test' - - pytest-benchmark ; extra == 'test' - - pytest-cov ; extra == 'test' - - pytest-xdist ; extra == 'test' - - pretend ; extra == 'test' - - certifi ; extra == 'test' - - pytest-randomly ; extra == 'test-randomorder' - requires_python: '>=3.7' -- kind: pypi - name: cryptography - version: 43.0.0 - url: https://files.pythonhosted.org/packages/77/9d/0b98c73cebfd41e4fb0439fe9ce08022e8d059f51caa7afc8934fc1edcd9/cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22 - requires_dist: - - cffi>=1.12 ; platform_python_implementation != 'PyPy' - - bcrypt>=3.1.5 ; extra == 'ssh' - - nox ; extra == 'nox' - - cryptography-vectors==43.0.0 ; extra == 'test' - - pytest>=6.2.0 ; extra == 'test' - - pytest-benchmark ; extra == 'test' - - pytest-cov ; extra == 'test' - - pytest-xdist ; extra == 'test' - - pretend ; extra == 'test' - - certifi ; extra == 'test' - - pytest-randomly ; extra == 'test-randomorder' - - sphinx>=5.3.0 ; extra == 'docs' - - sphinx-rtd-theme>=1.1.1 ; extra == 'docs' - - pyenchant>=1.6.11 ; extra == 'docstest' - - readme-renderer ; extra == 'docstest' - - sphinxcontrib-spelling>=4.0.1 ; extra == 'docstest' - - build ; extra == 'sdist' - - ruff ; extra == 'pep8test' - - mypy ; extra == 'pep8test' - - check-sdist ; extra == 'pep8test' - - click ; extra == 'pep8test' - requires_python: '>=3.7' -- kind: pypi - name: cryptography - version: 43.0.0 - url: https://files.pythonhosted.org/packages/76/eb/ab783b47b3b9b55371b4361c7ec695144bde1a3343ff2b7a8c1d8fe617bb/cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e - requires_dist: - - cffi>=1.12 ; platform_python_implementation != 'PyPy' - - bcrypt>=3.1.5 ; extra == 'ssh' - - nox ; extra == 'nox' - - cryptography-vectors==43.0.0 ; extra == 'test' - - pytest>=6.2.0 ; extra == 'test' - - pytest-benchmark ; extra == 'test' - - pytest-cov ; extra == 'test' - - pytest-xdist ; extra == 'test' - - pretend ; extra == 'test' - - certifi ; extra == 'test' - - pytest-randomly ; extra == 'test-randomorder' - - sphinx>=5.3.0 ; extra == 'docs' - - sphinx-rtd-theme>=1.1.1 ; extra == 'docs' - - pyenchant>=1.6.11 ; extra == 'docstest' - - readme-renderer ; extra == 'docstest' - - sphinxcontrib-spelling>=4.0.1 ; extra == 'docstest' - - build ; extra == 'sdist' - - ruff ; extra == 'pep8test' - - mypy ; extra == 'pep8test' - - check-sdist ; extra == 'pep8test' - - click ; extra == 'pep8test' - requires_python: '>=3.7' -- kind: conda - name: cuda - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-11.7.0-0.tar.bz2 - md5: 1cf5addc0441778805f1e58a2c83e3cd - depends: - - cuda-demo-suite >=11.7.50 - - cuda-runtime >=11.7.0 - - cuda-toolkit >=11.7.0 - arch: x86_64 - platform: linux - purls: [] - size: 1450 - timestamp: 1656529602952 -- kind: conda - name: cuda-cccl - version: 11.7.58 - build: hc415cf5_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cccl-11.7.58-hc415cf5_0.tar.bz2 - md5: 4dcb96960cea4c603b6ef5b33d062ff5 - arch: x86_64 - platform: linux - purls: [] - size: 1223929 - timestamp: 1650428244011 -- kind: conda - name: cuda-command-line-tools - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-command-line-tools-11.7.0-0.tar.bz2 - md5: 007751c1b543fc04fd34723884e495c0 - depends: - - cuda-cupti >=11.7.50 - - cuda-gdb >=11.7.50 - - cuda-memcheck >=11.7.50 - - cuda-nvdisasm >=11.7.50 - - cuda-nvprof >=11.7.50 - - cuda-nvtx >=11.7.50 - - cuda-sanitizer-api >=11.7.50 - arch: x86_64 - platform: linux - purls: [] - size: 1479 - timestamp: 1656529461635 -- kind: conda - name: cuda-compiler - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-compiler-11.7.0-0.tar.bz2 - md5: 8652316a9db9776f8fe37f61255bc10d - depends: - - cuda-cuobjdump >=11.7.50 - - cuda-cuxxfilt >=11.7.50 - - cuda-nvcc >=11.7.64 - - cuda-nvprune >=11.7.50 - arch: x86_64 - platform: linux - purls: [] - size: 1462 - timestamp: 1656529473118 -- kind: conda - name: cuda-cudart - version: 11.7.60 - build: h9538e0e_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cudart-11.7.60-h9538e0e_0.tar.bz2 - md5: 98cd5a3ad5ef4a928e89437608c364e4 - arch: x86_64 - platform: linux - purls: [] - size: 199483 - timestamp: 1650834099084 -- kind: conda - name: cuda-cudart-dev - version: 11.7.60 - build: h6a7c232_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cudart-dev-11.7.60-h6a7c232_0.tar.bz2 - md5: 53ba3e794accfb5921ea4b90d32d1519 - depends: - - cuda-cccl - - cuda-cudart >=11.7.60 - arch: x86_64 - platform: linux - purls: [] - size: 1032258 - timestamp: 1650834100255 -- kind: conda - name: cuda-cuobjdump - version: 11.7.50 - build: h28cc80a_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cuobjdump-11.7.50-h28cc80a_0.tar.bz2 - md5: 7e35d1b055085ba22d44a04e4076d3a9 - arch: x86_64 - platform: linux - purls: [] - size: 162802 - timestamp: 1649212986334 -- kind: conda - name: cuda-cupti - version: 11.7.50 - build: hb6f9eaf_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cupti-11.7.50-hb6f9eaf_0.tar.bz2 - md5: 574fcb4cadd50dcd079fe25cfb670e12 - arch: x86_64 - platform: linux - purls: [] - size: 24044607 - timestamp: 1649213592867 -- kind: conda - name: cuda-cuxxfilt - version: 11.7.50 - build: hb365495_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cuxxfilt-11.7.50-hb365495_0.tar.bz2 - md5: 1c55be2750344cf5a174992356279e20 - arch: x86_64 - platform: linux - purls: [] - size: 290614 - timestamp: 1649212827286 -- kind: conda - name: cuda-demo-suite - version: 11.7.50 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-demo-suite-11.7.50-0.tar.bz2 - md5: 1b154edaadb2c5bf27521c17508f729c - arch: x86_64 - platform: linux - purls: [] - size: 5188153 - timestamp: 1655213605854 -- kind: conda - name: cuda-documentation - version: 11.7.50 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-documentation-11.7.50-0.tar.bz2 - md5: 8087cac045651ecef5f4e35a5dc73042 - arch: x86_64 - platform: linux - purls: [] - size: 90906 - timestamp: 1655213446631 -- kind: conda - name: cuda-driver-dev - version: 11.7.60 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-driver-dev-11.7.60-0.tar.bz2 - md5: da85cb513904259eb327a0deff44c83e - arch: x86_64 - platform: linux - purls: [] - size: 17306 - timestamp: 1650834099778 -- kind: conda - name: cuda-gdb - version: 11.7.50 - build: h4a0ac72_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-gdb-11.7.50-h4a0ac72_0.tar.bz2 - md5: 4a56a961da9312c45eef7a923867c09a - arch: x86_64 - platform: linux - purls: [] - size: 5031913 - timestamp: 1649217411560 -- kind: conda - name: cuda-libraries - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-libraries-11.7.0-0.tar.bz2 - md5: 0375db9ce1eb908878f210556f5eaf49 - depends: - - cuda-cudart >=11.7.60 - - cuda-nvrtc >=11.7.50 - - libcublas >=11.10.1.25 - - libcufft >=10.7.2.50 - - libcufile >=1.3.0.44 - - libcurand >=10.2.10.50 - - libcusolver >=11.3.5.50 - - libcusparse >=11.7.3.50 - - libnpp >=11.7.3.21 - - libnvjpeg >=11.7.2.34 - arch: x86_64 - platform: linux - purls: [] - size: 1532 - timestamp: 1656529485123 -- kind: conda - name: cuda-libraries-dev - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-libraries-dev-11.7.0-0.tar.bz2 - md5: 19ad25fcbe6c77d4c9b51b2900efce6e - depends: - - cuda-cccl >=11.7.58 - - cuda-cudart-dev >=11.7.60 - - cuda-driver-dev >=11.7.60 - - cuda-nvrtc-dev >=11.7.50 - - libcublas-dev >=11.10.1.25 - - libcufft-dev >=10.7.2.50 - - libcufile-dev >=1.3.0.44 - - libcurand-dev >=10.2.10.50 - - libcusolver-dev >=11.3.5.50 - - libcusparse-dev >=11.7.3.50 - - libnpp-dev >=11.7.3.21 - - libnvjpeg-dev >=11.7.2.34 - arch: x86_64 - platform: linux - purls: [] - size: 1555 - timestamp: 1656529497700 -- kind: conda - name: cuda-memcheck - version: 11.7.50 - build: hc446b2b_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-memcheck-11.7.50-hc446b2b_0.tar.bz2 - md5: 0f93a278e1db683ceb5459d987c9dea1 - arch: x86_64 - platform: linux - purls: [] - size: 172923 - timestamp: 1649213042047 -- kind: conda - name: cuda-nsight - version: 11.7.50 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nsight-11.7.50-0.tar.bz2 - md5: 20207c732b326a47481797775873ef5e - arch: x86_64 - platform: linux - purls: [] - size: 119141042 - timestamp: 1655213189533 -- kind: conda - name: cuda-nsight-compute - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nsight-compute-11.7.0-0.tar.bz2 - md5: 8ba20f5b48a2f8c7872543bc48cac488 - depends: - - nsight-compute >=2022.2.0.13 - arch: x86_64 - platform: linux - purls: [] - size: 1443 - timestamp: 1656529509384 -- kind: conda - name: cuda-nvcc - version: 11.7.64 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvcc-11.7.64-0.tar.bz2 - md5: 5d6319b9cae2d0d2e54923cbe96df471 - arch: x86_64 - platform: linux - purls: [] - size: 44771777 - timestamp: 1651657202137 -- kind: conda - name: cuda-nvdisasm - version: 11.7.50 - build: h5bd0695_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvdisasm-11.7.50-h5bd0695_0.tar.bz2 - md5: af154b6fb0f634db8b81440ef90ee562 - arch: x86_64 - platform: linux - purls: [] - size: 33039793 - timestamp: 1649213490405 -- kind: conda - name: cuda-nvml-dev - version: 11.7.50 - build: h3af1343_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvml-dev-11.7.50-h3af1343_0.tar.bz2 - md5: ba930fa5f7a29c3c5f987074d1cc495f - arch: x86_64 - platform: linux - purls: [] - size: 83313 - timestamp: 1649212178820 -- kind: conda - name: cuda-nvprof - version: 11.7.50 - build: h7a2404d_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvprof-11.7.50-h7a2404d_0.tar.bz2 - md5: ad7c62c4078f954e049255b8ee3291a6 - arch: x86_64 - platform: linux - purls: [] - size: 4540250 - timestamp: 1649213856717 -- kind: conda - name: cuda-nvprune - version: 11.7.50 - build: h7add7b4_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvprune-11.7.50-h7add7b4_0.tar.bz2 - md5: 18363ee0fcca6914b3510e98e2fa5860 - arch: x86_64 - platform: linux - purls: [] - size: 66461 - timestamp: 1649212794028 -- kind: conda - name: cuda-nvrtc - version: 11.7.50 - build: hd0285e0_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvrtc-11.7.50-hd0285e0_0.tar.bz2 - md5: 6b2a357a01dc1a8f4191b0c38f36d5b3 - arch: x86_64 - platform: linux - purls: [] - size: 18144796 - timestamp: 1649211864503 -- kind: conda - name: cuda-nvrtc-dev - version: 11.7.50 - build: heada363_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvrtc-dev-11.7.50-heada363_0.tar.bz2 - md5: c9e36803e6806ad39913b9e509b8a566 - depends: - - cuda-nvrtc >=11.7.50 - arch: x86_64 - platform: linux - purls: [] - size: 17755280 - timestamp: 1649211875853 -- kind: conda - name: cuda-nvtx - version: 11.7.50 - build: h05b0816_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvtx-11.7.50-h05b0816_0.tar.bz2 - md5: 6976dd2f242bbf3faf324a4889690c95 - arch: x86_64 - platform: linux - purls: [] - size: 59374 - timestamp: 1649212889078 -- kind: conda - name: cuda-nvvp - version: 11.7.50 - build: hd2289d5_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvvp-11.7.50-hd2289d5_0.tar.bz2 - md5: 425ca25095c33084d34ecf65a2db9a65 - depends: - - cuda-nvdisasm - - cuda-nvprof - arch: x86_64 - platform: linux - purls: [] - size: 119873153 - timestamp: 1649214460122 -- kind: conda - name: cuda-runtime - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-runtime-11.7.0-0.tar.bz2 - md5: 3d2f825bd5eef4d8c12ef68d3e080de1 - depends: - - cuda-libraries >=11.7.0 - arch: x86_64 - platform: linux - purls: [] - size: 1430 - timestamp: 1656529544177 -- kind: conda - name: cuda-sanitizer-api - version: 11.7.50 - build: hb424887_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-sanitizer-api-11.7.50-hb424887_0.tar.bz2 - md5: 0fead82ef8c41e08d65843d3d74fe34e - arch: x86_64 - platform: linux - purls: [] - size: 17556154 - timestamp: 1649214155195 -- kind: conda - name: cuda-toolkit - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-toolkit-11.7.0-0.tar.bz2 - md5: d86b4e825ea4d17916a88541d0833d88 - depends: - - cuda-compiler >=11.7.0 - - cuda-documentation >=11.7.50 - - cuda-libraries >=11.7.0 - - cuda-libraries-dev >=11.7.0 - - cuda-nvml-dev >=11.7.50 - - cuda-tools >=11.7.0 - arch: x86_64 - platform: linux - purls: [] - size: 1469 - timestamp: 1656529579362 -- kind: conda - name: cuda-tools - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-tools-11.7.0-0.tar.bz2 - md5: 2012403f68f994e83c0a15eea7220d43 - depends: - - cuda-command-line-tools >=11.7.0 - - cuda-visual-tools >=11.7.0 - - gds-tools >=1.3.0.44 - arch: x86_64 - platform: linux - purls: [] - size: 1449 - timestamp: 1656529567537 -- kind: conda - name: cuda-visual-tools - version: 11.7.0 - build: '0' - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-visual-tools-11.7.0-0.tar.bz2 - md5: 72c35e5ed115b7ffc0ec834a30d133e5 - depends: - - cuda-libraries-dev >=11.7.0 - - cuda-nsight >=11.7.50 - - cuda-nsight-compute >=11.7.0 - - cuda-nvml-dev >=11.7.50 - - cuda-nvvp >=11.7.50 - arch: x86_64 - platform: linux - purls: [] - size: 1476 - timestamp: 1656529555717 -- kind: pypi - name: decorator - version: 5.1.1 - url: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl - sha256: b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 - requires_python: '>=3.5' -- kind: pypi - name: deprecated - version: 1.2.14 - url: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl - sha256: 6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c - requires_dist: - - wrapt<2,>=1.10 - - tox ; extra == 'dev' - - pytest ; extra == 'dev' - - pytest-cov ; extra == 'dev' - - bump2version<1 ; extra == 'dev' - - sphinx<2 ; extra == 'dev' - requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' -- kind: pypi - name: docutils - version: 0.20.1 - url: https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl - sha256: 96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 - requires_python: '>=3.7' -- kind: pypi - name: exceptiongroup - version: 1.2.1 - url: https://files.pythonhosted.org/packages/01/90/79fe92dd413a9cab314ef5c591b5aa9b9ba787ae4cadab75055b0ae00b33/exceptiongroup-1.2.1-py3-none-any.whl - sha256: 5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad - requires_dist: - - pytest>=6 ; extra == 'test' - requires_python: '>=3.7' -- kind: pypi - name: exceptiongroup - version: 1.2.2 - url: https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl - sha256: 3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b - requires_dist: - - pytest>=6 ; extra == 'test' - requires_python: '>=3.7' -- kind: pypi - name: fabric - version: 3.2.2 - url: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl - sha256: 91c47c0be68b14936c88b34da8a1f55e5710fd28397dac5d4ff2e21558113a6f - requires_dist: - - invoke>=2.0 - - paramiko>=2.4 - - decorator>=5 - - deprecated>=1.2 - - pytest>=7 ; extra == 'pytest' -- kind: pypi - name: filelock - version: 3.15.4 - url: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl - sha256: 6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 - requires_dist: - - furo>=2023.9.10 ; extra == 'docs' - - sphinx-autodoc-typehints!=1.23.4,>=1.25.2 ; extra == 'docs' - - sphinx>=7.2.6 ; extra == 'docs' - - covdefaults>=2.3 ; extra == 'testing' - - coverage>=7.3.2 ; extra == 'testing' - - diff-cover>=8.0.1 ; extra == 'testing' - - pytest-asyncio>=0.21 ; extra == 'testing' - - pytest-cov>=4.1 ; extra == 'testing' - - pytest-mock>=3.12 ; extra == 'testing' - - pytest-timeout>=2.2 ; extra == 'testing' - - pytest>=7.4.3 ; extra == 'testing' - - virtualenv>=20.26.2 ; extra == 'testing' - - typing-extensions>=4.8 ; python_version < '3.11' and extra == 'typing' - requires_python: '>=3.8' -- kind: pypi - name: fsspec - version: 2024.6.1 - url: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl - sha256: 3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e - requires_dist: - - adlfs ; extra == 'abfs' - - adlfs ; extra == 'adl' - - pyarrow>=1 ; extra == 'arrow' - - dask ; extra == 'dask' - - distributed ; extra == 'dask' - - pre-commit ; extra == 'dev' - - ruff ; extra == 'dev' - - numpydoc ; extra == 'doc' - - sphinx ; extra == 'doc' - - sphinx-design ; extra == 'doc' - - sphinx-rtd-theme ; extra == 'doc' - - yarl ; extra == 'doc' - - dropbox ; extra == 'dropbox' - - dropboxdrivefs ; extra == 'dropbox' - - requests ; extra == 'dropbox' - - adlfs ; extra == 'full' - - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'full' - - dask ; extra == 'full' - - distributed ; extra == 'full' - - dropbox ; extra == 'full' - - dropboxdrivefs ; extra == 'full' - - fusepy ; extra == 'full' - - gcsfs ; extra == 'full' - - libarchive-c ; extra == 'full' - - ocifs ; extra == 'full' - - panel ; extra == 'full' - - paramiko ; extra == 'full' - - pyarrow>=1 ; extra == 'full' - - pygit2 ; extra == 'full' - - requests ; extra == 'full' - - s3fs ; extra == 'full' - - smbprotocol ; extra == 'full' - - tqdm ; extra == 'full' - - fusepy ; extra == 'fuse' - - gcsfs ; extra == 'gcs' - - pygit2 ; extra == 'git' - - requests ; extra == 'github' - - gcsfs ; extra == 'gs' - - panel ; extra == 'gui' - - pyarrow>=1 ; extra == 'hdfs' - - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'http' - - libarchive-c ; extra == 'libarchive' - - ocifs ; extra == 'oci' - - s3fs ; extra == 's3' - - paramiko ; extra == 'sftp' - - smbprotocol ; extra == 'smb' - - paramiko ; extra == 'ssh' - - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test' - - numpy ; extra == 'test' - - pytest ; extra == 'test' - - pytest-asyncio!=0.22.0 ; extra == 'test' - - pytest-benchmark ; extra == 'test' - - pytest-cov ; extra == 'test' - - pytest-mock ; extra == 'test' - - pytest-recording ; extra == 'test' - - pytest-rerunfailures ; extra == 'test' - - requests ; extra == 'test' - - aiobotocore<3.0.0,>=2.5.4 ; extra == 'test-downstream' - - dask-expr ; extra == 'test-downstream' - - dask[dataframe,test] ; extra == 'test-downstream' - - moto[server]<5,>4 ; extra == 'test-downstream' - - pytest-timeout ; extra == 'test-downstream' - - xarray ; extra == 'test-downstream' - - adlfs ; extra == 'test-full' - - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test-full' - - cloudpickle ; extra == 'test-full' - - dask ; extra == 'test-full' - - distributed ; extra == 'test-full' - - dropbox ; extra == 'test-full' - - dropboxdrivefs ; extra == 'test-full' - - fastparquet ; extra == 'test-full' - - fusepy ; extra == 'test-full' - - gcsfs ; extra == 'test-full' - - jinja2 ; extra == 'test-full' - - kerchunk ; extra == 'test-full' - - libarchive-c ; extra == 'test-full' - - lz4 ; extra == 'test-full' - - notebook ; extra == 'test-full' - - numpy ; extra == 'test-full' - - ocifs ; extra == 'test-full' - - pandas ; extra == 'test-full' - - panel ; extra == 'test-full' - - paramiko ; extra == 'test-full' - - pyarrow ; extra == 'test-full' - - pyarrow>=1 ; extra == 'test-full' - - pyftpdlib ; extra == 'test-full' - - pygit2 ; extra == 'test-full' - - pytest ; extra == 'test-full' - - pytest-asyncio!=0.22.0 ; extra == 'test-full' - - pytest-benchmark ; extra == 'test-full' - - pytest-cov ; extra == 'test-full' - - pytest-mock ; extra == 'test-full' - - pytest-recording ; extra == 'test-full' - - pytest-rerunfailures ; extra == 'test-full' - - python-snappy ; extra == 'test-full' - - requests ; extra == 'test-full' - - smbprotocol ; extra == 'test-full' - - tqdm ; extra == 'test-full' - - urllib3 ; extra == 'test-full' - - zarr ; extra == 'test-full' - - zstandard ; extra == 'test-full' - - tqdm ; extra == 'tqdm' - requires_python: '>=3.8' +packages: - kind: conda - name: gds-tools - version: 1.3.0.44 - build: '0' + name: _libgcc_mutex + version: '0.1' + build: conda_forge subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/gds-tools-1.3.0.44-0.tar.bz2 - md5: 119fc5cfc6250cfb80c85baacd32779c - depends: - - libcufile >=1.3.0.44 - arch: x86_64 - platform: linux + url: https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 + sha256: fe51de6107f9edc7aa4f786a70f4a883943bc9d39b3bb7307c04c41410990726 + md5: d7c89558ba9fa0495403155b64376d81 + license: None purls: [] - size: 41628234 - timestamp: 1656528515966 -- kind: pypi - name: idna - version: '3.7' - url: https://files.pythonhosted.org/packages/e5/3e/741d8c82801c347547f8a2a06aa57dbb1992be9e948df2ea0eda2c8b79e8/idna-3.7-py3-none-any.whl - sha256: 82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 - requires_python: '>=3.5' -- kind: pypi - name: importlib-metadata - version: 8.0.0 - url: https://files.pythonhosted.org/packages/dc/ef/38766b2edb096260d9b1b6ad35adaa0bce3b0567abb452b21eb074af88c4/importlib_metadata-8.0.0-py3-none-any.whl - sha256: 15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f - requires_dist: - - zipp>=0.5 - - typing-extensions>=3.6.4 ; python_version < '3.8' - - sphinx>=3.5 ; extra == 'doc' - - jaraco-packaging>=9.3 ; extra == 'doc' - - rst-linker>=1.9 ; extra == 'doc' - - furo ; extra == 'doc' - - sphinx-lint ; extra == 'doc' - - jaraco-tidelift>=1.4 ; extra == 'doc' - - ipython ; extra == 'perf' - - pytest!=8.1.*,>=6 ; extra == 'test' - - pytest-checkdocs>=2.4 ; extra == 'test' - - pytest-cov ; extra == 'test' - - pytest-mypy ; extra == 'test' - - pytest-enabler>=2.2 ; extra == 'test' - - pytest-ruff>=0.2.1 ; extra == 'test' - - packaging ; extra == 'test' - - pyfakefs ; extra == 'test' - - flufl-flake8 ; extra == 'test' - - pytest-perf>=0.9.2 ; extra == 'test' - - jaraco-test>=5.4 ; extra == 'test' - - importlib-resources>=1.3 ; python_version < '3.9' and extra == 'test' - requires_python: '>=3.8' -- kind: pypi - name: importlib-resources - version: 6.4.0 - url: https://files.pythonhosted.org/packages/75/06/4df55e1b7b112d183f65db9503bff189e97179b256e1ea450a3c365241e0/importlib_resources-6.4.0-py3-none-any.whl - sha256: 50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c - requires_dist: - - zipp>=3.1.0 ; python_version < '3.10' - - sphinx>=3.5 ; extra == 'docs' - - sphinx<7.2.5 ; extra == 'docs' - - jaraco-packaging>=9.3 ; extra == 'docs' - - rst-linker>=1.9 ; extra == 'docs' - - furo ; extra == 'docs' - - sphinx-lint ; extra == 'docs' - - jaraco-tidelift>=1.4 ; extra == 'docs' - - pytest>=6 ; extra == 'testing' - - pytest-checkdocs>=2.4 ; extra == 'testing' - - pytest-cov ; extra == 'testing' - - pytest-enabler>=2.2 ; extra == 'testing' - - pytest-ruff>=0.2.1 ; extra == 'testing' - - zipp>=3.17 ; extra == 'testing' - - jaraco-test>=5.4 ; extra == 'testing' - - pytest-mypy ; platform_python_implementation != 'PyPy' and extra == 'testing' - requires_python: '>=3.8' -- kind: pypi - name: iniconfig - version: 2.0.0 - url: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl - sha256: b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 - requires_python: '>=3.7' -- kind: pypi - name: invoke - version: 2.2.0 - url: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl - sha256: 6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820 - requires_python: '>=3.6' -- kind: pypi - name: jaraco-classes - version: 3.4.0 - url: https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl - sha256: f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - requires_dist: - - more-itertools - - sphinx>=3.5 ; extra == 'docs' - - jaraco-packaging>=9.3 ; extra == 'docs' - - rst-linker>=1.9 ; extra == 'docs' - - furo ; extra == 'docs' - - sphinx-lint ; extra == 'docs' - - jaraco-tidelift>=1.4 ; extra == 'docs' - - pytest>=6 ; extra == 'testing' - - pytest-checkdocs>=2.4 ; extra == 'testing' - - pytest-cov ; extra == 'testing' - - pytest-mypy ; extra == 'testing' - - pytest-enabler>=2.2 ; extra == 'testing' - - pytest-ruff>=0.2.1 ; extra == 'testing' - requires_python: '>=3.8' -- kind: pypi - name: jaraco-context - version: 5.3.0 - url: https://files.pythonhosted.org/packages/d2/40/11b7bc1898cf1dcb87ccbe09b39f5088634ac78bb25f3383ff541c2b40aa/jaraco.context-5.3.0-py3-none-any.whl - sha256: 3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 - requires_dist: - - backports-tarfile ; python_version < '3.12' - - sphinx>=3.5 ; extra == 'docs' - - jaraco-packaging>=9.3 ; extra == 'docs' - - rst-linker>=1.9 ; extra == 'docs' - - furo ; extra == 'docs' - - sphinx-lint ; extra == 'docs' - - jaraco-tidelift>=1.4 ; extra == 'docs' - - pytest!=8.1.1,>=6 ; extra == 'testing' - - pytest-checkdocs>=2.4 ; extra == 'testing' - - pytest-cov ; extra == 'testing' - - pytest-mypy ; extra == 'testing' - - pytest-enabler>=2.2 ; extra == 'testing' - - pytest-ruff>=0.2.1 ; extra == 'testing' - - portend ; extra == 'testing' - requires_python: '>=3.8' -- kind: pypi - name: jaraco-functools - version: 4.0.1 - url: https://files.pythonhosted.org/packages/c3/ac/d0bf0d37a9f95f69a5efc5685d9166ee34a664d3cd29a9c139989512fe14/jaraco.functools-4.0.1-py3-none-any.whl - sha256: 3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 - requires_dist: - - more-itertools - - sphinx>=3.5 ; extra == 'docs' - - sphinx<7.2.5 ; extra == 'docs' - - jaraco-packaging>=9.3 ; extra == 'docs' - - rst-linker>=1.9 ; extra == 'docs' - - furo ; extra == 'docs' - - sphinx-lint ; extra == 'docs' - - jaraco-tidelift>=1.4 ; extra == 'docs' - - pytest>=6 ; extra == 'testing' - - pytest-checkdocs>=2.4 ; extra == 'testing' - - pytest-cov ; extra == 'testing' - - pytest-enabler>=2.2 ; extra == 'testing' - - pytest-ruff>=0.2.1 ; extra == 'testing' - - jaraco-classes ; extra == 'testing' - - pytest-mypy ; platform_python_implementation != 'PyPy' and extra == 'testing' - requires_python: '>=3.8' -- kind: pypi - name: jeepney - version: 0.8.0 - url: https://files.pythonhosted.org/packages/ae/72/2a1e2290f1ab1e06f71f3d0f1646c9e4634e70e1d37491535e19266e8dc9/jeepney-0.8.0-py3-none-any.whl - sha256: c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - requires_dist: - - pytest ; extra == 'test' - - pytest-trio ; extra == 'test' - - pytest-asyncio>=0.17 ; extra == 'test' - - testpath ; extra == 'test' - - trio ; extra == 'test' - - async-timeout ; extra == 'test' - - trio ; extra == 'trio' - - async-generator ; extra == 'trio' and python_version == '3.6' - requires_python: '>=3.7' -- kind: pypi - name: jinja2 - version: 3.1.4 - url: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl - sha256: bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - requires_dist: - - markupsafe>=2.0 - - babel>=2.7 ; extra == 'i18n' - requires_python: '>=3.7' + size: 2562 + timestamp: 1578324546067 +- kind: conda + name: _openmp_mutex + version: '4.5' + build: 2_gnu + build_number: 16 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 + sha256: fbe2c5e56a653bebb982eda4876a9178aedfc2b545f25d0ce9c4c0b508253d22 + md5: 73aaf86a425cc6e73fcf236a5a46396d + depends: + - _libgcc_mutex 0.1 conda_forge + - libgomp >=7.5.0 + constrains: + - openmp_impl 9999 + license: BSD-3-Clause + license_family: BSD + purls: [] + size: 23621 + timestamp: 1650670423406 - kind: pypi - name: keyring - version: 25.2.1 - url: https://files.pythonhosted.org/packages/92/91/901f5cfeaaea04cf15f5ddf41ee053a5c9e389166477a3427fcfd055e1d9/keyring-25.2.1-py3-none-any.whl - sha256: 2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 + name: backports-tarfile + version: 1.2.0 + url: https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl + sha256: 77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 requires_dist: - - jaraco-classes - - jaraco-functools - - jaraco-context - - importlib-metadata>=4.11.4 ; python_version < '3.12' - - importlib-resources ; python_version < '3.9' - - secretstorage>=3.2 ; sys_platform == 'linux' - - jeepney>=0.4.2 ; sys_platform == 'linux' - - pywin32-ctypes>=0.2.0 ; sys_platform == 'win32' - - shtab>=1.1.0 ; extra == 'completion' - sphinx>=3.5 ; extra == 'docs' - jaraco-packaging>=9.3 ; extra == 'docs' - rst-linker>=1.9 ; extra == 'docs' - furo ; extra == 'docs' - sphinx-lint ; extra == 'docs' - - jaraco-tidelift>=1.4 ; extra == 'docs' - pytest!=8.1.*,>=6 ; extra == 'testing' - pytest-checkdocs>=2.4 ; extra == 'testing' - pytest-cov ; extra == 'testing' - - pytest-mypy ; extra == 'testing' - pytest-enabler>=2.2 ; extra == 'testing' - - pytest-ruff>=0.2.1 ; extra == 'testing' + - jaraco-test ; extra == 'testing' + - pytest!=8.0.* ; extra == 'testing' + requires_python: '>=3.8' +- kind: pypi + name: bcrypt + version: 4.1.3 + url: https://files.pythonhosted.org/packages/4c/6a/ce950d4350c734bc5d9b7196a58fedbdc94f564c00b495a1222984431e03/bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl + sha256: 4fb253d65da30d9269e0a6f4b0de32bd657a0208a6f4e43d3e645774fb5457f3 + requires_dist: + - pytest!=3.3.0,>=3.2.1 ; extra == 'tests' + - mypy ; extra == 'typecheck' + requires_python: '>=3.7' +- kind: pypi + name: build + version: 1.2.1 + url: https://files.pythonhosted.org/packages/e2/03/f3c8ba0a6b6e30d7d18c40faab90807c9bb5e9a1e3b2fe2008af624a9c97/build-1.2.1-py3-none-any.whl + sha256: 75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4 + requires_dist: + - packaging>=19.1 + - pyproject-hooks + - colorama ; os_name == 'nt' + - importlib-metadata>=4.6 ; python_full_version < '3.10.2' + - tomli>=1.1.0 ; python_version < '3.11' + - furo>=2023.8.17 ; extra == 'docs' + - sphinx~=7.0 ; extra == 'docs' + - sphinx-argparse-cli>=1.5 ; extra == 'docs' + - sphinx-autodoc-typehints>=1.10 ; extra == 'docs' + - sphinx-issues>=3.0.0 ; extra == 'docs' + - build[uv,virtualenv] ; extra == 'test' + - filelock>=3 ; extra == 'test' + - pytest>=6.2.4 ; extra == 'test' + - pytest-cov>=2.12 ; extra == 'test' + - pytest-mock>=2 ; extra == 'test' + - pytest-rerunfailures>=9.1 ; extra == 'test' + - pytest-xdist>=1.34 ; extra == 'test' + - wheel>=0.36.0 ; extra == 'test' + - setuptools>=42.0.0 ; extra == 'test' and python_version < '3.10' + - setuptools>=56.0.0 ; extra == 'test' and python_version == '3.10' + - setuptools>=56.0.0 ; extra == 'test' and python_version == '3.11' + - setuptools>=67.8.0 ; extra == 'test' and python_version >= '3.12' + - build[uv] ; extra == 'typing' + - importlib-metadata>=5.1 ; extra == 'typing' + - mypy~=1.9.0 ; extra == 'typing' + - tomli ; extra == 'typing' + - typing-extensions>=3.7.4.3 ; extra == 'typing' + - uv>=0.1.18 ; extra == 'uv' + - virtualenv>=20.0.35 ; extra == 'virtualenv' requires_python: '>=3.8' - kind: conda - name: ld_impl_linux-64 - version: '2.40' - build: hf3520f5_7 - build_number: 7 + name: ca-certificates + version: 2024.7.4 + build: hbcca054_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda - sha256: 764b6950aceaaad0c67ef925417594dd14cd2e22fff864aeef455ac259263d15 - md5: b80f2f396ca2c28b8c14c437a4ed1e74 - constrains: - - binutils_impl_linux-64 2.40 - license: GPL-3.0-only - license_family: GPL + url: https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.7.4-hbcca054_0.conda + sha256: c1548a3235376f464f9931850b64b02492f379b2f2bb98bc786055329b080446 + md5: 23ab7665c5f63cfb9f1f6195256daac6 + license: ISC purls: [] - size: 707602 - timestamp: 1718625640445 + size: 154853 + timestamp: 1720077432978 +- kind: pypi + name: certifi + version: 2024.7.4 + url: https://files.pythonhosted.org/packages/1c/d5/c84e1a17bf61d4df64ca866a1c9a913874b4e9bdc131ec689a0ad013fb36/certifi-2024.7.4-py3-none-any.whl + sha256: c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 + requires_python: '>=3.6' +- kind: pypi + name: cffi + version: 1.16.0 + url: https://files.pythonhosted.org/packages/f1/c9/326611aa83e16b13b6db4dbb73b5455c668159a003c4c2f0c3bcb2ddabaf/cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: 6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 + requires_dist: + - pycparser + requires_python: '>=3.8' +- kind: pypi + name: charset-normalizer + version: 3.3.2 + url: https://files.pythonhosted.org/packages/3d/09/d82fe4a34c5f0585f9ea1df090e2a71eb9bb1e469723053e1ee9f57c16f3/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: 45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 + requires_python: '>=3.7.0' +- kind: pypi + name: cloudpickle + version: 3.0.0 + url: https://files.pythonhosted.org/packages/96/43/dae06432d0c4b1dc9e9149ad37b4ca8384cf6eb7700cd9215b177b914f0a/cloudpickle-3.0.0-py3-none-any.whl + sha256: 246ee7d0c295602a036e86369c77fecda4ab17b506496730f2f576d9016fd9c7 + requires_python: '>=3.8' +- kind: pypi + name: cryptography + version: 42.0.8 + url: https://files.pythonhosted.org/packages/07/40/d6f6819c62e808ea74639c3c640f7edd636b86cce62cb14943996a15df92/cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl + sha256: 6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 + requires_dist: + - cffi>=1.12 ; platform_python_implementation != 'PyPy' + - sphinx>=5.3.0 ; extra == 'docs' + - sphinx-rtd-theme>=1.1.1 ; extra == 'docs' + - pyenchant>=1.6.11 ; extra == 'docstest' + - readme-renderer ; extra == 'docstest' + - sphinxcontrib-spelling>=4.0.1 ; extra == 'docstest' + - nox ; extra == 'nox' + - ruff ; extra == 'pep8test' + - mypy ; extra == 'pep8test' + - check-sdist ; extra == 'pep8test' + - click ; extra == 'pep8test' + - build ; extra == 'sdist' + - bcrypt>=3.1.5 ; extra == 'ssh' + - pytest>=6.2.0 ; extra == 'test' + - pytest-benchmark ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-xdist ; extra == 'test' + - pretend ; extra == 'test' + - certifi ; extra == 'test' + - pytest-randomly ; extra == 'test-randomorder' + requires_python: '>=3.7' - kind: conda - name: libcublas - version: 11.10.1.25 - build: he442b6f_0 + name: cuda + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcublas-11.10.1.25-he442b6f_0.tar.bz2 - md5: 12db71df5ad476eca4c1e5ba59e5bd82 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-11.7.0-0.tar.bz2 + md5: 1cf5addc0441778805f1e58a2c83e3cd + depends: + - cuda-demo-suite >=11.7.50 + - cuda-runtime >=11.7.0 + - cuda-toolkit >=11.7.0 arch: x86_64 platform: linux purls: [] - size: 314450443 - timestamp: 1649213079084 + size: 1450 + timestamp: 1656529602952 - kind: conda - name: libcublas-dev - version: 11.10.1.25 - build: h0c8ac2b_0 + name: cuda-cccl + version: 11.7.58 + build: hc415cf5_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcublas-dev-11.10.1.25-h0c8ac2b_0.tar.bz2 - md5: 2ad42b6003cb3439a73c257b4bd356e0 - depends: - - libcublas >=11.10.1.25 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cccl-11.7.58-hc415cf5_0.tar.bz2 + md5: 4dcb96960cea4c603b6ef5b33d062ff5 arch: x86_64 platform: linux purls: [] - size: 324267418 - timestamp: 1649213249394 + size: 1223929 + timestamp: 1650428244011 - kind: conda - name: libcufft - version: 10.7.2.50 - build: h80a1efe_0 + name: cuda-command-line-tools + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufft-10.7.2.50-h80a1efe_0.tar.bz2 - md5: 92c539005489bb48dac1173c09559fe6 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-command-line-tools-11.7.0-0.tar.bz2 + md5: 007751c1b543fc04fd34723884e495c0 + depends: + - cuda-cupti >=11.7.50 + - cuda-gdb >=11.7.50 + - cuda-memcheck >=11.7.50 + - cuda-nvdisasm >=11.7.50 + - cuda-nvprof >=11.7.50 + - cuda-nvtx >=11.7.50 + - cuda-sanitizer-api >=11.7.50 arch: x86_64 platform: linux purls: [] - size: 98107830 - timestamp: 1649215036926 + size: 1479 + timestamp: 1656529461635 - kind: conda - name: libcufft-dev - version: 10.7.2.50 - build: h59a5ac8_0 + name: cuda-compiler + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufft-dev-10.7.2.50-h59a5ac8_0.tar.bz2 - md5: 9a74839ee7d4e6b4a767fdb3c71f8a7a + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-compiler-11.7.0-0.tar.bz2 + md5: 8652316a9db9776f8fe37f61255bc10d depends: - - libcufft >=10.7.2.50 + - cuda-cuobjdump >=11.7.50 + - cuda-cuxxfilt >=11.7.50 + - cuda-nvcc >=11.7.64 + - cuda-nvprune >=11.7.50 arch: x86_64 platform: linux purls: [] - size: 205997732 - timestamp: 1649215083810 + size: 1462 + timestamp: 1656529473118 +- kind: conda + name: cuda-cudart + version: 11.7.60 + build: h9538e0e_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cudart-11.7.60-h9538e0e_0.tar.bz2 + md5: 98cd5a3ad5ef4a928e89437608c364e4 + arch: x86_64 + platform: linux + purls: [] + size: 199483 + timestamp: 1650834099084 +- kind: conda + name: cuda-cudart-dev + version: 11.7.60 + build: h6a7c232_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cudart-dev-11.7.60-h6a7c232_0.tar.bz2 + md5: 53ba3e794accfb5921ea4b90d32d1519 + depends: + - cuda-cccl + - cuda-cudart >=11.7.60 + arch: x86_64 + platform: linux + purls: [] + size: 1032258 + timestamp: 1650834100255 - kind: conda - name: libcufile - version: 1.3.0.44 - build: '0' + name: cuda-cuobjdump + version: 11.7.50 + build: h28cc80a_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufile-1.3.0.44-0.tar.bz2 - md5: bf3550b0ab35a211752c2018c9fd192a + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cuobjdump-11.7.50-h28cc80a_0.tar.bz2 + md5: 7e35d1b055085ba22d44a04e4076d3a9 arch: x86_64 platform: linux purls: [] - size: 552448 - timestamp: 1656528513646 + size: 162802 + timestamp: 1649212986334 - kind: conda - name: libcufile-dev - version: 1.3.0.44 - build: '0' + name: cuda-cupti + version: 11.7.50 + build: hb6f9eaf_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufile-dev-1.3.0.44-0.tar.bz2 - md5: b765ba0ec97350706e0c88ed061096e7 - depends: - - libcufile >=1.3.0.44 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cupti-11.7.50-hb6f9eaf_0.tar.bz2 + md5: 574fcb4cadd50dcd079fe25cfb670e12 arch: x86_64 platform: linux purls: [] - size: 12791788 - timestamp: 1656528532807 + size: 24044607 + timestamp: 1649213592867 - kind: conda - name: libcurand - version: 10.2.10.50 - build: heec50f7_0 + name: cuda-cuxxfilt + version: 11.7.50 + build: hb365495_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcurand-10.2.10.50-heec50f7_0.tar.bz2 - md5: 00e467aec3d8e12f82bb994e434c685c + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-cuxxfilt-11.7.50-hb365495_0.tar.bz2 + md5: 1c55be2750344cf5a174992356279e20 arch: x86_64 platform: linux purls: [] - size: 52796544 - timestamp: 1649213456498 + size: 290614 + timestamp: 1649212827286 - kind: conda - name: libcurand-dev - version: 10.2.10.50 - build: hd49a9cd_0 + name: cuda-demo-suite + version: 11.7.50 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcurand-dev-10.2.10.50-hd49a9cd_0.tar.bz2 - md5: 37590eb41c2408c6af1801714a3ca279 - depends: - - libcurand >=10.2.10.50 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-demo-suite-11.7.50-0.tar.bz2 + md5: 1b154edaadb2c5bf27521c17508f729c arch: x86_64 platform: linux purls: [] - size: 53180004 - timestamp: 1649213486008 + size: 5188153 + timestamp: 1655213605854 - kind: conda - name: libcusolver - version: 11.3.5.50 - build: hcab339c_0 + name: cuda-documentation + version: 11.7.50 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusolver-11.3.5.50-hcab339c_0.tar.bz2 - md5: 4448aa4e3d7464625d372b280c41728e + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-documentation-11.7.50-0.tar.bz2 + md5: 8087cac045651ecef5f4e35a5dc73042 arch: x86_64 platform: linux purls: [] - size: 93511094 - timestamp: 1649215013905 + size: 90906 + timestamp: 1655213446631 - kind: conda - name: libcusolver-dev - version: 11.3.5.50 - build: hc6eba6f_0 + name: cuda-driver-dev + version: 11.7.60 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusolver-dev-11.3.5.50-hc6eba6f_0.tar.bz2 - md5: 30afc179a44e93bd9ec16185256b50c8 - depends: - - libcusolver >=11.3.5.50 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-driver-dev-11.7.60-0.tar.bz2 + md5: da85cb513904259eb327a0deff44c83e arch: x86_64 platform: linux purls: [] - size: 65246418 - timestamp: 1649215087514 + size: 17306 + timestamp: 1650834099778 - kind: conda - name: libcusparse - version: 11.7.3.50 - build: h6aaafad_0 + name: cuda-gdb + version: 11.7.50 + build: h4a0ac72_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusparse-11.7.3.50-h6aaafad_0.tar.bz2 - md5: d08e053b281fcd900a088fb2f2eaacb9 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-gdb-11.7.50-h4a0ac72_0.tar.bz2 + md5: 4a56a961da9312c45eef7a923867c09a arch: x86_64 platform: linux purls: [] - size: 155592022 - timestamp: 1649213972450 + size: 5031913 + timestamp: 1649217411560 - kind: conda - name: libcusparse-dev - version: 11.7.3.50 - build: hc644b96_0 + name: cuda-libraries + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusparse-dev-11.7.3.50-hc644b96_0.tar.bz2 - md5: 511128895df21b0f832e29865b56db35 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-libraries-11.7.0-0.tar.bz2 + md5: 0375db9ce1eb908878f210556f5eaf49 depends: + - cuda-cudart >=11.7.60 + - cuda-nvrtc >=11.7.50 + - libcublas >=11.10.1.25 + - libcufft >=10.7.2.50 + - libcufile >=1.3.0.44 + - libcurand >=10.2.10.50 + - libcusolver >=11.3.5.50 - libcusparse >=11.7.3.50 + - libnpp >=11.7.3.21 + - libnvjpeg >=11.7.2.34 arch: x86_64 platform: linux purls: [] - size: 315781272 - timestamp: 1649214045356 + size: 1532 + timestamp: 1656529485123 - kind: conda - name: libexpat - version: 2.6.2 - build: h59595ed_0 + name: cuda-libraries-dev + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.2-h59595ed_0.conda - sha256: 331bb7c7c05025343ebd79f86ae612b9e1e74d2687b8f3179faec234f986ce19 - md5: e7ba12deb7020dd080c6c70e7b6f6a3d + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-libraries-dev-11.7.0-0.tar.bz2 + md5: 19ad25fcbe6c77d4c9b51b2900efce6e depends: - - libgcc-ng >=12 - constrains: - - expat 2.6.2.* - license: MIT - license_family: MIT + - cuda-cccl >=11.7.58 + - cuda-cudart-dev >=11.7.60 + - cuda-driver-dev >=11.7.60 + - cuda-nvrtc-dev >=11.7.50 + - libcublas-dev >=11.10.1.25 + - libcufft-dev >=10.7.2.50 + - libcufile-dev >=1.3.0.44 + - libcurand-dev >=10.2.10.50 + - libcusolver-dev >=11.3.5.50 + - libcusparse-dev >=11.7.3.50 + - libnpp-dev >=11.7.3.21 + - libnvjpeg-dev >=11.7.2.34 + arch: x86_64 + platform: linux purls: [] - size: 73730 - timestamp: 1710362120304 + size: 1555 + timestamp: 1656529497700 - kind: conda - name: libffi - version: 3.2.1 - build: he1b5a44_1007 - build_number: 1007 + name: cuda-memcheck + version: 11.7.50 + build: hc446b2b_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.2.1-he1b5a44_1007.tar.bz2 - sha256: 992246df63724484e9ee8652ce3ca0237f707961beab8b813096bbc647cf84f4 - md5: 11389072d7d6036fd811c3d9460475cd - depends: - - libgcc-ng >=7.3.0 - - libstdcxx-ng >=7.3.0 - license: Custom + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-memcheck-11.7.50-hc446b2b_0.tar.bz2 + md5: 0f93a278e1db683ceb5459d987c9dea1 + arch: x86_64 + platform: linux purls: [] - size: 48003 - timestamp: 1584559351227 + size: 172923 + timestamp: 1649213042047 - kind: conda - name: libffi - version: 3.4.2 - build: h7f98852_5 - build_number: 5 + name: cuda-nsight + version: 11.7.50 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 - sha256: ab6e9856c21709b7b517e940ae7028ae0737546122f83c2aa5d692860c3b149e - md5: d645c6d2ac96843a2bfaccd2d62b3ac3 - depends: - - libgcc-ng >=9.4.0 - license: MIT - license_family: MIT + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nsight-11.7.50-0.tar.bz2 + md5: 20207c732b326a47481797775873ef5e + arch: x86_64 + platform: linux purls: [] - size: 58292 - timestamp: 1636488182923 + size: 119141042 + timestamp: 1655213189533 - kind: conda - name: libgcc-ng - version: 14.1.0 - build: h77fa898_0 + name: cuda-nsight-compute + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda - sha256: b8e869ac96591cda2704bf7e77a301025e405227791a0bddf14a3dac65125538 - md5: ca0fad6a41ddaef54a153b78eccb5037 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nsight-compute-11.7.0-0.tar.bz2 + md5: 8ba20f5b48a2f8c7872543bc48cac488 depends: - - _libgcc_mutex 0.1 conda_forge - - _openmp_mutex >=4.5 - constrains: - - libgomp 14.1.0 h77fa898_0 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL + - nsight-compute >=2022.2.0.13 + arch: x86_64 + platform: linux purls: [] - size: 842109 - timestamp: 1719538896937 + size: 1443 + timestamp: 1656529509384 - kind: conda - name: libgomp - version: 14.1.0 - build: h77fa898_0 + name: cuda-nvcc + version: 11.7.64 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda - sha256: 7699df61a1f6c644b3576a40f54791561f2845983120477a16116b951c9cdb05 - md5: ae061a5ed5f05818acdf9adab72c146d - depends: - - _libgcc_mutex 0.1 conda_forge - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvcc-11.7.64-0.tar.bz2 + md5: 5d6319b9cae2d0d2e54923cbe96df471 + arch: x86_64 + platform: linux purls: [] - size: 456925 - timestamp: 1719538796073 + size: 44771777 + timestamp: 1651657202137 - kind: conda - name: libnpp - version: 11.7.3.21 - build: h3effbd9_0 + name: cuda-nvdisasm + version: 11.7.50 + build: h5bd0695_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnpp-11.7.3.21-h3effbd9_0.tar.bz2 - md5: 0450f6bb03d24424334573ba05687130 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvdisasm-11.7.50-h5bd0695_0.tar.bz2 + md5: af154b6fb0f634db8b81440ef90ee562 arch: x86_64 platform: linux purls: [] - size: 124218053 - timestamp: 1647673432276 + size: 33039793 + timestamp: 1649213490405 - kind: conda - name: libnpp-dev - version: 11.7.3.21 - build: hb6476a9_0 - subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnpp-dev-11.7.3.21-hb6476a9_0.tar.bz2 - md5: 88a90901316877a2873e88800a7c52d7 - depends: - - libnpp >=11.7.3.21 + name: cuda-nvml-dev + version: 11.7.50 + build: h3af1343_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvml-dev-11.7.50-h3af1343_0.tar.bz2 + md5: ba930fa5f7a29c3c5f987074d1cc495f arch: x86_64 platform: linux purls: [] - size: 121327534 - timestamp: 1647673495804 + size: 83313 + timestamp: 1649212178820 - kind: conda - name: libnsl - version: 2.0.1 - build: hd590300_0 + name: cuda-nvprof + version: 11.7.50 + build: h7a2404d_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda - sha256: 26d77a3bb4dceeedc2a41bd688564fe71bf2d149fdcf117049970bc02ff1add6 - md5: 30fd6e37fe21f86f4bd26d6ee73eeec7 - depends: - - libgcc-ng >=12 - license: LGPL-2.1-only - license_family: GPL + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvprof-11.7.50-h7a2404d_0.tar.bz2 + md5: ad7c62c4078f954e049255b8ee3291a6 + arch: x86_64 + platform: linux purls: [] - size: 33408 - timestamp: 1697359010159 + size: 4540250 + timestamp: 1649213856717 - kind: conda - name: libnvjpeg - version: 11.7.2.34 - build: hfe236c7_0 + name: cuda-nvprune + version: 11.7.50 + build: h7add7b4_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnvjpeg-11.7.2.34-hfe236c7_0.tar.bz2 - md5: f2b4df286504479597a82a83bc3458e0 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvprune-11.7.50-h7add7b4_0.tar.bz2 + md5: 18363ee0fcca6914b3510e98e2fa5860 arch: x86_64 platform: linux purls: [] - size: 2448233 - timestamp: 1649213785605 + size: 66461 + timestamp: 1649212794028 - kind: conda - name: libnvjpeg-dev - version: 11.7.2.34 - build: h2e48410_0 + name: cuda-nvrtc + version: 11.7.50 + build: hd0285e0_0 subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnvjpeg-dev-11.7.2.34-h2e48410_0.tar.bz2 - md5: f87c91a0fab81a8abbea228fd8808277 - depends: - - libnvjpeg >=11.7.2.34 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvrtc-11.7.50-hd0285e0_0.tar.bz2 + md5: 6b2a357a01dc1a8f4191b0c38f36d5b3 arch: x86_64 platform: linux purls: [] - size: 2126625 - timestamp: 1649213787489 + size: 18144796 + timestamp: 1649211864503 - kind: conda - name: libsqlite - version: 3.46.0 - build: hde9e2c9_0 + name: cuda-nvrtc-dev + version: 11.7.50 + build: heada363_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda - sha256: daee3f68786231dad457d0dfde3f7f1f9a7f2018adabdbb864226775101341a8 - md5: 18aa975d2094c34aef978060ae7da7d8 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvrtc-dev-11.7.50-heada363_0.tar.bz2 + md5: c9e36803e6806ad39913b9e509b8a566 depends: - - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0a0 - license: Unlicense + - cuda-nvrtc >=11.7.50 + arch: x86_64 + platform: linux purls: [] - size: 865346 - timestamp: 1718050628718 + size: 17755280 + timestamp: 1649211875853 - kind: conda - name: libstdcxx-ng - version: 14.1.0 - build: hc0a3c3a_0 + name: cuda-nvtx + version: 11.7.50 + build: h05b0816_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-hc0a3c3a_0.conda - sha256: 88c42b388202ffe16adaa337e36cf5022c63cf09b0405cf06fc6aeacccbe6146 - md5: 1cb187a157136398ddbaae90713e2498 - depends: - - libgcc-ng 14.1.0 h77fa898_0 - license: GPL-3.0-only WITH GCC-exception-3.1 - license_family: GPL + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvtx-11.7.50-h05b0816_0.tar.bz2 + md5: 6976dd2f242bbf3faf324a4889690c95 + arch: x86_64 + platform: linux purls: [] - size: 3881307 - timestamp: 1719538923443 + size: 59374 + timestamp: 1649212889078 - kind: conda - name: libuuid - version: 2.38.1 - build: h0b41bf4_0 + name: cuda-nvvp + version: 11.7.50 + build: hd2289d5_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda - sha256: 787eb542f055a2b3de553614b25f09eefb0a0931b0c87dbcce6efdfd92f04f18 - md5: 40b61aab5c7ba9ff276c41cfffe6b80b + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-nvvp-11.7.50-hd2289d5_0.tar.bz2 + md5: 425ca25095c33084d34ecf65a2db9a65 depends: - - libgcc-ng >=12 - license: BSD-3-Clause - license_family: BSD + - cuda-nvdisasm + - cuda-nvprof + arch: x86_64 + platform: linux purls: [] - size: 33601 - timestamp: 1680112270483 + size: 119873153 + timestamp: 1649214460122 - kind: conda - name: libxcrypt - version: 4.4.36 - build: hd590300_1 - build_number: 1 + name: cuda-runtime + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda - sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c - md5: 5aa797f8787fe7a17d1b0821485b5adc + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-runtime-11.7.0-0.tar.bz2 + md5: 3d2f825bd5eef4d8c12ef68d3e080de1 depends: - - libgcc-ng >=12 - license: LGPL-2.1-or-later + - cuda-libraries >=11.7.0 + arch: x86_64 + platform: linux purls: [] - size: 100393 - timestamp: 1702724383534 + size: 1430 + timestamp: 1656529544177 - kind: conda - name: libzlib - version: 1.2.13 - build: h4ab18f5_6 - build_number: 6 + name: cuda-sanitizer-api + version: 11.7.50 + build: hb424887_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h4ab18f5_6.conda - sha256: 8ced4afed6322172182af503f21725d072a589a6eb918f8a58135c1e00d35980 - md5: 27329162c0dc732bcf67a4e0cd488125 - depends: - - libgcc-ng >=12 - constrains: - - zlib 1.2.13 *_6 - license: Zlib - license_family: Other + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-sanitizer-api-11.7.50-hb424887_0.tar.bz2 + md5: 0fead82ef8c41e08d65843d3d74fe34e + arch: x86_64 + platform: linux purls: [] - size: 61571 - timestamp: 1716874066944 + size: 17556154 + timestamp: 1649214155195 - kind: conda - name: libzlib - version: 1.3.1 - build: h4ab18f5_1 - build_number: 1 + name: cuda-toolkit + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda - sha256: adf6096f98b537a11ae3729eaa642b0811478f0ea0402ca67b5108fe2cb0010d - md5: 57d7dc60e9325e3de37ff8dffd18e814 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-toolkit-11.7.0-0.tar.bz2 + md5: d86b4e825ea4d17916a88541d0833d88 depends: - - libgcc-ng >=12 - constrains: - - zlib 1.3.1 *_1 - license: Zlib - license_family: Other + - cuda-compiler >=11.7.0 + - cuda-documentation >=11.7.50 + - cuda-libraries >=11.7.0 + - cuda-libraries-dev >=11.7.0 + - cuda-nvml-dev >=11.7.50 + - cuda-tools >=11.7.0 + arch: x86_64 + platform: linux purls: [] - size: 61574 - timestamp: 1716874187109 -- kind: pypi - name: lit - version: 18.1.8 - url: https://files.pythonhosted.org/packages/96/06/b36f150fa7c5bcc96a31a4d19a20fddbd1d965b6f02510b57a3bb8d4b930/lit-18.1.8-py3-none-any.whl - sha256: a873ff7acd76e746368da32eb7355625e2e55a2baaab884c9cc130f2ee0300f7 -- kind: pypi - name: markdown-it-py - version: 3.0.0 - url: https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl - sha256: 355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 - requires_dist: - - mdurl~=0.1 - - psutil ; extra == 'benchmarking' - - pytest ; extra == 'benchmarking' - - pytest-benchmark ; extra == 'benchmarking' - - pre-commit~=3.0 ; extra == 'code_style' - - commonmark~=0.9 ; extra == 'compare' - - markdown~=3.4 ; extra == 'compare' - - mistletoe~=1.0 ; extra == 'compare' - - mistune~=2.0 ; extra == 'compare' - - panflute~=2.3 ; extra == 'compare' - - linkify-it-py>=1,<3 ; extra == 'linkify' - - mdit-py-plugins ; extra == 'plugins' - - gprof2dot ; extra == 'profiling' - - mdit-py-plugins ; extra == 'rtd' - - myst-parser ; extra == 'rtd' - - pyyaml ; extra == 'rtd' - - sphinx ; extra == 'rtd' - - sphinx-copybutton ; extra == 'rtd' - - sphinx-design ; extra == 'rtd' - - sphinx-book-theme ; extra == 'rtd' - - jupyter-sphinx ; extra == 'rtd' - - coverage ; extra == 'testing' - - pytest ; extra == 'testing' - - pytest-cov ; extra == 'testing' - - pytest-regressions ; extra == 'testing' - requires_python: '>=3.8' -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab - requires_python: '>=3.7' -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/5f/5a/360da85076688755ea0cceb92472923086993e86b5613bbae9fbc14136b0/MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 - requires_python: '>=3.7' -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/7c/52/2b1b570f6b8b803cef5ac28fdf78c0da318916c7d2fe9402a84d591b394c/MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f - requires_python: '>=3.7' -- kind: pypi - name: markupsafe - version: 2.1.5 - url: https://files.pythonhosted.org/packages/97/18/c30da5e7a0e7f4603abfc6780574131221d9148f323752c2755d48abad30/MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 - requires_python: '>=3.7' -- kind: pypi - name: mdurl - version: 0.1.2 - url: https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl - sha256: 84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 - requires_python: '>=3.7' -- kind: pypi - name: more-itertools - version: 10.3.0 - url: https://files.pythonhosted.org/packages/bb/23/2d1cdb0427aecb2b150dc2ac2d15400990c4f05585b3fbc1b5177d74d7fb/more_itertools-10.3.0-py3-none-any.whl - sha256: ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 - requires_python: '>=3.8' -- kind: pypi - name: mpmath - version: 1.3.0 - url: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl - sha256: a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c - requires_dist: - - pytest>=4.6 ; extra == 'develop' - - pycodestyle ; extra == 'develop' - - pytest-cov ; extra == 'develop' - - codecov ; extra == 'develop' - - wheel ; extra == 'develop' - - sphinx ; extra == 'docs' - - gmpy2>=2.1.0a4 ; platform_python_implementation != 'PyPy' and extra == 'gmpy' - - pytest>=4.6 ; extra == 'tests' + size: 1469 + timestamp: 1656529579362 - kind: conda - name: ncurses - version: '6.5' - build: h59595ed_0 + name: cuda-tools + version: 11.7.0 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda - sha256: 4fc3b384f4072b68853a0013ea83bdfd3d66b0126e2238e1d6e1560747aa7586 - md5: fcea371545eda051b6deafb24889fc69 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-tools-11.7.0-0.tar.bz2 + md5: 2012403f68f994e83c0a15eea7220d43 depends: - - libgcc-ng >=12 - license: X11 AND BSD-3-Clause + - cuda-command-line-tools >=11.7.0 + - cuda-visual-tools >=11.7.0 + - gds-tools >=1.3.0.44 + arch: x86_64 + platform: linux purls: [] - size: 887465 - timestamp: 1715194722503 -- kind: pypi - name: networkx - version: '3.1' - url: https://files.pythonhosted.org/packages/a8/05/9d4f9b78ead6b2661d6e8ea772e111fc4a9fbd866ad0c81906c11206b55e/networkx-3.1-py3-none-any.whl - sha256: 4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36 - requires_dist: - - numpy>=1.20 ; extra == 'default' - - scipy>=1.8 ; extra == 'default' - - matplotlib>=3.4 ; extra == 'default' - - pandas>=1.3 ; extra == 'default' - - pre-commit>=3.2 ; extra == 'developer' - - mypy>=1.1 ; extra == 'developer' - - sphinx>=6.1 ; extra == 'doc' - - pydata-sphinx-theme>=0.13 ; extra == 'doc' - - sphinx-gallery>=0.12 ; extra == 'doc' - - numpydoc>=1.5 ; extra == 'doc' - - pillow>=9.4 ; extra == 'doc' - - nb2plots>=0.6 ; extra == 'doc' - - texext>=0.6.7 ; extra == 'doc' - - lxml>=4.6 ; extra == 'extra' - - pygraphviz>=1.10 ; extra == 'extra' - - pydot>=1.4.2 ; extra == 'extra' - - sympy>=1.10 ; extra == 'extra' - - pytest>=7.2 ; extra == 'test' - - pytest-cov>=4.0 ; extra == 'test' - - codecov>=2.1 ; extra == 'test' - requires_python: '>=3.8' -- kind: pypi - name: networkx - version: 3.2.1 - url: https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl - sha256: f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2 - requires_dist: - - numpy>=1.22 ; extra == 'default' - - scipy!=1.11.0,!=1.11.1,>=1.9 ; extra == 'default' - - matplotlib>=3.5 ; extra == 'default' - - pandas>=1.4 ; extra == 'default' - - changelist==0.4 ; extra == 'developer' - - pre-commit>=3.2 ; extra == 'developer' - - mypy>=1.1 ; extra == 'developer' - - rtoml ; extra == 'developer' - - sphinx>=7 ; extra == 'doc' - - pydata-sphinx-theme>=0.14 ; extra == 'doc' - - sphinx-gallery>=0.14 ; extra == 'doc' - - numpydoc>=1.6 ; extra == 'doc' - - pillow>=9.4 ; extra == 'doc' - - nb2plots>=0.7 ; extra == 'doc' - - texext>=0.6.7 ; extra == 'doc' - - nbconvert<7.9 ; extra == 'doc' - - lxml>=4.6 ; extra == 'extra' - - pygraphviz>=1.11 ; extra == 'extra' - - pydot>=1.4.2 ; extra == 'extra' - - sympy>=1.10 ; extra == 'extra' - - pytest>=7.2 ; extra == 'test' - - pytest-cov>=4.0 ; extra == 'test' - requires_python: '>=3.9' -- kind: pypi - name: networkx - version: '3.3' - url: https://files.pythonhosted.org/packages/38/e9/5f72929373e1a0e8d142a130f3f97e6ff920070f87f91c4e13e40e0fba5a/networkx-3.3-py3-none-any.whl - sha256: 28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2 - requires_dist: - - numpy>=1.23 ; extra == 'default' - - scipy!=1.11.0,!=1.11.1,>=1.9 ; extra == 'default' - - matplotlib>=3.6 ; extra == 'default' - - pandas>=1.4 ; extra == 'default' - - changelist==0.5 ; extra == 'developer' - - pre-commit>=3.2 ; extra == 'developer' - - mypy>=1.1 ; extra == 'developer' - - rtoml ; extra == 'developer' - - sphinx>=7 ; extra == 'doc' - - pydata-sphinx-theme>=0.14 ; extra == 'doc' - - sphinx-gallery>=0.14 ; extra == 'doc' - - numpydoc>=1.7 ; extra == 'doc' - - pillow>=9.4 ; extra == 'doc' - - texext>=0.6.7 ; extra == 'doc' - - myst-nb>=1.0 ; extra == 'doc' - - lxml>=4.6 ; extra == 'extra' - - pygraphviz>=1.12 ; extra == 'extra' - - pydot>=2.0 ; extra == 'extra' - - sympy>=1.10 ; extra == 'extra' - - pytest>=7.2 ; extra == 'test' - - pytest-cov>=4.0 ; extra == 'test' - requires_python: '>=3.10' -- kind: pypi - name: nh3 - version: 0.2.17 - url: https://files.pythonhosted.org/packages/da/19/d52d9a0247007835df949f17abd904615248dc1b94d67cb8c99100330f08/nh3-0.2.17-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b -- kind: pypi - name: nodeenv - version: 1.9.1 - url: https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl - sha256: ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9 - requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*' + size: 1449 + timestamp: 1656529567537 - kind: conda - name: nsight-compute - version: 2022.2.0.13 + name: cuda-visual-tools + version: 11.7.0 build: '0' subdir: linux-64 - url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/nsight-compute-2022.2.0.13-0.tar.bz2 - md5: b82bd48611bbcdb30dba23710dad5d60 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/cuda-visual-tools-11.7.0-0.tar.bz2 + md5: 72c35e5ed115b7ffc0ec834a30d133e5 + depends: + - cuda-libraries-dev >=11.7.0 + - cuda-nsight >=11.7.50 + - cuda-nsight-compute >=11.7.0 + - cuda-nvml-dev >=11.7.50 + - cuda-nvvp >=11.7.50 arch: x86_64 platform: linux purls: [] - size: 485381969 - timestamp: 1655214757556 -- kind: pypi - name: numpy - version: 1.24.4 - url: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc - requires_python: '>=3.8' -- kind: pypi - name: numpy - version: 1.26.4 - url: https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3 - requires_python: '>=3.9' -- kind: pypi - name: numpy - version: 1.26.4 - url: https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f - requires_python: '>=3.9' -- kind: pypi - name: numpy - version: 1.26.4 - url: https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5 - requires_python: '>=3.9' -- kind: pypi - name: nvidia-cublas-cu11 - version: 11.10.3.66 - url: https://files.pythonhosted.org/packages/ce/41/fdeb62b5437996e841d83d7d2714ca75b886547ee8017ee2fe6ea409d983/nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl - sha256: d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded - requires_dist: - - setuptools - - wheel - requires_python: '>=3' -- kind: pypi - name: nvidia-cublas-cu12 - version: 12.1.3.1 - url: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl - sha256: ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728 - requires_python: '>=3' -- kind: pypi - name: nvidia-cuda-cupti-cu11 - version: 11.7.101 - url: https://files.pythonhosted.org/packages/e6/9d/dd0cdcd800e642e3c82ee3b5987c751afd4f3fb9cc2752517f42c3bc6e49/nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl - sha256: e0cfd9854e1f2edaa36ca20d21cd0bdd5dcfca4e3b9e130a082e05b33b6c5895 - requires_dist: - - setuptools - - wheel - requires_python: '>=3' -- kind: pypi - name: nvidia-cuda-cupti-cu12 - version: 12.1.105 - url: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - sha256: e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e - requires_python: '>=3' -- kind: pypi - name: nvidia-cuda-nvrtc-cu11 - version: 11.7.99 - url: https://files.pythonhosted.org/packages/ef/25/922c5996aada6611b79b53985af7999fc629aee1d5d001b6a22431e18fec/nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl - sha256: 9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03 - requires_python: '>=3' -- kind: pypi - name: nvidia-cuda-nvrtc-cu12 - version: 12.1.105 - url: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - sha256: 339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2 - requires_python: '>=3' -- kind: pypi - name: nvidia-cuda-runtime-cu11 - version: 11.7.99 - url: https://files.pythonhosted.org/packages/36/92/89cf558b514125d2ebd8344dd2f0533404b416486ff681d5434a5832a019/nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl - sha256: cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31 - requires_dist: - - setuptools - - wheel - requires_python: '>=3' -- kind: pypi - name: nvidia-cuda-runtime-cu12 - version: 12.1.105 - url: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - sha256: 6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40 - requires_python: '>=3' -- kind: pypi - name: nvidia-cudnn-cu11 - version: 8.5.0.96 - url: https://files.pythonhosted.org/packages/dc/30/66d4347d6e864334da5bb1c7571305e501dcb11b9155971421bb7bb5315f/nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl - sha256: 402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7 - requires_dist: - - nvidia-cublas-cu11 - requires_python: '>=3' -- kind: pypi - name: nvidia-cudnn-cu12 - version: 8.9.2.26 - url: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl - sha256: 5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9 - requires_dist: - - nvidia-cublas-cu12 - requires_python: '>=3' -- kind: pypi - name: nvidia-cudnn-cu12 - version: 9.1.0.70 - url: https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl - sha256: 165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f - requires_dist: - - nvidia-cublas-cu12 - requires_python: '>=3' -- kind: pypi - name: nvidia-cufft-cu11 - version: 10.9.0.58 - url: https://files.pythonhosted.org/packages/74/79/b912a77e38e41f15a0581a59f5c3548d1ddfdda3225936fb67c342719e7a/nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl - sha256: 222f9da70c80384632fd6035e4c3f16762d64ea7a843829cb278f98b3cb7dd81 - requires_python: '>=3' + size: 1476 + timestamp: 1656529555717 - kind: pypi - name: nvidia-cufft-cu12 - version: 11.0.2.54 - url: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl - sha256: 794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56 - requires_python: '>=3' + name: decorator + version: 5.1.1 + url: https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl + sha256: b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186 + requires_python: '>=3.5' - kind: pypi - name: nvidia-curand-cu11 - version: 10.2.10.91 - url: https://files.pythonhosted.org/packages/8f/11/af78d54b2420e64a4dd19e704f5bb69dcb5a6a3138b4465d6a48cdf59a21/nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl - sha256: eecb269c970fa599a2660c9232fa46aaccbf90d9170b96c462e13bcb4d129e2c + name: deprecated + version: 1.2.14 + url: https://files.pythonhosted.org/packages/20/8d/778b7d51b981a96554f29136cd59ca7880bf58094338085bcf2a979a0e6a/Deprecated-1.2.14-py2.py3-none-any.whl + sha256: 6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c requires_dist: - - setuptools - - wheel - requires_python: '>=3' -- kind: pypi - name: nvidia-curand-cu12 - version: 10.3.2.106 - url: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl - sha256: 9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0 - requires_python: '>=3' + - wrapt<2,>=1.10 + - tox ; extra == 'dev' + - pytest ; extra == 'dev' + - pytest-cov ; extra == 'dev' + - bump2version<1 ; extra == 'dev' + - sphinx<2 ; extra == 'dev' + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' - kind: pypi - name: nvidia-cusolver-cu11 - version: 11.4.0.1 - url: https://files.pythonhosted.org/packages/3e/77/66149e3153b19312fb782ea367f3f950123b93916a45538b573fe373570a/nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl - sha256: 72fa7261d755ed55c0074960df5904b65e2326f7adce364cbe4945063c1be412 - requires_dist: - - nvidia-cublas-cu11 - requires_python: '>=3' + name: docutils + version: 0.20.1 + url: https://files.pythonhosted.org/packages/26/87/f238c0670b94533ac0353a4e2a1a771a0cc73277b88bff23d3ae35a256c1/docutils-0.20.1-py3-none-any.whl + sha256: 96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 + requires_python: '>=3.7' - kind: pypi - name: nvidia-cusolver-cu12 - version: 11.4.5.107 - url: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl - sha256: 8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd + name: exceptiongroup + version: 1.2.1 + url: https://files.pythonhosted.org/packages/01/90/79fe92dd413a9cab314ef5c591b5aa9b9ba787ae4cadab75055b0ae00b33/exceptiongroup-1.2.1-py3-none-any.whl + sha256: 5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad requires_dist: - - nvidia-cublas-cu12 - - nvidia-nvjitlink-cu12 - - nvidia-cusparse-cu12 - requires_python: '>=3' + - pytest>=6 ; extra == 'test' + requires_python: '>=3.7' - kind: pypi - name: nvidia-cusparse-cu11 - version: 11.7.4.91 - url: https://files.pythonhosted.org/packages/ea/6f/6d032cc1bb7db88a989ddce3f4968419a7edeafda362847f42f614b1f845/nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl - sha256: a3389de714db63321aa11fbec3919271f415ef19fda58aed7f2ede488c32733d + name: fabric + version: 3.2.2 + url: https://files.pythonhosted.org/packages/d6/1f/e99e23ee01847147fa194e8d41cfcf2535a2dbfcb51414c541cadb15c5d7/fabric-3.2.2-py3-none-any.whl + sha256: 91c47c0be68b14936c88b34da8a1f55e5710fd28397dac5d4ff2e21558113a6f requires_dist: - - setuptools - - wheel - requires_python: '>=3' + - invoke>=2.0 + - paramiko>=2.4 + - decorator>=5 + - deprecated>=1.2 + - pytest>=7 ; extra == 'pytest' - kind: pypi - name: nvidia-cusparse-cu12 - version: 12.1.0.106 - url: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl - sha256: f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c + name: filelock + version: 3.15.4 + url: https://files.pythonhosted.org/packages/ae/f0/48285f0262fe47103a4a45972ed2f9b93e4c80b8fd609fa98da78b2a5706/filelock-3.15.4-py3-none-any.whl + sha256: 6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 requires_dist: - - nvidia-nvjitlink-cu12 - requires_python: '>=3' -- kind: pypi - name: nvidia-nccl-cu11 - version: 2.14.3 - url: https://files.pythonhosted.org/packages/55/92/914cdb650b6a5d1478f83148597a25e90ea37d739bd563c5096b0e8a5f43/nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl - sha256: 5e5534257d1284b8e825bc3a182c6f06acd6eb405e9f89d49340e98cd8f136eb - requires_python: '>=3' -- kind: pypi - name: nvidia-nccl-cu12 - version: 2.18.1 - url: https://files.pythonhosted.org/packages/a4/05/23f8f38eec3d28e4915725b233c24d8f1a33cb6540a882f7b54be1befa02/nvidia_nccl_cu12-2.18.1-py3-none-manylinux1_x86_64.whl - sha256: 1a6c4acefcbebfa6de320f412bf7866de856e786e0462326ba1bac40de0b5e71 - requires_python: '>=3' -- kind: pypi - name: nvidia-nccl-cu12 - version: 2.19.3 - url: https://files.pythonhosted.org/packages/38/00/d0d4e48aef772ad5aebcf70b73028f88db6e5640b36c38e90445b7a57c45/nvidia_nccl_cu12-2.19.3-py3-none-manylinux1_x86_64.whl - sha256: a9734707a2c96443331c1e48c717024aa6678a0e2a4cb66b2c364d18cee6b48d - requires_python: '>=3' -- kind: pypi - name: nvidia-nccl-cu12 - version: 2.20.5 - url: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl - sha256: 057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56 - requires_python: '>=3' -- kind: pypi - name: nvidia-nvjitlink-cu12 - version: 12.5.82 - url: https://files.pythonhosted.org/packages/75/bc/e0d0dbb85246a086ab14839979039647bce501d8c661a159b8b019d987b7/nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl - sha256: f9b37bc5c8cf7509665cb6ada5aaa0ce65618f2332b7d3e78e9790511f111212 - requires_python: '>=3' -- kind: pypi - name: nvidia-nvjitlink-cu12 - version: 12.6.20 - url: https://files.pythonhosted.org/packages/59/65/7ff0569494fbaea45ad2814972cc88da843d53cc96eb8554fcd0908941d9/nvidia_nvjitlink_cu12-12.6.20-py3-none-manylinux2014_x86_64.whl - sha256: 562ab97ea2c23164823b2a89cb328d01d45cb99634b8c65fe7cd60d14562bd79 - requires_python: '>=3' + - furo>=2023.9.10 ; extra == 'docs' + - sphinx-autodoc-typehints!=1.23.4,>=1.25.2 ; extra == 'docs' + - sphinx>=7.2.6 ; extra == 'docs' + - covdefaults>=2.3 ; extra == 'testing' + - coverage>=7.3.2 ; extra == 'testing' + - diff-cover>=8.0.1 ; extra == 'testing' + - pytest-asyncio>=0.21 ; extra == 'testing' + - pytest-cov>=4.1 ; extra == 'testing' + - pytest-mock>=3.12 ; extra == 'testing' + - pytest-timeout>=2.2 ; extra == 'testing' + - pytest>=7.4.3 ; extra == 'testing' + - virtualenv>=20.26.2 ; extra == 'testing' + - typing-extensions>=4.8 ; python_version < '3.11' and extra == 'typing' + requires_python: '>=3.8' - kind: pypi - name: nvidia-nvtx-cu11 - version: 11.7.91 - url: https://files.pythonhosted.org/packages/23/d5/09493ff0e64fd77523afbbb075108f27a13790479efe86b9ffb4587671b5/nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl - sha256: b22c64eee426a62fc00952b507d6d29cf62b4c9df7a480fcc417e540e05fd5ac + name: fsspec + version: 2024.6.1 + url: https://files.pythonhosted.org/packages/5e/44/73bea497ac69bafde2ee4269292fa3b41f1198f4bb7bbaaabde30ad29d4a/fsspec-2024.6.1-py3-none-any.whl + sha256: 3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e requires_dist: - - setuptools - - wheel - requires_python: '>=3' -- kind: pypi - name: nvidia-nvtx-cu12 - version: 12.1.105 - url: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl - sha256: dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5 - requires_python: '>=3' -- kind: conda - name: openssl - version: 1.1.1w - build: hd590300_0 - subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1w-hd590300_0.conda - sha256: 4fe19885c77f0758084feb54954bd1977dfeeab7134fba0a1d9c0cfff821d6bd - md5: 301e70057a3bd399640bb16bbdf87995 - depends: - - ca-certificates - - libgcc-ng >=12 - license: OpenSSL - license_family: Apache - purls: [] - size: 1956010 - timestamp: 1694461292959 + - adlfs ; extra == 'abfs' + - adlfs ; extra == 'adl' + - pyarrow>=1 ; extra == 'arrow' + - dask ; extra == 'dask' + - distributed ; extra == 'dask' + - pre-commit ; extra == 'dev' + - ruff ; extra == 'dev' + - numpydoc ; extra == 'doc' + - sphinx ; extra == 'doc' + - sphinx-design ; extra == 'doc' + - sphinx-rtd-theme ; extra == 'doc' + - yarl ; extra == 'doc' + - dropbox ; extra == 'dropbox' + - dropboxdrivefs ; extra == 'dropbox' + - requests ; extra == 'dropbox' + - adlfs ; extra == 'full' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'full' + - dask ; extra == 'full' + - distributed ; extra == 'full' + - dropbox ; extra == 'full' + - dropboxdrivefs ; extra == 'full' + - fusepy ; extra == 'full' + - gcsfs ; extra == 'full' + - libarchive-c ; extra == 'full' + - ocifs ; extra == 'full' + - panel ; extra == 'full' + - paramiko ; extra == 'full' + - pyarrow>=1 ; extra == 'full' + - pygit2 ; extra == 'full' + - requests ; extra == 'full' + - s3fs ; extra == 'full' + - smbprotocol ; extra == 'full' + - tqdm ; extra == 'full' + - fusepy ; extra == 'fuse' + - gcsfs ; extra == 'gcs' + - pygit2 ; extra == 'git' + - requests ; extra == 'github' + - gcsfs ; extra == 'gs' + - panel ; extra == 'gui' + - pyarrow>=1 ; extra == 'hdfs' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'http' + - libarchive-c ; extra == 'libarchive' + - ocifs ; extra == 'oci' + - s3fs ; extra == 's3' + - paramiko ; extra == 'sftp' + - smbprotocol ; extra == 'smb' + - paramiko ; extra == 'ssh' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test' + - numpy ; extra == 'test' + - pytest ; extra == 'test' + - pytest-asyncio!=0.22.0 ; extra == 'test' + - pytest-benchmark ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-mock ; extra == 'test' + - pytest-recording ; extra == 'test' + - pytest-rerunfailures ; extra == 'test' + - requests ; extra == 'test' + - aiobotocore<3.0.0,>=2.5.4 ; extra == 'test-downstream' + - dask-expr ; extra == 'test-downstream' + - dask[dataframe,test] ; extra == 'test-downstream' + - moto[server]<5,>4 ; extra == 'test-downstream' + - pytest-timeout ; extra == 'test-downstream' + - xarray ; extra == 'test-downstream' + - adlfs ; extra == 'test-full' + - aiohttp!=4.0.0a0,!=4.0.0a1 ; extra == 'test-full' + - cloudpickle ; extra == 'test-full' + - dask ; extra == 'test-full' + - distributed ; extra == 'test-full' + - dropbox ; extra == 'test-full' + - dropboxdrivefs ; extra == 'test-full' + - fastparquet ; extra == 'test-full' + - fusepy ; extra == 'test-full' + - gcsfs ; extra == 'test-full' + - jinja2 ; extra == 'test-full' + - kerchunk ; extra == 'test-full' + - libarchive-c ; extra == 'test-full' + - lz4 ; extra == 'test-full' + - notebook ; extra == 'test-full' + - numpy ; extra == 'test-full' + - ocifs ; extra == 'test-full' + - pandas ; extra == 'test-full' + - panel ; extra == 'test-full' + - paramiko ; extra == 'test-full' + - pyarrow ; extra == 'test-full' + - pyarrow>=1 ; extra == 'test-full' + - pyftpdlib ; extra == 'test-full' + - pygit2 ; extra == 'test-full' + - pytest ; extra == 'test-full' + - pytest-asyncio!=0.22.0 ; extra == 'test-full' + - pytest-benchmark ; extra == 'test-full' + - pytest-cov ; extra == 'test-full' + - pytest-mock ; extra == 'test-full' + - pytest-recording ; extra == 'test-full' + - pytest-rerunfailures ; extra == 'test-full' + - python-snappy ; extra == 'test-full' + - requests ; extra == 'test-full' + - smbprotocol ; extra == 'test-full' + - tqdm ; extra == 'test-full' + - urllib3 ; extra == 'test-full' + - zarr ; extra == 'test-full' + - zstandard ; extra == 'test-full' + - tqdm ; extra == 'tqdm' + requires_python: '>=3.8' - kind: conda - name: openssl - version: 3.3.1 - build: h4bc722e_2 - build_number: 2 + name: gds-tools + version: 1.3.0.44 + build: '0' subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.1-h4bc722e_2.conda - sha256: b294b3cc706ad1048cdb514f0db3da9f37ae3fcc0c53a7104083dd0918adb200 - md5: e1b454497f9f7c1147fdde4b53f1b512 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/gds-tools-1.3.0.44-0.tar.bz2 + md5: 119fc5cfc6250cfb80c85baacd32779c depends: - - __glibc >=2.17,<3.0.a0 - - ca-certificates - - libgcc-ng >=12 - constrains: - - pyopenssl >=22.1 - license: Apache-2.0 - license_family: Apache + - libcufile >=1.3.0.44 + arch: x86_64 + platform: linux purls: [] - size: 2895213 - timestamp: 1721194688955 -- kind: pypi - name: packaging - version: '24.1' - url: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl - sha256: 5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - requires_python: '>=3.8' + size: 41628234 + timestamp: 1656528515966 - kind: pypi - name: paramiko - version: 3.4.0 - url: https://files.pythonhosted.org/packages/ad/50/8792484502c8141c20c996b802fefa8435a9c018a2bb440a06b172782118/paramiko-3.4.0-py3-none-any.whl - sha256: 43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7 - requires_dist: - - bcrypt>=3.2 - - cryptography>=3.3 - - pynacl>=1.5 - - pyasn1>=0.1.7 ; extra == 'all' - - invoke>=2.0 ; extra == 'all' - - gssapi>=1.4.1 ; platform_system != 'Windows' and extra == 'all' - - pywin32>=2.1.8 ; platform_system == 'Windows' and extra == 'all' - - pyasn1>=0.1.7 ; extra == 'gssapi' - - gssapi>=1.4.1 ; platform_system != 'Windows' and extra == 'gssapi' - - pywin32>=2.1.8 ; platform_system == 'Windows' and extra == 'gssapi' - - invoke>=2.0 ; extra == 'invoke' - requires_python: '>=3.6' + name: idna + version: '3.7' + url: https://files.pythonhosted.org/packages/e5/3e/741d8c82801c347547f8a2a06aa57dbb1992be9e948df2ea0eda2c8b79e8/idna-3.7-py3-none-any.whl + sha256: 82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 + requires_python: '>=3.5' - kind: pypi - name: paramiko - version: 3.4.1 - url: https://files.pythonhosted.org/packages/96/6e/4a52a8923d840107024b844d83502dfa6a1e5399ad31cf9d1a4ddbaaa7e5/paramiko-3.4.1-py3-none-any.whl - sha256: 8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32 + name: importlib-metadata + version: 8.0.0 + url: https://files.pythonhosted.org/packages/dc/ef/38766b2edb096260d9b1b6ad35adaa0bce3b0567abb452b21eb074af88c4/importlib_metadata-8.0.0-py3-none-any.whl + sha256: 15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f requires_dist: - - bcrypt>=3.2 - - cryptography>=3.3 - - pynacl>=1.5 - - pyasn1>=0.1.7 ; extra == 'all' - - invoke>=2.0 ; extra == 'all' - - gssapi>=1.4.1 ; platform_system != 'Windows' and extra == 'all' - - pywin32>=2.1.8 ; platform_system == 'Windows' and extra == 'all' - - pyasn1>=0.1.7 ; extra == 'gssapi' - - gssapi>=1.4.1 ; platform_system != 'Windows' and extra == 'gssapi' - - pywin32>=2.1.8 ; platform_system == 'Windows' and extra == 'gssapi' - - invoke>=2.0 ; extra == 'invoke' - requires_python: '>=3.6' + - zipp>=0.5 + - typing-extensions>=3.6.4 ; python_version < '3.8' + - sphinx>=3.5 ; extra == 'doc' + - jaraco-packaging>=9.3 ; extra == 'doc' + - rst-linker>=1.9 ; extra == 'doc' + - furo ; extra == 'doc' + - sphinx-lint ; extra == 'doc' + - jaraco-tidelift>=1.4 ; extra == 'doc' + - ipython ; extra == 'perf' + - pytest!=8.1.*,>=6 ; extra == 'test' + - pytest-checkdocs>=2.4 ; extra == 'test' + - pytest-cov ; extra == 'test' + - pytest-mypy ; extra == 'test' + - pytest-enabler>=2.2 ; extra == 'test' + - pytest-ruff>=0.2.1 ; extra == 'test' + - packaging ; extra == 'test' + - pyfakefs ; extra == 'test' + - flufl-flake8 ; extra == 'test' + - pytest-perf>=0.9.2 ; extra == 'test' + - jaraco-test>=5.4 ; extra == 'test' + - importlib-resources>=1.3 ; python_version < '3.9' and extra == 'test' + requires_python: '>=3.8' - kind: pypi - name: pkginfo - version: 1.10.0 - url: https://files.pythonhosted.org/packages/56/09/054aea9b7534a15ad38a363a2bd974c20646ab1582a387a95b8df1bfea1c/pkginfo-1.10.0-py3-none-any.whl - sha256: 889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 + name: importlib-resources + version: 6.4.0 + url: https://files.pythonhosted.org/packages/75/06/4df55e1b7b112d183f65db9503bff189e97179b256e1ea450a3c365241e0/importlib_resources-6.4.0-py3-none-any.whl + sha256: 50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c requires_dist: - - pytest ; extra == 'testing' + - zipp>=3.1.0 ; python_version < '3.10' + - sphinx>=3.5 ; extra == 'docs' + - sphinx<7.2.5 ; extra == 'docs' + - jaraco-packaging>=9.3 ; extra == 'docs' + - rst-linker>=1.9 ; extra == 'docs' + - furo ; extra == 'docs' + - sphinx-lint ; extra == 'docs' + - jaraco-tidelift>=1.4 ; extra == 'docs' + - pytest>=6 ; extra == 'testing' + - pytest-checkdocs>=2.4 ; extra == 'testing' - pytest-cov ; extra == 'testing' - - wheel ; extra == 'testing' - requires_python: '>=3.6' + - pytest-enabler>=2.2 ; extra == 'testing' + - pytest-ruff>=0.2.1 ; extra == 'testing' + - zipp>=3.17 ; extra == 'testing' + - jaraco-test>=5.4 ; extra == 'testing' + - pytest-mypy ; platform_python_implementation != 'PyPy' and extra == 'testing' + requires_python: '>=3.8' - kind: pypi - name: pluggy - version: 1.5.0 - url: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl - sha256: 44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 + name: iniconfig + version: 2.0.0 + url: https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl + sha256: b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + requires_python: '>=3.7' +- kind: pypi + name: invoke + version: 2.2.0 + url: https://files.pythonhosted.org/packages/0a/66/7f8c48009c72d73bc6bbe6eb87ac838d6a526146f7dab14af671121eb379/invoke-2.2.0-py3-none-any.whl + sha256: 6ea924cc53d4f78e3d98bc436b08069a03077e6f85ad1ddaa8a116d7dad15820 + requires_python: '>=3.6' +- kind: pypi + name: jaraco-classes + version: 3.4.0 + url: https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl + sha256: f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 requires_dist: - - pre-commit ; extra == 'dev' - - tox ; extra == 'dev' - - pytest ; extra == 'testing' - - pytest-benchmark ; extra == 'testing' + - more-itertools + - sphinx>=3.5 ; extra == 'docs' + - jaraco-packaging>=9.3 ; extra == 'docs' + - rst-linker>=1.9 ; extra == 'docs' + - furo ; extra == 'docs' + - sphinx-lint ; extra == 'docs' + - jaraco-tidelift>=1.4 ; extra == 'docs' + - pytest>=6 ; extra == 'testing' + - pytest-checkdocs>=2.4 ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-mypy ; extra == 'testing' + - pytest-enabler>=2.2 ; extra == 'testing' + - pytest-ruff>=0.2.1 ; extra == 'testing' requires_python: '>=3.8' - kind: pypi - name: pycparser - version: '2.22' - url: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl - sha256: c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc + name: jaraco-context + version: 5.3.0 + url: https://files.pythonhosted.org/packages/d2/40/11b7bc1898cf1dcb87ccbe09b39f5088634ac78bb25f3383ff541c2b40aa/jaraco.context-5.3.0-py3-none-any.whl + sha256: 3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 + requires_dist: + - backports-tarfile ; python_version < '3.12' + - sphinx>=3.5 ; extra == 'docs' + - jaraco-packaging>=9.3 ; extra == 'docs' + - rst-linker>=1.9 ; extra == 'docs' + - furo ; extra == 'docs' + - sphinx-lint ; extra == 'docs' + - jaraco-tidelift>=1.4 ; extra == 'docs' + - pytest!=8.1.1,>=6 ; extra == 'testing' + - pytest-checkdocs>=2.4 ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-mypy ; extra == 'testing' + - pytest-enabler>=2.2 ; extra == 'testing' + - pytest-ruff>=0.2.1 ; extra == 'testing' + - portend ; extra == 'testing' requires_python: '>=3.8' - kind: pypi - name: pygments - version: 2.18.0 - url: https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl - sha256: b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a + name: jaraco-functools + version: 4.0.1 + url: https://files.pythonhosted.org/packages/c3/ac/d0bf0d37a9f95f69a5efc5685d9166ee34a664d3cd29a9c139989512fe14/jaraco.functools-4.0.1-py3-none-any.whl + sha256: 3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 requires_dist: - - colorama>=0.4.6 ; extra == 'windows-terminal' + - more-itertools + - sphinx>=3.5 ; extra == 'docs' + - sphinx<7.2.5 ; extra == 'docs' + - jaraco-packaging>=9.3 ; extra == 'docs' + - rst-linker>=1.9 ; extra == 'docs' + - furo ; extra == 'docs' + - sphinx-lint ; extra == 'docs' + - jaraco-tidelift>=1.4 ; extra == 'docs' + - pytest>=6 ; extra == 'testing' + - pytest-checkdocs>=2.4 ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-enabler>=2.2 ; extra == 'testing' + - pytest-ruff>=0.2.1 ; extra == 'testing' + - jaraco-classes ; extra == 'testing' + - pytest-mypy ; platform_python_implementation != 'PyPy' and extra == 'testing' requires_python: '>=3.8' - kind: pypi - name: pynacl - version: 1.5.0 - url: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl - sha256: 0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d + name: jeepney + version: 0.8.0 + url: https://files.pythonhosted.org/packages/ae/72/2a1e2290f1ab1e06f71f3d0f1646c9e4634e70e1d37491535e19266e8dc9/jeepney-0.8.0-py3-none-any.whl + sha256: c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 requires_dist: - - cffi>=1.4.1 - - sphinx>=1.6.5 ; extra == 'docs' - - sphinx-rtd-theme ; extra == 'docs' - - pytest!=3.3.0,>=3.2.1 ; extra == 'tests' - - hypothesis>=3.27.0 ; extra == 'tests' - requires_python: '>=3.6' -- kind: pypi - name: pyproject-hooks - version: 1.1.0 - url: https://files.pythonhosted.org/packages/ae/f3/431b9d5fe7d14af7a32340792ef43b8a714e7726f1d7b69cc4e8e7a3f1d7/pyproject_hooks-1.1.0-py3-none-any.whl - sha256: 7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2 + - pytest ; extra == 'test' + - pytest-trio ; extra == 'test' + - pytest-asyncio>=0.17 ; extra == 'test' + - testpath ; extra == 'test' + - trio ; extra == 'test' + - async-timeout ; extra == 'test' + - trio ; extra == 'trio' + - async-generator ; extra == 'trio' and python_version == '3.6' requires_python: '>=3.7' - kind: pypi - name: pyright - version: 1.1.370 - url: https://files.pythonhosted.org/packages/0c/2b/3d70ea49041da4dfb64b71039d94f3b31843575edf1f29fe0370919c35aa/pyright-1.1.370-py3-none-any.whl - sha256: fc721601e480a69989775bfc210534a6ca0110ebd0c065244a8d3a151294fc61 + name: jinja2 + version: 3.1.4 + url: https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl + sha256: bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d requires_dist: - - nodeenv>=1.6.0 - - typing-extensions>=3.7 ; python_version < '3.8' - - twine>=3.4.1 ; extra == 'all' - - twine>=3.4.1 ; extra == 'dev' + - markupsafe>=2.0 + - babel>=2.7 ; extra == 'i18n' requires_python: '>=3.7' - kind: pypi - name: pytest - version: 8.2.2 - url: https://files.pythonhosted.org/packages/4e/e7/81ebdd666d3bff6670d27349b5053605d83d55548e6bd5711f3b0ae7dd23/pytest-8.2.2-py3-none-any.whl - sha256: c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343 - requires_dist: - - iniconfig - - packaging - - pluggy<2.0,>=1.5 - - exceptiongroup>=1.0.0rc8 ; python_version < '3.11' - - tomli>=1 ; python_version < '3.11' - - colorama ; sys_platform == 'win32' - - argcomplete ; extra == 'dev' - - attrs>=19.2 ; extra == 'dev' - - hypothesis>=3.56 ; extra == 'dev' - - mock ; extra == 'dev' - - pygments>=2.7.2 ; extra == 'dev' - - requests ; extra == 'dev' - - setuptools ; extra == 'dev' - - xmlschema ; extra == 'dev' - requires_python: '>=3.8' -- kind: pypi - name: pytest - version: 8.3.2 - url: https://files.pythonhosted.org/packages/0f/f9/cf155cf32ca7d6fa3601bc4c5dd19086af4b320b706919d48a4c79081cf9/pytest-8.3.2-py3-none-any.whl - sha256: 4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 + name: keyring + version: 25.2.1 + url: https://files.pythonhosted.org/packages/92/91/901f5cfeaaea04cf15f5ddf41ee053a5c9e389166477a3427fcfd055e1d9/keyring-25.2.1-py3-none-any.whl + sha256: 2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 requires_dist: - - iniconfig - - packaging - - pluggy<2,>=1.5 - - exceptiongroup>=1.0.0rc8 ; python_version < '3.11' - - tomli>=1 ; python_version < '3.11' - - colorama ; sys_platform == 'win32' - - argcomplete ; extra == 'dev' - - attrs>=19.2 ; extra == 'dev' - - hypothesis>=3.56 ; extra == 'dev' - - mock ; extra == 'dev' - - pygments>=2.7.2 ; extra == 'dev' - - requests ; extra == 'dev' - - setuptools ; extra == 'dev' - - xmlschema ; extra == 'dev' + - jaraco-classes + - jaraco-functools + - jaraco-context + - importlib-metadata>=4.11.4 ; python_version < '3.12' + - importlib-resources ; python_version < '3.9' + - secretstorage>=3.2 ; sys_platform == 'linux' + - jeepney>=0.4.2 ; sys_platform == 'linux' + - pywin32-ctypes>=0.2.0 ; sys_platform == 'win32' + - shtab>=1.1.0 ; extra == 'completion' + - sphinx>=3.5 ; extra == 'docs' + - jaraco-packaging>=9.3 ; extra == 'docs' + - rst-linker>=1.9 ; extra == 'docs' + - furo ; extra == 'docs' + - sphinx-lint ; extra == 'docs' + - jaraco-tidelift>=1.4 ; extra == 'docs' + - pytest!=8.1.*,>=6 ; extra == 'testing' + - pytest-checkdocs>=2.4 ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-mypy ; extra == 'testing' + - pytest-enabler>=2.2 ; extra == 'testing' + - pytest-ruff>=0.2.1 ; extra == 'testing' requires_python: '>=3.8' - kind: conda - name: python - version: 3.8.1 - build: h357f687_2 - build_number: 2 + name: ld_impl_linux-64 + version: '2.40' + build: hf3520f5_7 + build_number: 7 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda + sha256: 764b6950aceaaad0c67ef925417594dd14cd2e22fff864aeef455ac259263d15 + md5: b80f2f396ca2c28b8c14c437a4ed1e74 + constrains: + - binutils_impl_linux-64 2.40 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 707602 + timestamp: 1718625640445 +- kind: conda + name: libcublas + version: 11.10.1.25 + build: he442b6f_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcublas-11.10.1.25-he442b6f_0.tar.bz2 + md5: 12db71df5ad476eca4c1e5ba59e5bd82 + arch: x86_64 + platform: linux + purls: [] + size: 314450443 + timestamp: 1649213079084 +- kind: conda + name: libcublas-dev + version: 11.10.1.25 + build: h0c8ac2b_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcublas-dev-11.10.1.25-h0c8ac2b_0.tar.bz2 + md5: 2ad42b6003cb3439a73c257b4bd356e0 + depends: + - libcublas >=11.10.1.25 + arch: x86_64 + platform: linux + purls: [] + size: 324267418 + timestamp: 1649213249394 +- kind: conda + name: libcufft + version: 10.7.2.50 + build: h80a1efe_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufft-10.7.2.50-h80a1efe_0.tar.bz2 + md5: 92c539005489bb48dac1173c09559fe6 + arch: x86_64 + platform: linux + purls: [] + size: 98107830 + timestamp: 1649215036926 +- kind: conda + name: libcufft-dev + version: 10.7.2.50 + build: h59a5ac8_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufft-dev-10.7.2.50-h59a5ac8_0.tar.bz2 + md5: 9a74839ee7d4e6b4a767fdb3c71f8a7a + depends: + - libcufft >=10.7.2.50 + arch: x86_64 + platform: linux + purls: [] + size: 205997732 + timestamp: 1649215083810 +- kind: conda + name: libcufile + version: 1.3.0.44 + build: '0' + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufile-1.3.0.44-0.tar.bz2 + md5: bf3550b0ab35a211752c2018c9fd192a + arch: x86_64 + platform: linux + purls: [] + size: 552448 + timestamp: 1656528513646 +- kind: conda + name: libcufile-dev + version: 1.3.0.44 + build: '0' + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcufile-dev-1.3.0.44-0.tar.bz2 + md5: b765ba0ec97350706e0c88ed061096e7 + depends: + - libcufile >=1.3.0.44 + arch: x86_64 + platform: linux + purls: [] + size: 12791788 + timestamp: 1656528532807 +- kind: conda + name: libcurand + version: 10.2.10.50 + build: heec50f7_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.1-h357f687_2.tar.bz2 - sha256: 8f8ab267e32519c9d88e95eabfa5381df37dae2c5ad700b8493263e170ac03c9 - md5: e860ad02b3a59c645b68f422b3d49e84 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcurand-10.2.10.50-heec50f7_0.tar.bz2 + md5: 00e467aec3d8e12f82bb994e434c685c + arch: x86_64 + platform: linux + purls: [] + size: 52796544 + timestamp: 1649213456498 +- kind: conda + name: libcurand-dev + version: 10.2.10.50 + build: hd49a9cd_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcurand-dev-10.2.10.50-hd49a9cd_0.tar.bz2 + md5: 37590eb41c2408c6af1801714a3ca279 depends: - - ld_impl_linux-64 - - libffi >=3.2.1,<3.3.0a0 - - libgcc-ng >=7.3.0 - - libstdcxx-ng >=7.3.0 - - openssl >=1.1.1a,<1.1.2a - - readline >=8.0,<9.0a0 - - sqlite >=3.30.1,<4.0a0 - - tk >=8.6.10,<8.7.0a0 - - xz >=5.2.4,<6.0.0a0 - - zlib >=1.2.11,<1.3.0a0 - constrains: - - python_abi * *_cp38 - license: PSF + - libcurand >=10.2.10.50 + arch: x86_64 + platform: linux purls: [] - size: 60989193 - timestamp: 1580309998972 + size: 53180004 + timestamp: 1649213486008 - kind: conda - name: python - version: 3.8.19 - build: hd12c33a_0_cpython + name: libcusolver + version: 11.3.5.50 + build: hcab339c_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusolver-11.3.5.50-hcab339c_0.tar.bz2 + md5: 4448aa4e3d7464625d372b280c41728e + arch: x86_64 + platform: linux + purls: [] + size: 93511094 + timestamp: 1649215013905 +- kind: conda + name: libcusolver-dev + version: 11.3.5.50 + build: hc6eba6f_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.19-hd12c33a_0_cpython.conda - sha256: 71899083b05d7f489887b029387c0588e353b9c461f74ebf864c0620586108ba - md5: 53aabe8cf596487ec6f1ce319c93a741 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusolver-dev-11.3.5.50-hc6eba6f_0.tar.bz2 + md5: 30afc179a44e93bd9ec16185256b50c8 depends: - - bzip2 >=1.0.8,<2.0a0 - - ld_impl_linux-64 >=2.36.1 - - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - - libnsl >=2.0.1,<2.1.0a0 - - libsqlite >=3.45.2,<4.0a0 - - libuuid >=2.38.1,<3.0a0 - - libxcrypt >=4.4.36 - - libzlib >=1.2.13,<2.0.0a0 - - ncurses >=6.4.20240210,<7.0a0 - - openssl >=3.2.1,<4.0a0 - - readline >=8.2,<9.0a0 - - tk >=8.6.13,<8.7.0a0 - - xz >=5.2.6,<6.0a0 - constrains: - - python_abi 3.8.* *_cp38 - license: Python-2.0 + - libcusolver >=11.3.5.50 + arch: x86_64 + platform: linux + purls: [] + size: 65246418 + timestamp: 1649215087514 +- kind: conda + name: libcusparse + version: 11.7.3.50 + build: h6aaafad_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusparse-11.7.3.50-h6aaafad_0.tar.bz2 + md5: d08e053b281fcd900a088fb2f2eaacb9 + arch: x86_64 + platform: linux purls: [] - size: 22357104 - timestamp: 1710939954552 + size: 155592022 + timestamp: 1649213972450 - kind: conda - name: python - version: 3.9.19 - build: h0755675_0_cpython + name: libcusparse-dev + version: 11.7.3.50 + build: hc644b96_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.19-h0755675_0_cpython.conda - sha256: b9253ca9ca5427e6da4b1d43353a110e0f2edfab9c951afb4bf01cbae2825b31 - md5: d9ee3647fbd9e8595b8df759b2bbefb8 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libcusparse-dev-11.7.3.50-hc644b96_0.tar.bz2 + md5: 511128895df21b0f832e29865b56db35 depends: - - bzip2 >=1.0.8,<2.0a0 - - ld_impl_linux-64 >=2.36.1 - - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - - libnsl >=2.0.1,<2.1.0a0 - - libsqlite >=3.45.2,<4.0a0 - - libuuid >=2.38.1,<3.0a0 - - libxcrypt >=4.4.36 - - libzlib >=1.2.13,<2.0.0a0 - - ncurses >=6.4.20240210,<7.0a0 - - openssl >=3.2.1,<4.0a0 - - readline >=8.2,<9.0a0 - - tk >=8.6.13,<8.7.0a0 - - tzdata - - xz >=5.2.6,<6.0a0 - constrains: - - python_abi 3.9.* *_cp39 - license: Python-2.0 + - libcusparse >=11.7.3.50 + arch: x86_64 + platform: linux purls: [] - size: 23800555 - timestamp: 1710940120866 + size: 315781272 + timestamp: 1649214045356 - kind: conda - name: python - version: 3.10.14 - build: hd12c33a_0_cpython + name: libffi + version: 3.2.1 + build: he1b5a44_1007 + build_number: 1007 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.10.14-hd12c33a_0_cpython.conda - sha256: 76a5d12e73542678b70a94570f7b0f7763f9a938f77f0e75d9ea615ef22aa84c - md5: 2b4ba962994e8bd4be9ff5b64b75aff2 + url: https://conda.anaconda.org/conda-forge/linux-64/libffi-3.2.1-he1b5a44_1007.tar.bz2 + sha256: 992246df63724484e9ee8652ce3ca0237f707961beab8b813096bbc647cf84f4 + md5: 11389072d7d6036fd811c3d9460475cd depends: - - bzip2 >=1.0.8,<2.0a0 - - ld_impl_linux-64 >=2.36.1 - - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - - libnsl >=2.0.1,<2.1.0a0 - - libsqlite >=3.45.2,<4.0a0 - - libuuid >=2.38.1,<3.0a0 - - libxcrypt >=4.4.36 - - libzlib >=1.2.13,<2.0.0a0 - - ncurses >=6.4.20240210,<7.0a0 - - openssl >=3.2.1,<4.0a0 - - readline >=8.2,<9.0a0 - - tk >=8.6.13,<8.7.0a0 - - tzdata - - xz >=5.2.6,<6.0a0 - constrains: - - python_abi 3.10.* *_cp310 - license: Python-2.0 + - libgcc-ng >=7.3.0 + - libstdcxx-ng >=7.3.0 + license: Custom purls: [] - size: 25517742 - timestamp: 1710939725109 + size: 48003 + timestamp: 1584559351227 - kind: conda - name: python - version: 3.11.9 - build: hb806964_0_cpython + name: libgcc-ng + version: 14.1.0 + build: h77fa898_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.11.9-hb806964_0_cpython.conda - sha256: 177f33a1fb8d3476b38f73c37b42f01c0b014fa0e039a701fd9f83d83aae6d40 - md5: ac68acfa8b558ed406c75e98d3428d7b + url: https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h77fa898_0.conda + sha256: b8e869ac96591cda2704bf7e77a301025e405227791a0bddf14a3dac65125538 + md5: ca0fad6a41ddaef54a153b78eccb5037 depends: - - bzip2 >=1.0.8,<2.0a0 - - ld_impl_linux-64 >=2.36.1 - - libexpat >=2.6.2,<3.0a0 - - libffi >=3.4,<4.0a0 - - libgcc-ng >=12 - - libnsl >=2.0.1,<2.1.0a0 - - libsqlite >=3.45.3,<4.0a0 - - libuuid >=2.38.1,<3.0a0 - - libxcrypt >=4.4.36 - - libzlib >=1.2.13,<2.0.0a0 - - ncurses >=6.4.20240210,<7.0a0 - - openssl >=3.2.1,<4.0a0 - - readline >=8.2,<9.0a0 - - tk >=8.6.13,<8.7.0a0 - - tzdata - - xz >=5.2.6,<6.0a0 + - _libgcc_mutex 0.1 conda_forge + - _openmp_mutex >=4.5 constrains: - - python_abi 3.11.* *_cp311 - license: Python-2.0 + - libgomp 14.1.0 h77fa898_0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL purls: [] - size: 30884494 - timestamp: 1713553104915 + size: 842109 + timestamp: 1719538896937 - kind: conda - name: readline - version: '8.2' - build: h8228510_1 - build_number: 1 + name: libgomp + version: 14.1.0 + build: h77fa898_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda - sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 - md5: 47d31b792659ce70f470b5c82fdfb7a4 + url: https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_0.conda + sha256: 7699df61a1f6c644b3576a40f54791561f2845983120477a16116b951c9cdb05 + md5: ae061a5ed5f05818acdf9adab72c146d depends: - - libgcc-ng >=12 - - ncurses >=6.3,<7.0a0 - license: GPL-3.0-only + - _libgcc_mutex 0.1 conda_forge + license: GPL-3.0-only WITH GCC-exception-3.1 license_family: GPL purls: [] - size: 281456 - timestamp: 1679532220005 -- kind: pypi - name: readme-renderer - version: '43.0' - url: https://files.pythonhosted.org/packages/45/be/3ea20dc38b9db08387cf97997a85a7d51527ea2057d71118feb0aa8afa55/readme_renderer-43.0-py3-none-any.whl - sha256: 19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 - requires_dist: - - nh3>=0.2.14 - - docutils>=0.13.1 - - pygments>=2.5.1 - - cmarkgfm>=0.8.0 ; extra == 'md' - requires_python: '>=3.8' -- kind: pypi - name: requests - version: 2.32.3 - url: https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl - sha256: 70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - requires_dist: - - charset-normalizer<4,>=2 - - idna<4,>=2.5 - - urllib3<3,>=1.21.1 - - certifi>=2017.4.17 - - pysocks!=1.5.7,>=1.5.6 ; extra == 'socks' - - chardet<6,>=3.0.2 ; extra == 'use_chardet_on_py3' - requires_python: '>=3.8' -- kind: pypi - name: requests-toolbelt - version: 1.0.0 - url: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl - sha256: cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - requires_dist: - - requests<3.0.0,>=2.0.1 - requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' -- kind: pypi - name: rfc3986 - version: 2.0.0 - url: https://files.pythonhosted.org/packages/ff/9a/9afaade874b2fa6c752c36f1548f718b5b83af81ed9b76628329dab81c1b/rfc3986-2.0.0-py2.py3-none-any.whl - sha256: 50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd - requires_dist: - - idna ; extra == 'idna2008' - requires_python: '>=3.7' -- kind: pypi - name: rich - version: 13.7.1 - url: https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl - sha256: 4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 - requires_dist: - - ipywidgets>=7.5.1,<9 ; extra == 'jupyter' - - markdown-it-py>=2.2.0 - - pygments>=2.13.0,<3.0.0 - - typing-extensions>=4.0.0,<5.0 ; python_version < '3.9' - requires_python: '>=3.7.0' -- kind: pypi - name: ruff - version: 0.5.1 - url: https://files.pythonhosted.org/packages/8a/d5/8271d42dd239b7c2d163615b3b01b1acfb187f5114bfca6d5a85e1d6a1eb/ruff-0.5.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: e216fc75a80ea1fbd96af94a6233d90190d5b65cc3d5dfacf2bd48c3e067d3e1 - requires_python: '>=3.7' -- kind: pypi - name: secretstorage - version: 3.3.3 - url: https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl - sha256: f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - requires_dist: - - cryptography>=2.0 - - jeepney>=0.6 - requires_python: '>=3.6' -- kind: pypi - name: setuptools - version: 72.1.0 - url: https://files.pythonhosted.org/packages/e1/58/e0ef3b9974a04ce9cde2a7a33881ddcb2d68450803745804545cdd8d258f/setuptools-72.1.0-py3-none-any.whl - sha256: 5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1 - requires_dist: - - packaging>=24 ; extra == 'core' - - ordered-set>=3.1.1 ; extra == 'core' - - more-itertools>=8.8 ; extra == 'core' - - jaraco-text>=3.7 ; extra == 'core' - - wheel>=0.43.0 ; extra == 'core' - - platformdirs>=2.6.2 ; extra == 'core' - - importlib-metadata>=6 ; python_version < '3.10' and extra == 'core' - - tomli>=2.0.1 ; python_version < '3.11' and extra == 'core' - - importlib-resources>=5.10.2 ; python_version < '3.9' and extra == 'core' - - sphinx>=3.5 ; extra == 'doc' - - jaraco-packaging>=9.3 ; extra == 'doc' - - rst-linker>=1.9 ; extra == 'doc' - - furo ; extra == 'doc' - - sphinx-lint ; extra == 'doc' - - jaraco-tidelift>=1.4 ; extra == 'doc' - - pygments-github-lexers==0.0.5 ; extra == 'doc' - - sphinx-favicon ; extra == 'doc' - - sphinx-inline-tabs ; extra == 'doc' - - sphinx-reredirects ; extra == 'doc' - - sphinxcontrib-towncrier ; extra == 'doc' - - sphinx-notfound-page<2,>=1 ; extra == 'doc' - - pyproject-hooks!=1.1 ; extra == 'doc' - - pytest!=8.1.*,>=6 ; extra == 'test' - - pytest-checkdocs>=2.4 ; extra == 'test' - - pytest-cov ; extra == 'test' - - pytest-mypy ; extra == 'test' - - pytest-enabler>=2.2 ; extra == 'test' - - virtualenv>=13.0.0 ; extra == 'test' - - wheel ; extra == 'test' - - pip>=19.1 ; extra == 'test' - - packaging>=23.2 ; extra == 'test' - - jaraco-envs>=2.2 ; extra == 'test' - - pytest-xdist>=3 ; extra == 'test' - - jaraco-path>=3.2.0 ; extra == 'test' - - build[virtualenv]>=1.0.3 ; extra == 'test' - - filelock>=3.4.0 ; extra == 'test' - - ini2toml[lite]>=0.14 ; extra == 'test' - - tomli-w>=1.0.0 ; extra == 'test' - - pytest-timeout ; extra == 'test' - - pytest-home>=0.5 ; extra == 'test' - - mypy==1.11.* ; extra == 'test' - - tomli ; extra == 'test' - - importlib-metadata ; extra == 'test' - - pytest-subprocess ; extra == 'test' - - pyproject-hooks!=1.1 ; extra == 'test' - - jaraco-test ; extra == 'test' - - pytest-ruff<0.4 ; platform_system == 'Windows' and extra == 'test' - - jaraco-develop>=7.21 ; (python_version >= '3.9' and sys_platform != 'cygwin') and extra == 'test' - - pytest-ruff>=0.2.1 ; sys_platform != 'cygwin' and extra == 'test' - - pytest-perf ; sys_platform != 'cygwin' and extra == 'test' - - pytest-ruff>=0.3.2 ; sys_platform != 'cygwin' and extra == 'test' - requires_python: '>=3.8' + size: 456925 + timestamp: 1719538796073 +- kind: conda + name: libnpp + version: 11.7.3.21 + build: h3effbd9_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnpp-11.7.3.21-h3effbd9_0.tar.bz2 + md5: 0450f6bb03d24424334573ba05687130 + arch: x86_64 + platform: linux + purls: [] + size: 124218053 + timestamp: 1647673432276 +- kind: conda + name: libnpp-dev + version: 11.7.3.21 + build: hb6476a9_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnpp-dev-11.7.3.21-hb6476a9_0.tar.bz2 + md5: 88a90901316877a2873e88800a7c52d7 + depends: + - libnpp >=11.7.3.21 + arch: x86_64 + platform: linux + purls: [] + size: 121327534 + timestamp: 1647673495804 - kind: conda - name: sqlite + name: libnvjpeg + version: 11.7.2.34 + build: hfe236c7_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnvjpeg-11.7.2.34-hfe236c7_0.tar.bz2 + md5: f2b4df286504479597a82a83bc3458e0 + arch: x86_64 + platform: linux + purls: [] + size: 2448233 + timestamp: 1649213785605 +- kind: conda + name: libnvjpeg-dev + version: 11.7.2.34 + build: h2e48410_0 + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/libnvjpeg-dev-11.7.2.34-h2e48410_0.tar.bz2 + md5: f87c91a0fab81a8abbea228fd8808277 + depends: + - libnvjpeg >=11.7.2.34 + arch: x86_64 + platform: linux + purls: [] + size: 2126625 + timestamp: 1649213787489 +- kind: conda + name: libsqlite version: 3.46.0 - build: h6d4b2fc_0 + build: hde9e2c9_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.0-h6d4b2fc_0.conda - sha256: e849d576e52bf3e6fc5786f89b7d76978f2e2438587826c95570324cb572e52b - md5: 77ea8dff5cf8550cc8f5629a6af56323 + url: https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.0-hde9e2c9_0.conda + sha256: daee3f68786231dad457d0dfde3f7f1f9a7f2018adabdbb864226775101341a8 + md5: 18aa975d2094c34aef978060ae7da7d8 depends: - libgcc-ng >=12 - - libsqlite 3.46.0 hde9e2c9_0 - libzlib >=1.2.13,<2.0a0 - - ncurses >=6.5,<7.0a0 - - readline >=8.2,<9.0a0 license: Unlicense purls: [] - size: 860352 - timestamp: 1718050658212 -- kind: pypi - name: sympy - version: 1.12.1 - url: https://files.pythonhosted.org/packages/61/53/e18c8c97d0b2724d85c9830477e3ebea3acf1dcdc6deb344d5d9c93a9946/sympy-1.12.1-py3-none-any.whl - sha256: 9b2cbc7f1a640289430e13d2a56f02f867a1da0190f2f99d8968c2f74da0e515 - requires_dist: - - mpmath<1.4.0,>=1.1.0 - requires_python: '>=3.8' -- kind: pypi - name: sympy - version: 1.13.2 - url: https://files.pythonhosted.org/packages/c1/f9/6845bf8fca0eaf847da21c5d5bc6cd92797364662824a11d3f836423a1a5/sympy-1.13.2-py3-none-any.whl - sha256: c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9 - requires_dist: - - mpmath<1.4,>=1.1.0 - - pytest>=7.1.0 ; extra == 'dev' - - hypothesis>=6.70.0 ; extra == 'dev' - requires_python: '>=3.8' + size: 865346 + timestamp: 1718050628718 - kind: conda - name: tk - version: 8.6.13 - build: noxft_h4845f30_101 - build_number: 101 + name: libstdcxx-ng + version: 14.1.0 + build: hc0a3c3a_0 subdir: linux-64 - url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda - sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e - md5: d453b98d9c83e71da0741bb0ff4d76bc + url: https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-hc0a3c3a_0.conda + sha256: 88c42b388202ffe16adaa337e36cf5022c63cf09b0405cf06fc6aeacccbe6146 + md5: 1cb187a157136398ddbaae90713e2498 + depends: + - libgcc-ng 14.1.0 h77fa898_0 + license: GPL-3.0-only WITH GCC-exception-3.1 + license_family: GPL + purls: [] + size: 3881307 + timestamp: 1719538923443 +- kind: conda + name: libzlib + version: 1.2.13 + build: h4ab18f5_6 + build_number: 6 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h4ab18f5_6.conda + sha256: 8ced4afed6322172182af503f21725d072a589a6eb918f8a58135c1e00d35980 + md5: 27329162c0dc732bcf67a4e0cd488125 depends: - libgcc-ng >=12 - - libzlib >=1.2.13,<2.0.0a0 - license: TCL - license_family: BSD + constrains: + - zlib 1.2.13 *_6 + license: Zlib + license_family: Other purls: [] - size: 3318875 - timestamp: 1699202167581 -- kind: pypi - name: tomli - version: 2.0.1 - url: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl - sha256: 939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc - requires_python: '>=3.7' -- kind: pypi - name: torch - version: 2.0.1 - url: https://files.pythonhosted.org/packages/96/28/026dc037f177d53558477931677b120f649dd5a0dcdc4b44dc38b3d75711/torch-2.0.1-cp38-cp38-manylinux1_x86_64.whl - sha256: 5ef3ea3d25441d3957348f7e99c7824d33798258a2bf5f0f0277cbcadad2e20d - requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - nvidia-cuda-nvrtc-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu11==11.7.101 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu11==8.5.0.96 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu11==11.10.3.66 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu11==10.9.0.58 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu11==10.2.10.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu11==11.4.0.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu11==11.7.4.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu11==2.14.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu11==11.7.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' -- kind: pypi - name: torch - version: 2.0.1 - url: https://files.pythonhosted.org/packages/e5/9a/ce0fe125f226ffce8deba6a18bd8d0b9f589aa236780a83a6d70b5525f56/torch-2.0.1-cp39-cp39-manylinux1_x86_64.whl - sha256: e10e1597f2175365285db1b24019eb6f04d53dcd626c735fc502f1e8b6be9875 - requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - nvidia-cuda-nvrtc-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu11==11.7.101 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu11==8.5.0.96 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu11==11.10.3.66 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu11==10.9.0.58 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu11==10.2.10.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu11==11.4.0.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu11==11.7.4.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu11==2.14.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu11==11.7.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' -- kind: pypi - name: torch - version: 2.0.1 - url: https://files.pythonhosted.org/packages/8c/4d/17e07377c9c3d1a0c4eb3fde1c7c16b5a0ce6133ddbabc08ceef6b7f2645/torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl - sha256: 8ced00b3ba471856b993822508f77c98f48a458623596a4c43136158781e306a - requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - nvidia-cuda-nvrtc-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu11==11.7.101 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu11==8.5.0.96 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu11==11.10.3.66 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu11==10.9.0.58 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu11==10.2.10.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu11==11.4.0.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu11==11.7.4.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu11==2.14.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu11==11.7.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' -- kind: pypi - name: torch - version: 2.0.1 - url: https://files.pythonhosted.org/packages/c8/21/25020cfdd9f564a72f400ee491610e50cb212e8add8031abaa959af6451e/torch-2.0.1-cp311-cp311-manylinux1_x86_64.whl - sha256: e617b1d0abaf6ced02dbb9486803abfef0d581609b09641b34fa315c9c40766d - requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - nvidia-cuda-nvrtc-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu11==11.7.99 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu11==11.7.101 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu11==8.5.0.96 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu11==11.10.3.66 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu11==10.9.0.58 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu11==10.2.10.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu11==11.4.0.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu11==11.7.4.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu11==2.14.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu11==11.7.91 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' -- kind: pypi - name: torch - version: 2.1.2 - url: https://files.pythonhosted.org/packages/31/c0/6e856c0c745dffd7696ec514381befa83f3449cd914f02b0968e0ca5a244/torch-2.1.2-cp38-cp38-manylinux1_x86_64.whl - sha256: f41fe0c7ecbf903a568c73486139a75cfab287a0f6c17ed0698fdea7a1e8641d - requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.18.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.1.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - jinja2 ; extra == 'dynamo' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' -- kind: pypi - name: torch - version: 2.1.2 - url: https://files.pythonhosted.org/packages/da/57/0a58fb9a7d110eab4492fe984bc207d51706797d0729dbd8ce7ff982c82e/torch-2.1.2-cp39-cp39-manylinux1_x86_64.whl - sha256: 9ca96253b761e9aaf8e06fb30a66ee301aecbf15bb5a303097de1969077620b6 - requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.18.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.1.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - jinja2 ; extra == 'dynamo' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' -- kind: pypi - name: torch - version: 2.1.2 - url: https://files.pythonhosted.org/packages/03/f1/13137340776dd5d5bcfd2574c9c6dfcc7618285035cd77240496e5c1a79b/torch-2.1.2-cp310-cp310-manylinux1_x86_64.whl - sha256: 3a871edd6c02dae77ad810335c0833391c1a4ce49af21ea8cf0f6a5d2096eea8 - requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.18.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.1.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - jinja2 ; extra == 'dynamo' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' + size: 61571 + timestamp: 1716874066944 - kind: pypi - name: torch - version: 2.1.2 - url: https://files.pythonhosted.org/packages/da/6a/7fb9d82db4568834ff6d4df2fe3b143de4ed65a3f8f93e7daed703626cb6/torch-2.1.2-cp311-cp311-manylinux1_x86_64.whl - sha256: a6ebbe517097ef289cc7952783588c72de071d4b15ce0f8b285093f0916b1162 + name: markdown-it-py + version: 3.0.0 + url: https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl + sha256: 355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 requires_dist: - - filelock - - typing-extensions - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.18.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.1.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - jinja2 ; extra == 'dynamo' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - requires_python: '>=3.8.0' + - mdurl~=0.1 + - psutil ; extra == 'benchmarking' + - pytest ; extra == 'benchmarking' + - pytest-benchmark ; extra == 'benchmarking' + - pre-commit~=3.0 ; extra == 'code-style' + - commonmark~=0.9 ; extra == 'compare' + - markdown~=3.4 ; extra == 'compare' + - mistletoe~=1.0 ; extra == 'compare' + - mistune~=2.0 ; extra == 'compare' + - panflute~=2.3 ; extra == 'compare' + - linkify-it-py>=1,<3 ; extra == 'linkify' + - mdit-py-plugins ; extra == 'plugins' + - gprof2dot ; extra == 'profiling' + - mdit-py-plugins ; extra == 'rtd' + - myst-parser ; extra == 'rtd' + - pyyaml ; extra == 'rtd' + - sphinx ; extra == 'rtd' + - sphinx-copybutton ; extra == 'rtd' + - sphinx-design ; extra == 'rtd' + - sphinx-book-theme ; extra == 'rtd' + - jupyter-sphinx ; extra == 'rtd' + - coverage ; extra == 'testing' + - pytest ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - pytest-regressions ; extra == 'testing' + requires_python: '>=3.8' - kind: pypi - name: torch - version: 2.2.2 - url: https://files.pythonhosted.org/packages/99/bf/7f6c1a37ea7fdf6afbc05ac405faae6eba1c1450d9ed632e23535e6438e2/torch-2.2.2-cp38-cp38-manylinux1_x86_64.whl - sha256: cd2bf7697c9e95fb5d97cc1d525486d8cf11a084c6af1345c2c2c22a6b0029d0 - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.19.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.2.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + name: markupsafe + version: 2.1.5 + url: https://files.pythonhosted.org/packages/c7/bd/50319665ce81bb10e90d1cf76f9e1aa269ea6f7fa30ab4521f14d122a3df/MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab + requires_python: '>=3.7' - kind: pypi - name: torch - version: 2.2.2 - url: https://files.pythonhosted.org/packages/68/6c/754b1b742258f9a76d8daf53ac55ce672228c988b5a1b59b16203dda6959/torch-2.2.2-cp39-cp39-manylinux1_x86_64.whl - sha256: a6e5770d68158d07456bfcb5318b173886f579fdfbf747543901ce718ea94782 - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.19.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.2.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + name: mdurl + version: 0.1.2 + url: https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl + sha256: 84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 + requires_python: '>=3.7' - kind: pypi - name: torch - version: 2.2.2 - url: https://files.pythonhosted.org/packages/33/b3/1fcc3bccfddadfd6845dcbfe26eb4b099f1dfea5aa0e5cfb92b3c98dba5b/torch-2.2.2-cp310-cp310-manylinux1_x86_64.whl - sha256: bc889d311a855dd2dfd164daf8cc903a6b7273a747189cebafdd89106e4ad585 - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.19.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.2.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + name: more-itertools + version: 10.3.0 + url: https://files.pythonhosted.org/packages/bb/23/2d1cdb0427aecb2b150dc2ac2d15400990c4f05585b3fbc1b5177d74d7fb/more_itertools-10.3.0-py3-none-any.whl + sha256: ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + requires_python: '>=3.8' - kind: pypi - name: torch - version: 2.2.2 - url: https://files.pythonhosted.org/packages/c3/33/d7a6123231bd4d04c7005dde8507235772f3bc4622a25f3a88c016415d49/torch-2.2.2-cp311-cp311-manylinux1_x86_64.whl - sha256: ad4c03b786e074f46606f4151c0a1e3740268bcf29fbd2fdf6666d66341c1dcb + name: mpmath + version: 1.3.0 + url: https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl + sha256: a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.19.3 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.2.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + - pytest>=4.6 ; extra == 'develop' + - pycodestyle ; extra == 'develop' + - pytest-cov ; extra == 'develop' + - codecov ; extra == 'develop' + - wheel ; extra == 'develop' + - sphinx ; extra == 'docs' + - gmpy2>=2.1.0a4 ; platform_python_implementation != 'PyPy' and extra == 'gmpy' + - pytest>=4.6 ; extra == 'tests' +- kind: conda + name: ncurses + version: '6.5' + build: h59595ed_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h59595ed_0.conda + sha256: 4fc3b384f4072b68853a0013ea83bdfd3d66b0126e2238e1d6e1560747aa7586 + md5: fcea371545eda051b6deafb24889fc69 + depends: + - libgcc-ng >=12 + license: X11 AND BSD-3-Clause + purls: [] + size: 887465 + timestamp: 1715194722503 - kind: pypi - name: torch - version: 2.3.1 - url: https://files.pythonhosted.org/packages/c0/7e/309d63c6330a0b821a6f55e06dcef6704a7ab8b707534a4923837570624e/torch-2.3.1-cp38-cp38-manylinux1_x86_64.whl - sha256: 07e9ba746832b8d069cacb45f312cadd8ad02b81ea527ec9766c0e7404bb3feb + name: networkx + version: '3.1' + url: https://files.pythonhosted.org/packages/a8/05/9d4f9b78ead6b2661d6e8ea772e111fc4a9fbd866ad0c81906c11206b55e/networkx-3.1-py3-none-any.whl + sha256: 4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36 requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - mkl<=2021.4.0,>=2021.1.1 ; platform_system == 'Windows' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + - numpy>=1.20 ; extra == 'default' + - scipy>=1.8 ; extra == 'default' + - matplotlib>=3.4 ; extra == 'default' + - pandas>=1.3 ; extra == 'default' + - pre-commit>=3.2 ; extra == 'developer' + - mypy>=1.1 ; extra == 'developer' + - sphinx>=6.1 ; extra == 'doc' + - pydata-sphinx-theme>=0.13 ; extra == 'doc' + - sphinx-gallery>=0.12 ; extra == 'doc' + - numpydoc>=1.5 ; extra == 'doc' + - pillow>=9.4 ; extra == 'doc' + - nb2plots>=0.6 ; extra == 'doc' + - texext>=0.6.7 ; extra == 'doc' + - lxml>=4.6 ; extra == 'extra' + - pygraphviz>=1.10 ; extra == 'extra' + - pydot>=1.4.2 ; extra == 'extra' + - sympy>=1.10 ; extra == 'extra' + - pytest>=7.2 ; extra == 'test' + - pytest-cov>=4.0 ; extra == 'test' + - codecov>=2.1 ; extra == 'test' + requires_python: '>=3.8' - kind: pypi - name: torch - version: 2.3.1 - url: https://files.pythonhosted.org/packages/74/b3/1febb6be57a4f68cb55ea178f5ffca6a10b01b47e182f7b76eddd9168632/torch-2.3.1-cp39-cp39-manylinux1_x86_64.whl - sha256: aaa872abde9a3d4f91580f6396d54888620f4a0b92e3976a6034759df4b961ad - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - mkl<=2021.4.0,>=2021.1.1 ; platform_system == 'Windows' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + name: nh3 + version: 0.2.17 + url: https://files.pythonhosted.org/packages/da/19/d52d9a0247007835df949f17abd904615248dc1b94d67cb8c99100330f08/nh3-0.2.17-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b - kind: pypi - name: torch - version: 2.3.1 - url: https://files.pythonhosted.org/packages/cb/e2/1bd899d3eb60c6495cf5d0d2885edacac08bde7a1407eadeb2ab36eca3c7/torch-2.3.1-cp310-cp310-manylinux1_x86_64.whl - sha256: 605a25b23944be5ab7c3467e843580e1d888b8066e5aaf17ff7bf9cc30001cc3 - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - mkl<=2021.4.0,>=2021.1.1 ; platform_system == 'Windows' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + name: nodeenv + version: 1.9.1 + url: https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl + sha256: ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*' +- kind: conda + name: nsight-compute + version: 2022.2.0.13 + build: '0' + subdir: linux-64 + url: https://conda.anaconda.org/nvidia/label/cuda-11.7.0/linux-64/nsight-compute-2022.2.0.13-0.tar.bz2 + md5: b82bd48611bbcdb30dba23710dad5d60 + arch: x86_64 + platform: linux + purls: [] + size: 485381969 + timestamp: 1655214757556 +- kind: pypi + name: numpy + version: 1.24.4 + url: https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc + requires_python: '>=3.8' - kind: pypi - name: torch - version: 2.3.1 - url: https://files.pythonhosted.org/packages/07/9a/4c5e74264439837814656201da13a898056a5201c976ef042544bceb840f/torch-2.3.1-cp311-cp311-manylinux1_x86_64.whl - sha256: b2ec81b61bb094ea4a9dee1cd3f7b76a44555375719ad29f05c0ca8ef596ad39 - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==2.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' - - mkl<=2021.4.0,>=2021.1.1 ; platform_system == 'Windows' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.9.1 ; extra == 'optree' - requires_python: '>=3.8.0' + name: nvidia-cublas-cu12 + version: 12.1.3.1 + url: https://files.pythonhosted.org/packages/37/6d/121efd7382d5b0284239f4ab1fc1590d86d34ed4a4a2fdb13b30ca8e5740/nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl + sha256: ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728 + requires_python: '>=3' - kind: pypi - name: torch - version: 2.4.0 - url: https://files.pythonhosted.org/packages/fc/58/f93bdce23c9ff568c3dfb5129db0c14e60f7c72ab4d1a6de8fedca6e3792/torch-2.4.0-cp38-cp38-manylinux1_x86_64.whl - sha256: cc30457ea5489c62747d3306438af00c606b509d78822a88f804202ba63111ed - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==9.1.0.70 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==3.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.13' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.11.0 ; extra == 'optree' - requires_python: '>=3.8.0' + name: nvidia-cuda-cupti-cu12 + version: 12.1.105 + url: https://files.pythonhosted.org/packages/7e/00/6b218edd739ecfc60524e585ba8e6b00554dd908de2c9c66c1af3e44e18d/nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl + sha256: e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e + requires_python: '>=3' - kind: pypi - name: torch - version: 2.4.0 - url: https://files.pythonhosted.org/packages/36/80/3ac18a2db50d832745c1c5db7e47c4d0e02f1a11e92185155a6b218cbbe3/torch-2.4.0-cp39-cp39-manylinux1_x86_64.whl - sha256: 618808d3f610d5f180e47a697d4ec90b810953bb1e020f424b2ac7fb0884b545 - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==9.1.0.70 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==3.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.13' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.11.0 ; extra == 'optree' - requires_python: '>=3.8.0' + name: nvidia-cuda-nvrtc-cu12 + version: 12.1.105 + url: https://files.pythonhosted.org/packages/b6/9f/c64c03f49d6fbc56196664d05dba14e3a561038a81a638eeb47f4d4cfd48/nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl + sha256: 339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2 + requires_python: '>=3' - kind: pypi - name: torch - version: 2.4.0 - url: https://files.pythonhosted.org/packages/9a/bd/4161ae28fb1c388a8ee30ca3aa72cf11ac3016ce62bc9e82c71ce193c410/torch-2.4.0-cp310-cp310-manylinux1_x86_64.whl - sha256: 4ed94583e244af51d6a8d28701ca5a9e02d1219e782f5a01dd401f90af17d8ac - requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==9.1.0.70 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==3.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.13' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.11.0 ; extra == 'optree' - requires_python: '>=3.8.0' + name: nvidia-cuda-runtime-cu12 + version: 12.1.105 + url: https://files.pythonhosted.org/packages/eb/d5/c68b1d2cdfcc59e72e8a5949a37ddb22ae6cade80cd4a57a84d4c8b55472/nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl + sha256: 6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40 + requires_python: '>=3' - kind: pypi - name: torch - version: 2.4.0 - url: https://files.pythonhosted.org/packages/80/83/9b7681e41e59adb6c2b042f7e8eb716515665a6eed3dda4215c6b3385b90/torch-2.4.0-cp311-cp311-manylinux1_x86_64.whl - sha256: e743adadd8c8152bb8373543964551a7cb7cc20ba898dc8f9c0cdbe47c283de0 + name: nvidia-cudnn-cu12 + version: 8.9.2.26 + url: https://files.pythonhosted.org/packages/ff/74/a2e2be7fb83aaedec84f391f082cf765dfb635e7caa9b49065f73e4835d8/nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl + sha256: 5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9 requires_dist: - - filelock - - typing-extensions>=4.8.0 - - sympy - - networkx - - jinja2 - - fsspec - - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cudnn-cu12==9.1.0.70 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' - - triton==3.0.0 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.13' - - opt-einsum>=3.3 ; extra == 'opt-einsum' - - optree>=0.11.0 ; extra == 'optree' - requires_python: '>=3.8.0' + - nvidia-cublas-cu12 + requires_python: '>=3' - kind: pypi - name: torchrunx - version: 0.1.2 - path: . - sha256: ea8ee0e8c5ec94c3753f0719ba9e6f1f837a140ca619b0773cebdd5e34306c95 - requires_dist: - - cloudpickle>=3.0.0 - - fabric>=3.0.0 - - torch>=2.0.0 - - numpy<2 - requires_python: '>=3.8.1' - editable: true + name: nvidia-cufft-cu12 + version: 11.0.2.54 + url: https://files.pythonhosted.org/packages/86/94/eb540db023ce1d162e7bea9f8f5aa781d57c65aed513c33ee9a5123ead4d/nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl + sha256: 794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56 + requires_python: '>=3' - kind: pypi - name: triton - version: 2.0.0 - url: https://files.pythonhosted.org/packages/a6/4b/28142a3c70621cb3398ac626c276268ca87af50a3fa43667a834fa5d13bf/triton-2.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 9d4978298b74fcf59a75fe71e535c092b023088933b2f1df933ec32615e4beef - requires_dist: - - cmake - - filelock - - torch - - lit - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + name: nvidia-curand-cu12 + version: 10.3.2.106 + url: https://files.pythonhosted.org/packages/44/31/4890b1c9abc496303412947fc7dcea3d14861720642b49e8ceed89636705/nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl + sha256: 9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0 + requires_python: '>=3' - kind: pypi - name: triton - version: 2.0.0 - url: https://files.pythonhosted.org/packages/77/ac/28b74ec1177c730d0da8803eaff5e5025bd532bcf07cadb0fcf661abed97/triton-2.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 74f118c12b437fb2ca25e1a04759173b517582fcf4c7be11913316c764213656 + name: nvidia-cusolver-cu12 + version: 11.4.5.107 + url: https://files.pythonhosted.org/packages/bc/1d/8de1e5c67099015c834315e333911273a8c6aaba78923dd1d1e25fc5f217/nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl + sha256: 8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd requires_dist: - - cmake - - filelock - - torch - - lit - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - nvidia-cublas-cu12 + - nvidia-nvjitlink-cu12 + - nvidia-cusparse-cu12 + requires_python: '>=3' - kind: pypi - name: triton - version: 2.0.0 - url: https://files.pythonhosted.org/packages/ca/31/ff6be541195daf77aa5c72303b2354661a69e717967d44d91eb4f3fdce32/triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 38806ee9663f4b0f7cd64790e96c579374089e58f49aac4a6608121aa55e2505 + name: nvidia-cusparse-cu12 + version: 12.1.0.106 + url: https://files.pythonhosted.org/packages/65/5b/cfaeebf25cd9fdec14338ccb16f6b2c4c7fa9163aefcf057d86b9cc248bb/nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl + sha256: f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c requires_dist: - - cmake - - filelock - - torch - - lit - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - nvidia-nvjitlink-cu12 + requires_python: '>=3' - kind: pypi - name: triton - version: 2.0.0 - url: https://files.pythonhosted.org/packages/b7/cd/4aa0179919306f9c2e3e5308f269d20c094b2a4e2963b656e9405172763f/triton-2.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 226941c7b8595219ddef59a1fdb821e8c744289a132415ddd584facedeb475b1 - requires_dist: - - cmake - - filelock - - torch - - lit - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + name: nvidia-nccl-cu12 + version: 2.20.5 + url: https://files.pythonhosted.org/packages/4b/2a/0a131f572aa09f741c30ccd45a8e56316e8be8dfc7bc19bf0ab7cfef7b19/nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl + sha256: 057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56 + requires_python: '>=3' +- kind: pypi + name: nvidia-nvjitlink-cu12 + version: 12.5.82 + url: https://files.pythonhosted.org/packages/75/bc/e0d0dbb85246a086ab14839979039647bce501d8c661a159b8b019d987b7/nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl + sha256: f9b37bc5c8cf7509665cb6ada5aaa0ce65618f2332b7d3e78e9790511f111212 + requires_python: '>=3' +- kind: pypi + name: nvidia-nvtx-cu12 + version: 12.1.105 + url: https://files.pythonhosted.org/packages/da/d3/8057f0587683ed2fcd4dbfbdfdfa807b9160b809976099d36b8f60d08f03/nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl + sha256: dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5 + requires_python: '>=3' +- kind: conda + name: openssl + version: 1.1.1w + build: hd590300_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1w-hd590300_0.conda + sha256: 4fe19885c77f0758084feb54954bd1977dfeeab7134fba0a1d9c0cfff821d6bd + md5: 301e70057a3bd399640bb16bbdf87995 + depends: + - ca-certificates + - libgcc-ng >=12 + license: OpenSSL + license_family: Apache + purls: [] + size: 1956010 + timestamp: 1694461292959 +- kind: pypi + name: packaging + version: '24.1' + url: https://files.pythonhosted.org/packages/08/aa/cc0199a5f0ad350994d660967a8efb233fe0416e4639146c089643407ce6/packaging-24.1-py3-none-any.whl + sha256: 5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 + requires_python: '>=3.8' - kind: pypi - name: triton - version: 2.1.0 - url: https://files.pythonhosted.org/packages/72/98/34f43ed68ee6455ea874f749a5515c0600243186301ecd83819d942ce08a/triton-2.1.0-0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 39f6fb6bdccb3e98f3152e3fbea724f1aeae7d749412bbb1fa9c441d474eba26 + name: paramiko + version: 3.4.0 + url: https://files.pythonhosted.org/packages/ad/50/8792484502c8141c20c996b802fefa8435a9c018a2bb440a06b172782118/paramiko-3.4.0-py3-none-any.whl + sha256: 43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7 requires_dist: - - filelock - - cmake>=3.18 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - bcrypt>=3.2 + - cryptography>=3.3 + - pynacl>=1.5 + - pyasn1>=0.1.7 ; extra == 'all' + - invoke>=2.0 ; extra == 'all' + - gssapi>=1.4.1 ; platform_system != 'Windows' and extra == 'all' + - pywin32>=2.1.8 ; platform_system == 'Windows' and extra == 'all' + - pyasn1>=0.1.7 ; extra == 'gssapi' + - gssapi>=1.4.1 ; platform_system != 'Windows' and extra == 'gssapi' + - pywin32>=2.1.8 ; platform_system == 'Windows' and extra == 'gssapi' + - invoke>=2.0 ; extra == 'invoke' + requires_python: '>=3.6' - kind: pypi - name: triton - version: 2.1.0 - url: https://files.pythonhosted.org/packages/d1/5a/e5811fcc8fc6703be39eb157af6224eaa3b628a42008df93b87e23eb9731/triton-2.1.0-0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 21544e522c02005a626c8ad63d39bdff2f31d41069592919ef281e964ed26446 + name: pkginfo + version: 1.10.0 + url: https://files.pythonhosted.org/packages/56/09/054aea9b7534a15ad38a363a2bd974c20646ab1582a387a95b8df1bfea1c/pkginfo-1.10.0-py3-none-any.whl + sha256: 889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 requires_dist: - - filelock - - cmake>=3.18 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - pytest ; extra == 'testing' + - pytest-cov ; extra == 'testing' + - wheel ; extra == 'testing' + requires_python: '>=3.6' - kind: pypi - name: triton - version: 2.1.0 - url: https://files.pythonhosted.org/packages/4d/22/91a8af421c8a8902dde76e6ef3db01b258af16c53d81e8c0d0dc13900a9e/triton-2.1.0-0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 66439923a30d5d48399b08a9eae10370f6c261a5ec864a64983bae63152d39d7 + name: pluggy + version: 1.5.0 + url: https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl + sha256: 44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 requires_dist: - - filelock - - cmake>=3.18 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - pre-commit ; extra == 'dev' + - tox ; extra == 'dev' + - pytest ; extra == 'testing' + - pytest-benchmark ; extra == 'testing' + requires_python: '>=3.8' - kind: pypi - name: triton - version: 2.1.0 - url: https://files.pythonhosted.org/packages/5c/c1/54fffb2eb13d293d9a429fead3646752ea190de0229bcf3d591ba2481263/triton-2.1.0-0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 919b06453f0033ea52c13eaf7833de0e57db3178d23d4e04f9fc71c4f2c32bf8 + name: pycparser + version: '2.22' + url: https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl + sha256: c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc + requires_python: '>=3.8' +- kind: pypi + name: pygments + version: 2.18.0 + url: https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl + sha256: b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a requires_dist: - - filelock - - cmake>=3.18 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - colorama>=0.4.6 ; extra == 'windows-terminal' + requires_python: '>=3.8' - kind: pypi - name: triton - version: 2.2.0 - url: https://files.pythonhosted.org/packages/7f/fc/1c97813debad858dde5b84b5a8d4ea4077044a7b26e1ad8de9689af93565/triton-2.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: b8ce26093e539d727e7cf6f6f0d932b1ab0574dc02567e684377630d86723ace + name: pynacl + version: 1.5.0 + url: https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl + sha256: 0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - cffi>=1.4.1 + - sphinx>=1.6.5 ; extra == 'docs' + - sphinx-rtd-theme ; extra == 'docs' + - pytest!=3.3.0,>=3.2.1 ; extra == 'tests' + - hypothesis>=3.27.0 ; extra == 'tests' + requires_python: '>=3.6' - kind: pypi - name: triton - version: 2.2.0 - url: https://files.pythonhosted.org/packages/6a/5c/01d9f062f719581cf6e60053e1a005d666ec67dcb59630fffaa3a3e5c9d8/triton-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 227cc6f357c5efcb357f3867ac2a8e7ecea2298cd4606a8ba1e931d1d5a947df + name: pyproject-hooks + version: 1.1.0 + url: https://files.pythonhosted.org/packages/ae/f3/431b9d5fe7d14af7a32340792ef43b8a714e7726f1d7b69cc4e8e7a3f1d7/pyproject_hooks-1.1.0-py3-none-any.whl + sha256: 7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2 + requires_python: '>=3.7' +- kind: pypi + name: pyright + version: 1.1.370 + url: https://files.pythonhosted.org/packages/0c/2b/3d70ea49041da4dfb64b71039d94f3b31843575edf1f29fe0370919c35aa/pyright-1.1.370-py3-none-any.whl + sha256: fc721601e480a69989775bfc210534a6ca0110ebd0c065244a8d3a151294fc61 requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - nodeenv>=1.6.0 + - typing-extensions>=3.7 ; python_version < '3.8' + - twine>=3.4.1 ; extra == 'all' + - twine>=3.4.1 ; extra == 'dev' + requires_python: '>=3.7' - kind: pypi - name: triton - version: 2.2.0 - url: https://files.pythonhosted.org/packages/95/05/ed974ce87fe8c8843855daa2136b3409ee1c126707ab54a8b72815c08b49/triton-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: a2294514340cfe4e8f4f9e5c66c702744c4a117d25e618bd08469d0bfed1e2e5 + name: pytest + version: 8.2.2 + url: https://files.pythonhosted.org/packages/4e/e7/81ebdd666d3bff6670d27349b5053605d83d55548e6bd5711f3b0ae7dd23/pytest-8.2.2-py3-none-any.whl + sha256: c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343 requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - iniconfig + - packaging + - pluggy<2.0,>=1.5 + - exceptiongroup>=1.0.0rc8 ; python_version < '3.11' + - tomli>=1 ; python_version < '3.11' + - colorama ; sys_platform == 'win32' + - argcomplete ; extra == 'dev' + - attrs>=19.2 ; extra == 'dev' + - hypothesis>=3.56 ; extra == 'dev' + - mock ; extra == 'dev' + - pygments>=2.7.2 ; extra == 'dev' + - requests ; extra == 'dev' + - setuptools ; extra == 'dev' + - xmlschema ; extra == 'dev' + requires_python: '>=3.8' +- kind: conda + name: python + version: 3.8.1 + build: h357f687_2 + build_number: 2 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.8.1-h357f687_2.tar.bz2 + sha256: 8f8ab267e32519c9d88e95eabfa5381df37dae2c5ad700b8493263e170ac03c9 + md5: e860ad02b3a59c645b68f422b3d49e84 + depends: + - ld_impl_linux-64 + - libffi >=3.2.1,<3.3.0a0 + - libgcc-ng >=7.3.0 + - libstdcxx-ng >=7.3.0 + - openssl >=1.1.1a,<1.1.2a + - readline >=8.0,<9.0a0 + - sqlite >=3.30.1,<4.0a0 + - tk >=8.6.10,<8.7.0a0 + - xz >=5.2.4,<6.0.0a0 + - zlib >=1.2.11,<1.3.0a0 + constrains: + - python_abi * *_cp38 + license: PSF + purls: [] + size: 60989193 + timestamp: 1580309998972 +- kind: conda + name: readline + version: '8.2' + build: h8228510_1 + build_number: 1 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda + sha256: 5435cf39d039387fbdc977b0a762357ea909a7694d9528ab40f005e9208744d7 + md5: 47d31b792659ce70f470b5c82fdfb7a4 + depends: + - libgcc-ng >=12 + - ncurses >=6.3,<7.0a0 + license: GPL-3.0-only + license_family: GPL + purls: [] + size: 281456 + timestamp: 1679532220005 - kind: pypi - name: triton - version: 2.2.0 - url: https://files.pythonhosted.org/packages/bd/ac/3974caaa459bf2c3a244a84be8d17561f631f7d42af370fc311defeca2fb/triton-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: da58a152bddb62cafa9a857dd2bc1f886dbf9f9c90a2b5da82157cd2b34392b0 + name: readme-renderer + version: '43.0' + url: https://files.pythonhosted.org/packages/45/be/3ea20dc38b9db08387cf97997a85a7d51527ea2057d71118feb0aa8afa55/readme_renderer-43.0-py3-none-any.whl + sha256: 19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - nh3>=0.2.14 + - docutils>=0.13.1 + - pygments>=2.5.1 + - cmarkgfm>=0.8.0 ; extra == 'md' + requires_python: '>=3.8' +- kind: pypi + name: requests + version: 2.32.3 + url: https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl + sha256: 70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + requires_dist: + - charset-normalizer<4,>=2 + - idna<4,>=2.5 + - urllib3<3,>=1.21.1 + - certifi>=2017.4.17 + - pysocks!=1.5.7,>=1.5.6 ; extra == 'socks' + - chardet<6,>=3.0.2 ; extra == 'use-chardet-on-py3' + requires_python: '>=3.8' - kind: pypi - name: triton - version: 2.3.1 - url: https://files.pythonhosted.org/packages/d3/55/45b3882019a8d69ad73b5b2bd1714cb2d6653b39e7376b7ac5accf745760/triton-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 63381e35ded3304704ea867ffde3b7cfc42c16a55b3062d41e017ef510433d66 + name: requests-toolbelt + version: 1.0.0 + url: https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl + sha256: cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - requests<3.0.0,>=2.0.1 + requires_python: '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*' - kind: pypi - name: triton - version: 2.3.1 - url: https://files.pythonhosted.org/packages/fe/31/a3783aaab3a75d8b622b0fa822eb3ae95063dec8e866a18d574ae64f33bd/triton-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 1d968264523c7a07911c8fb51b4e0d1b920204dae71491b1fe7b01b62a31e124 + name: rfc3986 + version: 2.0.0 + url: https://files.pythonhosted.org/packages/ff/9a/9afaade874b2fa6c752c36f1548f718b5b83af81ed9b76628329dab81c1b/rfc3986-2.0.0-py2.py3-none-any.whl + sha256: 50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - idna ; extra == 'idna2008' + requires_python: '>=3.7' - kind: pypi - name: triton - version: 2.3.1 - url: https://files.pythonhosted.org/packages/d7/69/8a9fde07d2d27a90e16488cdfe9878e985a247b2496a4b5b1a2126042528/triton-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 3c84595cbe5e546b1b290d2a58b1494df5a2ef066dd890655e5b8a8a92205c33 + name: rich + version: 13.7.1 + url: https://files.pythonhosted.org/packages/87/67/a37f6214d0e9fe57f6ae54b2956d550ca8365857f42a1ce0392bb21d9410/rich-13.7.1-py3-none-any.whl + sha256: 4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - ipywidgets>=7.5.1,<9 ; extra == 'jupyter' + - markdown-it-py>=2.2.0 + - pygments>=2.13.0,<3.0.0 + - typing-extensions>=4.0.0,<5.0 ; python_version < '3.9' + requires_python: '>=3.7.0' - kind: pypi - name: triton - version: 2.3.1 - url: https://files.pythonhosted.org/packages/64/16/956b7b9d2ed3a437a1a06792b2ae2e3c49147296ba2f4d59fcee376ded8f/triton-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: c9d64ae33bcb3a7a18081e3a746e8cf87ca8623ca13d2c362413ce7a486f893e + name: ruff + version: 0.5.1 + url: https://files.pythonhosted.org/packages/8a/d5/8271d42dd239b7c2d163615b3b01b1acfb187f5114bfca6d5a85e1d6a1eb/ruff-0.5.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: e216fc75a80ea1fbd96af94a6233d90190d5b65cc3d5dfacf2bd48c3e067d3e1 + requires_python: '>=3.7' +- kind: pypi + name: secretstorage + version: 3.3.3 + url: https://files.pythonhosted.org/packages/54/24/b4293291fa1dd830f353d2cb163295742fa87f179fcc8a20a306a81978b7/SecretStorage-3.3.3-py3-none-any.whl + sha256: f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - torch ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' - - torch ; extra == 'tutorials' + - cryptography>=2.0 + - jeepney>=0.6 + requires_python: '>=3.6' +- kind: conda + name: sqlite + version: 3.46.0 + build: h6d4b2fc_0 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.0-h6d4b2fc_0.conda + sha256: e849d576e52bf3e6fc5786f89b7d76978f2e2438587826c95570324cb572e52b + md5: 77ea8dff5cf8550cc8f5629a6af56323 + depends: + - libgcc-ng >=12 + - libsqlite 3.46.0 hde9e2c9_0 + - libzlib >=1.2.13,<2.0a0 + - ncurses >=6.5,<7.0a0 + - readline >=8.2,<9.0a0 + license: Unlicense + purls: [] + size: 860352 + timestamp: 1718050658212 - kind: pypi - name: triton - version: 3.0.0 - url: https://files.pythonhosted.org/packages/4d/b4/c37e2776a1390bab7e78a6d52bd525441cb3cad7260a6a00b11b0b702e7c/triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9 + name: sympy + version: 1.12.1 + url: https://files.pythonhosted.org/packages/61/53/e18c8c97d0b2724d85c9830477e3ebea3acf1dcdc6deb344d5d9c93a9946/sympy-1.12.1-py3-none-any.whl + sha256: 9b2cbc7f1a640289430e13d2a56f02f867a1da0190f2f99d8968c2f74da0e515 requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - llnl-hatchet ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - mpmath<1.4.0,>=1.1.0 + requires_python: '>=3.8' +- kind: conda + name: tk + version: 8.6.13 + build: noxft_h4845f30_101 + build_number: 101 + subdir: linux-64 + url: https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda + sha256: e0569c9caa68bf476bead1bed3d79650bb080b532c64a4af7d8ca286c08dea4e + md5: d453b98d9c83e71da0741bb0ff4d76bc + depends: + - libgcc-ng >=12 + - libzlib >=1.2.13,<2.0.0a0 + license: TCL + license_family: BSD + purls: [] + size: 3318875 + timestamp: 1699202167581 - kind: pypi - name: triton - version: 3.0.0 - url: https://files.pythonhosted.org/packages/6c/bf/55cccf57c14787ad81ee827526ddd48fd0aff0291fcc7b8c2e2bdf28da0a/triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609 + name: tomli + version: 2.0.1 + url: https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl + sha256: 939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc + requires_python: '>=3.7' +- kind: pypi + name: torch + version: 2.3.1 + url: https://files.pythonhosted.org/packages/c0/7e/309d63c6330a0b821a6f55e06dcef6704a7ab8b707534a4923837570624e/torch-2.3.1-cp38-cp38-manylinux1_x86_64.whl + sha256: 07e9ba746832b8d069cacb45f312cadd8ad02b81ea527ec9766c0e7404bb3feb requires_dist: - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - llnl-hatchet ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - typing-extensions>=4.8.0 + - sympy + - networkx + - jinja2 + - fsspec + - nvidia-cuda-nvrtc-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-cuda-runtime-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-cuda-cupti-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-cudnn-cu12==8.9.2.26 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-cublas-cu12==12.1.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-cufft-cu12==11.0.2.54 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-curand-cu12==10.3.2.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-cusolver-cu12==11.4.5.107 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-cusparse-cu12==12.1.0.106 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-nccl-cu12==2.20.5 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - nvidia-nvtx-cu12==12.1.105 ; platform_system == 'Linux' and platform_machine == 'x86_64' + - triton==2.3.1 ; platform_system == 'Linux' and platform_machine == 'x86_64' and python_version < '3.12' + - mkl<=2021.4.0,>=2021.1.1 ; platform_system == 'Windows' + - opt-einsum>=3.3 ; extra == 'opt-einsum' + - optree>=0.9.1 ; extra == 'optree' + requires_python: '>=3.8.0' - kind: pypi - name: triton - version: 3.0.0 - url: https://files.pythonhosted.org/packages/45/27/14cc3101409b9b4b9241d2ba7deaa93535a217a211c86c4cc7151fb12181/triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a + name: torchrunx + version: 0.1.2 + path: . + sha256: 7045df900ce870f00f3fb2d88381f6b4ab65e95e50d839eaf98d8d12069b960d requires_dist: - - filelock - - cmake>=3.20 ; extra == 'build' - - lit ; extra == 'build' - - autopep8 ; extra == 'tests' - - flake8 ; extra == 'tests' - - isort ; extra == 'tests' - - numpy ; extra == 'tests' - - pytest ; extra == 'tests' - - scipy>=1.7.1 ; extra == 'tests' - - llnl-hatchet ; extra == 'tests' - - matplotlib ; extra == 'tutorials' - - pandas ; extra == 'tutorials' - - tabulate ; extra == 'tutorials' + - cloudpickle>=3.0.0 + - fabric>=3.0.0 + - torch>=2.0.0 + - numpy<2 + - numpy>=1.26.0 ; python_version == '3.12' + requires_python: '>=3.8.1' + editable: true - kind: pypi name: triton - version: 3.0.0 - url: https://files.pythonhosted.org/packages/33/3e/a2f59384587eff6aeb7d37b6780de7fedd2214935e27520430ca9f5b7975/triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl - sha256: 5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c + version: 2.3.1 + url: https://files.pythonhosted.org/packages/d3/55/45b3882019a8d69ad73b5b2bd1714cb2d6653b39e7376b7ac5accf745760/triton-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + sha256: 63381e35ded3304704ea867ffde3b7cfc42c16a55b3062d41e017ef510433d66 requires_dist: - filelock - cmake>=3.20 ; extra == 'build' @@ -5024,10 +2077,11 @@ packages: - numpy ; extra == 'tests' - pytest ; extra == 'tests' - scipy>=1.7.1 ; extra == 'tests' - - llnl-hatchet ; extra == 'tests' + - torch ; extra == 'tests' - matplotlib ; extra == 'tutorials' - pandas ; extra == 'tutorials' - tabulate ; extra == 'tutorials' + - torch ; extra == 'tutorials' - kind: pypi name: twine version: 5.1.1 @@ -5051,19 +2105,6 @@ packages: url: https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl sha256: 04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d requires_python: '>=3.8' -- kind: conda - name: tzdata - version: 2024a - build: h0c530f3_0 - subdir: noarch - noarch: generic - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h0c530f3_0.conda - sha256: 7b2b69c54ec62a243eb6fba2391b5e443421608c3ae5dbff938ad33ca8db5122 - md5: 161081fc7cec0bfda0d86d7cb595f8d8 - license: LicenseRef-Public-Domain - purls: [] - size: 119815 - timestamp: 1706886945727 - kind: pypi name: urllib3 version: 2.2.2 @@ -5076,39 +2117,12 @@ packages: - pysocks!=1.5.7,<2.0,>=1.5.6 ; extra == 'socks' - zstandard>=0.18.0 ; extra == 'zstd' requires_python: '>=3.8' -- kind: pypi - name: wheel - version: 0.44.0 - url: https://files.pythonhosted.org/packages/1b/d1/9babe2ccaecff775992753d8686970b1e2755d21c8a63be73aba7a4e7d77/wheel-0.44.0-py3-none-any.whl - sha256: 2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f - requires_dist: - - pytest>=6.0.0 ; extra == 'test' - - setuptools>=65 ; extra == 'test' - requires_python: '>=3.8' - kind: pypi name: wrapt version: 1.16.0 url: https://files.pythonhosted.org/packages/ef/c6/56e718e2c58a4078518c14d97e531ef1e9e8a5c1ddafdc0d264a92be1a1a/wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl sha256: 941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f requires_python: '>=3.6' -- kind: pypi - name: wrapt - version: 1.16.0 - url: https://files.pythonhosted.org/packages/b1/e7/459a8a4f40f2fa65eb73cb3f339e6d152957932516d18d0e996c7ae2d7ae/wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a - requires_python: '>=3.6' -- kind: pypi - name: wrapt - version: 1.16.0 - url: https://files.pythonhosted.org/packages/49/83/b40bc1ad04a868b5b5bcec86349f06c1ee1ea7afe51dc3e46131e4f39308/wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf - requires_python: '>=3.6' -- kind: pypi - name: wrapt - version: 1.16.0 - url: https://files.pythonhosted.org/packages/6e/52/2da48b35193e39ac53cfb141467d9f259851522d0e8c87153f0ba4205fb1/wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl - sha256: 72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 - requires_python: '>=3.6' - kind: conda name: xz version: 5.2.6 diff --git a/src/torchrunx/__init__.py b/src/torchrunx/__init__.py index 9c4a4aed..2ac6fd80 100644 --- a/src/torchrunx/__init__.py +++ b/src/torchrunx/__init__.py @@ -1,5 +1,14 @@ from .environment import auto_hosts, auto_workers, slurm_hosts, slurm_workers from .launcher import Launcher, launch -from .log_utils import DefaultLogSpec, LogSpec +from .logging_utils import DefaultLogSpec, LogSpec -__all__ = ["Launcher", "launch", "slurm_hosts", "slurm_workers", "auto_hosts", "auto_workers", "LogSpec", "DefaultLogSpec"] \ No newline at end of file +__all__ = [ + "Launcher", + "launch", + "slurm_hosts", + "slurm_workers", + "auto_hosts", + "auto_workers", + "LogSpec", + "DefaultLogSpec", +] diff --git a/src/torchrunx/__main__.py b/src/torchrunx/__main__.py index f458d37d..c3b3099e 100644 --- a/src/torchrunx/__main__.py +++ b/src/torchrunx/__main__.py @@ -7,6 +7,7 @@ parser = ArgumentParser() parser.add_argument("--launcher-hostname", type=str) parser.add_argument("--launcher-port", type=int) + parser.add_argument("--logger-port", type=int) parser.add_argument("--world-size", type=int) parser.add_argument("--rank", type=int) args = parser.parse_args() @@ -18,4 +19,8 @@ rank=args.rank, ) - main(launcher_agent_group) + main( + launcher_agent_group, + logger_hostname=args.launcher_hostname, + logger_port=args.logger_port, + ) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 32abe4eb..9df1c35a 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -16,7 +16,7 @@ from torch.distributed.elastic.multiprocessing import start_processes from typing_extensions import Self -from .log_utils import RenamingSocketHandler, StreamLogger +from .logging_utils import RenamingSocketHandler, StreamLogger from .utils import ( AgentPayload, AgentStatus, @@ -29,6 +29,8 @@ @dataclass class WorkerArgs: function: Callable + logger_hostname: str + logger_port: int master_hostname: str master_port: int backend: Literal["mpi", "gloo", "nccl", "ucc", None] @@ -37,8 +39,6 @@ class WorkerArgs: local_world_size: int world_size: int hostname: str - log_host: str - log_port: int timeout: int def to_bytes(self) -> bytes: @@ -49,30 +49,6 @@ def from_bytes(cls, serialized: bytes) -> Self: return cloudpickle.loads(serialized) -class WorkerTee(object): - def __init__(self, name: os.PathLike | str, mode: str): - self.file = open(name, mode) - self.stdout = sys.stdout - sys.stdout = self - - def __enter__(self): - return self - - def __exit__(self, exception_type, exception_value, exception_traceback): - self.__del__() - - def __del__(self): - sys.stdout = self.stdout - self.file.close() - - def write(self, data): - self.file.write(data) - self.stdout.write(data) - - def flush(self): - self.file.flush() - - def entrypoint(serialized_worker_args: bytes): worker_args = WorkerArgs.from_bytes(serialized_worker_args) logger = logging.getLogger() @@ -80,8 +56,11 @@ def entrypoint(serialized_worker_args: bytes): logger.name = ( f"torchrunx.{worker_args.hostname}[{worker_args.local_rank}]" # overwrite root logger name ) - socketHandler = RenamingSocketHandler(worker_args.log_host, worker_args.log_port, logger.name) - logger.addHandler(socketHandler) + logger.addHandler( + RenamingSocketHandler( + host=worker_args.logger_hostname, port=worker_args.logger_port, root_name=logger.name + ) + ) sys.stdout = StreamLogger(logger, sys.__stdout__) sys.stderr = StreamLogger(logger, sys.__stderr__) @@ -118,7 +97,7 @@ def entrypoint(serialized_worker_args: bytes): return worker_args.function() -def main(launcher_agent_group: LauncherAgentGroup): +def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_port: int): agent_rank = launcher_agent_group.rank - 1 payload = AgentPayload( @@ -139,11 +118,11 @@ def main(launcher_agent_group: LauncherAgentGroup): logger = logging.getLogger(f"torchrunx.{launcher_payload.hostnames[agent_rank]}") logger.setLevel(logging.DEBUG) socketHandler = logging.handlers.SocketHandler( - launcher_payload.log_host, - launcher_payload.log_port, + host=logger_hostname, + port=logger_port, ) logger.addHandler(socketHandler) - + if torch.__version__ >= "2.3": # DefaultLogsSpecs only exists in torch >= 2.3 from torch.distributed.elastic.multiprocessing import DefaultLogsSpecs @@ -161,6 +140,8 @@ def main(launcher_agent_group: LauncherAgentGroup): i: ( WorkerArgs( function=launcher_payload.fn, + logger_hostname=logger_hostname, + logger_port=logger_port, master_hostname=main_agent_payload.hostname, master_port=main_agent_payload.port, backend=launcher_payload.backend, @@ -169,8 +150,6 @@ def main(launcher_agent_group: LauncherAgentGroup): local_world_size=num_workers, world_size=worker_world_size, hostname=launcher_payload.hostnames[agent_rank], - log_host=launcher_payload.log_host, - log_port=launcher_payload.log_port, timeout=launcher_payload.timeout, ).to_bytes(), ) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 458d04b7..14ebd818 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -19,8 +19,8 @@ import fabric import torch.distributed as dist -from .log_utils import DefaultLogSpec, LogRecordSocketReceiver, LogSpec from .environment import auto_hosts, auto_workers +from .logging_utils import DefaultLogSpec, LogRecordSocketReceiver, LogSpec from .utils import ( AgentPayload, AgentStatus, @@ -120,10 +120,6 @@ def run( formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") # logger. - # log_dir = Path(self.log_dir) - # log_dir.mkdir(parents=True, exist_ok=True) - # timestamp = datetime.datetime.now().isoformat(timespec="seconds") - if self.log_spec is None: # TODO: this assumes the type of workers_per_host is simply int. We should consider # again whether it's worth supporting inhomogeneous allocations (list[int]) @@ -132,12 +128,12 @@ def run( num_workers=self.workers_per_host, # type: ignore ) - log_port = get_open_port() + logger_port = get_open_port() log_process = Process( - target=monitor_log, args=(self.log_spec, log_port, formatter), daemon=True + target=monitor_log, args=(self.log_spec, logger_port, formatter), daemon=True ) log_process.start() - + if self.auto: if self.hostnames is None: self.hostnames = auto_hosts() @@ -184,6 +180,7 @@ def run( f"{sys.executable} -u -m torchrunx " f"--launcher-hostname {launcher_hostname} " f"--launcher-port {launcher_port} " + f"--logger-port {logger_port} " f"--world-size {world_size} " # rank set in the loop below ) @@ -222,8 +219,6 @@ def run( hostnames=self.hostnames, worker_world_size=worker_world_size, worker_global_ranks=worker_global_ranks, - log_host=launcher_hostname, - log_port=log_port, backend=self.backend, timeout=self.timeout, ) diff --git a/src/torchrunx/log_utils.py b/src/torchrunx/logging_utils.py similarity index 99% rename from src/torchrunx/log_utils.py rename to src/torchrunx/logging_utils.py index 60a618eb..da1e3f0d 100644 --- a/src/torchrunx/log_utils.py +++ b/src/torchrunx/logging_utils.py @@ -87,7 +87,6 @@ def serve_until_stopped(self): class RenamingSocketHandler(logging.handlers.SocketHandler): def __init__(self, host, port, root_name): super().__init__(host, port) - self.root_name = root_name def emit(self, record): @@ -102,7 +101,7 @@ def get_map(self) -> dict[str, list[logging.Handler]]: """ Called by torchrunx.launch on the log_spec argument. - :return: A mapping of logger names to lists of :mod:`logging.Handler` objects. + :return: A mapping of logger names to lists of :mod:`logging.Handler` objects. :rtype: dict[str, list[logging.Handler]] """ raise NotImplementedError @@ -189,6 +188,7 @@ class StreamLogger: """ For logging write calls to streams such as stdout and stdin in the worker processes. """ + def __init__(self, logger: logging.Logger, stream: TextIOWrapper | None): self.logger = logger self._string_io = StringIO() diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 30ae812f..43f040c6 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -26,8 +26,6 @@ class LauncherPayload: hostnames: list[str] worker_world_size: int worker_global_ranks: list[list[int]] - log_host: str - log_port: int backend: Literal["mpi", "gloo", "nccl", "ucc", None] timeout: int diff --git a/tests/test_CI.py b/tests/test_CI.py index dd999557..be7c5a00 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -6,7 +6,7 @@ import torch.distributed as dist import torchrunx # noqa: I001 -from torchrunx.log_utils import DefaultLogSpec +from torchrunx.logging_utils import DefaultLogSpec def test_simple_localhost(): From 444b461b06e4e050b790fd374c7f0f7fed7f3f4b Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Sat, 17 Aug 2024 14:35:53 -0400 Subject: [PATCH 18/63] refactoring --- src/torchrunx/launcher.py | 72 ++++++++++++++++------------------ src/torchrunx/logging_utils.py | 16 ++++---- tests/test_CI.py | 2 +- 3 files changed, 44 insertions(+), 46 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 14ebd818..0059c9cf 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -111,6 +111,25 @@ def run( :return: A dictionary mapping worker ranks to their output :rtype: dict[int, Any] """ + if not dist.is_available(): + raise RuntimeError("The torch.distributed package is not available.") + + if self.hostnames is None: + assert self.auto + self.hostnames = auto_hosts() + + num_hosts = len(self.hostnames) + + if self.workers_per_host is None: + assert self.auto + self.workers_per_host = auto_workers() + + if isinstance(self.workers_per_host, int): + self.workers_per_host = [self.workers_per_host] * num_hosts + + assert num_hosts == len(self.workers_per_host) + + # setup logging logger = logging.getLogger("torchrunx") logger.setLevel(logging.DEBUG) @@ -118,14 +137,11 @@ def run( logger.parent = None formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") - # logger. if self.log_spec is None: - # TODO: this assumes the type of workers_per_host is simply int. We should consider - # again whether it's worth supporting inhomogeneous allocations (list[int]) self.log_spec = DefaultLogSpec.basic( hostnames=self.hostnames, - num_workers=self.workers_per_host, # type: ignore + workers_per_host=self.workers_per_host, ) logger_port = get_open_port() @@ -134,28 +150,10 @@ def run( ) log_process.start() - if self.auto: - if self.hostnames is None: - self.hostnames = auto_hosts() - if self.workers_per_host is None: - self.workers_per_host = auto_workers() - - assert self.hostnames is not None and self.workers_per_host is not None - - if not dist.is_available(): - raise RuntimeError("The torch.distributed package is not available.") - - num_hosts = len(self.hostnames) - - workers_per_host = self.workers_per_host - if isinstance(self.workers_per_host, int): - workers_per_host = [workers_per_host] * num_hosts - - assert workers_per_host is not None - assert len(workers_per_host) == num_hosts # type: ignore - # launch command + current_dir = os.getcwd() + env_exports = [] for k, v in os.environ.items(): if any(fnmatch.fnmatch(k, e) for e in self.env_vars): @@ -173,22 +171,20 @@ def run( launcher_port = get_open_port() world_size = num_hosts + 1 # launcher + agents - command = ( - f"cd {os.getcwd()} && " - f"{env_export_string}" - f"{env_file_string}" - f"{sys.executable} -u -m torchrunx " - f"--launcher-hostname {launcher_hostname} " - f"--launcher-port {launcher_port} " - f"--logger-port {logger_port} " - f"--world-size {world_size} " - # rank set in the loop below - ) - # start agents on each node for i, hostname in enumerate(self.hostnames): execute_command( - command=f"{command} --rank {i+1}", + command=( + f"cd {current_dir} && " + f"{env_export_string}" + f"{env_file_string}" + f"{sys.executable} -u -m torchrunx " + f"--launcher-hostname {launcher_hostname} " + f"--launcher-port {launcher_port} " + f"--logger-port {logger_port} " + f"--world-size {world_size} " + f"--rank {i+1}" + ), hostname=hostname, ssh_config_file=self.ssh_config_file, ) @@ -205,7 +201,7 @@ def run( # build and sync payloads between launcher and agents - _cumulative_workers = [0] + list(itertools.accumulate(workers_per_host)) # type: ignore + _cumulative_workers = [0] + list(itertools.accumulate(self.workers_per_host)) worker_world_size = _cumulative_workers[-1] diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index da1e3f0d..0956aef9 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -119,17 +119,19 @@ def __init__(self, log_spec_dict: dict[str, list[logging.Handler]]): @classmethod def basic( - cls, hostnames: list[str], num_workers: int, log_dir: str = "./logs", stream: bool = True + cls, + hostnames: list[str], + workers_per_host: list[int], + log_dir: str = "./logs", + stream: bool = True, ) -> DefaultLogSpec: """ Generates torchrunx's default LogSpec :param hostnames: The node hostnames :type hostnames: list[str] - :param num_agents: Number of agents in work group - :type num_agents: int :param num_workers: Number of workers per agent - :type num_workers: int + :type num_workers: list[int] :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument :rtype: DefaultLogSpec """ # noqa: E501 @@ -143,9 +145,9 @@ def basic( for hostname in hostnames } workers: dict[str, list[logging.Handler]] = { - f"{hostname}[{j}]": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}[{j}].log")] - for j in range(num_workers) - for hostname in hostnames + f"{hostname}[{i}]": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}[{i}].log")] + for hostname, num_workers in zip(hostnames, workers_per_host) + for i in range(num_workers) } if stream: diff --git a/tests/test_CI.py b/tests/test_CI.py index be7c5a00..767f72d9 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -57,7 +57,7 @@ def dist_func(): workers_per_host=2, backend="gloo", log_spec=DefaultLogSpec.basic( - hostnames=["localhost"], num_workers=2, log_dir="./test_logs" + hostnames=["localhost"], workers_per_host=[2], log_dir="./test_logs" ), ) From 707c3e4ed71843f79a99c7a3cbfecedeaf7f64f4 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Sat, 17 Aug 2024 17:51:06 -0400 Subject: [PATCH 19/63] fix test --- tests/test_CI.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/test_CI.py b/tests/test_CI.py index 767f72d9..3aa84111 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -67,14 +67,13 @@ def dist_func(): for file in log_files: with open("./test_logs/" + file, "r") as f: - if file.endswith("0.log"): + if file.endswith("[0].log"): assert "worker rank: 0\n" in f.read() - elif file.endswith("1.log"): + elif file.endswith("[1].log"): assert "worker rank: 1\n" in f.read() else: contents = f.read() - assert "worker rank: 0" in contents - assert "worker rank: 1" in contents + assert "starting processes" in contents # clean up shutil.rmtree("./test_logs", ignore_errors=True) From ce1c067d56848804dce82b5a3c297785e92ad0b9 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Sat, 17 Aug 2024 19:09:29 -0400 Subject: [PATCH 20/63] LogSpec -> LogMap --- src/torchrunx/__init__.py | 5 ++--- src/torchrunx/launcher.py | 22 ++++++++++---------- src/torchrunx/logging_utils.py | 38 +++++++--------------------------- 3 files changed, 21 insertions(+), 44 deletions(-) diff --git a/src/torchrunx/__init__.py b/src/torchrunx/__init__.py index 2ac6fd80..ee053dad 100644 --- a/src/torchrunx/__init__.py +++ b/src/torchrunx/__init__.py @@ -1,6 +1,6 @@ from .environment import auto_hosts, auto_workers, slurm_hosts, slurm_workers from .launcher import Launcher, launch -from .logging_utils import DefaultLogSpec, LogSpec +from .logging_utils import LogMap __all__ = [ "Launcher", @@ -9,6 +9,5 @@ "slurm_workers", "auto_hosts", "auto_workers", - "LogSpec", - "DefaultLogSpec", + "LogMap", ] diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 0059c9cf..d15fe09c 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -20,7 +20,7 @@ import torch.distributed as dist from .environment import auto_hosts, auto_workers -from .logging_utils import DefaultLogSpec, LogRecordSocketReceiver, LogSpec +from .logging_utils import LogMap, LogRecordSocketReceiver from .utils import ( AgentPayload, AgentStatus, @@ -59,8 +59,8 @@ def execute_command( conn.run(f"{command} >> /dev/null 2>&1 &", asynchronous=True) -def monitor_log(log_spec: LogSpec, port: int, formatter: logging.Formatter): - for lname, handlers in log_spec.get_map().items(): # type: ignore +def monitor_log(log_map: LogMap, port: int, formatter: logging.Formatter): + for lname, handlers in log_map.items(): # type: ignore _logger = logging.getLogger(f"torchrunx.{lname}") for handler in handlers: handler.setFormatter(formatter) @@ -76,7 +76,7 @@ class Launcher: workers_per_host: int | list[int] | None = 1 ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None - log_spec: LogSpec | None = None + log_map: LogMap | None = None env_vars: list[str] = field( default_factory=lambda: [ "PATH", @@ -138,15 +138,15 @@ def run( formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") - if self.log_spec is None: - self.log_spec = DefaultLogSpec.basic( + if self.log_map is None: + self.log_map = LogMap.basic( hostnames=self.hostnames, workers_per_host=self.workers_per_host, ) logger_port = get_open_port() log_process = Process( - target=monitor_log, args=(self.log_spec, logger_port, formatter), daemon=True + target=monitor_log, args=(self.log_map, logger_port, formatter), daemon=True ) log_process.start() @@ -268,7 +268,7 @@ def launch( workers_per_host: int | list[int] | None = 1, ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, - log_spec: LogSpec | None = None, + log_map: LogMap | None = None, env_vars: list[str] = [ "PATH", "LD_LIBRARY", @@ -301,8 +301,8 @@ def launch( :type ssh_config_file: str | os.PathLike | None, optional :param backend: A ``torch.distributed`` `backend string `_, defaults to None :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional - :param log_spec: A :mod:`torchrunx.LogSpec` object specifying how to log the run. When left empty, a :mod:`torchrunx.DefaultLogSpec` is constructed, defaults to None - :type log_spec: torchrunx.LogSpec + :param log_map: A :mod:`torchrunx.LogMap` object specifying how to log the run. When left empty, :mod:`torchrunx.LogMap.basic` is used to construct the default :mod:`torchrunx.LogMap`, defaults to None + :type log_map: torchrunx.LogMap :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional :param env_file: An additional environment file that will be sourced prior to executing ``func``, defaults to None @@ -319,7 +319,7 @@ def launch( workers_per_host=workers_per_host, ssh_config_file=ssh_config_file, backend=backend, - log_spec=log_spec, + log_map=log_map, env_vars=env_vars, env_file=env_file, timeout=timeout, diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 0956aef9..ce45bddb 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -7,7 +7,6 @@ import select import socketserver import struct -from abc import ABC, abstractmethod from collections import defaultdict from io import StringIO, TextIOWrapper from pathlib import Path @@ -95,27 +94,11 @@ def emit(self, record): super().emit(record) -class LogSpec(ABC): - @abstractmethod - def get_map(self) -> dict[str, list[logging.Handler]]: - """ - Called by torchrunx.launch on the log_spec argument. - - :return: A mapping of logger names to lists of :mod:`logging.Handler` objects. - :rtype: dict[str, list[logging.Handler]] - """ - raise NotImplementedError - - -class DefaultLogSpec(LogSpec): - def __init__(self, log_spec_dict: dict[str, list[logging.Handler]]): - """ - Constructs a ``DefaultLogSpec``. - - :param log_spec_dict: A mapping of logger names to lists of :mod:`logging.Handler` objects. - :type log_spec_dict: dict[str, list[logging.Handler]] - """ - self.log_spec_dict = log_spec_dict +class LogMap(dict): + def __or__(self, other: LogMap) -> LogMap: + new = LogMap(other) + new.update(self) + return new @classmethod def basic( @@ -124,7 +107,7 @@ def basic( workers_per_host: list[int], log_dir: str = "./logs", stream: bool = True, - ) -> DefaultLogSpec: + ) -> LogMap: """ Generates torchrunx's default LogSpec @@ -156,9 +139,7 @@ def basic( return cls({**agents, **workers}) @classmethod - def from_file_map( - cls, file_map: dict[str, list[str]], log_dir: str = "./logs" - ) -> DefaultLogSpec: + def from_file_map(cls, file_map: dict[str, list[str]], log_dir: str = "./logs") -> LogMap: """ Generates DefaultLogSpec from a mapping of filenames to worker/agent names that should be logged there. @@ -180,10 +161,7 @@ def from_file_map( logging.FileHandler(f"{log_dir}/{timestamp}-{file_suffix}") ) - return DefaultLogSpec(reverse_map) # re-typing - - def get_map(self): - return self.log_spec_dict + return cls(reverse_map) class StreamLogger: From ee8e6e8c632642974d18ca176b1d3a5c423da041 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Sat, 17 Aug 2024 19:11:37 -0400 Subject: [PATCH 21/63] switch to LogMap in CI test --- tests/test_CI.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tests/test_CI.py b/tests/test_CI.py index 3aa84111..ce0c6be2 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -5,8 +5,8 @@ import torch import torch.distributed as dist -import torchrunx # noqa: I001 -from torchrunx.logging_utils import DefaultLogSpec +import torchrunx +from torchrunx import LogMap def test_simple_localhost(): @@ -56,9 +56,7 @@ def dist_func(): func_kwargs={}, workers_per_host=2, backend="gloo", - log_spec=DefaultLogSpec.basic( - hostnames=["localhost"], workers_per_host=[2], log_dir="./test_logs" - ), + log_map=LogMap.basic(hostnames=["localhost"], workers_per_host=[2], log_dir="./test_logs"), ) log_files = next(os.walk("./test_logs"), (None, None, []))[2] From ce114162ee5565919057da28ef3ac76c084a11ba Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Sun, 18 Aug 2024 18:20:53 -0400 Subject: [PATCH 22/63] updates to LogMap api --- src/torchrunx/launcher.py | 5 +- src/torchrunx/logging_utils.py | 142 +++++++++++++++++++-------------- 2 files changed, 86 insertions(+), 61 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index d15fe09c..24db50fa 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -60,11 +60,10 @@ def execute_command( def monitor_log(log_map: LogMap, port: int, formatter: logging.Formatter): - for lname, handlers in log_map.items(): # type: ignore - _logger = logging.getLogger(f"torchrunx.{lname}") + for logger, handlers in log_map.iter(): for handler in handlers: handler.setFormatter(formatter) - _logger.addHandler(handler) + logger.addHandler(handler) LogRecordSocketReceiver(host=socket.getfqdn(), port=port).serve_until_stopped() diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index ce45bddb..6384cd1f 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -10,6 +10,7 @@ from collections import defaultdict from io import StringIO, TextIOWrapper from pathlib import Path +from typing import DefaultDict, List, Tuple, Union class LogRecordStreamHandler(socketserver.StreamRequestHandler): @@ -94,11 +95,26 @@ def emit(self, record): super().emit(record) -class LogMap(dict): +class LogMap(DefaultDict[Tuple[str, Union[int, None]], List[logging.Handler]]): + def __init__(self): + super().__init__(list) + + def add_handler(self, hostname: str, worker_id: int | None, handler: logging.Handler): + self[(hostname, worker_id)].append(handler) + def __or__(self, other: LogMap) -> LogMap: - new = LogMap(other) - new.update(self) - return new + m = LogMap() + for k in self.keys() | other.keys(): + m[k] = self[k] + other[k] + return m + + def iter(self): + for (hostname, worker_id), v in self.items(): + _name = f"torchrunx.{hostname}" + if worker_id is not None: + _name += f"[{worker_id}]" + _logger = logging.getLogger(_name) + yield _logger, v @classmethod def basic( @@ -108,60 +124,70 @@ def basic( log_dir: str = "./logs", stream: bool = True, ) -> LogMap: - """ - Generates torchrunx's default LogSpec - - :param hostnames: The node hostnames - :type hostnames: list[str] - :param num_workers: Number of workers per agent - :type num_workers: list[int] - :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument - :rtype: DefaultLogSpec - """ # noqa: E501 - - timestamp = datetime.datetime.now().isoformat(timespec="seconds") - - Path(log_dir).mkdir(parents=True, exist_ok=True) - - agents: dict[str, list[logging.Handler]] = { - hostname: [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.log")] - for hostname in hostnames - } - workers: dict[str, list[logging.Handler]] = { - f"{hostname}[{i}]": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}[{i}].log")] - for hostname, num_workers in zip(hostnames, workers_per_host) - for i in range(num_workers) - } - - if stream: - workers[f"{hostnames[0]}[0]"].append(logging.StreamHandler()) - - return cls({**agents, **workers}) - - @classmethod - def from_file_map(cls, file_map: dict[str, list[str]], log_dir: str = "./logs") -> LogMap: - """ - Generates DefaultLogSpec from a mapping of filenames to worker/agent names that should be logged there. - - :param file_map: A dictionary mapping file suffixes (filenames will be prefixed with a timestamp) to worker and agent names. - :type file_map: dict[str, str] - :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument - :rtype: DefaultLogSpec - """ # noqa: E501 - - reverse_map: defaultdict[str, list[logging.Handler]] = defaultdict(lambda: []) - - timestamp = datetime.datetime.now().isoformat(timespec="seconds") - - Path(log_dir).mkdir(parents=True, exist_ok=True) - - for file_suffix, loggers in file_map.items(): - for logger in loggers: - reverse_map[logger].append( - logging.FileHandler(f"{log_dir}/{timestamp}-{file_suffix}") - ) - - return cls(reverse_map) + return LogMap() + + # @classmethod + # def basic( + # cls, + # hostnames: list[str], + # workers_per_host: list[int], + # log_dir: str = "./logs", + # stream: bool = True, + # ) -> LogMap: + # """ + # Generates torchrunx's default LogSpec + + # :param hostnames: The node hostnames + # :type hostnames: list[str] + # :param num_workers: Number of workers per agent + # :type num_workers: list[int] + # :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument + # :rtype: DefaultLogSpec + # """ # noqa: E501 + + # timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + # Path(log_dir).mkdir(parents=True, exist_ok=True) + + # agents: dict[str, list[logging.Handler]] = { + # hostname: [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.log")] + # for hostname in hostnames + # } + # workers: dict[str, list[logging.Handler]] = { + # f"{hostname}[{i}]": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}[{i}].log")] + # for hostname, num_workers in zip(hostnames, workers_per_host) + # for i in range(num_workers) + # } + + # if stream: + # workers[f"{hostnames[0]}[0]"].append(logging.StreamHandler()) + + # return cls({**agents, **workers}) + + # @classmethod + # def from_file_map(cls, file_map: dict[str, list[str]], log_dir: str = "./logs") -> LogMap: + # """ + # Generates DefaultLogSpec from a mapping of filenames to worker/agent names that should be logged there. + + # :param file_map: A dictionary mapping file suffixes (filenames will be prefixed with a timestamp) to worker and agent names. + # :type file_map: dict[str, str] + # :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument + # :rtype: DefaultLogSpec + # """ # noqa: E501 + + # reverse_map: defaultdict[str, list[logging.Handler]] = defaultdict(lambda: []) + + # timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + # Path(log_dir).mkdir(parents=True, exist_ok=True) + + # for file_suffix, loggers in file_map.items(): + # for logger in loggers: + # reverse_map[logger].append( + # logging.FileHandler(f"{log_dir}/{timestamp}-{file_suffix}") + # ) + + # return cls(reverse_map) class StreamLogger: From 5e973d97aecedc73971c13c9c760c025e543f664 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Sun, 18 Aug 2024 18:47:08 -0400 Subject: [PATCH 23/63] adding mapping to logmap --- src/torchrunx/launcher.py | 2 +- src/torchrunx/logging_utils.py | 23 +++++++++-------------- 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 24db50fa..006230c0 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -60,7 +60,7 @@ def execute_command( def monitor_log(log_map: LogMap, port: int, formatter: logging.Formatter): - for logger, handlers in log_map.iter(): + for logger, handlers in log_map: for handler in handlers: handler.setFormatter(formatter) logger.addHandler(handler) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 6384cd1f..a9f51fda 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -1,15 +1,12 @@ from __future__ import annotations -import datetime import logging import logging.handlers import pickle import select import socketserver import struct -from collections import defaultdict from io import StringIO, TextIOWrapper -from pathlib import Path from typing import DefaultDict, List, Tuple, Union @@ -95,26 +92,24 @@ def emit(self, record): super().emit(record) -class LogMap(DefaultDict[Tuple[str, Union[int, None]], List[logging.Handler]]): +class LogMap: def __init__(self): - super().__init__(list) + self.mapping = DefaultDict[Tuple[str, Union[int, None]], List[logging.Handler]](list) def add_handler(self, hostname: str, worker_id: int | None, handler: logging.Handler): - self[(hostname, worker_id)].append(handler) + self.mapping[(hostname, worker_id)].append(handler) def __or__(self, other: LogMap) -> LogMap: m = LogMap() - for k in self.keys() | other.keys(): - m[k] = self[k] + other[k] + for k in self.mapping.keys() | other.mapping.keys(): + m.mapping[k] = self.mapping[k] + other.mapping[k] return m - def iter(self): - for (hostname, worker_id), v in self.items(): - _name = f"torchrunx.{hostname}" - if worker_id is not None: - _name += f"[{worker_id}]" + def __iter__(self): + for (hostname, worker_id), handlers in self.mapping.items(): + _name = f"torchrunx.{hostname}" + (f"[{worker_id}]" if worker_id is not None else "") _logger = logging.getLogger(_name) - yield _logger, v + yield _logger, handlers @classmethod def basic( From 2e4fd71785ef55c092362b73e6d6276a0f80c82d Mon Sep 17 00:00:00 2001 From: Apoorv Khandelwal Date: Tue, 20 Aug 2024 13:07:08 -0700 Subject: [PATCH 24/63] LogMap implements __and__ --- src/torchrunx/logging_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index a9f51fda..56c497ec 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -99,7 +99,7 @@ def __init__(self): def add_handler(self, hostname: str, worker_id: int | None, handler: logging.Handler): self.mapping[(hostname, worker_id)].append(handler) - def __or__(self, other: LogMap) -> LogMap: + def __add__(self, other: LogMap) -> LogMap: m = LogMap() for k in self.mapping.keys() | other.mapping.keys(): m.mapping[k] = self.mapping[k] + other.mapping[k] From 752598f901b53bd3bca9e03562b3352e3757b6c2 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Tue, 20 Aug 2024 18:03:39 -0400 Subject: [PATCH 25/63] environment.Auto class --- src/torchrunx/__init__.py | 5 ++-- src/torchrunx/environment.py | 50 +++++++++++++++++++----------------- src/torchrunx/launcher.py | 23 +++++++---------- 3 files changed, 37 insertions(+), 41 deletions(-) diff --git a/src/torchrunx/__init__.py b/src/torchrunx/__init__.py index ee053dad..0f1b0eb5 100644 --- a/src/torchrunx/__init__.py +++ b/src/torchrunx/__init__.py @@ -1,13 +1,12 @@ -from .environment import auto_hosts, auto_workers, slurm_hosts, slurm_workers +from .environment import Auto, slurm_hosts, slurm_workers from .launcher import Launcher, launch from .logging_utils import LogMap __all__ = [ "Launcher", "launch", + "Auto", "slurm_hosts", "slurm_workers", - "auto_hosts", - "auto_workers", "LogMap", ] diff --git a/src/torchrunx/environment.py b/src/torchrunx/environment.py index 560d62cc..b9a49a47 100644 --- a/src/torchrunx/environment.py +++ b/src/torchrunx/environment.py @@ -44,27 +44,29 @@ def slurm_workers() -> int: return int(os.environ["SLURM_CPUS_ON_NODE"]) -def auto_hosts() -> list[str]: - """ - Automatically determine hostname list - - :return: Hostnames in Slurm allocation, or ['localhost'] - :rtype: list[str] - """ - if in_slurm_job(): - slurm_hosts() - - return ["localhost"] - - -def auto_workers() -> int: - """ - Automatically determine number of workers per host - - :return: Workers per host - :rtype: int - """ - if in_slurm_job(): - return slurm_workers() - - return torch.cuda.device_count() or os.cpu_count() or 1 +class Auto: + @staticmethod + def hosts() -> list[str]: + """ + Automatically determine hostname list + + :return: Hostnames in Slurm allocation, or ['localhost'] + :rtype: list[str] + """ + if in_slurm_job(): + slurm_hosts() + + return ["localhost"] + + @staticmethod + def workers() -> int: + """ + Automatically determine number of workers per host + + :return: Workers per host + :rtype: int + """ + if in_slurm_job(): + return slurm_workers() + + return torch.cuda.device_count() or os.cpu_count() or 1 diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 006230c0..cf83bf90 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -19,7 +19,7 @@ import fabric import torch.distributed as dist -from .environment import auto_hosts, auto_workers +from .environment import Auto from .logging_utils import LogMap, LogRecordSocketReceiver from .utils import ( AgentPayload, @@ -70,9 +70,8 @@ def monitor_log(log_map: LogMap, port: int, formatter: logging.Formatter): @dataclass class Launcher: - auto: bool = False - hostnames: list[str] | None = field(default_factory=lambda: ["localhost"]) - workers_per_host: int | list[int] | None = 1 + hostnames: list[str] | Auto = field(default_factory=lambda: ["localhost"]) + workers_per_host: int | list[int] | Auto = 1 ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None log_map: LogMap | None = None @@ -113,15 +112,13 @@ def run( if not dist.is_available(): raise RuntimeError("The torch.distributed package is not available.") - if self.hostnames is None: - assert self.auto - self.hostnames = auto_hosts() + if isinstance(self.hostnames, Auto): + self.hostnames = Auto.hosts() num_hosts = len(self.hostnames) - if self.workers_per_host is None: - assert self.auto - self.workers_per_host = auto_workers() + if isinstance(self.workers_per_host, Auto): + self.workers_per_host = Auto.workers() if isinstance(self.workers_per_host, int): self.workers_per_host = [self.workers_per_host] * num_hosts @@ -262,9 +259,8 @@ def launch( func: Callable, func_args: tuple[Any] = tuple(), func_kwargs: dict[str, Any] = {}, - auto: bool = False, - hostnames: list[str] | None = ["localhost"], - workers_per_host: int | list[int] | None = 1, + hostnames: list[str] | Auto = ["localhost"], + workers_per_host: int | list[int] | Auto = 1, ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, log_map: LogMap | None = None, @@ -313,7 +309,6 @@ def launch( :rtype: dict[int, Any] """ # noqa: E501 return Launcher( - auto=auto, hostnames=hostnames, workers_per_host=workers_per_host, ssh_config_file=ssh_config_file, From ab79473c0b064677554b057ec78c06656f56f9ff Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 22 Aug 2024 18:01:46 -0400 Subject: [PATCH 26/63] changes to streamed log record handling --- src/torchrunx/__init__.py | 2 - src/torchrunx/agent.py | 59 ++++----- src/torchrunx/launcher.py | 46 +++---- src/torchrunx/logging_utils.py | 211 ++++++--------------------------- tests/test_CI.py | 5 +- 5 files changed, 86 insertions(+), 237 deletions(-) diff --git a/src/torchrunx/__init__.py b/src/torchrunx/__init__.py index 0f1b0eb5..fc2691b2 100644 --- a/src/torchrunx/__init__.py +++ b/src/torchrunx/__init__.py @@ -1,6 +1,5 @@ from .environment import Auto, slurm_hosts, slurm_workers from .launcher import Launcher, launch -from .logging_utils import LogMap __all__ = [ "Launcher", @@ -8,5 +7,4 @@ "Auto", "slurm_hosts", "slurm_workers", - "LogMap", ] diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 9df1c35a..7df9b4f8 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -1,13 +1,12 @@ from __future__ import annotations import datetime -import logging -import logging.handlers import os import socket import sys import tempfile from dataclasses import dataclass +import logging from typing import Callable, Literal import cloudpickle @@ -16,7 +15,7 @@ from torch.distributed.elastic.multiprocessing import start_processes from typing_extensions import Self -from .logging_utils import RenamingSocketHandler, StreamLogger +from .logging_utils import StreamLogger, log_records_to_socket from .utils import ( AgentPayload, AgentStatus, @@ -51,19 +50,19 @@ def from_bytes(cls, serialized: bytes) -> Self: def entrypoint(serialized_worker_args: bytes): worker_args = WorkerArgs.from_bytes(serialized_worker_args) + logger = logging.getLogger() - logger.setLevel(logging.DEBUG) - logger.name = ( - f"torchrunx.{worker_args.hostname}[{worker_args.local_rank}]" # overwrite root logger name - ) - logger.addHandler( - RenamingSocketHandler( - host=worker_args.logger_hostname, port=worker_args.logger_port, root_name=logger.name - ) + + log_records_to_socket( + logger=logger, + hostname=worker_args.hostname, + worker_rank=worker_args.local_rank, + logger_hostname=worker_args.logger_hostname, + logger_port=worker_args.logger_port, ) - sys.stdout = StreamLogger(logger, sys.__stdout__) sys.stderr = StreamLogger(logger, sys.__stderr__) + sys.stdout = StreamLogger(logger, sys.__stdout__) store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] host_name=worker_args.master_hostname, @@ -76,7 +75,7 @@ def entrypoint(serialized_worker_args: bytes): if backend is None: backend = "nccl" if torch.cuda.is_available() else "gloo" - logging.debug(f"using backend: {backend}") + logger.debug(f"using backend: {backend}") dist.init_process_group( backend=backend, @@ -93,7 +92,7 @@ def entrypoint(serialized_worker_args: bytes): os.environ["MASTER_ADDR"] = worker_args.master_hostname os.environ["MASTER_PORT"] = str(worker_args.master_port) - logging.debug(f"executing function: {worker_args.function}") + logger.debug(f"executing function: {worker_args.function}") return worker_args.function() @@ -105,7 +104,7 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ port=get_open_port(), process_id=os.getpid(), ) - # DefaultLogsSpecs(log_dir=None, tee=Std.ALL, local_ranks_filter={0}), + all_payloads = launcher_agent_group.sync_payloads(payload=payload) launcher_payload: LauncherPayload = all_payloads[0] # pyright: ignore[reportAssignmentType] main_agent_payload: AgentPayload = all_payloads[1] # pyright: ignore[reportAssignmentType] @@ -115,28 +114,29 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ worker_global_ranks = launcher_payload.worker_global_ranks[agent_rank] num_workers = len(worker_global_ranks) - logger = logging.getLogger(f"torchrunx.{launcher_payload.hostnames[agent_rank]}") - logger.setLevel(logging.DEBUG) - socketHandler = logging.handlers.SocketHandler( - host=logger_hostname, - port=logger_port, + logger = logging.getLogger() + + log_records_to_socket( + logger=logger, + hostname=hostname, + worker_rank=None, + logger_hostname=logger_hostname, + logger_port=logger_port, ) - logger.addHandler(socketHandler) if torch.__version__ >= "2.3": - # DefaultLogsSpecs only exists in torch >= 2.3 from torch.distributed.elastic.multiprocessing import DefaultLogsSpecs - log_arg = DefaultLogsSpecs(log_dir=tempfile.mkdtemp()) + log_kwargs = {"logs_specs": DefaultLogsSpecs(log_dir=tempfile.mkdtemp())} else: - log_arg = tempfile.mkdtemp() + log_kwargs = {"log_dir": tempfile.mkdtemp()} # spawn workers ctx = start_processes( - f"{hostname}_", - entrypoint, - { + name=f"{hostname}_", + entrypoint=entrypoint, + args={ i: ( WorkerArgs( function=launcher_payload.fn, @@ -155,10 +155,11 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ ) for i in range(num_workers) }, - {i: {} for i in range(num_workers)}, - log_arg, # type: ignore + envs={i: {} for i in range(num_workers)}, + **log_kwargs, # pyright: ignore [reportArgumentType] ) logger.debug("starting processes") + try: status = AgentStatus() while True: diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index cf83bf90..32807ec9 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -3,9 +3,6 @@ import fnmatch import ipaddress import itertools -import logging -import logging.config -import logging.handlers import os import socket import subprocess @@ -13,6 +10,7 @@ from collections import ChainMap from dataclasses import dataclass, field from functools import partial +from logging import Handler from multiprocessing import Process from typing import Any, Callable, Literal @@ -20,7 +18,7 @@ import torch.distributed as dist from .environment import Auto -from .logging_utils import LogMap, LogRecordSocketReceiver +from .logging_utils import LogRecordSocketReceiver from .utils import ( AgentPayload, AgentStatus, @@ -59,22 +57,13 @@ def execute_command( conn.run(f"{command} >> /dev/null 2>&1 &", asynchronous=True) -def monitor_log(log_map: LogMap, port: int, formatter: logging.Formatter): - for logger, handlers in log_map: - for handler in handlers: - handler.setFormatter(formatter) - logger.addHandler(handler) - - LogRecordSocketReceiver(host=socket.getfqdn(), port=port).serve_until_stopped() - - @dataclass class Launcher: hostnames: list[str] | Auto = field(default_factory=lambda: ["localhost"]) workers_per_host: int | list[int] | Auto = 1 ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None - log_map: LogMap | None = None + log_handlers: list[Handler] = [] env_vars: list[str] = field( default_factory=lambda: [ "PATH", @@ -125,24 +114,18 @@ def run( assert num_hosts == len(self.workers_per_host) - # setup logging - - logger = logging.getLogger("torchrunx") - logger.setLevel(logging.DEBUG) - logger.propagate = False - logger.parent = None + # - formatter = logging.Formatter("%(asctime)s:%(levelname)s:%(name)s:%(message)s") + launcher_hostname = socket.getfqdn() - if self.log_map is None: - self.log_map = LogMap.basic( - hostnames=self.hostnames, - workers_per_host=self.workers_per_host, - ) + # setup logging logger_port = get_open_port() log_process = Process( - target=monitor_log, args=(self.log_map, logger_port, formatter), daemon=True + target=LogRecordSocketReceiver( + host=launcher_hostname, port=logger_port, handlers=self.log_handlers + ).serve_forever, + daemon=True, ) log_process.start() @@ -163,7 +146,6 @@ def run( if self.env_file is not None: env_file_string = f"source {self.env_file} && " - launcher_hostname = socket.getfqdn() launcher_port = get_open_port() world_size = num_hosts + 1 # launcher + agents @@ -263,7 +245,7 @@ def launch( workers_per_host: int | list[int] | Auto = 1, ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, - log_map: LogMap | None = None, + log_handlers: list[Handler] = [], env_vars: list[str] = [ "PATH", "LD_LIBRARY", @@ -296,8 +278,8 @@ def launch( :type ssh_config_file: str | os.PathLike | None, optional :param backend: A ``torch.distributed`` `backend string `_, defaults to None :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional - :param log_map: A :mod:`torchrunx.LogMap` object specifying how to log the run. When left empty, :mod:`torchrunx.LogMap.basic` is used to construct the default :mod:`torchrunx.LogMap`, defaults to None - :type log_map: torchrunx.LogMap + :param log_handlers: A list of handlers to manage agent and worker logs, defaults to [] + :type log_handlers: list[Handler], optional :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional :param env_file: An additional environment file that will be sourced prior to executing ``func``, defaults to None @@ -313,7 +295,7 @@ def launch( workers_per_host=workers_per_host, ssh_config_file=ssh_config_file, backend=backend, - log_map=log_map, + log_handlers=log_handlers, env_vars=env_vars, env_file=env_file, timeout=timeout, diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 56c497ec..a98c74db 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -1,188 +1,55 @@ from __future__ import annotations import logging -import logging.handlers import pickle -import select import socketserver import struct from io import StringIO, TextIOWrapper -from typing import DefaultDict, List, Tuple, Union +from logging import Handler, Logger +from logging.handlers import SocketHandler -class LogRecordStreamHandler(socketserver.StreamRequestHandler): - """Handler for a streaming logging request. +def log_records_to_socket( + logger: Logger, hostname: str, worker_rank: int | None, logger_hostname: str, logger_port: int +): + logger.setLevel(logging.NOTSET) - This basically logs the record using whatever logging policy is - configured locally. - """ + old_factory = logging.getLogRecordFactory() - def handle(self): - """ - Handle multiple requests - each expected to be a 4-byte length, - followed by the LogRecord in pickle format. Logs the record - according to whatever policy is configured locally. - """ - while True: - chunk = self.connection.recv(4) - if len(chunk) < 4: - break - slen = struct.unpack(">L", chunk)[0] - chunk = self.connection.recv(slen) - while len(chunk) < slen: - chunk = chunk + self.connection.recv(slen - len(chunk)) - obj = self.unPickle(chunk) - record = logging.makeLogRecord(obj) - self.handleLogRecord(record) - - def unPickle(self, data): - return pickle.loads(data) - - def handleLogRecord(self, record): - # if a name is specified, we use the named logger rather than the one - # implied by the record. - if self.server.logname is not None: # type: ignore - name = self.server.logname # type: ignore - else: - name = record.name - logger = logging.getLogger(name) - # N.B. EVERY record gets logged. This is because Logger.handle - # is normally called AFTER logger-level filtering. If you want - # to do filtering, do it at the client end to save wasting - # cycles and network bandwidth! - if logger.getEffectiveLevel() <= record.levelno: - logger.handle(record) + def record_factory(*args, **kwargs): + record = old_factory(*args, **kwargs) + record.hostname = hostname + record.worker_rank = worker_rank + return record + logging.setLogRecordFactory(record_factory) + + logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) -class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): - """ - Simple TCP socket-based logging receiver suitable for testing. - """ - allow_reuse_address = 1 # type: ignore - - def __init__( - self, - host="localhost", - port=logging.handlers.DEFAULT_TCP_LOGGING_PORT, - handler=LogRecordStreamHandler, - ): - socketserver.ThreadingTCPServer.__init__(self, (host, port), handler) - self.abort = 0 - self.timeout = 1 - self.logname = None - - def serve_until_stopped(self): - abort = 0 - while not abort: - rd, wr, ex = select.select([self.socket.fileno()], [], [], self.timeout) - if rd: - self.handle_request() - abort = self.abort - - -class RenamingSocketHandler(logging.handlers.SocketHandler): - def __init__(self, host, port, root_name): - super().__init__(host, port) - self.root_name = root_name - - def emit(self, record): - if not record.name.startswith(self.root_name): - record.name = f"{self.root_name}.{record.name}" - super().emit(record) - - -class LogMap: - def __init__(self): - self.mapping = DefaultDict[Tuple[str, Union[int, None]], List[logging.Handler]](list) - - def add_handler(self, hostname: str, worker_id: int | None, handler: logging.Handler): - self.mapping[(hostname, worker_id)].append(handler) - - def __add__(self, other: LogMap) -> LogMap: - m = LogMap() - for k in self.mapping.keys() | other.mapping.keys(): - m.mapping[k] = self.mapping[k] + other.mapping[k] - return m - - def __iter__(self): - for (hostname, worker_id), handlers in self.mapping.items(): - _name = f"torchrunx.{hostname}" + (f"[{worker_id}]" if worker_id is not None else "") - _logger = logging.getLogger(_name) - yield _logger, handlers - - @classmethod - def basic( - cls, - hostnames: list[str], - workers_per_host: list[int], - log_dir: str = "./logs", - stream: bool = True, - ) -> LogMap: - return LogMap() - - # @classmethod - # def basic( - # cls, - # hostnames: list[str], - # workers_per_host: list[int], - # log_dir: str = "./logs", - # stream: bool = True, - # ) -> LogMap: - # """ - # Generates torchrunx's default LogSpec - - # :param hostnames: The node hostnames - # :type hostnames: list[str] - # :param num_workers: Number of workers per agent - # :type num_workers: list[int] - # :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument - # :rtype: DefaultLogSpec - # """ # noqa: E501 - - # timestamp = datetime.datetime.now().isoformat(timespec="seconds") - - # Path(log_dir).mkdir(parents=True, exist_ok=True) - - # agents: dict[str, list[logging.Handler]] = { - # hostname: [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}.log")] - # for hostname in hostnames - # } - # workers: dict[str, list[logging.Handler]] = { - # f"{hostname}[{i}]": [logging.FileHandler(f"{log_dir}/{timestamp}-{hostname}[{i}].log")] - # for hostname, num_workers in zip(hostnames, workers_per_host) - # for i in range(num_workers) - # } - - # if stream: - # workers[f"{hostnames[0]}[0]"].append(logging.StreamHandler()) - - # return cls({**agents, **workers}) - - # @classmethod - # def from_file_map(cls, file_map: dict[str, list[str]], log_dir: str = "./logs") -> LogMap: - # """ - # Generates DefaultLogSpec from a mapping of filenames to worker/agent names that should be logged there. - - # :param file_map: A dictionary mapping file suffixes (filenames will be prefixed with a timestamp) to worker and agent names. - # :type file_map: dict[str, str] - # :return: A DefaultLogSpec object to be passed to :mod:`torchrunx.launch` as the ``log_spec`` argument - # :rtype: DefaultLogSpec - # """ # noqa: E501 - - # reverse_map: defaultdict[str, list[logging.Handler]] = defaultdict(lambda: []) - - # timestamp = datetime.datetime.now().isoformat(timespec="seconds") - - # Path(log_dir).mkdir(parents=True, exist_ok=True) - - # for file_suffix, loggers in file_map.items(): - # for logger in loggers: - # reverse_map[logger].append( - # logging.FileHandler(f"{log_dir}/{timestamp}-{file_suffix}") - # ) - - # return cls(reverse_map) +class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): + def __init__(self, host: str, port: int, handlers: list[Handler]): + class _LogRecordStreamHandler(socketserver.StreamRequestHandler): + """ + https://docs.python.org/3.8/howto/logging-cookbook.html#sending-and-receiving-logging-events-across-a-network + """ + + def handle(self): + while True: + chunk = self.connection.recv(4) + if len(chunk) < 4: + break + slen = struct.unpack(">L", chunk)[0] + chunk = self.connection.recv(slen) + while len(chunk) < slen: + chunk = chunk + self.connection.recv(slen - len(chunk)) + obj = pickle.loads(chunk) + record = logging.makeLogRecord(obj) + + for handler in handlers: + handler.handle(record) + + super().__init__((host, port), _LogRecordStreamHandler) class StreamLogger: @@ -190,7 +57,7 @@ class StreamLogger: For logging write calls to streams such as stdout and stdin in the worker processes. """ - def __init__(self, logger: logging.Logger, stream: TextIOWrapper | None): + def __init__(self, logger: Logger, stream: TextIOWrapper | None): self.logger = logger self._string_io = StringIO() if stream is None: diff --git a/tests/test_CI.py b/tests/test_CI.py index ce0c6be2..93541e74 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -6,7 +6,8 @@ import torch.distributed as dist import torchrunx -from torchrunx import LogMap + +# from torchrunx import LogMap def test_simple_localhost(): @@ -56,7 +57,7 @@ def dist_func(): func_kwargs={}, workers_per_host=2, backend="gloo", - log_map=LogMap.basic(hostnames=["localhost"], workers_per_host=[2], log_dir="./test_logs"), + # log_map=LogMap.basic(hostnames=["localhost"], workers_per_host=[2], log_dir="./test_logs") ) log_files = next(os.walk("./test_logs"), (None, None, []))[2] From c7532e19736944c9d1b83ae516ae1a379bec5ded Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 22 Aug 2024 20:42:27 -0400 Subject: [PATCH 27/63] dummy auto builder for logging handlers --- src/torchrunx/agent.py | 18 +++++++++--------- src/torchrunx/environment.py | 4 ++++ src/torchrunx/launcher.py | 16 ++++++++++++---- 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 7df9b4f8..208b8b6b 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -1,12 +1,12 @@ from __future__ import annotations import datetime +import logging import os import socket import sys import tempfile from dataclasses import dataclass -import logging from typing import Callable, Literal import cloudpickle @@ -30,8 +30,8 @@ class WorkerArgs: function: Callable logger_hostname: str logger_port: int - master_hostname: str - master_port: int + main_agent_hostname: str + main_agent_port: int backend: Literal["mpi", "gloo", "nccl", "ucc", None] rank: int local_rank: int @@ -65,8 +65,8 @@ def entrypoint(serialized_worker_args: bytes): sys.stdout = StreamLogger(logger, sys.__stdout__) store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] - host_name=worker_args.master_hostname, - port=worker_args.master_port, + host_name=worker_args.main_agent_hostname, + port=worker_args.main_agent_port, world_size=worker_args.world_size, is_master=(worker_args.rank == 0), ) @@ -89,8 +89,8 @@ def entrypoint(serialized_worker_args: bytes): os.environ["LOCAL_RANK"] = str(worker_args.local_rank) os.environ["LOCAL_WORLD_SIZE"] = str(worker_args.local_world_size) os.environ["WORLD_SIZE"] = str(worker_args.world_size) - os.environ["MASTER_ADDR"] = worker_args.master_hostname - os.environ["MASTER_PORT"] = str(worker_args.master_port) + os.environ["MASTER_ADDR"] = worker_args.main_agent_hostname + os.environ["MASTER_PORT"] = str(worker_args.main_agent_port) logger.debug(f"executing function: {worker_args.function}") return worker_args.function() @@ -142,8 +142,8 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ function=launcher_payload.fn, logger_hostname=logger_hostname, logger_port=logger_port, - master_hostname=main_agent_payload.hostname, - master_port=main_agent_payload.port, + main_agent_hostname=main_agent_payload.hostname, + main_agent_port=main_agent_payload.port, backend=launcher_payload.backend, rank=worker_global_ranks[i], local_rank=i, diff --git a/src/torchrunx/environment.py b/src/torchrunx/environment.py index b9a49a47..cae756f8 100644 --- a/src/torchrunx/environment.py +++ b/src/torchrunx/environment.py @@ -70,3 +70,7 @@ def workers() -> int: return slurm_workers() return torch.cuda.device_count() or os.cpu_count() or 1 + + @staticmethod + def handlers(hostnames: list[str], workers_per_host: list[int]): + return [] diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 32807ec9..dd7f222c 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -63,7 +63,7 @@ class Launcher: workers_per_host: int | list[int] | Auto = 1 ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None - log_handlers: list[Handler] = [] + log_handlers: list[Handler] | Auto | None = Auto() env_vars: list[str] = field( default_factory=lambda: [ "PATH", @@ -120,11 +120,19 @@ def run( # setup logging + if self.log_handlers is None: + self.log_handlers = [] + elif isinstance(self.log_handlers, Auto): + self.log_handlers = Auto.handlers( + hostnames=self.hostnames, workers_per_host=self.workers_per_host + ) + logger_port = get_open_port() + log_receiver = LogRecordSocketReceiver( + host=launcher_hostname, port=logger_port, handlers=self.log_handlers + ) log_process = Process( - target=LogRecordSocketReceiver( - host=launcher_hostname, port=logger_port, handlers=self.log_handlers - ).serve_forever, + target=log_receiver.serve_forever, daemon=True, ) log_process.start() From aefdd4d5fafaf9231098a209c8a2ba7dce3a07af Mon Sep 17 00:00:00 2001 From: Apoorv Khandelwal Date: Thu, 22 Aug 2024 20:58:13 -0700 Subject: [PATCH 28/63] Update logging_utils.py --- src/torchrunx/logging_utils.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index a98c74db..911a9a81 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -2,7 +2,7 @@ import logging import pickle -import socketserver +from socketserver import TCPServer, StreamRequestHandler import struct from io import StringIO, TextIOWrapper from logging import Handler, Logger @@ -27,13 +27,9 @@ def record_factory(*args, **kwargs): logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) -class LogRecordSocketReceiver(socketserver.ThreadingTCPServer): +class LogRecordSocketReceiver(TCPServer): def __init__(self, host: str, port: int, handlers: list[Handler]): - class _LogRecordStreamHandler(socketserver.StreamRequestHandler): - """ - https://docs.python.org/3.8/howto/logging-cookbook.html#sending-and-receiving-logging-events-across-a-network - """ - + class _LogRecordStreamHandler(StreamRequestHandler): def handle(self): while True: chunk = self.connection.recv(4) @@ -45,7 +41,7 @@ def handle(self): chunk = chunk + self.connection.recv(slen - len(chunk)) obj = pickle.loads(chunk) record = logging.makeLogRecord(obj) - + # for handler in handlers: handler.handle(record) From bea3276417b611689a9768c676e4229bfdcbe59d Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Fri, 23 Aug 2024 17:39:00 -0400 Subject: [PATCH 29/63] ignore vscode configs --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 240c67cf..4dbbd920 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,8 @@ test_logs/ _build/ out/ output/ +.vscode/ + # Byte-compiled / optimized / DLL files __pycache__/ From 2e4b3becef4a28455f299da8084eb48bcf50226b Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Fri, 23 Aug 2024 17:40:12 -0400 Subject: [PATCH 30/63] fix CI test. make default handlers --- src/torchrunx/environment.py | 4 --- src/torchrunx/launcher.py | 4 +-- src/torchrunx/logging_utils.py | 45 +++++++++++++++++++++++++++++++--- tests/pytest.ini | 2 -- tests/test_CI.py | 23 ++++++----------- 5 files changed, 52 insertions(+), 26 deletions(-) delete mode 100644 tests/pytest.ini diff --git a/src/torchrunx/environment.py b/src/torchrunx/environment.py index cae756f8..b9a49a47 100644 --- a/src/torchrunx/environment.py +++ b/src/torchrunx/environment.py @@ -70,7 +70,3 @@ def workers() -> int: return slurm_workers() return torch.cuda.device_count() or os.cpu_count() or 1 - - @staticmethod - def handlers(hostnames: list[str], workers_per_host: list[int]): - return [] diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 8b3858b2..ba9e29e0 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -18,7 +18,7 @@ import torch.distributed as dist from .environment import Auto -from .logging_utils import LogRecordSocketReceiver +from .logging_utils import LogRecordSocketReceiver, default_handlers from .utils import ( AgentPayload, AgentStatus, @@ -123,7 +123,7 @@ def run( if self.log_handlers is None: self.log_handlers = [] elif isinstance(self.log_handlers, Auto): - self.log_handlers = Auto.handlers( + self.log_handlers = default_handlers( hostnames=self.hostnames, workers_per_host=self.workers_per_host ) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 1805c8ce..1fea58ec 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -1,18 +1,19 @@ from __future__ import annotations +import datetime import logging import pickle import struct from io import StringIO, TextIOWrapper from logging import Handler, Logger from logging.handlers import SocketHandler -from socketserver import StreamRequestHandler, TCPServer +from socketserver import StreamRequestHandler, ThreadingTCPServer def log_records_to_socket( logger: Logger, hostname: str, worker_rank: int | None, logger_hostname: str, logger_port: int ): - logger.setLevel(logging.NOTSET) + logger.setLevel(logging.DEBUG) old_factory = logging.getLogRecordFactory() @@ -27,7 +28,44 @@ def record_factory(*args, **kwargs): logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) -class LogRecordSocketReceiver(TCPServer): +def default_handlers( + hostnames: list[str], workers_per_host: list[int], log_dir: str = "./logs" +) -> list[Handler]: + handlers = [] + + timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + def make_handler(hostname: str, rank: int | None = None, stream: bool = False) -> Handler: + if stream: + handler = logging.StreamHandler() + else: + handler = logging.FileHandler( + f"{log_dir}/{timestamp}-{hostname}{'' if rank is None else f'[{rank}]'}.log" + ) + + def handler_filter(record: logging.LogRecord) -> bool: + return record.hostname == hostname and record.worker_rank == rank + + handler.addFilter(handler_filter) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter( + "%(asctime)s:%(levelname)s:%(hostname)s:worker-%(worker_rank)s:%(message)s" + ) + handler.setFormatter(formatter) + + return handler + + for i, hostname in enumerate(hostnames): + handlers.append(make_handler(hostname=hostname)) + for j in range(workers_per_host[i]): + handlers.append(make_handler(hostname=hostname, rank=j)) + if i == 0 and j == 0: + handlers.append(make_handler(hostname=hostname, rank=j, stream=True)) + + return handlers + + +class LogRecordSocketReceiver(ThreadingTCPServer): def __init__(self, host: str, port: int, handlers: list[Handler]): class _LogRecordStreamHandler(StreamRequestHandler): def handle(self): @@ -46,6 +84,7 @@ def handle(self): handler.handle(record) super().__init__((host, port), _LogRecordStreamHandler) + self.daemon_threads = True class StreamLogger: diff --git a/tests/pytest.ini b/tests/pytest.ini deleted file mode 100644 index 85f8d824..00000000 --- a/tests/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -#python_paths = /users/pcurtin1/torchrunx/src diff --git a/tests/test_CI.py b/tests/test_CI.py index 79f41c0c..4862b09d 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -1,4 +1,3 @@ -import logging import os import tempfile @@ -7,6 +6,7 @@ import torch.distributed as dist import torchrunx as trx +from torchrunx.logging_utils import default_handlers def test_simple_localhost(): @@ -42,7 +42,7 @@ def dist_func(): def test_logging(): def dist_func(): rank = int(os.environ["RANK"]) - logging.info(f"worker rank: {rank}") + print(f"worker rank: {rank}") tmp = tempfile.mkdtemp() trx.launch( @@ -50,14 +50,7 @@ def dist_func(): func_kwargs={}, workers_per_host=2, backend="gloo", - ) # log_dir=tmp) - - trx.launch( - func=dist_func, - func_kwargs={}, - workers_per_host=2, - backend="gloo", - # log_map=LogMap.basic(hostnames=["localhost"], workers_per_host=[2], log_dir="./test_logs") + log_handlers=default_handlers(hostnames=["localhost"], workers_per_host=[2], log_dir=tmp), ) log_files = next(os.walk(tmp), (None, None, []))[2] @@ -66,12 +59,12 @@ def dist_func(): for file in log_files: with open(f"{tmp}/{file}", "r") as f: - if file.endswith("0.log"): - assert f.read() == "worker rank: 0\n" - elif file.endswith("1.log"): - assert f.read() == "worker rank: 1\n" + contents = f.read() + if file.endswith("[0].log"): + assert "worker rank: 0\n" in contents + elif file.endswith("[1].log"): + assert "worker rank: 1\n" in contents else: - contents = f.read() assert "starting processes" in contents From 5ed20b6a251dfd3c188309cbe0af593ccaa74bde Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Fri, 23 Aug 2024 17:47:25 -0400 Subject: [PATCH 31/63] fix formatting and typing --- src/torchrunx/logging_utils.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 1fea58ec..db9e2cbd 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -11,7 +11,11 @@ def log_records_to_socket( - logger: Logger, hostname: str, worker_rank: int | None, logger_hostname: str, logger_port: int + logger: Logger, + hostname: str, + worker_rank: int | None, + logger_hostname: str, + logger_port: int, ): logger.setLevel(logging.DEBUG) @@ -44,7 +48,7 @@ def make_handler(hostname: str, rank: int | None = None, stream: bool = False) - ) def handler_filter(record: logging.LogRecord) -> bool: - return record.hostname == hostname and record.worker_rank == rank + return record.hostname == hostname and record.worker_rank == rank # pyright: ignore handler.addFilter(handler_filter) handler.setLevel(logging.DEBUG) From f081a00543bebe469ddae8a942a0930a45d2fe1a Mon Sep 17 00:00:00 2001 From: Apoorv Khandelwal Date: Mon, 26 Aug 2024 16:08:56 -0700 Subject: [PATCH 32/63] Slurm job fixes --- src/torchrunx/environment.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/torchrunx/environment.py b/src/torchrunx/environment.py index 560d62cc..bef1e4b2 100644 --- a/src/torchrunx/environment.py +++ b/src/torchrunx/environment.py @@ -39,6 +39,8 @@ def slurm_workers() -> int: if "SLURM_JOB_GPUS" in os.environ: # TODO: is it possible to allocate uneven GPUs across nodes? return len(os.environ["SLURM_JOB_GPUS"].split(",")) + elif "SLURM_GPUS_PER_NODE" in os.environ: + return int(os.environ['SLURM_GPUS_PER_NODE']) else: # TODO: should we assume that we plan to do one worker per CPU? return int(os.environ["SLURM_CPUS_ON_NODE"]) @@ -52,7 +54,7 @@ def auto_hosts() -> list[str]: :rtype: list[str] """ if in_slurm_job(): - slurm_hosts() + return slurm_hosts() return ["localhost"] From e99c91a273a4af7f70bb634810d095fcdec6c0d0 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 29 Aug 2024 20:57:49 -0400 Subject: [PATCH 33/63] removed "Auto" class, just using Literal["auto"] --- src/torchrunx/__init__.py | 4 --- src/torchrunx/environment.py | 50 +++++++++++++++++------------------- src/torchrunx/launcher.py | 34 +++++++++++++----------- tests/test_func.py | 4 +-- tests/test_submitit.py | 4 +-- tests/test_train.py | 4 +-- 6 files changed, 48 insertions(+), 52 deletions(-) diff --git a/src/torchrunx/__init__.py b/src/torchrunx/__init__.py index fc2691b2..46b3b1b9 100644 --- a/src/torchrunx/__init__.py +++ b/src/torchrunx/__init__.py @@ -1,10 +1,6 @@ -from .environment import Auto, slurm_hosts, slurm_workers from .launcher import Launcher, launch __all__ = [ "Launcher", "launch", - "Auto", - "slurm_hosts", - "slurm_workers", ] diff --git a/src/torchrunx/environment.py b/src/torchrunx/environment.py index b9a49a47..560d62cc 100644 --- a/src/torchrunx/environment.py +++ b/src/torchrunx/environment.py @@ -44,29 +44,27 @@ def slurm_workers() -> int: return int(os.environ["SLURM_CPUS_ON_NODE"]) -class Auto: - @staticmethod - def hosts() -> list[str]: - """ - Automatically determine hostname list - - :return: Hostnames in Slurm allocation, or ['localhost'] - :rtype: list[str] - """ - if in_slurm_job(): - slurm_hosts() - - return ["localhost"] - - @staticmethod - def workers() -> int: - """ - Automatically determine number of workers per host - - :return: Workers per host - :rtype: int - """ - if in_slurm_job(): - return slurm_workers() - - return torch.cuda.device_count() or os.cpu_count() or 1 +def auto_hosts() -> list[str]: + """ + Automatically determine hostname list + + :return: Hostnames in Slurm allocation, or ['localhost'] + :rtype: list[str] + """ + if in_slurm_job(): + slurm_hosts() + + return ["localhost"] + + +def auto_workers() -> int: + """ + Automatically determine number of workers per host + + :return: Workers per host + :rtype: int + """ + if in_slurm_job(): + return slurm_workers() + + return torch.cuda.device_count() or os.cpu_count() or 1 diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index ba9e29e0..cb5fb7ba 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -17,7 +17,7 @@ import fabric import torch.distributed as dist -from .environment import Auto +from .environment import auto_hosts, auto_workers, slurm_hosts, slurm_workers from .logging_utils import LogRecordSocketReceiver, default_handlers from .utils import ( AgentPayload, @@ -59,11 +59,11 @@ def execute_command( @dataclass class Launcher: - hostnames: list[str] | Auto = field(default_factory=lambda: ["localhost"]) - workers_per_host: int | list[int] | Auto = 1 + hostnames: list[str] | Literal["auto", "slurm"] = field(default_factory=lambda: ["localhost"]) + workers_per_host: int | list[int] | Literal["auto", "slurm"] = 1 ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None - log_handlers: list[Handler] | Auto | None = Auto() + log_handlers: list[Handler] | Literal["auto"] | None = "auto" env_vars: list[str] = field( default_factory=lambda: [ "PATH", @@ -101,13 +101,17 @@ def run( if not dist.is_available(): raise RuntimeError("The torch.distributed package is not available.") - if isinstance(self.hostnames, Auto): - self.hostnames = Auto.hosts() + if self.hostnames == "auto": + self.hostnames = auto_hosts() + elif self.hostnames == "slurm": + self.hostnames = slurm_hosts() num_hosts = len(self.hostnames) - if isinstance(self.workers_per_host, Auto): - self.workers_per_host = Auto.workers() + if self.workers_per_host == "auto": + self.workers_per_host = auto_workers() + elif self.workers_per_host == "slurm": + self.workers_per_host = slurm_workers() if isinstance(self.workers_per_host, int): self.workers_per_host = [self.workers_per_host] * num_hosts @@ -122,7 +126,7 @@ def run( if self.log_handlers is None: self.log_handlers = [] - elif isinstance(self.log_handlers, Auto): + elif self.log_handlers == "auto": self.log_handlers = default_handlers( hostnames=self.hostnames, workers_per_host=self.workers_per_host ) @@ -250,11 +254,11 @@ def launch( func: Callable, func_args: tuple[Any] = tuple(), func_kwargs: dict[str, Any] = {}, - hostnames: list[str] | Auto = ["localhost"], - workers_per_host: int | list[int] | Auto = 1, + hostnames: list[str] | Literal["auto", "slurm"] = ["localhost"], + workers_per_host: int | list[int] | Literal["auto", "slurm"] = 1, ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, - log_handlers: list[Handler] = [], + log_handlers: list[Handler] | Literal["auto"] = "auto", env_vars: list[str] = [ "PATH", "LD_LIBRARY", @@ -280,15 +284,15 @@ def launch( :param auto: Automatically determine allocation sizes, supports Slurm allocation. ``hostnames`` and ``workers_per_host`` are automatically assigned if they're set to ``None``, defaults to None :type auto: bool, optional :param hostnames: A list of node hostnames to start workers on, defaults to ["localhost"] - :type hostnames: list[str] | None, optional + :type hostnames: list[str] | Literal["auto", "slurm"] | None, optional :param workers_per_host: The number of workers per node. Providing an ``int`` implies all nodes should have ``workers_per_host`` workers, meanwhile providing a list causes node ``i`` to have ``worker_per_host[i]`` workers, defaults to 1 - :type workers_per_host: int | list[int] | None, optional + :type workers_per_host: int | list[int] | Literal["auto", "slurm"] | None, optional :param ssh_config_file: An SSH configuration file to use when connecting to nodes, defaults to None :type ssh_config_file: str | os.PathLike | None, optional :param backend: A ``torch.distributed`` `backend string `_, defaults to None :type backend: Literal['mpi', 'gloo', 'nccl', 'ucc', None], optional :param log_handlers: A list of handlers to manage agent and worker logs, defaults to [] - :type log_handlers: list[Handler], optional + :type log_handlers: list[Handler] | Literal["auto"], optional :param env_vars: A list of environmental variables to be copied from the launcher environment to workers. Allows for bash pattern matching syntax, defaults to ["PATH", "LD_LIBRARY", "LIBRARY_PATH", "PYTHON*", "CUDA*", "TORCH*", "PYTORCH*", "NCCL*"] :type env_vars: list[str], optional :param env_file: An additional environment file that will be sourced prior to executing ``func``, defaults to None diff --git a/tests/test_func.py b/tests/test_func.py index 890987b6..9db6454d 100644 --- a/tests/test_func.py +++ b/tests/test_func.py @@ -9,8 +9,8 @@ def test_launch(): result = trx.launch( func=simple_matmul, - hostnames=trx.slurm_hosts(), - workers_per_host=trx.slurm_workers(), + hostnames="slurm", + workers_per_host="slurm", ) t = True diff --git a/tests/test_submitit.py b/tests/test_submitit.py index 53aaf5cc..290f7aad 100644 --- a/tests/test_submitit.py +++ b/tests/test_submitit.py @@ -47,9 +47,7 @@ def main(): def launch(): - trx.launch( - func=main, func_kwargs={}, hostnames=trx.slurm_hosts(), workers_per_host=trx.slurm_workers() - ) + trx.launch(func=main, func_kwargs={}, hostnames="slurm", workers_per_host="slurm") def test_submitit(): diff --git a/tests/test_train.py b/tests/test_train.py index 8a0b2a55..d28f5ef5 100644 --- a/tests/test_train.py +++ b/tests/test_train.py @@ -35,8 +35,8 @@ def forward(self, x): def test_distributed_train(): trx.launch( worker, - hostnames=trx.slurm_hosts(), - workers_per_host=trx.slurm_workers(), + hostnames="slurm", + workers_per_host="slurm", backend="nccl", ) From b8b54452036f72af7595fd658052cff72214ad12 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Sat, 31 Aug 2024 04:11:14 -0400 Subject: [PATCH 34/63] adjustments to logging --- .gitignore | 8 ++------ src/torchrunx/agent.py | 2 ++ src/torchrunx/logging_utils.py | 28 +++++++++++++++++----------- tests/test_CI.py | 9 +++++++-- 4 files changed, 28 insertions(+), 19 deletions(-) diff --git a/.gitignore b/.gitignore index 4dbbd920..af2731dd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,8 @@ +torchrunx_logs/ .pixi/ -logs/ -test_logs/ -_build/ -out/ -output/ +.ruff_cache/ .vscode/ - # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 208b8b6b..111c0e21 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -61,6 +61,8 @@ def entrypoint(serialized_worker_args: bytes): logger_port=worker_args.logger_port, ) + logging.captureWarnings(True) # TODO ? + sys.stderr = StreamLogger(logger, sys.__stderr__) sys.stdout = StreamLogger(logger, sys.__stdout__) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index db9e2cbd..70068ebf 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -2,6 +2,7 @@ import datetime import logging +import os import pickle import struct from io import StringIO, TextIOWrapper @@ -32,39 +33,44 @@ def record_factory(*args, **kwargs): logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) -def default_handlers( - hostnames: list[str], workers_per_host: list[int], log_dir: str = "./logs" -) -> list[Handler]: +def default_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Handler]: handlers = [] + log_dir = os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs") + os.makedirs(log_dir, exist_ok=True) timestamp = datetime.datetime.now().isoformat(timespec="seconds") def make_handler(hostname: str, rank: int | None = None, stream: bool = False) -> Handler: if stream: handler = logging.StreamHandler() + formatter = logging.Formatter( + "%(asctime)s:%(levelname)s:%(hostname)s" + + ("[%(worker_rank)s]" if rank is not None else "") + + ": %(message)s" + ) else: handler = logging.FileHandler( f"{log_dir}/{timestamp}-{hostname}{'' if rank is None else f'[{rank}]'}.log" ) + formatter = logging.Formatter( + "%(asctime)s:%(levelname)s: %(message)s" + ) def handler_filter(record: logging.LogRecord) -> bool: return record.hostname == hostname and record.worker_rank == rank # pyright: ignore handler.addFilter(handler_filter) handler.setLevel(logging.DEBUG) - formatter = logging.Formatter( - "%(asctime)s:%(levelname)s:%(hostname)s:worker-%(worker_rank)s:%(message)s" - ) handler.setFormatter(formatter) return handler - for i, hostname in enumerate(hostnames): - handlers.append(make_handler(hostname=hostname)) - for j in range(workers_per_host[i]): + for r in [None, 0]: + handlers.append(make_handler(hostname=hostnames[0], rank=r, stream=True)) + + for hostname, num_workers in zip(hostnames, workers_per_host): + for j in [None] + list(range(num_workers)): handlers.append(make_handler(hostname=hostname, rank=j)) - if i == 0 and j == 0: - handlers.append(make_handler(hostname=hostname, rank=j, stream=True)) return handlers diff --git a/tests/test_CI.py b/tests/test_CI.py index 4862b09d..babb72fa 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -45,12 +45,14 @@ def dist_func(): print(f"worker rank: {rank}") tmp = tempfile.mkdtemp() + os.environ["TORCHRUNX_DIR"] = tmp + trx.launch( func=dist_func, func_kwargs={}, workers_per_host=2, backend="gloo", - log_handlers=default_handlers(hostnames=["localhost"], workers_per_host=[2], log_dir=tmp), + log_handlers=default_handlers(hostnames=["localhost"], workers_per_host=[2]), ) log_files = next(os.walk(tmp), (None, None, []))[2] @@ -78,7 +80,10 @@ def error_func(): func_kwargs={}, workers_per_host=1, backend="gloo", - # log_dir=tempfile.mkdtemp(), ) assert "abcdefg" in str(excinfo.value) + + +if __name__ == "__main__": + test_simple_localhost() From 66cea381f011933190dbbfc29f8bb575516aa653 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Mon, 2 Sep 2024 12:55:18 -0400 Subject: [PATCH 35/63] add agent stream capture, flushing --- src/torchrunx/agent.py | 14 +++++++++++++- src/torchrunx/logging_utils.py | 8 +++----- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 111c0e21..7ea3f998 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -95,7 +95,14 @@ def entrypoint(serialized_worker_args: bytes): os.environ["MASTER_PORT"] = str(worker_args.main_agent_port) logger.debug(f"executing function: {worker_args.function}") - return worker_args.function() + + r = worker_args.function() + + # flush streams + sys.stdout.flush() + sys.stderr.flush() + + return r def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_port: int): @@ -118,6 +125,9 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ logger = logging.getLogger() + sys.stderr = StreamLogger(logger, sys.__stderr__) + sys.stdout = StreamLogger(logger, sys.__stdout__) + log_records_to_socket( logger=logger, hostname=hostname, @@ -181,3 +191,5 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ raise finally: ctx.close() + sys.stdout.flush() + sys.stderr.flush() diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 70068ebf..60e5dceb 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -18,7 +18,7 @@ def log_records_to_socket( logger_hostname: str, logger_port: int, ): - logger.setLevel(logging.DEBUG) + logger.setLevel(logging.NOTSET) old_factory = logging.getLogRecordFactory() @@ -52,15 +52,13 @@ def make_handler(hostname: str, rank: int | None = None, stream: bool = False) - handler = logging.FileHandler( f"{log_dir}/{timestamp}-{hostname}{'' if rank is None else f'[{rank}]'}.log" ) - formatter = logging.Formatter( - "%(asctime)s:%(levelname)s: %(message)s" - ) + formatter = logging.Formatter("%(asctime)s:%(levelname)s: %(message)s") def handler_filter(record: logging.LogRecord) -> bool: return record.hostname == hostname and record.worker_rank == rank # pyright: ignore handler.addFilter(handler_filter) - handler.setLevel(logging.DEBUG) + handler.setLevel(logging.NOTSET) handler.setFormatter(formatter) return handler From 5397fd6b1630f9f945fedd050bb8879c093b63d8 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 19:14:17 -0400 Subject: [PATCH 36/63] StreamLogger to LoggingStream(StringIO) --- src/torchrunx/agent.py | 17 +++++++++-------- src/torchrunx/logging_utils.py | 26 ++++++++------------------ 2 files changed, 17 insertions(+), 26 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 7ea3f998..a4868c91 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -14,8 +14,9 @@ import torch.distributed as dist from torch.distributed.elastic.multiprocessing import start_processes from typing_extensions import Self +from contextlib import redirect_stderr, redirect_stdout -from .logging_utils import StreamLogger, log_records_to_socket +from .logging_utils import LoggingStream, log_records_to_socket from .utils import ( AgentPayload, AgentStatus, @@ -61,10 +62,9 @@ def entrypoint(serialized_worker_args: bytes): logger_port=worker_args.logger_port, ) - logging.captureWarnings(True) # TODO ? - - sys.stderr = StreamLogger(logger, sys.__stderr__) - sys.stdout = StreamLogger(logger, sys.__stdout__) + logging.captureWarnings(True) + redirect_stderr(LoggingStream(logger)).__enter__() + redirect_stdout(LoggingStream(logger)).__enter__() store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] host_name=worker_args.main_agent_hostname, @@ -125,9 +125,6 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ logger = logging.getLogger() - sys.stderr = StreamLogger(logger, sys.__stderr__) - sys.stdout = StreamLogger(logger, sys.__stdout__) - log_records_to_socket( logger=logger, hostname=hostname, @@ -136,6 +133,10 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ logger_port=logger_port, ) + logging.captureWarnings(True) + redirect_stderr(LoggingStream(logger)).__enter__() + redirect_stdout(LoggingStream(logger)).__enter__() + if torch.__version__ >= "2.3": from torch.distributed.elastic.multiprocessing import DefaultLogsSpecs diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 60e5dceb..61631694 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -5,7 +5,7 @@ import os import pickle import struct -from io import StringIO, TextIOWrapper +from io import StringIO from logging import Handler, Logger from logging.handlers import SocketHandler from socketserver import StreamRequestHandler, ThreadingTCPServer @@ -95,25 +95,15 @@ def handle(self): self.daemon_threads = True -class StreamLogger: - """ - For logging write calls to streams such as stdout and stdin in the worker processes. - """ - - def __init__(self, logger: Logger, stream: TextIOWrapper | None): +class LoggingStream(StringIO): + def __init__(self, logger: Logger): + super().__init__() self.logger = logger - self._string_io = StringIO() - if stream is None: - raise ValueError("stream cannot be None") - self.stream: TextIOWrapper = stream # type: ignore - - def write(self, data: str): - self._string_io.write(data) - self.stream.write(data) def flush(self): - value = self._string_io.getvalue() + super().flush() + value = self.getvalue() if value != "": self.logger.info(f"\n{value}") - self._string_io = StringIO() # "create a new one, it's faster" - someone online - self.stream.flush() + self.truncate(0) + self.seek(0) From 4baa151a195a55a9b40258dcbe0d2e57ee0e5ab8 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 19:14:38 -0400 Subject: [PATCH 37/63] use default ruff lint rules --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 34371c6f..fe918714 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,8 +40,6 @@ Documentation = "https://torchrunx.readthedocs.io" include = ["pyproject.toml", "src/**/*.py", "tests/**/*.py"] line-length = 100 src = ["src", "tests"] -[tool.ruff.lint] -select = ["E", "F", "I"] [tool.pyright] include = ["src", "tests"] From 1c5e86e841a88d8e34723b98428a9c1af8a77b7d Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 19:15:51 -0400 Subject: [PATCH 38/63] extend-select import linting rules --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index fe918714..9e6aa0fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,8 @@ Documentation = "https://torchrunx.readthedocs.io" include = ["pyproject.toml", "src/**/*.py", "tests/**/*.py"] line-length = 100 src = ["src", "tests"] +[tool.ruff.lint] +extend-select = ["I"] [tool.pyright] include = ["src", "tests"] From 92fe35c89e56ce686bb60a57b9c42d3b2de98754 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 19:16:20 -0400 Subject: [PATCH 39/63] linting --- src/torchrunx/agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index a4868c91..4ee7d0f1 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -6,6 +6,7 @@ import socket import sys import tempfile +from contextlib import redirect_stderr, redirect_stdout from dataclasses import dataclass from typing import Callable, Literal @@ -14,7 +15,6 @@ import torch.distributed as dist from torch.distributed.elastic.multiprocessing import start_processes from typing_extensions import Self -from contextlib import redirect_stderr, redirect_stdout from .logging_utils import LoggingStream, log_records_to_socket from .utils import ( From 5cd1385f3e0878a23fcddefca16fae5554d0942c Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 19:21:55 -0400 Subject: [PATCH 40/63] added log level for std streams --- src/torchrunx/agent.py | 8 ++++---- src/torchrunx/logging_utils.py | 5 +++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 4ee7d0f1..599039f4 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -63,8 +63,8 @@ def entrypoint(serialized_worker_args: bytes): ) logging.captureWarnings(True) - redirect_stderr(LoggingStream(logger)).__enter__() - redirect_stdout(LoggingStream(logger)).__enter__() + redirect_stderr(LoggingStream(logger, level=logging.ERROR)).__enter__() + redirect_stdout(LoggingStream(logger, level=logging.INFO)).__enter__() store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] host_name=worker_args.main_agent_hostname, @@ -134,8 +134,8 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ ) logging.captureWarnings(True) - redirect_stderr(LoggingStream(logger)).__enter__() - redirect_stdout(LoggingStream(logger)).__enter__() + redirect_stderr(LoggingStream(logger, level=logging.ERROR)).__enter__() + redirect_stdout(LoggingStream(logger, level=logging.INFO)).__enter__() if torch.__version__ >= "2.3": from torch.distributed.elastic.multiprocessing import DefaultLogsSpecs diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 61631694..7add0aca 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -96,14 +96,15 @@ def handle(self): class LoggingStream(StringIO): - def __init__(self, logger: Logger): + def __init__(self, logger: Logger, level: int = logging.NOTSET): super().__init__() self.logger = logger + self.level = level def flush(self): super().flush() value = self.getvalue() if value != "": - self.logger.info(f"\n{value}") + self.logger.log(self.level, f"\n{value}") self.truncate(0) self.seek(0) From a640c2a9fc18bdaee65d680bc5a044ec989a0e97 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 19:30:53 -0400 Subject: [PATCH 41/63] ThreadingTCPServer to TCPServer --- src/torchrunx/logging_utils.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 7add0aca..ae457f93 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -8,7 +8,7 @@ from io import StringIO from logging import Handler, Logger from logging.handlers import SocketHandler -from socketserver import StreamRequestHandler, ThreadingTCPServer +from socketserver import StreamRequestHandler, TCPServer def log_records_to_socket( @@ -73,7 +73,7 @@ def handler_filter(record: logging.LogRecord) -> bool: return handlers -class LogRecordSocketReceiver(ThreadingTCPServer): +class LogRecordSocketReceiver(TCPServer): def __init__(self, host: str, port: int, handlers: list[Handler]): class _LogRecordStreamHandler(StreamRequestHandler): def handle(self): @@ -91,8 +91,11 @@ def handle(self): for handler in handlers: handler.handle(record) - super().__init__((host, port), _LogRecordStreamHandler) - self.daemon_threads = True + super().__init__( + server_address=(host, port), + RequestHandlerClass=_LogRecordStreamHandler, + bind_and_activate=True, + ) class LoggingStream(StringIO): From 805a2858b7d78fa6f673ae37b630ad35ebee14cb Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 20:06:16 -0400 Subject: [PATCH 42/63] refactoring handler build functions --- src/torchrunx/agent.py | 11 +-- src/torchrunx/logging_utils.py | 121 ++++++++++++++++++++------------- 2 files changed, 76 insertions(+), 56 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 599039f4..3f716e2a 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -6,7 +6,6 @@ import socket import sys import tempfile -from contextlib import redirect_stderr, redirect_stdout from dataclasses import dataclass from typing import Callable, Literal @@ -16,7 +15,7 @@ from torch.distributed.elastic.multiprocessing import start_processes from typing_extensions import Self -from .logging_utils import LoggingStream, log_records_to_socket +from .logging_utils import log_records_to_socket, redirect_stdio_to_logger from .utils import ( AgentPayload, AgentStatus, @@ -62,9 +61,7 @@ def entrypoint(serialized_worker_args: bytes): logger_port=worker_args.logger_port, ) - logging.captureWarnings(True) - redirect_stderr(LoggingStream(logger, level=logging.ERROR)).__enter__() - redirect_stdout(LoggingStream(logger, level=logging.INFO)).__enter__() + redirect_stdio_to_logger(logger) store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] host_name=worker_args.main_agent_hostname, @@ -133,9 +130,7 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ logger_port=logger_port, ) - logging.captureWarnings(True) - redirect_stderr(LoggingStream(logger, level=logging.ERROR)).__enter__() - redirect_stdout(LoggingStream(logger, level=logging.INFO)).__enter__() + redirect_stdio_to_logger(logger) if torch.__version__ >= "2.3": from torch.distributed.elastic.multiprocessing import DefaultLogsSpecs diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index ae457f93..4fd00e4d 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -5,10 +5,66 @@ import os import pickle import struct +from contextlib import redirect_stderr, redirect_stdout from io import StringIO from logging import Handler, Logger from logging.handlers import SocketHandler from socketserver import StreamRequestHandler, TCPServer +from typing import Callable + + +def get_filter(hostname: str, rank: int | None = None) -> Callable[[logging.LogRecord], bool]: + def _handler_filter(record: logging.LogRecord) -> bool: + return record.hostname == hostname and record.worker_rank == rank + + return _handler_filter + + +def file_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Handler]: + handlers = [] + + log_dir = os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs") + os.makedirs(log_dir, exist_ok=True) + timestamp = datetime.datetime.now().isoformat(timespec="seconds") + + for hostname, num_workers in zip(hostnames, workers_per_host): + for rank in [None] + list(range(num_workers)): + handler = logging.FileHandler( + f"{log_dir}/{timestamp}-{hostname}{'' if rank is None else f'[{rank}]'}.log" + ) + formatter = logging.Formatter("%(asctime)s:%(levelname)s: %(message)s") + + handler.addFilter(get_filter(hostname, rank)) + handler.setLevel(logging.NOTSET) + handler.setFormatter(formatter) + + handlers.append(handler) + + return handlers + + +def stream_handler(hostname: str, rank: int | None) -> Handler: + handler = logging.StreamHandler() + formatter = logging.Formatter( + "%(asctime)s:%(levelname)s:%(hostname)s" + + ("[%(worker_rank)s]" if rank is not None else "") + + ": %(message)s" + ) + handler.addFilter(get_filter(hostname, rank)) + handler.setLevel(logging.NOTSET) + handler.setFormatter(formatter) + return handler + + +def default_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Handler]: + stream_handlers = [ + stream_handler(hostname=hostnames[0], rank=None), + stream_handler(hostname=hostnames[0], rank=0), + ] + return stream_handlers + file_handlers(hostnames, workers_per_host) + + +## Agent/worker utilities def log_records_to_socket( @@ -33,44 +89,28 @@ def record_factory(*args, **kwargs): logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) -def default_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Handler]: - handlers = [] +def redirect_stdio_to_logger(logger: Logger): + logging.captureWarnings(True) + redirect_stderr(LoggingStream(logger, level=logging.ERROR)).__enter__() + redirect_stdout(LoggingStream(logger, level=logging.INFO)).__enter__() - log_dir = os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs") - os.makedirs(log_dir, exist_ok=True) - timestamp = datetime.datetime.now().isoformat(timespec="seconds") - def make_handler(hostname: str, rank: int | None = None, stream: bool = False) -> Handler: - if stream: - handler = logging.StreamHandler() - formatter = logging.Formatter( - "%(asctime)s:%(levelname)s:%(hostname)s" - + ("[%(worker_rank)s]" if rank is not None else "") - + ": %(message)s" - ) - else: - handler = logging.FileHandler( - f"{log_dir}/{timestamp}-{hostname}{'' if rank is None else f'[{rank}]'}.log" - ) - formatter = logging.Formatter("%(asctime)s:%(levelname)s: %(message)s") - - def handler_filter(record: logging.LogRecord) -> bool: - return record.hostname == hostname and record.worker_rank == rank # pyright: ignore - - handler.addFilter(handler_filter) - handler.setLevel(logging.NOTSET) - handler.setFormatter(formatter) - - return handler +class LoggingStream(StringIO): + def __init__(self, logger: Logger, level: int = logging.NOTSET): + super().__init__() + self.logger = logger + self.level = level - for r in [None, 0]: - handlers.append(make_handler(hostname=hostnames[0], rank=r, stream=True)) + def flush(self): + super().flush() + value = self.getvalue() + if value != "": + self.logger.log(self.level, f"\n{value}") + self.truncate(0) + self.seek(0) - for hostname, num_workers in zip(hostnames, workers_per_host): - for j in [None] + list(range(num_workers)): - handlers.append(make_handler(hostname=hostname, rank=j)) - return handlers +## Launcher utilities class LogRecordSocketReceiver(TCPServer): @@ -96,18 +136,3 @@ def handle(self): RequestHandlerClass=_LogRecordStreamHandler, bind_and_activate=True, ) - - -class LoggingStream(StringIO): - def __init__(self, logger: Logger, level: int = logging.NOTSET): - super().__init__() - self.logger = logger - self.level = level - - def flush(self): - super().flush() - value = self.getvalue() - if value != "": - self.logger.log(self.level, f"\n{value}") - self.truncate(0) - self.seek(0) From 61451c31db4d31ac4212ac3d2e8f246b802e7520 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Mon, 2 Sep 2024 23:34:27 -0400 Subject: [PATCH 43/63] switch to Filter class --- pixi.lock | 2 +- src/torchrunx/logging_utils.py | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/pixi.lock b/pixi.lock index 074abee8..2e1c2bd1 100644 --- a/pixi.lock +++ b/pixi.lock @@ -2557,7 +2557,7 @@ packages: name: torchrunx version: 0.1.2 path: . - sha256: 7045df900ce870f00f3fb2d88381f6b4ab65e95e50d839eaf98d8d12069b960d + sha256: 204075d053445a4bcebec8e87f2dde7d5f2729d4c086a52f3f5def389a0b1262 requires_dist: - cloudpickle>=3.0.0 - fabric>=3.0.0 diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 4fd00e4d..ec385166 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -10,14 +10,15 @@ from logging import Handler, Logger from logging.handlers import SocketHandler from socketserver import StreamRequestHandler, TCPServer -from typing import Callable -def get_filter(hostname: str, rank: int | None = None) -> Callable[[logging.LogRecord], bool]: - def _handler_filter(record: logging.LogRecord) -> bool: - return record.hostname == hostname and record.worker_rank == rank +class WorkerLogFilter(logging.Filter): + def __init__(self, hostname: str, worker_rank: int | None): + self.hostname = hostname + self.worker_rank = worker_rank - return _handler_filter + def filter(self, record: logging.LogRecord) -> bool: + return record.hostname == self.hostname and record.worker_rank == self.worker_rank def file_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Handler]: @@ -34,7 +35,7 @@ def file_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Han ) formatter = logging.Formatter("%(asctime)s:%(levelname)s: %(message)s") - handler.addFilter(get_filter(hostname, rank)) + handler.addFilter(WorkerLogFilter(hostname, rank)) handler.setLevel(logging.NOTSET) handler.setFormatter(formatter) @@ -50,7 +51,7 @@ def stream_handler(hostname: str, rank: int | None) -> Handler: + ("[%(worker_rank)s]" if rank is not None else "") + ": %(message)s" ) - handler.addFilter(get_filter(hostname, rank)) + handler.addFilter(WorkerLogFilter(hostname, rank)) handler.setLevel(logging.NOTSET) handler.setFormatter(formatter) return handler From a13f23a72fffdb2dd35386650faff70eeff873b9 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Tue, 3 Sep 2024 00:25:28 -0400 Subject: [PATCH 44/63] more refactoring for logging utils --- src/torchrunx/logging_utils.py | 75 +++++++++++++++++++++------------- 1 file changed, 47 insertions(+), 28 deletions(-) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index ec385166..3ddc8a91 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -11,17 +11,38 @@ from logging.handlers import SocketHandler from socketserver import StreamRequestHandler, TCPServer +## Handler utilities -class WorkerLogFilter(logging.Filter): - def __init__(self, hostname: str, worker_rank: int | None): - self.hostname = hostname - self.worker_rank = worker_rank - def filter(self, record: logging.LogRecord) -> bool: - return record.hostname == self.hostname and record.worker_rank == self.worker_rank +def add_filter_to_handler( + handler: Handler, + hostname: str, + worker_rank: int | None, + log_level: int = logging.NOTSET, +) -> None: + def _filter(record: logging.LogRecord) -> bool: + return ( + record.hostname == hostname + and record.worker_rank == worker_rank + and record.levelno >= log_level + ) + + handler.addFilter(_filter) + + +def file_handler( + hostname: str, worker_rank: int | None, file_path: str, log_level: int = logging.NOTSET +) -> Handler: + handler = logging.FileHandler(file_path) + add_filter_to_handler(handler, hostname, worker_rank, log_level=log_level) + formatter = logging.Formatter("%(asctime)s:%(levelname)s: %(message)s") + handler.setFormatter(formatter) + return handler -def file_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Handler]: +def file_handlers( + hostnames: list[str], workers_per_host: list[int], log_level: int = logging.NOTSET +) -> list[Handler]: handlers = [] log_dir = os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs") @@ -30,39 +51,37 @@ def file_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Han for hostname, num_workers in zip(hostnames, workers_per_host): for rank in [None] + list(range(num_workers)): - handler = logging.FileHandler( - f"{log_dir}/{timestamp}-{hostname}{'' if rank is None else f'[{rank}]'}.log" + file_path = ( + f"{log_dir}/{timestamp}-{hostname}" + + (f"[{rank}]" if rank is not None else "") + + ".log" ) - formatter = logging.Formatter("%(asctime)s:%(levelname)s: %(message)s") - - handler.addFilter(WorkerLogFilter(hostname, rank)) - handler.setLevel(logging.NOTSET) - handler.setFormatter(formatter) - - handlers.append(handler) + handlers.append(file_handler(hostname, rank, file_path, log_level=log_level)) return handlers -def stream_handler(hostname: str, rank: int | None) -> Handler: +def stream_handler(hostname: str, rank: int | None, log_level: int = logging.NOTSET) -> Handler: handler = logging.StreamHandler() - formatter = logging.Formatter( - "%(asctime)s:%(levelname)s:%(hostname)s" - + ("[%(worker_rank)s]" if rank is not None else "") - + ": %(message)s" + add_filter_to_handler(handler, hostname, rank, log_level=log_level) + handler.setFormatter( + logging.Formatter( + "%(asctime)s:%(levelname)s:%(hostname)s[%(worker_rank)s]: %(message)s" + if rank is not None + else "%(asctime)s:%(levelname)s:%(hostname)s: %(message)s" + ) ) - handler.addFilter(WorkerLogFilter(hostname, rank)) - handler.setLevel(logging.NOTSET) - handler.setFormatter(formatter) return handler -def default_handlers(hostnames: list[str], workers_per_host: list[int]) -> list[Handler]: +def default_handlers( + hostnames: list[str], workers_per_host: list[int], log_level: int = logging.INFO +) -> list[Handler]: stream_handlers = [ - stream_handler(hostname=hostnames[0], rank=None), - stream_handler(hostname=hostnames[0], rank=0), + stream_handler(hostname=hostnames[0], rank=None, log_level=log_level), + stream_handler(hostname=hostnames[0], rank=0, log_level=log_level), ] - return stream_handlers + file_handlers(hostnames, workers_per_host) + return stream_handlers + file_handlers(hostnames, workers_per_host, log_level=log_level) ## Agent/worker utilities From abc7267f7e4eb75c1ab0c01fbec1c5181da41725 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Tue, 3 Sep 2024 03:08:53 -0400 Subject: [PATCH 45/63] env variables in launcher for controlling logging --- src/torchrunx/launcher.py | 10 +++++-- src/torchrunx/logging_utils.py | 49 ++++++++++++++++++---------------- 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index cb5fb7ba..5b30eb53 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -3,6 +3,7 @@ import fnmatch import ipaddress import itertools +import logging import os import socket import subprocess @@ -47,7 +48,6 @@ def execute_command( hostname: str, ssh_config_file: str | os.PathLike | None = None, ) -> None: - # TODO: permit different stderr / stdout if is_localhost(hostname): subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) else: @@ -128,7 +128,10 @@ def run( self.log_handlers = [] elif self.log_handlers == "auto": self.log_handlers = default_handlers( - hostnames=self.hostnames, workers_per_host=self.workers_per_host + hostnames=self.hostnames, + workers_per_host=self.workers_per_host, + log_dir=os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs"), + log_level=getattr(logging, os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO")), ) logger_port = get_open_port() @@ -243,6 +246,9 @@ def run( ) raise finally: + # log_receiver.timeout = 1 + # log_receiver.shutdown() + # log_receiver.server_close() log_process.kill() dist.destroy_process_group() diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 3ddc8a91..0bdc397d 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -41,11 +41,13 @@ def file_handler( def file_handlers( - hostnames: list[str], workers_per_host: list[int], log_level: int = logging.NOTSET + hostnames: list[str], + workers_per_host: list[int], + log_dir: str = "./torchrunx_logs", + log_level: int = logging.NOTSET, ) -> list[Handler]: handlers = [] - log_dir = os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs") os.makedirs(log_dir, exist_ok=True) timestamp = datetime.datetime.now().isoformat(timespec="seconds") @@ -75,13 +77,15 @@ def stream_handler(hostname: str, rank: int | None, log_level: int = logging.NOT def default_handlers( - hostnames: list[str], workers_per_host: list[int], log_level: int = logging.INFO + hostnames: list[str], + workers_per_host: list[int], + log_dir: str = "./torchrunx_logs", + log_level: int = logging.INFO, ) -> list[Handler]: - stream_handlers = [ + return [ stream_handler(hostname=hostnames[0], rank=None, log_level=log_level), stream_handler(hostname=hostnames[0], rank=0, log_level=log_level), - ] - return stream_handlers + file_handlers(hostnames, workers_per_host, log_level=log_level) + ] + file_handlers(hostnames, workers_per_host, log_dir=log_dir, log_level=log_level) ## Agent/worker utilities @@ -110,24 +114,23 @@ def record_factory(*args, **kwargs): def redirect_stdio_to_logger(logger: Logger): + class _LoggingStream(StringIO): + def __init__(self, logger: Logger, level: int = logging.NOTSET): + super().__init__() + self.logger = logger + self.level = level + + def flush(self): + super().flush() + value = self.getvalue() + if value != "": + self.logger.log(self.level, f"\n{value}") + self.truncate(0) + self.seek(0) + logging.captureWarnings(True) - redirect_stderr(LoggingStream(logger, level=logging.ERROR)).__enter__() - redirect_stdout(LoggingStream(logger, level=logging.INFO)).__enter__() - - -class LoggingStream(StringIO): - def __init__(self, logger: Logger, level: int = logging.NOTSET): - super().__init__() - self.logger = logger - self.level = level - - def flush(self): - super().flush() - value = self.getvalue() - if value != "": - self.logger.log(self.level, f"\n{value}") - self.truncate(0) - self.seek(0) + redirect_stderr(_LoggingStream(logger, level=logging.ERROR)).__enter__() + redirect_stdout(_LoggingStream(logger, level=logging.INFO)).__enter__() ## Launcher utilities From 90e032e7355e10e2dba22f78de317e2217dbec06 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Tue, 3 Sep 2024 03:36:48 -0400 Subject: [PATCH 46/63] using pathlib --- src/torchrunx/launcher.py | 4 +++- src/torchrunx/logging_utils.py | 10 +++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 5b30eb53..6e7d8fd7 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -131,7 +131,9 @@ def run( hostnames=self.hostnames, workers_per_host=self.workers_per_host, log_dir=os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs"), - log_level=getattr(logging, os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO")), + log_level=logging._nameToLevel.get( + os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO"), logging.NOTSET + ), ) logger_port = get_open_port() diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 0bdc397d..12003c42 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -9,6 +9,7 @@ from io import StringIO from logging import Handler, Logger from logging.handlers import SocketHandler +from pathlib import Path from socketserver import StreamRequestHandler, TCPServer ## Handler utilities @@ -31,7 +32,10 @@ def _filter(record: logging.LogRecord) -> bool: def file_handler( - hostname: str, worker_rank: int | None, file_path: str, log_level: int = logging.NOTSET + hostname: str, + worker_rank: int | None, + file_path: str | os.PathLike, + log_level: int = logging.NOTSET, ) -> Handler: handler = logging.FileHandler(file_path) add_filter_to_handler(handler, hostname, worker_rank, log_level=log_level) @@ -43,7 +47,7 @@ def file_handler( def file_handlers( hostnames: list[str], workers_per_host: list[int], - log_dir: str = "./torchrunx_logs", + log_dir: str | os.PathLike = Path("torchrunx_logs"), log_level: int = logging.NOTSET, ) -> list[Handler]: handlers = [] @@ -79,7 +83,7 @@ def stream_handler(hostname: str, rank: int | None, log_level: int = logging.NOT def default_handlers( hostnames: list[str], workers_per_host: list[int], - log_dir: str = "./torchrunx_logs", + log_dir: str | os.PathLike = Path("torchrunx_logs"), log_level: int = logging.INFO, ) -> list[Handler]: return [ From f744391a3be213c13d1aa1e08d63af894a204b45 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Tue, 3 Sep 2024 13:38:22 -0400 Subject: [PATCH 47/63] overriding shutdown() with timeout --- src/torchrunx/launcher.py | 5 ++--- src/torchrunx/logging_utils.py | 10 ++++++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 6e7d8fd7..1f9db00c 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -248,9 +248,8 @@ def run( ) raise finally: - # log_receiver.timeout = 1 - # log_receiver.shutdown() - # log_receiver.server_close() + log_receiver.shutdown() + log_receiver.server_close() log_process.kill() dist.destroy_process_group() diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 12003c42..fd990140 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -10,7 +10,7 @@ from logging import Handler, Logger from logging.handlers import SocketHandler from pathlib import Path -from socketserver import StreamRequestHandler, TCPServer +from socketserver import StreamRequestHandler, ThreadingTCPServer ## Handler utilities @@ -140,7 +140,7 @@ def flush(self): ## Launcher utilities -class LogRecordSocketReceiver(TCPServer): +class LogRecordSocketReceiver(ThreadingTCPServer): def __init__(self, host: str, port: int, handlers: list[Handler]): class _LogRecordStreamHandler(StreamRequestHandler): def handle(self): @@ -163,3 +163,9 @@ def handle(self): RequestHandlerClass=_LogRecordStreamHandler, bind_and_activate=True, ) + self.daemon_threads = True + + def shutdown(self): + """ override BaseServer.shutdown() with added timeout """ + self._BaseServer__shutdown_request = True + self._BaseServer__is_shut_down.wait(timeout=3) From ad7456ba9ae94b9b2955e25c3b2824f383ccd0b4 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Tue, 3 Sep 2024 13:53:13 -0400 Subject: [PATCH 48/63] linting and typing --- src/torchrunx/environment.py | 2 +- src/torchrunx/logging_utils.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/torchrunx/environment.py b/src/torchrunx/environment.py index bef1e4b2..edf1431d 100644 --- a/src/torchrunx/environment.py +++ b/src/torchrunx/environment.py @@ -40,7 +40,7 @@ def slurm_workers() -> int: # TODO: is it possible to allocate uneven GPUs across nodes? return len(os.environ["SLURM_JOB_GPUS"].split(",")) elif "SLURM_GPUS_PER_NODE" in os.environ: - return int(os.environ['SLURM_GPUS_PER_NODE']) + return int(os.environ["SLURM_GPUS_PER_NODE"]) else: # TODO: should we assume that we plan to do one worker per CPU? return int(os.environ["SLURM_CPUS_ON_NODE"]) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index fd990140..469c845f 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -23,8 +23,8 @@ def add_filter_to_handler( ) -> None: def _filter(record: logging.LogRecord) -> bool: return ( - record.hostname == hostname - and record.worker_rank == worker_rank + record.hostname == hostname # pyright: ignore[reportAttributeAccessIssue] + and record.worker_rank == worker_rank # pyright: ignore[reportAttributeAccessIssue] and record.levelno >= log_level ) @@ -166,6 +166,6 @@ def handle(self): self.daemon_threads = True def shutdown(self): - """ override BaseServer.shutdown() with added timeout """ + """override BaseServer.shutdown() with added timeout""" self._BaseServer__shutdown_request = True - self._BaseServer__is_shut_down.wait(timeout=3) + self._BaseServer__is_shut_down.wait(timeout=3) # pyright: ignore[reportAttributeAccessIssue] From 1d42ccdf1c50275fbe13c0fa9e2121d74b46ad71 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Wed, 4 Sep 2024 22:27:45 -0400 Subject: [PATCH 49/63] fix CI test --- src/torchrunx/agent.py | 2 +- src/torchrunx/logging_utils.py | 2 +- tests/test_CI.py | 9 +++++++-- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 3f716e2a..5a7be1c8 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -166,7 +166,7 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ envs={i: {} for i in range(num_workers)}, **log_kwargs, # pyright: ignore [reportArgumentType] ) - logger.debug("starting processes") + logger.info("starting processes") try: status = AgentStatus() diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 469c845f..63e2f45d 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -102,7 +102,7 @@ def log_records_to_socket( logger_hostname: str, logger_port: int, ): - logger.setLevel(logging.NOTSET) + logger.setLevel(logging.DEBUG) old_factory = logging.getLogRecordFactory() diff --git a/tests/test_CI.py b/tests/test_CI.py index babb72fa..f507798e 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -6,7 +6,6 @@ import torch.distributed as dist import torchrunx as trx -from torchrunx.logging_utils import default_handlers def test_simple_localhost(): @@ -29,6 +28,9 @@ def dist_func(): return o.detach() + tmp = tempfile.mkdtemp() + os.environ["TORCHRUNX_DIR"] = tmp + r = trx.launch( func=dist_func, func_kwargs={}, @@ -52,7 +54,6 @@ def dist_func(): func_kwargs={}, workers_per_host=2, backend="gloo", - log_handlers=default_handlers(hostnames=["localhost"], workers_per_host=[2]), ) log_files = next(os.walk(tmp), (None, None, []))[2] @@ -62,6 +63,7 @@ def dist_func(): for file in log_files: with open(f"{tmp}/{file}", "r") as f: contents = f.read() + print(contents) if file.endswith("[0].log"): assert "worker rank: 0\n" in contents elif file.endswith("[1].log"): @@ -74,6 +76,9 @@ def test_error(): def error_func(): raise ValueError("abcdefg") + tmp = tempfile.mkdtemp() + os.environ["TORCHRUNX_DIR"] = tmp + with pytest.raises(RuntimeError) as excinfo: trx.launch( func=error_func, From 7df0948f083391bfb7923beacbd466bfc5c250c7 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Thu, 5 Sep 2024 12:04:03 -0400 Subject: [PATCH 50/63] DEBUG -> NOTSET --- src/torchrunx/logging_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 63e2f45d..469c845f 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -102,7 +102,7 @@ def log_records_to_socket( logger_hostname: str, logger_port: int, ): - logger.setLevel(logging.DEBUG) + logger.setLevel(logging.NOTSET) old_factory = logging.getLogRecordFactory() From d79d1e6ca18fdb443cb65216a11be4cba5889dfb Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Thu, 5 Sep 2024 15:17:08 -0400 Subject: [PATCH 51/63] raise worker errors in launcher --- src/torchrunx/agent.py | 11 +++++++++-- src/torchrunx/launcher.py | 18 +++++------------- src/torchrunx/utils.py | 24 ++++++++++++++++++------ tests/test_CI.py | 2 +- 4 files changed, 33 insertions(+), 22 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 5a7be1c8..c891e34c 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -21,6 +21,7 @@ AgentStatus, LauncherAgentGroup, LauncherPayload, + WorkerResult, get_open_port, ) @@ -48,7 +49,7 @@ def from_bytes(cls, serialized: bytes) -> Self: return cloudpickle.loads(serialized) -def entrypoint(serialized_worker_args: bytes): +def entrypoint(serialized_worker_args: bytes) -> WorkerResult: worker_args = WorkerArgs.from_bytes(serialized_worker_args) logger = logging.getLogger() @@ -93,7 +94,13 @@ def entrypoint(serialized_worker_args: bytes): logger.debug(f"executing function: {worker_args.function}") - r = worker_args.function() + r = WorkerResult(None, None) + + try: + r.result = worker_args.function() + except Exception as e: + r.exception = e + logger.error(e) # flush streams sys.stdout.flush() diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 1f9db00c..7dc1f83c 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -225,19 +225,11 @@ def run( if all(s.is_done() for s in agent_statuses): break - if any(s.is_failed() for s in agent_statuses): - # TODO: cleaner way to print these? - e = "" - for i, s in enumerate(agent_statuses): - if s is not None and s.is_failed(): - for k, v in s.failures.items(): - e += f"Node {i}, local worker {k} exited with error: " - if isinstance(v.message, str): - e += f"{v.message}\n" - else: - e += f"{v.message['message']}\n" - e += f"{v.message['extraInfo']['py_callstack']}\n\n" - raise RuntimeError(e) + for s in agent_statuses: + if s.is_failed: + for _, failure in s.failures.items(): + if failure is not None: + raise failure except: # cleanup: SIGTERM all agents for agent_pid, agent_hostname in zip(agent_pids, self.hostnames): diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 43f040c6..e90c5b05 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -20,6 +20,12 @@ def get_open_port() -> int: return port +@dataclass +class WorkerResult: + result: Any + exception: Exception + + @dataclass class LauncherPayload: fn: Callable @@ -42,9 +48,9 @@ class AgentStatus: running: bool = True failed: bool = False return_values: dict[int, Any] = field(default_factory=dict) - failures: dict[int, ProcessFailure] = field(default_factory=dict) - stdouts: dict[int, str] = field(default_factory=dict) - stderrs: dict[int, str] = field(default_factory=dict) + failures: dict[int, Exception] = field(default_factory=dict) + # stdouts: dict[int, str] = field(default_factory=dict) + # stderrs: dict[int, str] = field(default_factory=dict) @classmethod def from_result(cls, result: RunProcsResult | None, worker_global_ranks: list[int]) -> Self: @@ -53,9 +59,15 @@ def from_result(cls, result: RunProcsResult | None, worker_global_ranks: list[in return cls( running=False, - failed=result.is_failed(), - return_values={worker_global_ranks[k]: v for k, v in result.return_values.items()}, - failures={worker_global_ranks[k]: v for k, v in result.failures.items()}, + failed=any( + wr.exception is not None for _, wr in result.return_values.items() + ), # result.is_failed(), + return_values={ + worker_global_ranks[k]: wr.result for k, wr in result.return_values.items() + }, + failures={ + worker_global_ranks[k]: wr.exception for k, wr in result.return_values.items() + }, ) def is_running(self) -> bool: diff --git a/tests/test_CI.py b/tests/test_CI.py index f507798e..403c223a 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -79,7 +79,7 @@ def error_func(): tmp = tempfile.mkdtemp() os.environ["TORCHRUNX_DIR"] = tmp - with pytest.raises(RuntimeError) as excinfo: + with pytest.raises(ValueError) as excinfo: trx.launch( func=error_func, func_kwargs={}, From db38ce66980a51c391f2610524e5b161a2babf45 Mon Sep 17 00:00:00 2001 From: Peter Curtin Date: Thu, 5 Sep 2024 15:40:52 -0400 Subject: [PATCH 52/63] types and formatting --- src/torchrunx/utils.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index e90c5b05..4cdbc4b0 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -9,7 +9,6 @@ import cloudpickle import torch.distributed as dist from torch.distributed.elastic.multiprocessing.api import RunProcsResult -from torch.distributed.elastic.multiprocessing.errors import ProcessFailure from typing_extensions import Self @@ -22,8 +21,8 @@ def get_open_port() -> int: @dataclass class WorkerResult: - result: Any - exception: Exception + result: Any | None + exception: Exception | None @dataclass From a42d6d5d98e17fd9e4581b7284b143e995e827b6 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Fri, 6 Sep 2024 02:16:33 -0400 Subject: [PATCH 53/63] refactoring to WorkerException --- src/torchrunx/agent.py | 38 +++++++++-------------- src/torchrunx/launcher.py | 20 ++++++------ src/torchrunx/utils.py | 64 +++++++++++++++++---------------------- 3 files changed, 53 insertions(+), 69 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index c891e34c..f4dfab33 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -7,7 +7,7 @@ import sys import tempfile from dataclasses import dataclass -from typing import Callable, Literal +from typing import Any, Callable, Literal import cloudpickle import torch @@ -20,8 +20,7 @@ AgentPayload, AgentStatus, LauncherAgentGroup, - LauncherPayload, - WorkerResult, + WorkerException, get_open_port, ) @@ -49,7 +48,7 @@ def from_bytes(cls, serialized: bytes) -> Self: return cloudpickle.loads(serialized) -def entrypoint(serialized_worker_args: bytes) -> WorkerResult: +def entrypoint(serialized_worker_args: bytes) -> Any | WorkerException: worker_args = WorkerArgs.from_bytes(serialized_worker_args) logger = logging.getLogger() @@ -94,19 +93,14 @@ def entrypoint(serialized_worker_args: bytes) -> WorkerResult: logger.debug(f"executing function: {worker_args.function}") - r = WorkerResult(None, None) - try: - r.result = worker_args.function() + return worker_args.function() except Exception as e: - r.exception = e logger.error(e) - - # flush streams - sys.stdout.flush() - sys.stderr.flush() - - return r + return WorkerException(exception=e) + finally: + sys.stdout.flush() + sys.stderr.flush() def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_port: int): @@ -118,9 +112,8 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ process_id=os.getpid(), ) - all_payloads = launcher_agent_group.sync_payloads(payload=payload) - launcher_payload: LauncherPayload = all_payloads[0] # pyright: ignore[reportAssignmentType] - main_agent_payload: AgentPayload = all_payloads[1] # pyright: ignore[reportAssignmentType] + launcher_payload, agent_payloads = launcher_agent_group.sync_payloads(payload=payload) + main_agent_payload = agent_payloads[0] hostname = launcher_payload.hostnames[agent_rank] worker_world_size = launcher_payload.worker_world_size @@ -176,20 +169,19 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ logger.info("starting processes") try: - status = AgentStatus() + status = None while True: - if status.is_running(): + if status is None or status.state == "running": status = AgentStatus.from_result( result=ctx.wait(5), worker_global_ranks=worker_global_ranks ) agent_statuses = launcher_agent_group.sync_agent_statuses(status=status) - if all(s.is_done() for s in agent_statuses): + if all(s.state == "done" for s in agent_statuses): + break + elif any(s.state == "failed" for s in agent_statuses): break - - if any(s.is_failed() for s in agent_statuses): - raise RuntimeError() except: raise finally: diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 7dc1f83c..f91aca3d 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -21,10 +21,9 @@ from .environment import auto_hosts, auto_workers, slurm_hosts, slurm_workers from .logging_utils import LogRecordSocketReceiver, default_handlers from .utils import ( - AgentPayload, - AgentStatus, LauncherAgentGroup, LauncherPayload, + WorkerException, get_open_port, ) @@ -214,22 +213,23 @@ def run( timeout=self.timeout, ) - agent_payloads: list[AgentPayload] = launcher_agent_group.sync_payloads(payload=payload)[1:] # pyright: ignore[reportAssignmentType] + launcher_payload, agent_payloads = launcher_agent_group.sync_payloads(payload=payload) agent_pids = [p.process_id for p in agent_payloads] # loop to monitor agent statuses (until failed or done) try: while True: - agent_statuses = launcher_agent_group.sync_agent_statuses(status=AgentStatus()) + agent_statuses = launcher_agent_group.sync_agent_statuses(status=None) - if all(s.is_done() for s in agent_statuses): + for s in agent_statuses: + if s.state == "failed": + for value in s.return_values.values(): + if isinstance(value, WorkerException): + raise value.exception + + if all(s.state == "done" for s in agent_statuses): break - for s in agent_statuses: - if s.is_failed: - for _, failure in s.failures.items(): - if failure is not None: - raise failure except: # cleanup: SIGTERM all agents for agent_pid, agent_hostname in zip(agent_pids, self.hostnames): diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 4cdbc4b0..ceb44925 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -20,9 +20,8 @@ def get_open_port() -> int: @dataclass -class WorkerResult: - result: Any | None - exception: Exception | None +class WorkerException: + exception: Exception @dataclass @@ -44,39 +43,27 @@ class AgentPayload: @dataclass class AgentStatus: - running: bool = True - failed: bool = False - return_values: dict[int, Any] = field(default_factory=dict) - failures: dict[int, Exception] = field(default_factory=dict) - # stdouts: dict[int, str] = field(default_factory=dict) - # stderrs: dict[int, str] = field(default_factory=dict) + state: Literal["running", "failed", "done"] + return_values: dict[int, Any | WorkerException] = field(default_factory=dict) @classmethod - def from_result(cls, result: RunProcsResult | None, worker_global_ranks: list[int]) -> Self: + def from_result( + cls, result: RunProcsResult | None, worker_global_ranks: list[int] + ) -> Self: if result is None: - return cls() + return cls(state="running") - return cls( - running=False, - failed=any( - wr.exception is not None for _, wr in result.return_values.items() - ), # result.is_failed(), - return_values={ - worker_global_ranks[k]: wr.result for k, wr in result.return_values.items() - }, - failures={ - worker_global_ranks[k]: wr.exception for k, wr in result.return_values.items() - }, - ) + return_values = result.return_values - def is_running(self) -> bool: - return self.running + if any(isinstance(v, WorkerException) for v in return_values.values()): + state = "failed" + else: + state = "done" - def is_failed(self) -> bool: - return self.failed - - def is_done(self) -> bool: - return not self.running and not self.failed + return cls( + state=state, + return_values={worker_global_ranks[k]: v for k, v in return_values.items()}, + ) @dataclass @@ -110,14 +97,19 @@ def _all_gather(self, object: Any) -> list: """gather object from every rank to list on every rank""" object_bytes = self._serialize(object) object_list = [bytes()] * self.world_size - dist.all_gather_object(object_list=object_list, obj=object_bytes, group=self.group) + dist.all_gather_object( + object_list=object_list, obj=object_bytes, group=self.group + ) object_list = [self._deserialize(o) for o in object_list] return object_list def sync_payloads( self, payload: LauncherPayload | AgentPayload - ) -> list[LauncherPayload | AgentPayload]: - return self._all_gather(object=payload) - - def sync_agent_statuses(self, status: AgentStatus) -> list[AgentStatus]: - return self._all_gather(object=status)[1:] + ) -> tuple[LauncherPayload, list[AgentPayload]]: + payloads = self._all_gather(object=payload) + launcher_payload = payloads[0] + agent_payloads = payloads[1:] + return launcher_payload, agent_payloads + + def sync_agent_statuses(self, status: AgentStatus | None) -> list[AgentStatus]: + return self._all_gather(object=status)[1:] # [0] is launcher (status=None) From 037e55e1e55e68bb04e1da6546f57c3932b05a8e Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Fri, 6 Sep 2024 03:09:29 -0400 Subject: [PATCH 54/63] additional ruff rules --- pyproject.toml | 2 +- src/torchrunx/launcher.py | 49 +++++++++++++++++++++------------------ src/torchrunx/utils.py | 10 +++----- tests/test_CI.py | 2 +- 4 files changed, 31 insertions(+), 32 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index be8c0d6d..6f029d8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ include = ["pyproject.toml", "src/**/*.py", "tests/**/*.py"] line-length = 100 src = ["src", "tests"] [tool.ruff.lint] -extend-select = ["I"] +select = ["E", "F", "B", "UP", "I"] [tool.pyright] include = ["src", "tests"] diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index f91aca3d..89cde697 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -9,11 +9,11 @@ import subprocess import sys from collections import ChainMap -from dataclasses import dataclass, field +from dataclasses import dataclass from functools import partial from logging import Handler from multiprocessing import Process -from typing import Any, Callable, Literal +from typing import Any, Callable, Literal, Sequence import fabric import torch.distributed as dist @@ -58,22 +58,20 @@ def execute_command( @dataclass class Launcher: - hostnames: list[str] | Literal["auto", "slurm"] = field(default_factory=lambda: ["localhost"]) - workers_per_host: int | list[int] | Literal["auto", "slurm"] = 1 + hostnames: list[str] | Literal["auto", "slurm"] = "auto" + workers_per_host: int | list[int] | Literal["auto", "slurm"] = "auto" ssh_config_file: str | os.PathLike | None = None backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None log_handlers: list[Handler] | Literal["auto"] | None = "auto" - env_vars: list[str] = field( - default_factory=lambda: [ - "PATH", - "LD_LIBRARY", - "LIBRARY_PATH", - "PYTHON*", - "CUDA*", - "TORCH*", - "PYTORCH*", - "NCCL*", - ] + env_vars: Sequence[str] = ( + "PATH", + "LD_LIBRARY", + "LIBRARY_PATH", + "PYTHON*", + "CUDA*", + "TORCH*", + "PYTORCH*", + "NCCL*", ) env_file: str | os.PathLike | None = None timeout: int = 600 @@ -81,8 +79,8 @@ class Launcher: def run( self, func: Callable, - func_args: tuple[Any] = tuple(), - func_kwargs: dict[str, Any] = {}, + func_args: tuple[Any] | None = None, + func_kwargs: dict[str, Any] | None = None, ) -> dict[int, Any]: """ Launch a distributed PyTorch function on the specified nodes. See :mod:`torchrunx.launch` @@ -204,6 +202,11 @@ def run( host_ranks = range(_cumulative_workers[n], _cumulative_workers[n + 1]) worker_global_ranks.append(list(host_ranks)) + if func_args is None: + func_args = tuple() + if func_kwargs is None: + func_kwargs = dict() + payload = LauncherPayload( fn=partial(func, *func_args, **func_kwargs), hostnames=self.hostnames, @@ -251,14 +254,14 @@ def run( def launch( func: Callable, - func_args: tuple[Any] = tuple(), - func_kwargs: dict[str, Any] = {}, - hostnames: list[str] | Literal["auto", "slurm"] = ["localhost"], - workers_per_host: int | list[int] | Literal["auto", "slurm"] = 1, + func_args: tuple[Any] | None = None, + func_kwargs: dict[str, Any] | None = None, + hostnames: list[str] | Literal["auto", "slurm"] = "auto", + workers_per_host: int | list[int] | Literal["auto", "slurm"] = "auto", ssh_config_file: str | os.PathLike | None = None, backend: Literal["mpi", "gloo", "nccl", "ucc", None] = None, log_handlers: list[Handler] | Literal["auto"] = "auto", - env_vars: list[str] = [ + env_vars: Sequence[str] = ( "PATH", "LD_LIBRARY", "LIBRARY_PATH", @@ -267,7 +270,7 @@ def launch( "TORCH*", "PYTORCH*", "NCCL*", - ], + ), env_file: str | os.PathLike | None = None, timeout: int = 600, ) -> dict[int, Any]: diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index ceb44925..3a14d342 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -47,9 +47,7 @@ class AgentStatus: return_values: dict[int, Any | WorkerException] = field(default_factory=dict) @classmethod - def from_result( - cls, result: RunProcsResult | None, worker_global_ranks: list[int] - ) -> Self: + def from_result(cls, result: RunProcsResult | None, worker_global_ranks: list[int]) -> Self: if result is None: return cls(state="running") @@ -96,10 +94,8 @@ def _deserialize(self, serialized: bytes) -> Any: def _all_gather(self, object: Any) -> list: """gather object from every rank to list on every rank""" object_bytes = self._serialize(object) - object_list = [bytes()] * self.world_size - dist.all_gather_object( - object_list=object_list, obj=object_bytes, group=self.group - ) + object_list = [b""] * self.world_size + dist.all_gather_object(object_list=object_list, obj=object_bytes, group=self.group) object_list = [self._deserialize(o) for o in object_list] return object_list diff --git a/tests/test_CI.py b/tests/test_CI.py index 403c223a..b86cad64 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -61,7 +61,7 @@ def dist_func(): assert len(log_files) == 3 for file in log_files: - with open(f"{tmp}/{file}", "r") as f: + with open(f"{tmp}/{file}") as f: contents = f.read() print(contents) if file.endswith("[0].log"): From da9bdb466b0498e5611ad86bdc4a5a1008f97007 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Sat, 7 Sep 2024 05:10:51 -0400 Subject: [PATCH 55/63] more refactoring --- pixi.lock | 2 +- src/torchrunx/launcher.py | 221 ++++++++++++++++++++------------- src/torchrunx/logging_utils.py | 3 + src/torchrunx/utils.py | 4 +- tests/test_CI.py | 5 +- tests/test_func.py | 6 +- 6 files changed, 145 insertions(+), 96 deletions(-) diff --git a/pixi.lock b/pixi.lock index 8e8c95df..377385da 100644 --- a/pixi.lock +++ b/pixi.lock @@ -2603,7 +2603,7 @@ packages: name: torchrunx version: 0.1.3 path: . - sha256: 7352054b1212a4ce0d60c055288dd4f51cea2093a84d0a1a48ea97bdaa703fad + sha256: 0a30b1182ca7c101ff1d147eba62de2ba883f822fdedd13fa49207c5484f6cd8 requires_dist: - cloudpickle>=3.0.0 - fabric>=3.0.0 diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 89cde697..58011ae0 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -8,11 +8,11 @@ import socket import subprocess import sys -from collections import ChainMap from dataclasses import dataclass from functools import partial from logging import Handler from multiprocessing import Process +from pathlib import Path from typing import Any, Callable, Literal, Sequence import fabric @@ -28,6 +28,57 @@ ) +def resolve_hostnames(hostnames: list[str] | Literal["auto", "slurm"]) -> list[str]: + if hostnames == "auto": + return auto_hosts() + elif hostnames == "slurm": + return slurm_hosts() + return hostnames + + +def resolve_workers_per_host( + workers_per_host: int | list[int] | Literal["auto", "slurm"], num_hosts: int +) -> list[int]: + if workers_per_host == "auto": + workers_per_host = auto_workers() + elif workers_per_host == "slurm": + workers_per_host = slurm_workers() + + if isinstance(workers_per_host, int): + workers_per_host = [workers_per_host] * num_hosts + else: + assert len(workers_per_host) == num_hosts + + return workers_per_host + + +def build_logging_server( + log_handlers: list[Handler] | Literal["auto"] | None, + launcher_hostname: str, + hostnames: list[str], + workers_per_host: list[int], + log_dir: str | os.PathLike, + log_level: int, +) -> LogRecordSocketReceiver: + if log_handlers is None: + log_handlers = [] + elif log_handlers == "auto": + log_handlers = default_handlers( + hostnames=hostnames, + workers_per_host=workers_per_host, + log_dir=log_dir, + log_level=log_level, + ) + + log_receiver = LogRecordSocketReceiver( + host=launcher_hostname, + port=get_open_port(), + handlers=log_handlers, + ) + + return log_receiver + + def is_localhost(hostname_or_ip: str) -> bool: # check if host is "loopback" address (i.e. designated to send to self) try: @@ -56,6 +107,43 @@ def execute_command( conn.run(f"{command} >> /dev/null 2>&1 &", asynchronous=True) +def build_command( + launcher_hostname: str, + launcher_port: int, + logger_port: int, + world_size: int, + rank: int, + env_vars: Sequence[str], + env_file: str | os.PathLike | None, +) -> str: + current_dir = os.getcwd() + + env_exports = [] + for k, v in os.environ.items(): + if any(fnmatch.fnmatch(k, e) for e in env_vars): + env_exports.append(f"{k}={v}") + + env_export_string = "" + if len(env_exports) > 0: + env_export_string = f"export {' '.join(env_exports)} && " + + env_file_string = "" + if env_file is not None: + env_file_string = f"source {env_file} && " + + return ( + f"cd {current_dir} && " + f"{env_export_string}" + f"{env_file_string}" + f"{sys.executable} -u -m torchrunx " + f"--launcher-hostname {launcher_hostname} " + f"--launcher-port {launcher_port} " + f"--logger-port {logger_port} " + f"--world-size {world_size} " + f"--rank {rank}" + ) + + @dataclass class Launcher: hostnames: list[str] | Literal["auto", "slurm"] = "auto" @@ -81,7 +169,7 @@ def run( func: Callable, func_args: tuple[Any] | None = None, func_kwargs: dict[str, Any] | None = None, - ) -> dict[int, Any]: + ) -> dict[str, dict[int, Any]]: """ Launch a distributed PyTorch function on the specified nodes. See :mod:`torchrunx.launch` @@ -98,91 +186,50 @@ def run( if not dist.is_available(): raise RuntimeError("The torch.distributed package is not available.") - if self.hostnames == "auto": - self.hostnames = auto_hosts() - elif self.hostnames == "slurm": - self.hostnames = slurm_hosts() - - num_hosts = len(self.hostnames) - - if self.workers_per_host == "auto": - self.workers_per_host = auto_workers() - elif self.workers_per_host == "slurm": - self.workers_per_host = slurm_workers() - - if isinstance(self.workers_per_host, int): - self.workers_per_host = [self.workers_per_host] * num_hosts - - assert num_hosts == len(self.workers_per_host) - - # + hostnames = resolve_hostnames(self.hostnames) + workers_per_host = resolve_workers_per_host(self.workers_per_host, len(hostnames)) launcher_hostname = socket.getfqdn() + launcher_port = get_open_port() + world_size = len(hostnames) + 1 - # setup logging - - if self.log_handlers is None: - self.log_handlers = [] - elif self.log_handlers == "auto": - self.log_handlers = default_handlers( - hostnames=self.hostnames, - workers_per_host=self.workers_per_host, - log_dir=os.environ.get("TORCHRUNX_DIR", "./torchrunx_logs"), - log_level=logging._nameToLevel.get( - os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO"), logging.NOTSET - ), - ) + # start logging server - logger_port = get_open_port() - log_receiver = LogRecordSocketReceiver( - host=launcher_hostname, port=logger_port, handlers=self.log_handlers + log_receiver = build_logging_server( + log_handlers=self.log_handlers, + launcher_hostname=launcher_hostname, + hostnames=hostnames, + workers_per_host=workers_per_host, + log_dir=Path(os.environ.get("TORCHRUNX_LOG_DIR", "torchrunx_logs")), + log_level=logging._nameToLevel[os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO")], ) + log_process = Process( target=log_receiver.serve_forever, daemon=True, ) - log_process.start() - - # launch command - - current_dir = os.getcwd() - - env_exports = [] - for k, v in os.environ.items(): - if any(fnmatch.fnmatch(k, e) for e in self.env_vars): - env_exports.append(f"{k}={v}") - env_export_string = "" - if len(env_exports) > 0: - env_export_string = f"export {' '.join(env_exports)} && " - - env_file_string = "" - if self.env_file is not None: - env_file_string = f"source {self.env_file} && " - - launcher_port = get_open_port() - world_size = num_hosts + 1 # launcher + agents + log_process.start() # start agents on each node - for i, hostname in enumerate(self.hostnames): + + for i, hostname in enumerate(hostnames): execute_command( - command=( - f"cd {current_dir} && " - f"{env_export_string}" - f"{env_file_string}" - f"{sys.executable} -u -m torchrunx " - f"--launcher-hostname {launcher_hostname} " - f"--launcher-port {launcher_port} " - f"--logger-port {logger_port} " - f"--world-size {world_size} " - f"--rank {i+1}" + command=build_command( + launcher_hostname=launcher_hostname, + launcher_port=launcher_port, + logger_port=log_receiver.port, + world_size=world_size, + rank=i + 1, + env_vars=self.env_vars, + env_file=self.env_file, ), hostname=hostname, ssh_config_file=self.ssh_config_file, ) # initialize launcher–agent process group - # ranks = (launcher, agent_0, ..., agent_{num_hosts-1}) + # ranks = (launcher, agent_{hostnames[0]}, ..., agent[-1]) launcher_agent_group = LauncherAgentGroup( launcher_hostname=launcher_hostname, @@ -193,36 +240,30 @@ def run( # build and sync payloads between launcher and agents - _cumulative_workers = [0] + list(itertools.accumulate(self.workers_per_host)) - - worker_world_size = _cumulative_workers[-1] + _cumulative_workers = [0] + list(itertools.accumulate(workers_per_host)) - worker_global_ranks = [] # list of worker ranks per host - for n in range(num_hosts): - host_ranks = range(_cumulative_workers[n], _cumulative_workers[n + 1]) - worker_global_ranks.append(list(host_ranks)) - - if func_args is None: - func_args = tuple() - if func_kwargs is None: - func_kwargs = dict() + worker_global_ranks = [ + list(range(_cumulative_workers[n], _cumulative_workers[n + 1])) + for n in range(len(hostnames)) + ] payload = LauncherPayload( - fn=partial(func, *func_args, **func_kwargs), - hostnames=self.hostnames, - worker_world_size=worker_world_size, + fn=partial(func, *(func_args or ()), **(func_kwargs or {})), + hostnames=hostnames, worker_global_ranks=worker_global_ranks, + worker_world_size=sum(workers_per_host), backend=self.backend, timeout=self.timeout, ) launcher_payload, agent_payloads = launcher_agent_group.sync_payloads(payload=payload) - agent_pids = [p.process_id for p in agent_payloads] # loop to monitor agent statuses (until failed or done) + try: while True: agent_statuses = launcher_agent_group.sync_agent_statuses(status=None) + # raises exception if communication timeout due to death of any agent for s in agent_statuses: if s.state == "failed": @@ -235,9 +276,9 @@ def run( except: # cleanup: SIGTERM all agents - for agent_pid, agent_hostname in zip(agent_pids, self.hostnames): + for agent_payload, agent_hostname in zip(agent_payloads, hostnames): execute_command( - command=f"kill {agent_pid}", + command=f"kill {agent_payload.process_id}", hostname=agent_hostname, ssh_config_file=self.ssh_config_file, ) @@ -248,8 +289,10 @@ def run( log_process.kill() dist.destroy_process_group() - return_values: dict[int, Any] = dict(ChainMap(*[s.return_values for s in agent_statuses])) - return return_values + return { + hostname: agent_status.return_values + for hostname, agent_status in zip(hostnames, agent_statuses) + } def launch( @@ -273,7 +316,7 @@ def launch( ), env_file: str | os.PathLike | None = None, timeout: int = 600, -) -> dict[int, Any]: +) -> dict[str, dict[int, Any]]: """ Launch a distributed PyTorch function on the specified nodes. diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 469c845f..e8a5b142 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -142,6 +142,9 @@ def flush(self): class LogRecordSocketReceiver(ThreadingTCPServer): def __init__(self, host: str, port: int, handlers: list[Handler]): + self.host = host + self.port = port + class _LogRecordStreamHandler(StreamRequestHandler): def handle(self): while True: diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 3a14d342..82274e6f 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -28,8 +28,8 @@ class WorkerException: class LauncherPayload: fn: Callable hostnames: list[str] - worker_world_size: int worker_global_ranks: list[list[int]] + worker_world_size: int backend: Literal["mpi", "gloo", "nccl", "ucc", None] timeout: int @@ -60,7 +60,7 @@ def from_result(cls, result: RunProcsResult | None, worker_global_ranks: list[in return cls( state=state, - return_values={worker_global_ranks[k]: v for k, v in return_values.items()}, + return_values=return_values, ) diff --git a/tests/test_CI.py b/tests/test_CI.py index b86cad64..472d0c5d 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -38,7 +38,8 @@ def dist_func(): backend="gloo", # log_dir="./test_logs" ) - assert torch.all(r[0] == r[1]) + results = next(iter(r.values())) + assert torch.all(results[0] == results[1]) def test_logging(): @@ -47,7 +48,7 @@ def dist_func(): print(f"worker rank: {rank}") tmp = tempfile.mkdtemp() - os.environ["TORCHRUNX_DIR"] = tmp + os.environ["TORCHRUNX_LOG_DIR"] = tmp trx.launch( func=dist_func, diff --git a/tests/test_func.py b/tests/test_func.py index 9db6454d..444e7836 100644 --- a/tests/test_func.py +++ b/tests/test_func.py @@ -13,9 +13,11 @@ def test_launch(): workers_per_host="slurm", ) + result_values = [v for host_results in result.values() for v in host_results.values()] + t = True - for i in range(len(result)): - t = t and torch.all(result[i] == result[0]) + for i in range(len(result_values)): + t = t and torch.all(result_values[i] == result_values[0]) assert t, "Not all tensors equal" From f41025fc01610d86983b60cf3576d11289f0d5f3 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Wed, 11 Sep 2024 16:03:35 -0400 Subject: [PATCH 56/63] ruff ANN rules --- pyproject.toml | 3 ++- src/torchrunx/agent.py | 2 +- src/torchrunx/logging_utils.py | 16 ++++++++-------- tests/test_CI.py | 13 +++++++------ tests/test_func.py | 4 ++-- tests/test_submitit.py | 14 ++++++++------ tests/test_train.py | 20 ++++++++++---------- 7 files changed, 38 insertions(+), 34 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6f029d8b..693b5f1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,8 @@ include = ["pyproject.toml", "src/**/*.py", "tests/**/*.py"] line-length = 100 src = ["src", "tests"] [tool.ruff.lint] -select = ["E", "F", "B", "UP", "I"] +select = ["E", "F", "W", "ANN", "B", "UP", "I"] +ignore = ["ANN101", "ANN102", "ANN401"] [tool.pyright] include = ["src", "tests"] diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index f4dfab33..96aa383f 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -103,7 +103,7 @@ def entrypoint(serialized_worker_args: bytes) -> Any | WorkerException: sys.stderr.flush() -def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_port: int): +def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_port: int) -> None: agent_rank = launcher_agent_group.rank - 1 payload = AgentPayload( diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index e8a5b142..20a0051b 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -101,12 +101,12 @@ def log_records_to_socket( worker_rank: int | None, logger_hostname: str, logger_port: int, -): +) -> None: logger.setLevel(logging.NOTSET) old_factory = logging.getLogRecordFactory() - def record_factory(*args, **kwargs): + def record_factory(*args, **kwargs) -> logging.LogRecord: # noqa: ANN002, ANN003 record = old_factory(*args, **kwargs) record.hostname = hostname record.worker_rank = worker_rank @@ -117,14 +117,14 @@ def record_factory(*args, **kwargs): logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) -def redirect_stdio_to_logger(logger: Logger): +def redirect_stdio_to_logger(logger: Logger) -> None: class _LoggingStream(StringIO): - def __init__(self, logger: Logger, level: int = logging.NOTSET): + def __init__(self, logger: Logger, level: int = logging.NOTSET) -> None: super().__init__() self.logger = logger self.level = level - def flush(self): + def flush(self) -> None: super().flush() value = self.getvalue() if value != "": @@ -141,12 +141,12 @@ def flush(self): class LogRecordSocketReceiver(ThreadingTCPServer): - def __init__(self, host: str, port: int, handlers: list[Handler]): + def __init__(self, host: str, port: int, handlers: list[Handler]) -> None: self.host = host self.port = port class _LogRecordStreamHandler(StreamRequestHandler): - def handle(self): + def handle(self) -> None: while True: chunk = self.connection.recv(4) if len(chunk) < 4: @@ -168,7 +168,7 @@ def handle(self): ) self.daemon_threads = True - def shutdown(self): + def shutdown(self) -> None: """override BaseServer.shutdown() with added timeout""" self._BaseServer__shutdown_request = True self._BaseServer__is_shut_down.wait(timeout=3) # pyright: ignore[reportAttributeAccessIssue] diff --git a/tests/test_CI.py b/tests/test_CI.py index 472d0c5d..bcd70bfd 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -1,5 +1,6 @@ import os import tempfile +from typing import NoReturn import pytest import torch @@ -8,8 +9,8 @@ import torchrunx as trx -def test_simple_localhost(): - def dist_func(): +def test_simple_localhost() -> None: + def dist_func() -> torch.Tensor: rank = int(os.environ["RANK"]) if rank == 0: @@ -42,8 +43,8 @@ def dist_func(): assert torch.all(results[0] == results[1]) -def test_logging(): - def dist_func(): +def test_logging() -> None: + def dist_func() -> None: rank = int(os.environ["RANK"]) print(f"worker rank: {rank}") @@ -73,8 +74,8 @@ def dist_func(): assert "starting processes" in contents -def test_error(): - def error_func(): +def test_error() -> None: + def error_func() -> NoReturn: raise ValueError("abcdefg") tmp = tempfile.mkdtemp() diff --git a/tests/test_func.py b/tests/test_func.py index 444e7836..8fb264bf 100644 --- a/tests/test_func.py +++ b/tests/test_func.py @@ -6,7 +6,7 @@ import torchrunx as trx -def test_launch(): +def test_launch() -> None: result = trx.launch( func=simple_matmul, hostnames="slurm", @@ -22,7 +22,7 @@ def test_launch(): assert t, "Not all tensors equal" -def simple_matmul(): +def simple_matmul() -> torch.Tensor: rank = int(os.environ["RANK"]) local_rank = int(os.environ["LOCAL_RANK"]) device = torch.device(local_rank) if torch.cuda.is_available() else torch.device("cpu") diff --git a/tests/test_submitit.py b/tests/test_submitit.py index 290f7aad..225268d6 100644 --- a/tests/test_submitit.py +++ b/tests/test_submitit.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import copy import submitit @@ -9,22 +11,22 @@ class DummyDataset(Dataset): - def __init__(self, max_text_length=16, num_samples=20000) -> None: + def __init__(self, max_text_length: int = 16, num_samples: int = 20000) -> None: super().__init__() self.input_ids = torch.randint(0, 30522, (num_samples, max_text_length)) self.labels = copy.deepcopy(self.input_ids) - def __len__(self): + def __len__(self) -> int: return len(self.input_ids) - def __getitem__(self, index): + def __getitem__(self, index: int) -> dict[str, torch.Tensor]: return { "input_ids": self.input_ids[index], "labels": self.labels[index], } -def main(): +def main() -> None: model = BertForMaskedLM.from_pretrained("bert-base-uncased") train_dataset = DummyDataset() @@ -46,11 +48,11 @@ def main(): trainer.train() -def launch(): +def launch() -> None: trx.launch(func=main, func_kwargs={}, hostnames="slurm", workers_per_host="slurm") -def test_submitit(): +def test_submitit() -> None: executor = submitit.SlurmExecutor(folder="logs") executor.update_parameters( diff --git a/tests/test_train.py b/tests/test_train.py index d28f5ef5..9f637287 100644 --- a/tests/test_train.py +++ b/tests/test_train.py @@ -3,23 +3,23 @@ import torchrunx as trx -def worker(): +def worker() -> None: import torch - class TwoLinLayerNet(torch.nn.Module): - def __init__(self): + class TwoLayerNN(torch.nn.Module): + def __init__(self) -> None: super().__init__() self.a = torch.nn.Linear(10, 10, bias=False) self.b = torch.nn.Linear(10, 1, bias=False) - def forward(self, x): + def forward(self, x: torch.Tensor) -> torch.Tensor: a = self.a(x) - b = self.b(x) - return (a, b) + b = self.b(a) + return b local_rank = int(os.environ["LOCAL_RANK"]) print("init model") - model = TwoLinLayerNet().to(local_rank) + model = TwoLayerNN().to(local_rank) print("init ddp") ddp_model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank]) @@ -28,11 +28,11 @@ def forward(self, x): for _ in range(20): output = ddp_model(inp) - loss = output[0] + output[1] - loss.sum().backward() + loss = output.sum() + loss.backward() -def test_distributed_train(): +def test_distributed_train() -> None: trx.launch( worker, hostnames="slurm", From 52220fbb73e7a56ee6f1fe254de2c273d12edb1c Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Wed, 11 Sep 2024 16:40:17 -0400 Subject: [PATCH 57/63] print traceback in agent; import as dist_mp --- src/torchrunx/agent.py | 31 ++++++++++--------------------- tests/test_CI.py | 5 +++-- 2 files changed, 13 insertions(+), 23 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 96aa383f..4777c8f1 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -6,13 +6,14 @@ import socket import sys import tempfile +import traceback from dataclasses import dataclass from typing import Any, Callable, Literal import cloudpickle import torch import torch.distributed as dist -from torch.distributed.elastic.multiprocessing import start_processes +import torch.distributed.elastic.multiprocessing as dist_mp from typing_extensions import Self from .logging_utils import log_records_to_socket, redirect_stdio_to_logger @@ -70,11 +71,7 @@ def entrypoint(serialized_worker_args: bytes) -> Any | WorkerException: is_master=(worker_args.rank == 0), ) - backend = worker_args.backend - if backend is None: - backend = "nccl" if torch.cuda.is_available() else "gloo" - - logger.debug(f"using backend: {backend}") + backend = worker_args.backend or ("nccl" if torch.cuda.is_available() else "gloo") dist.init_process_group( backend=backend, @@ -91,12 +88,10 @@ def entrypoint(serialized_worker_args: bytes) -> Any | WorkerException: os.environ["MASTER_ADDR"] = worker_args.main_agent_hostname os.environ["MASTER_PORT"] = str(worker_args.main_agent_port) - logger.debug(f"executing function: {worker_args.function}") - try: return worker_args.function() except Exception as e: - logger.error(e) + traceback.print_exc() return WorkerException(exception=e) finally: sys.stdout.flush() @@ -132,16 +127,9 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ redirect_stdio_to_logger(logger) - if torch.__version__ >= "2.3": - from torch.distributed.elastic.multiprocessing import DefaultLogsSpecs - - log_kwargs = {"logs_specs": DefaultLogsSpecs(log_dir=tempfile.mkdtemp())} - else: - log_kwargs = {"log_dir": tempfile.mkdtemp()} - # spawn workers - ctx = start_processes( + ctx = dist_mp.start_processes( name=f"{hostname}_", entrypoint=entrypoint, args={ @@ -164,9 +152,12 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ for i in range(num_workers) }, envs={i: {} for i in range(num_workers)}, - **log_kwargs, # pyright: ignore [reportArgumentType] + **( + {"logs_specs": dist_mp.DefaultLogsSpecs(log_dir=tempfile.mkdtemp())} + if torch.__version__ >= "2.3" + else {"log_dir": tempfile.mkdtemp()} + ), # pyright: ignore [reportArgumentType] ) - logger.info("starting processes") try: status = None @@ -182,8 +173,6 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ break elif any(s.state == "failed" for s in agent_statuses): break - except: - raise finally: ctx.close() sys.stdout.flush() diff --git a/tests/test_CI.py b/tests/test_CI.py index bcd70bfd..bc3e2683 100644 --- a/tests/test_CI.py +++ b/tests/test_CI.py @@ -70,8 +70,9 @@ def dist_func() -> None: assert "worker rank: 0\n" in contents elif file.endswith("[1].log"): assert "worker rank: 1\n" in contents - else: - assert "starting processes" in contents + # TODO ? + # else: + # assert "starting processes" in contents def test_error() -> None: From bd52e7a6cef213bd860d56768dc1432e894b3940 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 12 Sep 2024 17:44:22 -0400 Subject: [PATCH 58/63] added more ruff lint rules --- .github/workflows/main.yml | 2 +- pixi.lock | 4 +- pyproject.toml | 22 +++++- src/torchrunx/agent.py | 10 ++- src/torchrunx/environment.py | 17 +++-- src/torchrunx/launcher.py | 118 +++++++++++++++++-------------- src/torchrunx/logging_utils.py | 24 ++++--- src/torchrunx/utils.py | 29 ++++---- tests/{test_CI.py => test_ci.py} | 22 +++--- tests/test_submitit.py | 2 +- tests/test_train.py | 8 +-- 11 files changed, 143 insertions(+), 115 deletions(-) rename tests/{test_CI.py => test_ci.py} (81%) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c1726931..45a3b483 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -86,4 +86,4 @@ jobs: cache: false environments: default activate-environment: default - - run: pytest tests/test_CI.py + - run: pytest tests/test_ci.py diff --git a/pixi.lock b/pixi.lock index 377385da..e4fae5ca 100644 --- a/pixi.lock +++ b/pixi.lock @@ -2601,9 +2601,9 @@ packages: requires_python: '>=3.8.0' - kind: pypi name: torchrunx - version: 0.1.3 + version: 0.2.0 path: . - sha256: 0a30b1182ca7c101ff1d147eba62de2ba883f822fdedd13fa49207c5484f6cd8 + sha256: 1753f43bee54bc0da38cdd524dc501c0c2be9fbaaa7036bced9c9d03a7a8e810 requires_dist: - cloudpickle>=3.0.0 - fabric>=3.0.0 diff --git a/pyproject.toml b/pyproject.toml index 693b5f1c..33acfaeb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "torchrunx" -version = "0.1.3" +version = "0.2.0" authors = [ {name = "Apoorv Khandelwal", email = "mail@apoorvkh.com"}, {name = "Peter Curtin", email = "peter_curtin@brown.edu"}, @@ -41,8 +41,24 @@ include = ["pyproject.toml", "src/**/*.py", "tests/**/*.py"] line-length = 100 src = ["src", "tests"] [tool.ruff.lint] -select = ["E", "F", "W", "ANN", "B", "UP", "I"] -ignore = ["ANN101", "ANN102", "ANN401"] +select = ["ALL"] +ignore = [ + "D", # documentation + "ANN101", "ANN102", "ANN401", # self / cls / Any annotations + "BLE001", # blind exceptions + "TD", # todo syntax + "FIX002", # existing todos + "PLR0913", # too many arguments + "DTZ005", # datetime timezone + "S301", # bandit: pickle + "S603", "S607", # bandit: subprocess + "COM812", "ISC001", # conflict with formatter +] +[tool.ruff.lint.per-file-ignores] +"tests/**/*.py" = [ + "S101", # allow asserts + "T201" # allow prints +] [tool.pyright] include = ["src", "tests"] diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 4777c8f1..37b3cb4b 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -163,15 +163,13 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ status = None while True: if status is None or status.state == "running": - status = AgentStatus.from_result( - result=ctx.wait(5), worker_global_ranks=worker_global_ranks - ) + status = AgentStatus.from_result(ctx.wait(5)) agent_statuses = launcher_agent_group.sync_agent_statuses(status=status) - if all(s.state == "done" for s in agent_statuses): - break - elif any(s.state == "failed" for s in agent_statuses): + all_done = all(s.state == "done" for s in agent_statuses) + any_failed = any(s.state == "failed" for s in agent_statuses) + if all_done or any_failed: break finally: ctx.close() diff --git a/src/torchrunx/environment.py b/src/torchrunx/environment.py index edf1431d..179cfb8d 100644 --- a/src/torchrunx/environment.py +++ b/src/torchrunx/environment.py @@ -17,7 +17,9 @@ def slurm_hosts() -> list[str]: :rtype: list[str] """ # TODO: sanity check SLURM variables, commands - assert in_slurm_job() + if not in_slurm_job(): + msg = "Not in a SLURM job" + raise RuntimeError(msg) return ( subprocess.check_output(["scontrol", "show", "hostnames", os.environ["SLURM_JOB_NODELIST"]]) .decode() @@ -35,15 +37,18 @@ def slurm_workers() -> int: :rtype: int """ # TODO: sanity check SLURM variables, commands - assert in_slurm_job() + if not in_slurm_job(): + msg = "Not in a SLURM job" + raise RuntimeError(msg) + if "SLURM_JOB_GPUS" in os.environ: # TODO: is it possible to allocate uneven GPUs across nodes? return len(os.environ["SLURM_JOB_GPUS"].split(",")) - elif "SLURM_GPUS_PER_NODE" in os.environ: + if "SLURM_GPUS_PER_NODE" in os.environ: return int(os.environ["SLURM_GPUS_PER_NODE"]) - else: - # TODO: should we assume that we plan to do one worker per CPU? - return int(os.environ["SLURM_CPUS_ON_NODE"]) + + # TODO: should we assume that we plan to do one worker per CPU? + return int(os.environ["SLURM_CPUS_ON_NODE"]) def auto_hosts() -> list[str]: diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 58011ae0..4c826a6c 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -5,6 +5,7 @@ import itertools import logging import os +import shlex import socket import subprocess import sys @@ -31,13 +32,14 @@ def resolve_hostnames(hostnames: list[str] | Literal["auto", "slurm"]) -> list[str]: if hostnames == "auto": return auto_hosts() - elif hostnames == "slurm": + if hostnames == "slurm": return slurm_hosts() return hostnames def resolve_workers_per_host( - workers_per_host: int | list[int] | Literal["auto", "slurm"], num_hosts: int + workers_per_host: int | list[int] | Literal["auto", "slurm"], + num_hosts: int, ) -> list[int]: if workers_per_host == "auto": workers_per_host = auto_workers() @@ -46,8 +48,9 @@ def resolve_workers_per_host( if isinstance(workers_per_host, int): workers_per_host = [workers_per_host] * num_hosts - else: - assert len(workers_per_host) == num_hosts + elif len(workers_per_host) != num_hosts: + msg = "len(workers_per_host) != len(hostnames)" + raise ValueError(msg) return workers_per_host @@ -70,13 +73,53 @@ def build_logging_server( log_level=log_level, ) - log_receiver = LogRecordSocketReceiver( + return LogRecordSocketReceiver( host=launcher_hostname, port=get_open_port(), handlers=log_handlers, ) - return log_receiver + +def build_command( + launcher_hostname: str, + launcher_port: int, + logger_port: int, + world_size: int, + rank: int, + env_vars: Sequence[str], + env_file: str | os.PathLike | None, +) -> str: + # shlex.quote prevents shell injection here (resolves S602 in execute_command) + + commands = [] + + current_dir = shlex.quote(str(Path.cwd())) + commands.append("cd " + current_dir) + + env_exports = [] + for k, v in os.environ.items(): + if any(fnmatch.fnmatch(k, e) for e in env_vars): + env_exports.append(shlex.quote(f"{k}={v}")) + + if len(env_exports) > 0: + commands.append("export " + " ".join(env_exports)) + + if env_file is not None: + commands.append("source " + shlex.quote(str(env_file))) + + python = shlex.quote(sys.executable) + launcher_hostname = shlex.quote(launcher_hostname) + + commands.append( + f"{python} -u -m torchrunx " + f"--launcher-hostname {launcher_hostname} " + f"--launcher-port {launcher_port} " + f"--logger-port {logger_port} " + f"--world-size {world_size} " + f"--rank {rank}", + ) + + return " && ".join(commands) def is_localhost(hostname_or_ip: str) -> bool: @@ -99,51 +142,17 @@ def execute_command( ssh_config_file: str | os.PathLike | None = None, ) -> None: if is_localhost(hostname): - subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + # S602: subprocess.Popen is called with shell=True (https://docs.python.org/3.8/library/subprocess.html#security-considerations) + # Made sure to shlex.quote arguments in build_command to prevent shell injection + subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) # noqa: S602 else: with fabric.Connection( - host=hostname, config=fabric.Config(runtime_ssh_path=ssh_config_file) + host=hostname, + config=fabric.Config(runtime_ssh_path=ssh_config_file), ) as conn: conn.run(f"{command} >> /dev/null 2>&1 &", asynchronous=True) -def build_command( - launcher_hostname: str, - launcher_port: int, - logger_port: int, - world_size: int, - rank: int, - env_vars: Sequence[str], - env_file: str | os.PathLike | None, -) -> str: - current_dir = os.getcwd() - - env_exports = [] - for k, v in os.environ.items(): - if any(fnmatch.fnmatch(k, e) for e in env_vars): - env_exports.append(f"{k}={v}") - - env_export_string = "" - if len(env_exports) > 0: - env_export_string = f"export {' '.join(env_exports)} && " - - env_file_string = "" - if env_file is not None: - env_file_string = f"source {env_file} && " - - return ( - f"cd {current_dir} && " - f"{env_export_string}" - f"{env_file_string}" - f"{sys.executable} -u -m torchrunx " - f"--launcher-hostname {launcher_hostname} " - f"--launcher-port {launcher_port} " - f"--logger-port {logger_port} " - f"--world-size {world_size} " - f"--rank {rank}" - ) - - @dataclass class Launcher: hostnames: list[str] | Literal["auto", "slurm"] = "auto" @@ -184,7 +193,8 @@ def run( :rtype: dict[int, Any] """ if not dist.is_available(): - raise RuntimeError("The torch.distributed package is not available.") + msg = "The torch.distributed package is not available." + raise RuntimeError(msg) hostnames = resolve_hostnames(self.hostnames) workers_per_host = resolve_workers_per_host(self.workers_per_host, len(hostnames)) @@ -201,7 +211,7 @@ def run( hostnames=hostnames, workers_per_host=workers_per_host, log_dir=Path(os.environ.get("TORCHRUNX_LOG_DIR", "torchrunx_logs")), - log_level=logging._nameToLevel[os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO")], + log_level=logging._nameToLevel[os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO")], # noqa: SLF001 ) log_process = Process( @@ -228,7 +238,7 @@ def run( ssh_config_file=self.ssh_config_file, ) - # initialize launcher–agent process group + # initialize launcher-agent process group # ranks = (launcher, agent_{hostnames[0]}, ..., agent[-1]) launcher_agent_group = LauncherAgentGroup( @@ -240,7 +250,7 @@ def run( # build and sync payloads between launcher and agents - _cumulative_workers = [0] + list(itertools.accumulate(workers_per_host)) + _cumulative_workers = [0, *itertools.accumulate(workers_per_host)] worker_global_ranks = [ list(range(_cumulative_workers[n], _cumulative_workers[n + 1])) @@ -262,14 +272,14 @@ def run( try: while True: - agent_statuses = launcher_agent_group.sync_agent_statuses(status=None) # raises exception if communication timeout due to death of any agent + agent_statuses = launcher_agent_group.sync_agent_statuses(status=None) + # raises exception if any agent failed for s in agent_statuses: - if s.state == "failed": - for value in s.return_values.values(): - if isinstance(value, WorkerException): - raise value.exception + for value in s.return_values.values(): + if isinstance(value, WorkerException): + raise value.exception if all(s.state == "done" for s in agent_statuses): break diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 20a0051b..36ec67b5 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -2,7 +2,7 @@ import datetime import logging -import os +import os # noqa: TCH003 import pickle import struct from contextlib import redirect_stderr, redirect_stdout @@ -52,11 +52,11 @@ def file_handlers( ) -> list[Handler]: handlers = [] - os.makedirs(log_dir, exist_ok=True) + Path(log_dir).mkdir(parents=True, exist_ok=True) timestamp = datetime.datetime.now().isoformat(timespec="seconds") for hostname, num_workers in zip(hostnames, workers_per_host): - for rank in [None] + list(range(num_workers)): + for rank in [None, *range(num_workers)]: file_path = ( f"{log_dir}/{timestamp}-{hostname}" + (f"[{rank}]" if rank is not None else "") @@ -74,8 +74,8 @@ def stream_handler(hostname: str, rank: int | None, log_level: int = logging.NOT logging.Formatter( "%(asctime)s:%(levelname)s:%(hostname)s[%(worker_rank)s]: %(message)s" if rank is not None - else "%(asctime)s:%(levelname)s:%(hostname)s: %(message)s" - ) + else "%(asctime)s:%(levelname)s:%(hostname)s: %(message)s", + ), ) return handler @@ -89,7 +89,8 @@ def default_handlers( return [ stream_handler(hostname=hostnames[0], rank=None, log_level=log_level), stream_handler(hostname=hostnames[0], rank=0, log_level=log_level), - ] + file_handlers(hostnames, workers_per_host, log_dir=log_dir, log_level=log_level) + *file_handlers(hostnames, workers_per_host, log_dir=log_dir, log_level=log_level), + ] ## Agent/worker utilities @@ -128,11 +129,11 @@ def flush(self) -> None: super().flush() value = self.getvalue() if value != "": - self.logger.log(self.level, f"\n{value}") + self.logger.log(self.level, value) self.truncate(0) self.seek(0) - logging.captureWarnings(True) + logging.captureWarnings(capture=True) redirect_stderr(_LoggingStream(logger, level=logging.ERROR)).__enter__() redirect_stdout(_LoggingStream(logger, level=logging.INFO)).__enter__() @@ -148,8 +149,9 @@ def __init__(self, host: str, port: int, handlers: list[Handler]) -> None: class _LogRecordStreamHandler(StreamRequestHandler): def handle(self) -> None: while True: - chunk = self.connection.recv(4) - if len(chunk) < 4: + chunk_size = 4 + chunk = self.connection.recv(chunk_size) + if len(chunk) < chunk_size: break slen = struct.unpack(">L", chunk)[0] chunk = self.connection.recv(slen) @@ -157,7 +159,7 @@ def handle(self) -> None: chunk = chunk + self.connection.recv(slen - len(chunk)) obj = pickle.loads(chunk) record = logging.makeLogRecord(obj) - # + for handler in handlers: handler.handle(record) diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 82274e6f..c2559fed 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -4,19 +4,20 @@ import socket from contextlib import closing from dataclasses import dataclass, field -from typing import Any, Callable, Literal +from typing import TYPE_CHECKING, Any, Callable, Literal import cloudpickle import torch.distributed as dist -from torch.distributed.elastic.multiprocessing.api import RunProcsResult from typing_extensions import Self +if TYPE_CHECKING: + from torch.distributed.elastic.multiprocessing.api import RunProcsResult + def get_open_port() -> int: with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: s.bind(("", 0)) - port = s.getsockname()[1] - return port + return s.getsockname()[1] @dataclass @@ -47,7 +48,7 @@ class AgentStatus: return_values: dict[int, Any | WorkerException] = field(default_factory=dict) @classmethod - def from_result(cls, result: RunProcsResult | None, worker_global_ranks: list[int]) -> Self: + def from_result(cls, result: RunProcsResult | None) -> Self: if result is None: return cls(state="running") @@ -85,27 +86,27 @@ def __post_init__(self) -> None: timeout=datetime.timedelta(seconds=30), ) - def _serialize(self, object: Any) -> bytes: - return cloudpickle.dumps(object) + def _serialize(self, obj: Any) -> bytes: + return cloudpickle.dumps(obj) def _deserialize(self, serialized: bytes) -> Any: return cloudpickle.loads(serialized) - def _all_gather(self, object: Any) -> list: + def _all_gather(self, obj: Any) -> list: """gather object from every rank to list on every rank""" - object_bytes = self._serialize(object) + object_bytes = self._serialize(obj) object_list = [b""] * self.world_size dist.all_gather_object(object_list=object_list, obj=object_bytes, group=self.group) - object_list = [self._deserialize(o) for o in object_list] - return object_list + return [self._deserialize(o) for o in object_list] def sync_payloads( - self, payload: LauncherPayload | AgentPayload + self, + payload: LauncherPayload | AgentPayload, ) -> tuple[LauncherPayload, list[AgentPayload]]: - payloads = self._all_gather(object=payload) + payloads = self._all_gather(payload) launcher_payload = payloads[0] agent_payloads = payloads[1:] return launcher_payload, agent_payloads def sync_agent_statuses(self, status: AgentStatus | None) -> list[AgentStatus]: - return self._all_gather(object=status)[1:] # [0] is launcher (status=None) + return self._all_gather(status)[1:] # [0] is launcher (status=None) diff --git a/tests/test_CI.py b/tests/test_ci.py similarity index 81% rename from tests/test_CI.py rename to tests/test_ci.py index bc3e2683..f72f3ef4 100644 --- a/tests/test_CI.py +++ b/tests/test_ci.py @@ -1,5 +1,6 @@ import os import tempfile +from pathlib import Path from typing import NoReturn import pytest @@ -13,10 +14,7 @@ def test_simple_localhost() -> None: def dist_func() -> torch.Tensor: rank = int(os.environ["RANK"]) - if rank == 0: - w = torch.rand((100, 100)) # in_dim, out_dim - else: - w = torch.zeros((100, 100)) + w = torch.rand((100, 100)) if rank == 0 else torch.zeros((100, 100)) dist.broadcast(w, 0) @@ -51,38 +49,38 @@ def dist_func() -> None: tmp = tempfile.mkdtemp() os.environ["TORCHRUNX_LOG_DIR"] = tmp + num_workers = 2 + trx.launch( func=dist_func, func_kwargs={}, - workers_per_host=2, + workers_per_host=num_workers, backend="gloo", ) log_files = next(os.walk(tmp), (None, None, []))[2] - assert len(log_files) == 3 + assert len(log_files) == num_workers + 1 for file in log_files: - with open(f"{tmp}/{file}") as f: + with Path(f"{tmp}/{file}").open() as f: contents = f.read() print(contents) if file.endswith("[0].log"): assert "worker rank: 0\n" in contents elif file.endswith("[1].log"): assert "worker rank: 1\n" in contents - # TODO ? - # else: - # assert "starting processes" in contents def test_error() -> None: def error_func() -> NoReturn: - raise ValueError("abcdefg") + msg = "abcdefg" + raise ValueError(msg) tmp = tempfile.mkdtemp() os.environ["TORCHRUNX_DIR"] = tmp - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError) as excinfo: # noqa: PT011 trx.launch( func=error_func, func_kwargs={}, diff --git a/tests/test_submitit.py b/tests/test_submitit.py index 225268d6..433e3382 100644 --- a/tests/test_submitit.py +++ b/tests/test_submitit.py @@ -40,7 +40,7 @@ def main() -> None: ) trainer = Trainer( - model=model, # type: ignore + model=model, args=training_arguments, train_dataset=train_dataset, ) diff --git a/tests/test_train.py b/tests/test_train.py index 9f637287..b654a8b7 100644 --- a/tests/test_train.py +++ b/tests/test_train.py @@ -6,20 +6,18 @@ def worker() -> None: import torch - class TwoLayerNN(torch.nn.Module): + class MLP(torch.nn.Module): def __init__(self) -> None: super().__init__() self.a = torch.nn.Linear(10, 10, bias=False) self.b = torch.nn.Linear(10, 1, bias=False) def forward(self, x: torch.Tensor) -> torch.Tensor: - a = self.a(x) - b = self.b(a) - return b + return self.b(self.a(x)) local_rank = int(os.environ["LOCAL_RANK"]) print("init model") - model = TwoLayerNN().to(local_rank) + model = MLP().to(local_rank) print("init ddp") ddp_model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank]) From e4ae2200865cb1fd33fb7f4c45a876051d6db28b Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 12 Sep 2024 18:04:39 -0400 Subject: [PATCH 59/63] refactoring worker args serialization --- src/torchrunx/agent.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 37b3cb4b..030316a3 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -14,7 +14,6 @@ import torch import torch.distributed as dist import torch.distributed.elastic.multiprocessing as dist_mp -from typing_extensions import Self from .logging_utils import log_records_to_socket, redirect_stdio_to_logger from .utils import ( @@ -41,16 +40,20 @@ class WorkerArgs: hostname: str timeout: int - def to_bytes(self) -> bytes: - return cloudpickle.dumps(self) + def serialize(self) -> SerializedWorkerArgs: + return SerializedWorkerArgs(worker_args=self) - @classmethod - def from_bytes(cls, serialized: bytes) -> Self: - return cloudpickle.loads(serialized) +class SerializedWorkerArgs: + def __init__(self, worker_args: WorkerArgs) -> None: + self.bytes = cloudpickle.dumps(worker_args) -def entrypoint(serialized_worker_args: bytes) -> Any | WorkerException: - worker_args = WorkerArgs.from_bytes(serialized_worker_args) + def deserialize(self) -> WorkerArgs: + return cloudpickle.loads(self.bytes) + + +def entrypoint(serialized_worker_args: SerializedWorkerArgs) -> Any | WorkerException: + worker_args: WorkerArgs = serialized_worker_args.deserialize() logger = logging.getLogger() @@ -147,7 +150,7 @@ def main(launcher_agent_group: LauncherAgentGroup, logger_hostname: str, logger_ world_size=worker_world_size, hostname=launcher_payload.hostnames[agent_rank], timeout=launcher_payload.timeout, - ).to_bytes(), + ).serialize(), ) for i in range(num_workers) }, From 9c97d09fb362a3f15ac8c7e563d5ea6ca29993de Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 12 Sep 2024 18:38:31 -0400 Subject: [PATCH 60/63] WorkerLogRecord class --- src/torchrunx/__init__.py | 4 + src/torchrunx/agent.py | 2 +- src/torchrunx/logging_utils.py | 219 ++++++++++++++++++--------------- src/torchrunx/utils.py | 2 +- 4 files changed, 124 insertions(+), 103 deletions(-) diff --git a/src/torchrunx/__init__.py b/src/torchrunx/__init__.py index 46b3b1b9..74214cb8 100644 --- a/src/torchrunx/__init__.py +++ b/src/torchrunx/__init__.py @@ -1,6 +1,10 @@ from .launcher import Launcher, launch +from .logging_utils import add_filter_to_handler, file_handler, stream_handler __all__ = [ "Launcher", "launch", + "add_filter_to_handler", + "file_handler", + "stream_handler", ] diff --git a/src/torchrunx/agent.py b/src/torchrunx/agent.py index 030316a3..04d1ec92 100644 --- a/src/torchrunx/agent.py +++ b/src/torchrunx/agent.py @@ -67,7 +67,7 @@ def entrypoint(serialized_worker_args: SerializedWorkerArgs) -> Any | WorkerExce redirect_stdio_to_logger(logger) - store = dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] + store = dist.TCPStore( # pyright: ignore [reportPrivateImportUsage] host_name=worker_args.main_agent_hostname, port=worker_args.main_agent_port, world_size=worker_args.world_size, diff --git a/src/torchrunx/logging_utils.py b/src/torchrunx/logging_utils.py index 36ec67b5..d12b27f7 100644 --- a/src/torchrunx/logging_utils.py +++ b/src/torchrunx/logging_utils.py @@ -2,15 +2,115 @@ import datetime import logging -import os # noqa: TCH003 import pickle import struct from contextlib import redirect_stderr, redirect_stdout +from dataclasses import dataclass from io import StringIO from logging import Handler, Logger from logging.handlers import SocketHandler from pathlib import Path from socketserver import StreamRequestHandler, ThreadingTCPServer +from typing import TYPE_CHECKING + +from typing_extensions import Self + +if TYPE_CHECKING: + import os + +## Launcher utilities + + +class LogRecordSocketReceiver(ThreadingTCPServer): + def __init__(self, host: str, port: int, handlers: list[Handler]) -> None: + self.host = host + self.port = port + + class _LogRecordStreamHandler(StreamRequestHandler): + def handle(self) -> None: + while True: + chunk_size = 4 + chunk = self.connection.recv(chunk_size) + if len(chunk) < chunk_size: + break + slen = struct.unpack(">L", chunk)[0] + chunk = self.connection.recv(slen) + while len(chunk) < slen: + chunk = chunk + self.connection.recv(slen - len(chunk)) + obj = pickle.loads(chunk) + record = logging.makeLogRecord(obj) + + for handler in handlers: + handler.handle(record) + + super().__init__( + server_address=(host, port), + RequestHandlerClass=_LogRecordStreamHandler, + bind_and_activate=True, + ) + self.daemon_threads = True + + def shutdown(self) -> None: + """override BaseServer.shutdown() with added timeout""" + self._BaseServer__shutdown_request = True + self._BaseServer__is_shut_down.wait(timeout=3) # pyright: ignore[reportAttributeAccessIssue] + + +## Agent/worker utilities + + +@dataclass +class WorkerLogRecord(logging.LogRecord): + hostname: str + worker_rank: int | None + + @classmethod + def from_record(cls, record: logging.LogRecord, hostname: str, worker_rank: int | None) -> Self: + record.hostname = hostname + record.worker_rank = worker_rank + record.__class__ = cls + return record # pyright: ignore [reportReturnType] + + +def log_records_to_socket( + logger: Logger, + hostname: str, + worker_rank: int | None, + logger_hostname: str, + logger_port: int, +) -> None: + logger.setLevel(logging.NOTSET) + + old_factory = logging.getLogRecordFactory() + + def record_factory(*args, **kwargs) -> WorkerLogRecord: # noqa: ANN002, ANN003 + record = old_factory(*args, **kwargs) + return WorkerLogRecord.from_record(record, hostname, worker_rank) + + logging.setLogRecordFactory(record_factory) + + logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) + + +def redirect_stdio_to_logger(logger: Logger) -> None: + class _LoggingStream(StringIO): + def __init__(self, logger: Logger, level: int = logging.NOTSET) -> None: + super().__init__() + self.logger = logger + self.level = level + + def flush(self) -> None: + super().flush() + value = self.getvalue() + if value != "": + self.logger.log(self.level, value) + self.truncate(0) + self.seek(0) + + logging.captureWarnings(capture=True) + redirect_stderr(_LoggingStream(logger, level=logging.ERROR)).__enter__() + redirect_stdout(_LoggingStream(logger, level=logging.INFO)).__enter__() + ## Handler utilities @@ -21,14 +121,27 @@ def add_filter_to_handler( worker_rank: int | None, log_level: int = logging.NOTSET, ) -> None: - def _filter(record: logging.LogRecord) -> bool: + def _filter(record: WorkerLogRecord) -> bool: return ( - record.hostname == hostname # pyright: ignore[reportAttributeAccessIssue] - and record.worker_rank == worker_rank # pyright: ignore[reportAttributeAccessIssue] + record.hostname == hostname + and record.worker_rank == worker_rank and record.levelno >= log_level ) - handler.addFilter(_filter) + handler.addFilter(_filter) # pyright: ignore [reportArgumentType] + + +def stream_handler(hostname: str, rank: int | None, log_level: int = logging.NOTSET) -> Handler: + handler = logging.StreamHandler() + add_filter_to_handler(handler, hostname, rank, log_level=log_level) + handler.setFormatter( + logging.Formatter( + "%(asctime)s:%(levelname)s:%(hostname)s[%(worker_rank)s]: %(message)s" + if rank is not None + else "%(asctime)s:%(levelname)s:%(hostname)s: %(message)s", + ), + ) + return handler def file_handler( @@ -67,19 +180,6 @@ def file_handlers( return handlers -def stream_handler(hostname: str, rank: int | None, log_level: int = logging.NOTSET) -> Handler: - handler = logging.StreamHandler() - add_filter_to_handler(handler, hostname, rank, log_level=log_level) - handler.setFormatter( - logging.Formatter( - "%(asctime)s:%(levelname)s:%(hostname)s[%(worker_rank)s]: %(message)s" - if rank is not None - else "%(asctime)s:%(levelname)s:%(hostname)s: %(message)s", - ), - ) - return handler - - def default_handlers( hostnames: list[str], workers_per_host: list[int], @@ -91,86 +191,3 @@ def default_handlers( stream_handler(hostname=hostnames[0], rank=0, log_level=log_level), *file_handlers(hostnames, workers_per_host, log_dir=log_dir, log_level=log_level), ] - - -## Agent/worker utilities - - -def log_records_to_socket( - logger: Logger, - hostname: str, - worker_rank: int | None, - logger_hostname: str, - logger_port: int, -) -> None: - logger.setLevel(logging.NOTSET) - - old_factory = logging.getLogRecordFactory() - - def record_factory(*args, **kwargs) -> logging.LogRecord: # noqa: ANN002, ANN003 - record = old_factory(*args, **kwargs) - record.hostname = hostname - record.worker_rank = worker_rank - return record - - logging.setLogRecordFactory(record_factory) - - logger.addHandler(SocketHandler(host=logger_hostname, port=logger_port)) - - -def redirect_stdio_to_logger(logger: Logger) -> None: - class _LoggingStream(StringIO): - def __init__(self, logger: Logger, level: int = logging.NOTSET) -> None: - super().__init__() - self.logger = logger - self.level = level - - def flush(self) -> None: - super().flush() - value = self.getvalue() - if value != "": - self.logger.log(self.level, value) - self.truncate(0) - self.seek(0) - - logging.captureWarnings(capture=True) - redirect_stderr(_LoggingStream(logger, level=logging.ERROR)).__enter__() - redirect_stdout(_LoggingStream(logger, level=logging.INFO)).__enter__() - - -## Launcher utilities - - -class LogRecordSocketReceiver(ThreadingTCPServer): - def __init__(self, host: str, port: int, handlers: list[Handler]) -> None: - self.host = host - self.port = port - - class _LogRecordStreamHandler(StreamRequestHandler): - def handle(self) -> None: - while True: - chunk_size = 4 - chunk = self.connection.recv(chunk_size) - if len(chunk) < chunk_size: - break - slen = struct.unpack(">L", chunk)[0] - chunk = self.connection.recv(slen) - while len(chunk) < slen: - chunk = chunk + self.connection.recv(slen - len(chunk)) - obj = pickle.loads(chunk) - record = logging.makeLogRecord(obj) - - for handler in handlers: - handler.handle(record) - - super().__init__( - server_address=(host, port), - RequestHandlerClass=_LogRecordStreamHandler, - bind_and_activate=True, - ) - self.daemon_threads = True - - def shutdown(self) -> None: - """override BaseServer.shutdown() with added timeout""" - self._BaseServer__shutdown_request = True - self._BaseServer__is_shut_down.wait(timeout=3) # pyright: ignore[reportAttributeAccessIssue] diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index c2559fed..1bd25c52 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -77,7 +77,7 @@ def __post_init__(self) -> None: backend="gloo", world_size=self.world_size, rank=self.rank, - store=dist.TCPStore( # pyright: ignore[reportPrivateImportUsage] + store=dist.TCPStore( # pyright: ignore [reportPrivateImportUsage] host_name=self.launcher_hostname, port=self.launcher_port, world_size=self.world_size, From b8c68d6237000be447917bd62231300bad852bc6 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 12 Sep 2024 22:27:42 -0400 Subject: [PATCH 61/63] set version to 0.1.4 --- pixi.lock | 4 ++-- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pixi.lock b/pixi.lock index e4fae5ca..e67beb9f 100644 --- a/pixi.lock +++ b/pixi.lock @@ -2601,9 +2601,9 @@ packages: requires_python: '>=3.8.0' - kind: pypi name: torchrunx - version: 0.2.0 + version: 0.1.4 path: . - sha256: 1753f43bee54bc0da38cdd524dc501c0c2be9fbaaa7036bced9c9d03a7a8e810 + sha256: de986bf47e1c379e4de6b10ca352715d708bb5f9b4cfc8736e9ee592db5fe1ae requires_dist: - cloudpickle>=3.0.0 - fabric>=3.0.0 diff --git a/pyproject.toml b/pyproject.toml index 33acfaeb..b5f3866e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "torchrunx" -version = "0.2.0" +version = "0.1.4" authors = [ {name = "Apoorv Khandelwal", email = "mail@apoorvkh.com"}, {name = "Peter Curtin", email = "peter_curtin@brown.edu"}, From 23aaae67242787418edd90f69b7126c0be15390e Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 12 Sep 2024 22:57:51 -0400 Subject: [PATCH 62/63] larger try-catch wrapper in Launcher --- src/torchrunx/launcher.py | 131 ++++++++++++++++++++------------------ src/torchrunx/utils.py | 5 ++ 2 files changed, 74 insertions(+), 62 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index 4c826a6c..bc183665 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -203,79 +203,83 @@ def run( launcher_port = get_open_port() world_size = len(hostnames) + 1 - # start logging server + log_receiver = None + log_process = None + launcher_agent_group = None - log_receiver = build_logging_server( - log_handlers=self.log_handlers, - launcher_hostname=launcher_hostname, - hostnames=hostnames, - workers_per_host=workers_per_host, - log_dir=Path(os.environ.get("TORCHRUNX_LOG_DIR", "torchrunx_logs")), - log_level=logging._nameToLevel[os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO")], # noqa: SLF001 - ) - - log_process = Process( - target=log_receiver.serve_forever, - daemon=True, - ) + try: + # start logging server + + log_receiver = build_logging_server( + log_handlers=self.log_handlers, + launcher_hostname=launcher_hostname, + hostnames=hostnames, + workers_per_host=workers_per_host, + log_dir=Path(os.environ.get("TORCHRUNX_LOG_DIR", "torchrunx_logs")), + log_level=logging._nameToLevel[os.environ.get("TORCHRUNX_LOG_LEVEL", "INFO")], # noqa: SLF001 + ) - log_process.start() - - # start agents on each node - - for i, hostname in enumerate(hostnames): - execute_command( - command=build_command( - launcher_hostname=launcher_hostname, - launcher_port=launcher_port, - logger_port=log_receiver.port, - world_size=world_size, - rank=i + 1, - env_vars=self.env_vars, - env_file=self.env_file, - ), - hostname=hostname, - ssh_config_file=self.ssh_config_file, + log_process = Process( + target=log_receiver.serve_forever, + daemon=True, ) - # initialize launcher-agent process group - # ranks = (launcher, agent_{hostnames[0]}, ..., agent[-1]) + log_process.start() - launcher_agent_group = LauncherAgentGroup( - launcher_hostname=launcher_hostname, - launcher_port=launcher_port, - world_size=world_size, - rank=0, - ) + # start agents on each node - # build and sync payloads between launcher and agents + for i, hostname in enumerate(hostnames): + execute_command( + command=build_command( + launcher_hostname=launcher_hostname, + launcher_port=launcher_port, + logger_port=log_receiver.port, + world_size=world_size, + rank=i + 1, + env_vars=self.env_vars, + env_file=self.env_file, + ), + hostname=hostname, + ssh_config_file=self.ssh_config_file, + ) - _cumulative_workers = [0, *itertools.accumulate(workers_per_host)] + # initialize launcher-agent process group + # ranks = (launcher, agent_{hostnames[0]}, ..., agent[-1]) - worker_global_ranks = [ - list(range(_cumulative_workers[n], _cumulative_workers[n + 1])) - for n in range(len(hostnames)) - ] + launcher_agent_group = LauncherAgentGroup( + launcher_hostname=launcher_hostname, + launcher_port=launcher_port, + world_size=world_size, + rank=0, + ) - payload = LauncherPayload( - fn=partial(func, *(func_args or ()), **(func_kwargs or {})), - hostnames=hostnames, - worker_global_ranks=worker_global_ranks, - worker_world_size=sum(workers_per_host), - backend=self.backend, - timeout=self.timeout, - ) + # build and sync payloads between launcher and agents - launcher_payload, agent_payloads = launcher_agent_group.sync_payloads(payload=payload) + _cumulative_workers = [0, *itertools.accumulate(workers_per_host)] - # loop to monitor agent statuses (until failed or done) + worker_global_ranks = [ + list(range(_cumulative_workers[n], _cumulative_workers[n + 1])) + for n in range(len(hostnames)) + ] + + payload = LauncherPayload( + fn=partial(func, *(func_args or ()), **(func_kwargs or {})), + hostnames=hostnames, + worker_global_ranks=worker_global_ranks, + worker_world_size=sum(workers_per_host), + backend=self.backend, + timeout=self.timeout, + ) + + launcher_payload, agent_payloads = launcher_agent_group.sync_payloads(payload=payload) + + # loop to monitor agent statuses (until failed or done) - try: while True: - # raises exception if communication timeout due to death of any agent + # raises RuntimeError if communication timeout due to death of any agent agent_statuses = launcher_agent_group.sync_agent_statuses(status=None) - # raises exception if any agent failed + # raises specific exception if any agent fails for s in agent_statuses: for value in s.return_values.values(): if isinstance(value, WorkerException): @@ -294,10 +298,13 @@ def run( ) raise finally: - log_receiver.shutdown() - log_receiver.server_close() - log_process.kill() - dist.destroy_process_group() + if log_receiver is not None: + log_receiver.shutdown() + log_receiver.server_close() + if log_process is not None: + log_process.kill() + if launcher_agent_group is not None: + launcher_agent_group.shutdown() return { hostname: agent_status.return_values diff --git a/src/torchrunx/utils.py b/src/torchrunx/utils.py index 1bd25c52..0fafec9d 100644 --- a/src/torchrunx/utils.py +++ b/src/torchrunx/utils.py @@ -73,6 +73,7 @@ class LauncherAgentGroup: rank: int def __post_init__(self) -> None: + # timeout will raise torch.distributed.DistStoreError self.group = dist.init_process_group( backend="gloo", world_size=self.world_size, @@ -96,6 +97,7 @@ def _all_gather(self, obj: Any) -> list: """gather object from every rank to list on every rank""" object_bytes = self._serialize(obj) object_list = [b""] * self.world_size + # raises RuntimeError if timeout dist.all_gather_object(object_list=object_list, obj=object_bytes, group=self.group) return [self._deserialize(o) for o in object_list] @@ -110,3 +112,6 @@ def sync_payloads( def sync_agent_statuses(self, status: AgentStatus | None) -> list[AgentStatus]: return self._all_gather(status)[1:] # [0] is launcher (status=None) + + def shutdown(self) -> None: + dist.destroy_process_group(group=self.group) From f46486d3d0c8f0619fdc3a7a678abdc0adcc9d57 Mon Sep 17 00:00:00 2001 From: apoorvkh Date: Thu, 12 Sep 2024 23:18:54 -0400 Subject: [PATCH 63/63] always terminate agent processes --- src/torchrunx/launcher.py | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/src/torchrunx/launcher.py b/src/torchrunx/launcher.py index bc183665..fa73ae04 100644 --- a/src/torchrunx/launcher.py +++ b/src/torchrunx/launcher.py @@ -173,7 +173,7 @@ class Launcher: env_file: str | os.PathLike | None = None timeout: int = 600 - def run( + def run( # noqa: C901, PLR0912 self, func: Callable, func_args: tuple[Any] | None = None, @@ -206,6 +206,7 @@ def run( log_receiver = None log_process = None launcher_agent_group = None + agent_payloads = None try: # start logging server @@ -287,25 +288,25 @@ def run( if all(s.state == "done" for s in agent_statuses): break - - except: - # cleanup: SIGTERM all agents - for agent_payload, agent_hostname in zip(agent_payloads, hostnames): - execute_command( - command=f"kill {agent_payload.process_id}", - hostname=agent_hostname, - ssh_config_file=self.ssh_config_file, - ) - raise finally: if log_receiver is not None: log_receiver.shutdown() - log_receiver.server_close() - if log_process is not None: - log_process.kill() + if log_process is not None: + log_receiver.server_close() + log_process.kill() + if launcher_agent_group is not None: launcher_agent_group.shutdown() + # cleanup: SIGTERM all agents + if agent_payloads is not None: + for agent_payload, agent_hostname in zip(agent_payloads, hostnames): + execute_command( + command=f"kill {agent_payload.process_id}", + hostname=agent_hostname, + ssh_config_file=self.ssh_config_file, + ) + return { hostname: agent_status.return_values for hostname, agent_status in zip(hostnames, agent_statuses)