From 20ad27933f3879993e93667a732c1dc3cf4a66b8 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 10 Mar 2026 05:12:50 +0000 Subject: [PATCH 01/28] megatron: integrate lora grad sync with finalize_model_grads --- src/art/megatron/finalize_grads.py | 112 +++++ src/art/megatron/lora.py | 615 +++++++++++++++++++-------- src/art/megatron/train.py | 642 ++++++++++++++++++----------- 3 files changed, 957 insertions(+), 412 deletions(-) create mode 100644 src/art/megatron/finalize_grads.py diff --git a/src/art/megatron/finalize_grads.py b/src/art/megatron/finalize_grads.py new file mode 100644 index 00000000..8c496667 --- /dev/null +++ b/src/art/megatron/finalize_grads.py @@ -0,0 +1,112 @@ +from collections import defaultdict +from collections.abc import Iterable +from typing import Any, Literal, cast + +from megatron.core import parallel_state as ps +from megatron.core.distributed.finalize_model_grads import finalize_model_grads +import torch +from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors + +GradSyncDomain = Literal["tp_default", "expert_tp"] +GradSyncOp = Literal["none", "avg"] + +TP_DEFAULT_GRAD_SYNC_DOMAIN: GradSyncDomain = "tp_default" +EXPERT_TP_GRAD_SYNC_DOMAIN: GradSyncDomain = "expert_tp" +GRAD_SYNC_OP_NONE: GradSyncOp = "none" +GRAD_SYNC_OP_AVG: GradSyncOp = "avg" +VALID_DOMAINS = (TP_DEFAULT_GRAD_SYNC_DOMAIN, EXPERT_TP_GRAD_SYNC_DOMAIN) +VALID_SYNC_OPS = (GRAD_SYNC_OP_NONE, GRAD_SYNC_OP_AVG) + + +def _iter_named_trainable_parameters( + model: list[torch.nn.Module], +) -> Iterable[tuple[str, torch.nn.Parameter]]: + seen: set[int] = set() + for chunk_index, model_chunk in enumerate(model): + for name, param in model_chunk.named_parameters(): + if not param.requires_grad: + continue + param_id = id(param) + if param_id in seen: + continue + seen.add(param_id) + yield f"chunk{chunk_index}.{name}", param + + +def _resolve_domain_group( + domain: GradSyncDomain, +) -> torch.distributed.ProcessGroup | None: + if domain == TP_DEFAULT_GRAD_SYNC_DOMAIN: + return None + if domain != EXPERT_TP_GRAD_SYNC_DOMAIN: + raise RuntimeError(f"Unknown grad sync domain: {domain}") + + group = ps.get_expert_tensor_parallel_group(check_initialized=False) + if group is None: + return None + if group.size() <= 1: + return None + return group + + +def _resolve_reduce_op(op: GradSyncOp) -> Any: + if op == GRAD_SYNC_OP_AVG: + return torch.distributed.ReduceOp.AVG + raise RuntimeError(f"Unknown grad sync op: {op}") + + +def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: + """Run Megatron finalize, then apply non-default grad-sync reductions. + + Megatron finalize handles DP/CP (and expert-DP via `param.allreduce=False`) internally. + This extension only handles extra reductions outside Megatron's default TP path, + currently expert-TP reductions for params annotated with grad_sync_* metadata. + """ + finalize_model_grads(model) + + buckets: dict[ + tuple[GradSyncDomain, GradSyncOp, torch.dtype, torch.device], + list[tuple[str, torch.Tensor]], + ] = defaultdict(list) + + for name, param in _iter_named_trainable_parameters(model): + domain: GradSyncDomain = getattr( + param, "grad_sync_domain", TP_DEFAULT_GRAD_SYNC_DOMAIN + ) + if domain == TP_DEFAULT_GRAD_SYNC_DOMAIN: + continue + if domain not in VALID_DOMAINS: + raise RuntimeError(f"{name}: unsupported grad_sync_domain={domain}") + + op: GradSyncOp = getattr(param, "grad_sync_op", GRAD_SYNC_OP_NONE) + if op not in VALID_SYNC_OPS: + raise RuntimeError(f"{name}: unsupported grad_sync_op={op}") + if op == GRAD_SYNC_OP_NONE: + continue + + if not hasattr(param, "main_grad"): + raise RuntimeError( + f"{name}: expected main_grad for domain={domain} reduce_op={op}, but attribute is missing" + ) + grad = param.main_grad + if grad is None: + raise RuntimeError( + f"{name}: expected non-None main_grad for domain={domain} reduce_op={op}" + ) + local_grad = cast( + torch.Tensor, grad._local_tensor if hasattr(grad, "_local_tensor") else grad + ) + buckets[(domain, op, local_grad.dtype, local_grad.device)].append( + (name, local_grad) + ) + + for (domain, op, _dtype, _device), entries in buckets.items(): + group = _resolve_domain_group(domain) + if group is None: + continue + + grads = [grad for _name, grad in entries] + coalesced = _flatten_dense_tensors(grads) + torch.distributed.all_reduce(coalesced, op=_resolve_reduce_op(op), group=group) + for grad, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)): + grad.copy_(synced) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 3ba97a77..12a38dec 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -1,5 +1,6 @@ +from collections.abc import Sequence import math -from typing import Sequence +from typing import Any, Literal from megatron.bridge.models.gpt_provider import GPTModelProvider from megatron.core import parallel_state as ps @@ -9,12 +10,111 @@ TERowParallelGroupedLinear, TERowParallelLinear, ) +from megatron.core.tensor_parallel.mappings import ( + reduce_from_tensor_model_parallel_region, + reduce_scatter_to_sequence_parallel_region, +) from megatron.core.transformer.attention import SelfAttention from megatron.core.transformer.moe import grouped_gemm_util from megatron.core.transformer.moe.experts import TEGroupedMLP from megatron.core.transformer.transformer_layer import TransformerLayer +from pydantic import BaseModel, ConfigDict import torch +ShardDomain = Literal["tp", "expert_tp"] +GradSyncDomain = Literal["tp_default", "expert_tp"] +GradSyncOp = Literal["none", "avg"] + +TP_DEFAULT_GRAD_SYNC_DOMAIN: GradSyncDomain = "tp_default" +EXPERT_TP_GRAD_SYNC_DOMAIN: GradSyncDomain = "expert_tp" +GRAD_SYNC_OP_NONE: GradSyncOp = "none" +GRAD_SYNC_OP_AVG: GradSyncOp = "avg" + + +class LoRAParallelSpec(BaseModel): + # This spec only describes TP / expert-TP behavior. + # DP/CP vs expert-DP behavior is selected separately via `allreduce`. + model_config = ConfigDict(frozen=True) + + shard_domain: ShardDomain = "tp" + sharded: bool = False + shard_axis: int | None = None + grad_sync_domain: GradSyncDomain = TP_DEFAULT_GRAD_SYNC_DOMAIN + grad_sync_op: GradSyncOp = GRAD_SYNC_OP_NONE + + +def _distributed_initialized() -> bool: + return torch.distributed.is_available() and torch.distributed.is_initialized() + + +def _get_shard_world_size(domain: ShardDomain) -> int: + if not _distributed_initialized(): + return 1 + if domain == "tp": + return ps.get_tensor_model_parallel_world_size() + group = ps.get_expert_tensor_parallel_group(check_initialized=False) + if group is None: + return 1 + return group.size() + + +def _get_shard_rank(domain: ShardDomain) -> int: + if not _distributed_initialized(): + return 0 + if domain == "tp": + return ps.get_tensor_model_parallel_rank() + group = ps.get_expert_tensor_parallel_group(check_initialized=False) + if group is None: + return 0 + return group.rank() + + +def _get_shard_group(domain: ShardDomain) -> torch.distributed.ProcessGroup | None: + if not _distributed_initialized(): + return None + if domain == "tp": + return ps.get_tensor_model_parallel_group() + return ps.get_expert_tensor_parallel_group(check_initialized=False) + + +def _normalize_axis(axis: int, ndim: int) -> int: + if axis < 0: + axis += ndim + if axis < 0 or axis >= ndim: + raise ValueError(f"Invalid shard axis {axis} for tensor ndim={ndim}") + return axis + + +def _set_lora_parallel_metadata( + param: torch.nn.Parameter, + *, + parallel_spec: LoRAParallelSpec, + allreduce: bool, +) -> None: + replicated = not parallel_spec.sharded + setattr(param, "lora_shard_domain", parallel_spec.shard_domain) + setattr(param, "lora_tp_sharded", parallel_spec.sharded) + setattr(param, "lora_tp_replicated", replicated) + setattr(param, "lora_tp_shard_axis", parallel_spec.shard_axis) + setattr(param, "grad_sync_domain", parallel_spec.grad_sync_domain) + setattr(param, "grad_sync_op", parallel_spec.grad_sync_op) + # Megatron DDP routing flag: + # - allreduce=True: sync with regular DP/CP replicas. + # - allreduce=False: sync with expert-DP replicas. + # TP / expert-TP replica handling is controlled by grad_sync_* metadata. + setattr(param, "allreduce", allreduce) + + # Megatron's native TP finalize path consumes this attr. + setattr( + param, + "average_gradients_across_tp_domain", + ( + replicated + and parallel_spec.grad_sync_domain == TP_DEFAULT_GRAD_SYNC_DOMAIN + and parallel_spec.grad_sync_op == GRAD_SYNC_OP_AVG + ), + ) + class LoRA(torch.nn.Module): def __init__( @@ -27,6 +127,9 @@ def __init__( dtype: torch.dtype, device: torch.device, num_local_experts: int = 1, + a_parallel_spec: LoRAParallelSpec = LoRAParallelSpec(), + b_parallel_spec: LoRAParallelSpec = LoRAParallelSpec(), + allreduce: bool = True, ) -> None: super().__init__() assert num_local_experts == 1 or "{expert}" in adapter_model_prefix, ( @@ -44,6 +147,16 @@ def __init__( num_local_experts, rank, out_features, dtype=dtype, device=device ).squeeze(0) ) + _set_lora_parallel_metadata( + self.A_T, + parallel_spec=a_parallel_spec, + allreduce=allreduce, + ) + _set_lora_parallel_metadata( + self.B_T, + parallel_spec=b_parallel_spec, + allreduce=allreduce, + ) self._expert_offset = ps.get_expert_model_parallel_rank() * num_local_experts self.reset_lora_parameters() @@ -51,6 +164,21 @@ def __init__( def num_local_experts(self) -> int: return self.A_T.shape[0] if self.A_T.ndim == 3 else 1 + def _broadcast_if_replicated(self, param: torch.nn.Parameter) -> None: + if not getattr(param, "lora_tp_replicated", False): + return + domain = getattr(param, "lora_shard_domain") + world_size = _get_shard_world_size(domain) + if world_size <= 1: + return + group = _get_shard_group(domain) + if group is None: + raise RuntimeError( + f"{self.adapter_model_prefix}: missing process group for replicated parameter domain={domain}" + ) + src = torch.distributed.get_global_rank(group, 0) + torch.distributed.broadcast(param.data, src=src, group=group) + def reset_lora_parameters(self) -> None: """Initialize LoRA weights (A=Kaiming, B=zeros) like PEFT defaults.""" if self.A_T.ndim == 3: @@ -59,22 +187,38 @@ def reset_lora_parameters(self) -> None: else: torch.nn.init.kaiming_uniform_(self.A_T.T, a=math.sqrt(5)) torch.nn.init.zeros_(self.B_T) + self._broadcast_if_replicated(self.A_T) + self._broadcast_if_replicated(self.B_T) + + def _expected_weight_keys(self, suffix: str) -> list[str]: + if self.num_local_experts > 1: + return [ + f"{self.adapter_model_prefix.format(expert=expert + self._expert_offset)}.{suffix}.weight" + for expert in range(self.num_local_experts) + ] + return [f"{self.adapter_model_prefix}.{suffix}.weight"] def load_lora(self, adapter_model: dict[str, torch.Tensor]) -> None: - try: - self.load_weights( - adapter_model, - suffix="lora_A", - into=self.A_T, + missing_keys = [ + key + for suffix in ("lora_A", "lora_B") + for key in self._expected_weight_keys(suffix) + if key not in adapter_model + ] + if missing_keys: + raise KeyError( + f"Missing LoRA adapter keys for {self.adapter_model_prefix}: {sorted(missing_keys)}" ) - self.load_weights( - adapter_model, - suffix="lora_B", - into=self.B_T, - ) - except KeyError: - print("Unable to find LoRA weights for", self.adapter_model_prefix) - self.reset_lora_parameters() + self.load_weights( + adapter_model, + suffix="lora_A", + into=self.A_T, + ) + self.load_weights( + adapter_model, + suffix="lora_B", + into=self.B_T, + ) def load_weights( self, @@ -83,65 +227,111 @@ def load_weights( suffix: str, into: torch.nn.Parameter, ) -> None: - self.load_weight( - ( - torch.stack( - [ - adapter_model[ - f"{self.adapter_model_prefix.format(expert=expert + self._expert_offset)}.{suffix}.weight" - ].T - for expert in range(self.num_local_experts) - ] - ) - if self.num_local_experts > 1 - else adapter_model[f"{self.adapter_model_prefix}.{suffix}.weight"].T - ), - into=into, - ) + keys = self._expected_weight_keys(suffix) + if self.num_local_experts > 1: + weight = torch.stack([adapter_model[key].T for key in keys]) + else: + weight = adapter_model[keys[0]].T + self.load_weight(weight, into=into) def load_weight(self, weight: torch.Tensor, *, into: torch.nn.Parameter) -> None: - setattr(into, "sharded", False) - tp_world_size = ps.get_tensor_model_parallel_world_size() - tp_rank = ps.get_tensor_model_parallel_rank() - for axis in (-2, -1): - if weight.shape[axis] == into.shape[axis]: - continue - # assume our param is tensor sharded along this axis - assert weight.shape[axis] // tp_world_size == into.shape[axis], ( - f"Weight shape {weight.shape} does not match into shape {into.shape} along axis {axis}" + domain = getattr(into, "lora_shard_domain") + sharded = bool(getattr(into, "lora_tp_sharded")) + if sharded: + axis = getattr(into, "lora_tp_shard_axis") + if axis is None: + raise RuntimeError( + f"{self.adapter_model_prefix}: missing shard axis for sharded parameter" + ) + axis = _normalize_axis(axis, weight.ndim) + world_size = _get_shard_world_size(domain) + rank = _get_shard_rank(domain) + if weight.shape[axis] % world_size != 0: + raise ValueError( + f"{self.adapter_model_prefix}: weight shape {tuple(weight.shape)} is not divisible by world size " + f"{world_size} on axis {axis}" + ) + local_size = weight.shape[axis] // world_size + if into.shape[axis] != local_size: + raise ValueError( + f"{self.adapter_model_prefix}: expected local shard size {into.shape[axis]}, got {local_size}" + ) + weight = weight.narrow(axis, rank * local_size, local_size) + elif tuple(weight.shape) != tuple(into.shape): + raise ValueError( + f"{self.adapter_model_prefix}: unsharded load shape mismatch, got {tuple(weight.shape)} " + f"expected {tuple(into.shape)}" ) - s = into.shape[axis] - weight = weight.narrow(axis, tp_rank * s, s) - setattr(into, "sharded", True) into.data.copy_(weight) into.requires_grad = True - def sharded_lora_state_dict(self) -> dict[str, torch.Tensor]: - if self.num_local_experts > 1: + def _should_export_parameter(self, param: torch.nn.Parameter) -> bool: + if self.num_local_experts > 1: # self is a MoE layer if ps.get_expert_data_parallel_rank() != 0: - return {} - return { - f"{self.adapter_model_prefix.format(expert=expert + self._expert_offset)}.{key}": param.data[ - expert - ].T - for expert in range(self.num_local_experts) - for key, param in ( - ("lora_A.weight", self.A_T), - ("lora_B.weight", self.B_T), - ) - } - if ps.get_data_parallel_rank() != 0 or torch.all(self.A_T == 0): - return {} + return False + else: # self is a non-MoE layer + if ps.get_data_parallel_rank() != 0: + return False + # Non-MoE layers are replicated across expert-model-parallel ranks. + if ( + ps.get_expert_model_parallel_world_size() > 1 + and ps.get_expert_model_parallel_rank() != 0 + ): + return False + + if getattr(param, "lora_tp_sharded", False): + # this param is fully sharded, all shard ranks participate + return True + + domain = getattr(param, "lora_shard_domain") + # param is replicated, tp rank 0 or etp rank 0 participates + return _get_shard_rank(domain) == 0 + + def _manifest_for_param(self, param: torch.nn.Parameter) -> dict[str, Any]: + domain = getattr(param, "lora_shard_domain") + sharded = bool(getattr(param, "lora_tp_sharded", False)) + shard_axis = getattr(param, "lora_tp_shard_axis", None) return { - f"{self.adapter_model_prefix}.{key}": param.data.T - for key, param in ( - ("lora_A.weight", self.A_T), - ("lora_B.weight", self.B_T), - ) - if getattr(param, "sharded", False) - or ps.get_tensor_model_parallel_rank() == 0 + "domain": domain, + "sharded": sharded, + "shard_axis": shard_axis, + "shard_world_size": _get_shard_world_size(domain) if sharded else 1, + "shard_rank": _get_shard_rank(domain) if sharded else 0, + } + + def _lora_params(self) -> list[tuple[str, torch.nn.Parameter]]: + return [ + ("lora_A.weight", self.A_T), + ("lora_B.weight", self.B_T), + ] + + def _export_items( + self, + ) -> list[tuple[str, torch.nn.Parameter, int | None]]: + export_items: list[tuple[str, torch.nn.Parameter, int | None]] = [] + for key, param in self._lora_params(): + if not self._should_export_parameter(param): + continue + if self.num_local_experts > 1: + for expert in range(self.num_local_experts): + full_key = f"{self.adapter_model_prefix.format(expert=expert + self._expert_offset)}.{key}" + export_items.append((full_key, param, expert)) + else: + export_items.append((f"{self.adapter_model_prefix}.{key}", param, None)) + return export_items + + def sharded_lora_manifest(self) -> dict[str, dict[str, Any]]: + return { + key: self._manifest_for_param(param) + for key, param, _expert in self._export_items() } + def sharded_lora_state_dict(self) -> dict[str, torch.Tensor]: + state: dict[str, torch.Tensor] = {} + for key, param, expert in self._export_items(): + state[key] = param.data[expert].T if expert is not None else param.data.T + return state + def forward( self, x: torch.Tensor, tokens_per_expert: list[int] | torch.Tensor | None = None ) -> torch.Tensor: @@ -152,14 +342,13 @@ def forward( bsz = tokens_per_expert if isinstance(bsz, list): bsz = torch.tensor(bsz, dtype=torch.int64, device="cpu") - # If no tokens routed locally, return zeros + # If no tokens routed locally, return zeros. if isinstance(bsz, torch.Tensor) and int(torch.count_nonzero(bsz)) == 0: return x.new_zeros((x.shape[0], self.B_T.shape[-1])) tmp = grouped_gemm_util.ops.gmm(x, self.A_T, bsz, trans_b=False) # type: ignore[attr-defined] out = grouped_gemm_util.ops.gmm(tmp, self.B_T, bsz, trans_b=False) # type: ignore[attr-defined] return out * self.scale - else: - return ((x @ self.A_T) @ self.B_T) * self.scale + return ((x @ self.A_T) @ self.B_T) * self.scale class SelfAttentionLinearProjLoRA(torch.nn.Module): @@ -175,6 +364,20 @@ def __init__( self.provider = provider self.linear_proj = linear_proj assert isinstance(linear_proj.weight, torch.Tensor) + a_parallel_spec = LoRAParallelSpec( + shard_domain="tp", + sharded=True, + shard_axis=-2, + grad_sync_domain=TP_DEFAULT_GRAD_SYNC_DOMAIN, + grad_sync_op=GRAD_SYNC_OP_NONE, # only need DP-type reductions + ) + b_parallel_spec = a_parallel_spec.model_copy( + update={ + "sharded": False, + "shard_axis": None, + "grad_sync_op": GRAD_SYNC_OP_AVG, # megatron reduces across TP ranks + } + ) self.lora = LoRA( adapter_model_prefix=adapter_model_prefix, in_features=linear_proj.in_features, @@ -183,22 +386,23 @@ def __init__( alpha=alpha, dtype=linear_proj.weight.dtype, device=linear_proj.weight.device, + a_parallel_spec=a_parallel_spec, + b_parallel_spec=b_parallel_spec, + # Non-expert LoRA params use Megatron's dense DP/CP gradient buckets. + allreduce=True, ) def forward(self, x: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor | None]: base_output, bias_output = self.linear_proj(x) assert isinstance(base_output, torch.Tensor) assert isinstance(bias_output, (torch.Tensor, type(None))) + lora_output = self.lora(x) - if ( - self.provider.sequence_parallel - and self.provider.tensor_model_parallel_size > 1 - ): - tp_rank = ps.get_tensor_model_parallel_rank() - tokens_per_rank = base_output.shape[0] - start = tp_rank * tokens_per_rank - end = start + tokens_per_rank - lora_output = lora_output[start:end] + if self.provider.tensor_model_parallel_size > 1: + if self.provider.sequence_parallel: + lora_output = reduce_scatter_to_sequence_parallel_region(lora_output) + else: + lora_output = reduce_from_tensor_model_parallel_region(lora_output) return base_output + lora_output, bias_output @@ -231,32 +435,64 @@ def __init__( q_out_features_per_rank = q_out_features // tp_world_size kv_out_features_per_rank = kv_out_features // tp_world_size assert isinstance(linear_qkv.weight, torch.Tensor) - self.q_proj_lora = LoRA( + self.q_proj_lora = self._build_qkv_lora( adapter_model_prefix=f"{adapter_model_prefix}.q_proj", - in_features=linear_qkv.in_features, - out_features=q_out_features_per_rank, + linear_qkv=linear_qkv, rank=rank, alpha=alpha, - dtype=linear_qkv.weight.dtype, - device=linear_qkv.weight.device, + out_features=q_out_features_per_rank, ) - self.k_proj_lora = LoRA( + self.k_proj_lora = self._build_qkv_lora( adapter_model_prefix=f"{adapter_model_prefix}.k_proj", - in_features=linear_qkv.in_features, - out_features=kv_out_features_per_rank, + linear_qkv=linear_qkv, rank=rank, alpha=alpha, - dtype=linear_qkv.weight.dtype, - device=linear_qkv.weight.device, + out_features=kv_out_features_per_rank, ) - self.v_proj_lora = LoRA( + self.v_proj_lora = self._build_qkv_lora( adapter_model_prefix=f"{adapter_model_prefix}.v_proj", - in_features=linear_qkv.in_features, + linear_qkv=linear_qkv, + rank=rank, + alpha=alpha, out_features=kv_out_features_per_rank, + ) + + @staticmethod + def _build_qkv_lora( + *, + adapter_model_prefix: str, + linear_qkv: TELayerNormColumnParallelLinear, + rank: int, + alpha: float, + out_features: int, + ) -> LoRA: + assert isinstance(linear_qkv.weight, torch.Tensor) + a_parallel_spec = LoRAParallelSpec( + shard_domain="tp", + sharded=False, + shard_axis=None, + grad_sync_domain=TP_DEFAULT_GRAD_SYNC_DOMAIN, + grad_sync_op=GRAD_SYNC_OP_AVG, # megatron reduces across TP ranks + ) + b_parallel_spec = a_parallel_spec.model_copy( + update={ + "sharded": True, + "shard_axis": -1, + "grad_sync_op": GRAD_SYNC_OP_NONE, # only need DP-type reductions + } + ) + return LoRA( + adapter_model_prefix=adapter_model_prefix, + in_features=linear_qkv.in_features, + out_features=out_features, rank=rank, alpha=alpha, dtype=linear_qkv.weight.dtype, device=linear_qkv.weight.device, + a_parallel_spec=a_parallel_spec, + b_parallel_spec=b_parallel_spec, + # Non-expert LoRA params use Megatron's dense DP/CP gradient buckets. + allreduce=True, ) def forward(self, x: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor | None]: @@ -302,19 +538,48 @@ def __init__( super().__init__() assert linear_fc1 is not None self.linear_fc1 = linear_fc1 - assert isinstance(linear_fc1.weight0, torch.Tensor) - self.gate_lora = LoRA( + self.gate_lora = self._build_fc1_lora( adapter_model_prefix=f"{adapter_model_prefix}.{{expert}}.gate_proj", - in_features=linear_fc1.in_features, - out_features=linear_fc1.out_features // 2, + linear_fc1=linear_fc1, rank=rank, alpha=alpha, - dtype=linear_fc1.weight0.dtype, - device=linear_fc1.weight0.device, num_local_experts=num_local_experts, ) - self.up_lora = LoRA( + self.up_lora = self._build_fc1_lora( adapter_model_prefix=f"{adapter_model_prefix}.{{expert}}.up_proj", + linear_fc1=linear_fc1, + rank=rank, + alpha=alpha, + num_local_experts=num_local_experts, + ) + + @staticmethod + def _build_fc1_lora( + *, + adapter_model_prefix: str, + linear_fc1: TEColumnParallelGroupedLinear, + rank: int, + alpha: float, + num_local_experts: int, + ) -> LoRA: + assert isinstance(linear_fc1.weight0, torch.Tensor) + a_parallel_spec = LoRAParallelSpec( + shard_domain="expert_tp", + sharded=False, + shard_axis=None, + grad_sync_domain=EXPERT_TP_GRAD_SYNC_DOMAIN, + grad_sync_op=GRAD_SYNC_OP_AVG, # we handle this with extended finalize_grads + ) + b_parallel_spec = a_parallel_spec.model_copy( + update={ + "sharded": True, + "shard_axis": -1, + "grad_sync_domain": EXPERT_TP_GRAD_SYNC_DOMAIN, + "grad_sync_op": GRAD_SYNC_OP_NONE, # only need DP-type reductions + } + ) + return LoRA( + adapter_model_prefix=adapter_model_prefix, in_features=linear_fc1.in_features, out_features=linear_fc1.out_features // 2, rank=rank, @@ -322,6 +587,10 @@ def __init__( dtype=linear_fc1.weight0.dtype, device=linear_fc1.weight0.device, num_local_experts=num_local_experts, + a_parallel_spec=a_parallel_spec, + b_parallel_spec=b_parallel_spec, + # Expert LoRA params use Megatron's expert-DP gradient buckets. + allreduce=False, ) def forward( @@ -347,6 +616,21 @@ def __init__( assert linear_fc2 is not None assert isinstance(linear_fc2.weight0, torch.Tensor) self.linear_fc2 = linear_fc2 + a_parallel_spec = LoRAParallelSpec( + shard_domain="expert_tp", + sharded=True, + shard_axis=-2, + grad_sync_domain=EXPERT_TP_GRAD_SYNC_DOMAIN, + grad_sync_op=GRAD_SYNC_OP_NONE, # only need DP-type reductions + ) + b_parallel_spec = a_parallel_spec.model_copy( + update={ + "sharded": False, + "shard_axis": None, + "grad_sync_domain": EXPERT_TP_GRAD_SYNC_DOMAIN, + "grad_sync_op": GRAD_SYNC_OP_AVG, # we handle this with extended finalize_grads + } + ) self.lora = LoRA( adapter_model_prefix=f"{adapter_model_prefix}.{{expert}}.down_proj", in_features=linear_fc2.in_features, @@ -356,6 +640,10 @@ def __init__( dtype=linear_fc2.weight0.dtype, device=linear_fc2.weight0.device, num_local_experts=num_local_experts, + a_parallel_spec=a_parallel_spec, + b_parallel_spec=b_parallel_spec, + # Expert LoRA params use Megatron's expert-DP gradient buckets. + allreduce=False, ) def forward( @@ -369,77 +657,68 @@ def forward( def apply_lora_adapters( model: Sequence[torch.nn.Module], provider: GPTModelProvider, -) -> None: - with torch.no_grad(): - for chunk in model: - for module in chunk.modules(): - if isinstance(module, TransformerLayer): - adapter_model_prefix = ( - f"base_model.model.model.layers.{module.layer_number - 1}" - ) - assert isinstance(module.self_attention, SelfAttention) - self_attention_linear_proj = module.self_attention.linear_proj - if not isinstance(self_attention_linear_proj, TERowParallelLinear): - self_attention_linear_proj = ( - self_attention_linear_proj.linear_proj - ) - assert isinstance( - self_attention_linear_proj, TERowParallelLinear - ) - module.self_attention.linear_proj = SelfAttentionLinearProjLoRA( - adapter_model_prefix=f"{adapter_model_prefix}.self_attn.o_proj", - linear_proj=self_attention_linear_proj, - rank=1, - alpha=32, - provider=provider, - ) - self_attention_linear_qkv = module.self_attention.linear_qkv - if not isinstance( - self_attention_linear_qkv, TELayerNormColumnParallelLinear - ): - self_attention_linear_qkv = self_attention_linear_qkv.linear_qkv - assert isinstance( - self_attention_linear_qkv, TELayerNormColumnParallelLinear - ) - module.self_attention.linear_qkv = SelfAttentionLinearQKVLoRA( - adapter_model_prefix=f"{adapter_model_prefix}.self_attn", - linear_qkv=self_attention_linear_qkv, - rank=1, - alpha=32, - provider=provider, - ) - assert isinstance(module.mlp.experts, TEGroupedMLP) - mlp_experts_linear_fc1 = module.mlp.experts.linear_fc1 - if not isinstance( - mlp_experts_linear_fc1, - TEColumnParallelGroupedLinear, # type: ignore - ): - mlp_experts_linear_fc1 = mlp_experts_linear_fc1.linear_fc1 - assert isinstance( - mlp_experts_linear_fc1, - TEColumnParallelGroupedLinear, # type: ignore - ) - module.mlp.experts.linear_fc1 = MLPExpertsLinearFC1LoRA( - adapter_model_prefix=f"{adapter_model_prefix}.mlp.experts", - linear_fc1=mlp_experts_linear_fc1, - rank=1, - alpha=32, - num_local_experts=module.mlp.experts.num_local_experts, - ) - mlp_experts_linear_fc2 = module.mlp.experts.linear_fc2 - if not isinstance( - mlp_experts_linear_fc2, - TERowParallelGroupedLinear, # type: ignore - ): - mlp_experts_linear_fc2 = mlp_experts_linear_fc2.linear_fc2 - assert isinstance( - mlp_experts_linear_fc2, - TERowParallelGroupedLinear, # type: ignore - ) - module.mlp.experts.linear_fc2 = MLPExpertsLinearFC2LoRA( - adapter_model_prefix=f"{adapter_model_prefix}.mlp.experts", - linear_fc2=mlp_experts_linear_fc2, - rank=1, - alpha=32, - num_local_experts=module.mlp.experts.num_local_experts, - ) +) -> list[torch.nn.Module]: + def _unwrap_attr(value: Any, attr_name: str, expected_type: type[Any]) -> Any: + if isinstance(value, expected_type): + return value + unwrapped = getattr(value, attr_name) + assert isinstance(unwrapped, expected_type) + return unwrapped + + for chunk in model: + for module in chunk.modules(): + if isinstance(module, TransformerLayer): + adapter_model_prefix = ( + f"base_model.model.model.layers.{module.layer_number - 1}" + ) + assert isinstance(module.self_attention, SelfAttention) + self_attention_linear_proj = _unwrap_attr( + module.self_attention.linear_proj, + "linear_proj", + TERowParallelLinear, + ) + module.self_attention.linear_proj = SelfAttentionLinearProjLoRA( + adapter_model_prefix=f"{adapter_model_prefix}.self_attn.o_proj", + linear_proj=self_attention_linear_proj, + rank=1, + alpha=32, + provider=provider, + ) + self_attention_linear_qkv = _unwrap_attr( + module.self_attention.linear_qkv, + "linear_qkv", + TELayerNormColumnParallelLinear, + ) + module.self_attention.linear_qkv = SelfAttentionLinearQKVLoRA( + adapter_model_prefix=f"{adapter_model_prefix}.self_attn", + linear_qkv=self_attention_linear_qkv, + rank=1, + alpha=32, + provider=provider, + ) + assert isinstance(module.mlp.experts, TEGroupedMLP) + mlp_experts_linear_fc1 = _unwrap_attr( + module.mlp.experts.linear_fc1, + "linear_fc1", + TEColumnParallelGroupedLinear, # type: ignore[arg-type] + ) + module.mlp.experts.linear_fc1 = MLPExpertsLinearFC1LoRA( + adapter_model_prefix=f"{adapter_model_prefix}.mlp.experts", + linear_fc1=mlp_experts_linear_fc1, + rank=1, + alpha=32, + num_local_experts=module.mlp.experts.num_local_experts, + ) + mlp_experts_linear_fc2 = _unwrap_attr( + module.mlp.experts.linear_fc2, + "linear_fc2", + TERowParallelGroupedLinear, # type: ignore[arg-type] + ) + module.mlp.experts.linear_fc2 = MLPExpertsLinearFC2LoRA( + adapter_model_prefix=f"{adapter_model_prefix}.mlp.experts", + linear_fc2=mlp_experts_linear_fc2, + rank=1, + alpha=32, + num_local_experts=module.mlp.experts.num_local_experts, + ) + return list(model) diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index 480a03be..abc2ef7b 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -20,20 +20,21 @@ def _set_cache_dir(env_var: str, default_path: str) -> None: import math import shutil import time -from typing import Any, cast +from typing import Any, Callable, cast from megatron.core import parallel_state as ps from megatron.core.distributed import DistributedDataParallelConfig from megatron.core.models.gpt.gpt_model import GPTModel from megatron.core.optimizer import OptimizerConfig, get_megatron_optimizer from megatron.core.transformer.module import MegatronModule -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from safetensors.torch import load_file, save_file import torch from torch._inductor.runtime.cache_dir_utils import cache_dir as inductor_cache_dir from art import dev, types from art.loss import loss_fn, shift_tensor +from art.megatron.finalize_grads import finalize_model_grads_extended from art.megatron.flex_attention import create_shared_prefix_attention_state from art.megatron.lora import apply_lora_adapters from art.megatron.offload import OffloadState, offload_to_cpu, reload_to_gpu @@ -44,9 +45,41 @@ def _set_cache_dir(env_var: str, default_path: str) -> None: packed_tensors_from_dir, ) -provider = get_provider( - os.environ.get("MODEL_IDENTIFIER", "Qwen/Qwen3-30B-A3B-Instruct-2507") -) +DEFAULT_MODEL_IDENTIFIER = "Qwen/Qwen3-30B-A3B-Instruct-2507" + + +class TrainingJob(BaseModel): + lora_path: str + optimizer_state_path: str + disk_packed_tensors: DiskPackedTensors + config: types.TrainConfig + experimental_config: dev.TrainConfig + + +class TrainingRuntime(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + provider: Any + model: list[MegatronModule] + optimizer: Any + rank: int + world_size: int + + +class TrainStepResult(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + reduced_loss: torch.Tensor + probs_corr: float + new_logprobs: torch.Tensor + update_successful: bool + grad_norm: float + num_zeros_in_grad: int | None + + +def print0(rank: int, *values: Any) -> None: + if rank == 0: + print(*values) def freeze_model(model_chunks: list[MegatronModule]) -> list[MegatronModule]: @@ -56,275 +89,396 @@ def freeze_model(model_chunks: list[MegatronModule]) -> list[MegatronModule]: return model_chunks -provider.register_pre_wrap_hook(lambda x: freeze_model(x) or x) - -model = provider.provide_distributed_model( - ddp_config=DistributedDataParallelConfig(), - data_parallel_random_init=False, -) - -rank = torch.distributed.get_rank() # ty:ignore[possibly-missing-attribute] -world_size = torch.distributed.get_world_size() # ty:ignore[possibly-missing-attribute] - -if rank == 0: - print("TORCHINDUCTOR_CACHE_DIR:", os.environ["TORCHINDUCTOR_CACHE_DIR"]) - print("Resolved inductor cache_dir():", inductor_cache_dir()) - print("TRITON_CACHE_DIR:", os.environ["TRITON_CACHE_DIR"]) +def _install_gpt_preprocess_hook(model_chunks: list[MegatronModule]) -> None: + for chunk in model_chunks: + module: Any = chunk + while not isinstance(module, GPTModel) and hasattr(module, "module"): + module = module.module + if not isinstance(module, GPTModel): + continue + preprocess = module._preprocess -for module in model: - while not isinstance(module, GPTModel) and hasattr(module, "module"): - module = module.module - if isinstance(module, GPTModel): - _preprocess = module._preprocess - - def _preprocess_hook(*args, **kwargs): + def preprocess_hook(*args, _preprocess=preprocess, **kwargs): preproc_output = list(_preprocess(*args, **kwargs)) - preproc_output[0].requires_grad = True # type: ignore - table = preproc_output[1] # [S,B,1,D] type: ignore - D = table.size(-1) # type: ignore - table_flat = table.view(table.size(0), D) # type: ignore - # position_ids: [B, S] - position_ids = kwargs["position_ids"] - B, S = position_ids.shape - gathered = table_flat.index_select(0, position_ids.reshape(-1)) # [B*S, D] - gathered = gathered.view(B, S, D).permute(1, 0, 2).contiguous() # [S, B, D] + preproc_output[0].requires_grad = True # type: ignore[index] + table = preproc_output[1] # [S, B, 1, D] # type: ignore[index] + embedding_dim = table.size(-1) + table_flat = table.view(table.size(0), embedding_dim) + position_ids = kwargs["position_ids"] # [B, S] + batch_size, sequence_length = position_ids.shape + gathered = table_flat.index_select(0, position_ids.reshape(-1)) + gathered = ( + gathered.view(batch_size, sequence_length, embedding_dim) + .permute(1, 0, 2) + .contiguous() + ) preproc_output[1] = gathered.unsqueeze(2) # [S, B, 1, D] return tuple(preproc_output) - module._preprocess = _preprocess_hook # type: ignore[attr-defined] - + module._preprocess = preprocess_hook # type: ignore[attr-defined] -apply_lora_adapters(model, provider) -optimizer = get_megatron_optimizer( - config=OptimizerConfig( +def _default_optimizer_config() -> OptimizerConfig: + return OptimizerConfig( bf16=True, lr=5e-6, adam_beta1=0.9, adam_beta2=0.99, clip_grad=0.1, weight_decay=0.1, - ), - model_chunks=model, # type: ignore -) + ) + -if rank == 0: - # Print the number of parameters in the optimizer, nicely formatted - num_params = sum( - p.numel() - for group in optimizer.param_groups - if not group["is_decoupled_lr"] - for p in group["params"] +def build_training_runtime( + *, + model_identifier: str | None = None, + provider_configure: Callable[[Any], None] | None = None, + optimizer_config: OptimizerConfig | None = None, + print_env: bool = True, + print_optimizer_stats: bool = True, +) -> TrainingRuntime: + provider = get_provider( + model_identifier or os.environ.get("MODEL_IDENTIFIER", DEFAULT_MODEL_IDENTIFIER) + ) + if provider_configure is not None: + provider_configure(provider) + provider.register_pre_wrap_hook(freeze_model) + provider.register_pre_wrap_hook( + lambda chunks: apply_lora_adapters(chunks, provider) ) - print(f"Number of parameters in optimizer: {num_params:,}") - total_params = sum(p.numel() for m in model for p in m.parameters()) - percent = (num_params / total_params) * 100 if total_params > 0 else 0 - print(f"Optimizer parameters as percent of total: {percent:0.2f}%") + model = cast( + list[MegatronModule], + provider.provide_distributed_model( + ddp_config=DistributedDataParallelConfig(), + data_parallel_random_init=False, + ), + ) -class TrainingJob(BaseModel): - lora_path: str - optimizer_state_path: str - disk_packed_tensors: DiskPackedTensors - config: types.TrainConfig - experimental_config: dev.TrainConfig + if not torch.distributed.is_initialized(): + raise RuntimeError( + "torch.distributed must be initialized before building runtime" + ) + rank = torch.distributed.get_rank() + world_size = torch.distributed.get_world_size() + if rank == 0 and print_env: + print("TORCHINDUCTOR_CACHE_DIR:", os.environ["TORCHINDUCTOR_CACHE_DIR"]) + print("Resolved inductor cache_dir():", inductor_cache_dir()) + print("TRITON_CACHE_DIR:", os.environ["TRITON_CACHE_DIR"]) -def print0(*values: Any) -> None: - if rank == 0: - print(*values) + _install_gpt_preprocess_hook(model) + optimizer = get_megatron_optimizer( + config=optimizer_config or _default_optimizer_config(), + model_chunks=model, + ) + + if rank == 0 and print_optimizer_stats: + num_params = sum( + p.numel() + for group in optimizer.param_groups + if not group["is_decoupled_lr"] + for p in group["params"] + ) + print(f"Number of parameters in optimizer: {num_params:,}") + total_params = sum(p.numel() for module in model for p in module.parameters()) + percent = (num_params / total_params) * 100 if total_params > 0 else 0 + print(f"Optimizer parameters as percent of total: {percent:0.2f}%") + + return TrainingRuntime( + provider=provider, + model=model, + optimizer=optimizer, + rank=rank, + world_size=world_size, + ) + + +def iter_modules(model_chunks: list[MegatronModule]) -> Any: + for chunk in model_chunks: + for module in chunk.modules(): + yield module + + +def load_adapter_into_model( + model_chunks: list[MegatronModule], + adapter_model: dict[str, torch.Tensor], +) -> None: + with torch.no_grad(): + for module in iter_modules(model_chunks): + if hasattr(module, "load_lora"): + module.load_lora(adapter_model) # type: ignore[attr-defined] + + +def collect_sharded_lora_state( + model_chunks: list[MegatronModule], + adapter_model: dict[str, torch.Tensor], +) -> tuple[dict[str, torch.Tensor], dict[str, dict[str, Any]]]: + sharded_state_dict: dict[str, torch.Tensor] = {} + sharded_state_manifest: dict[str, dict[str, Any]] = {} + for module in iter_modules(model_chunks): + if hasattr(module, "sharded_lora_state_dict"): + module_sharded_lora_state_dict: dict[str, torch.Tensor] = ( + module.sharded_lora_state_dict() # type: ignore[attr-defined] + ) + for key, value in module_sharded_lora_state_dict.items(): + target_dtype = ( + adapter_model[key].dtype if key in adapter_model else value.dtype + ) + sharded_state_dict[key] = value.to(target_dtype) + if hasattr(module, "sharded_lora_manifest"): + module_sharded_lora_manifest: dict[str, dict[str, Any]] = ( + module.sharded_lora_manifest() # type: ignore[attr-defined] + ) + sharded_state_manifest.update(module_sharded_lora_manifest) + return sharded_state_dict, sharded_state_manifest + + +def select_indexed_inputs(packed_tensors: PackedTensors, index: int) -> PackedTensors: + return PackedTensors( # type: ignore[call-arg] + **{ + key: value[index : index + 1] + for key, value in packed_tensors.items() + if isinstance(value, torch.Tensor) + }, + pixel_values=[None], + image_grid_thw=[None], + ) + + +def _move_inputs_to_device(inputs: PackedTensors, device: torch.device) -> None: + for key, value in inputs.items(): + if isinstance(value, torch.Tensor): + inputs[key] = value.to(device) # type: ignore[index] -offload_state = OffloadState() +def _finalize_grads(model_chunks: list[MegatronModule]) -> None: + finalize_model_grads_extended(cast(list[torch.nn.Module], model_chunks)) -offload_to_cpu(model, optimizer, rank, offload_state) -while True: - torch.distributed.barrier() # ty:ignore[possibly-missing-attribute] - jobs_dir = "/tmp/megatron_training_jobs" - os.makedirs(jobs_dir, exist_ok=True) - job_names = sorted( - job_name for job_name in os.listdir(jobs_dir) if job_name.endswith(".json") +def _optimizer_step( + optimizer: Any, + learning_rate: float, +) -> tuple[bool, float, int | None]: + for param_group in optimizer.param_groups: + param_group["lr"] = learning_rate + update_successful, grad_norm, num_zeros_in_grad = cast( + tuple[bool, float, int | None], optimizer.step() ) - if not job_names: - time.sleep(1) - continue - - wake_lock_path = "/tmp/megatron_vllm_waking" - while os.path.exists(wake_lock_path): - time.sleep(0.2) - - reload_to_gpu(model, optimizer, rank, offload_state) - - job_name = job_names[0] - job_path = os.path.join(jobs_dir, job_name) - with open(job_path, "rb") as f: - job = TrainingJob.model_validate_json(f.read()) - config = job.config - experimental_config = job.experimental_config - print0("Loaded job from", job_path) - print0("Job:", job) - adapter_model_path = f"{job.lora_path}/adapter_model.safetensors" - if os.path.exists(adapter_model_path): - print0("Loading adapter model from", adapter_model_path) - adapter_model = load_file(adapter_model_path) - with torch.no_grad(): - for chunk in model: - for module in chunk.modules(): - if hasattr(module, "load_lora"): - module.load_lora(adapter_model) # type: ignore - else: - print0("No adapter model found at", adapter_model_path) - adapter_model = {} - with torch.no_grad(): - for chunk in model: - for module in chunk.modules(): - if hasattr(module, "reset_lora_parameters"): - module.reset_lora_parameters() # type: ignore - optimizer_shard_path = os.path.join( - job.optimizer_state_path, f"{rank + 1:02d}-of-{world_size:02d}.pt" + optimizer.zero_grad() + return update_successful, grad_norm, num_zeros_in_grad + + +def _reduce_loss(loss: torch.Tensor) -> torch.Tensor: + reduced_loss = loss.detach().clone() + torch.distributed.all_reduce(reduced_loss, op=torch.distributed.ReduceOp.AVG) + return reduced_loss + + +def run_training_step( + *, + model_chunks: list[MegatronModule], + optimizer: Any, + learning_rate: float, + inputs: PackedTensors, + config: types.TrainConfig, + experimental_config: dev.TrainConfig, + ref_logprobs: torch.Tensor | None = None, +) -> TrainStepResult: + device = next(model_chunks[0].parameters()).device + _move_inputs_to_device(inputs, device) + + attention_state = create_shared_prefix_attention_state( + group_ids=inputs["group_ids"], + parent_ids=inputs["parent_ids"], ) - if os.path.exists(optimizer_shard_path): - print( - "Loading optimizer state from", - optimizer_shard_path, - ) - optimizer.load_state_dict(torch.load(optimizer_shard_path)) - else: - # No checkpoint for this run; reset optimizer state to avoid cross-run leakage - print( - "No optimizer state found at", - optimizer_shard_path, - "— resetting optimizer for new run", - ) - optimizer.optimizer.state.clear() - optimizer.reload_model_params() - print0("Loading packed tensors from", job.disk_packed_tensors["dir"]) - packed_tensors = packed_tensors_from_dir(**job.disk_packed_tensors) - num_sequences = job.disk_packed_tensors["num_sequences"] - dp_rank = ps.get_data_parallel_rank() - dp_world_size = ps.get_data_parallel_world_size() - num_indices = math.ceil(num_sequences / dp_world_size) - indices = list(range(dp_rank, num_sequences, dp_world_size)) - if not indices: - indices = [dp_rank % num_sequences] - # pad indices by repeating & slicing to target length - repeat = math.ceil(num_indices / len(indices)) - indices = (indices * repeat)[:num_indices] - for index in indices: - inputs = PackedTensors( # type: ignore - **{ - key: value[index : index + 1] - for key, value in packed_tensors.items() - if isinstance(value, torch.Tensor) - }, - pixel_values=[None], - image_grid_thw=[None], + attention_mask = torch.zeros((1, 1, 1, 1), dtype=torch.bool, device=device) + + for chunk in model_chunks: + cast(Any, chunk).zero_grad_buffer() + + new_logprobs: torch.Tensor = -model_chunks[0]( + input_ids=inputs["tokens"], + position_ids=inputs["input_pos"], + attention_mask=attention_mask, + labels=shift_tensor(inputs["tokens"], 0), + extra_block_kwargs={"attention_bias": attention_state}, + ) + + loss_info = loss_fn( + cast(Any, inputs), + new_logprobs, + ref_logprobs, + None, + experimental_config, + ) + loss = loss_info.mean_policy_loss + config.beta * loss_info.mean_kl + loss.backward() + _finalize_grads(model_chunks) + update_successful, grad_norm, num_zeros_in_grad = _optimizer_step( + optimizer, + learning_rate, + ) + reduced_loss = _reduce_loss(loss) + + return TrainStepResult( + reduced_loss=reduced_loss, + probs_corr=float(loss_info.probs_corr.item()), + new_logprobs=new_logprobs, + update_successful=update_successful, + grad_norm=grad_norm, + num_zeros_in_grad=num_zeros_in_grad, + ) + + +def _run_service_loop(runtime: TrainingRuntime) -> None: + offload_state = OffloadState() + offload_to_cpu(runtime.model, runtime.optimizer, runtime.rank, offload_state) + + while True: + torch.distributed.barrier() + jobs_dir = "/tmp/megatron_training_jobs" + os.makedirs(jobs_dir, exist_ok=True) + job_names = sorted( + job_name for job_name in os.listdir(jobs_dir) if job_name.endswith(".json") ) - ref_logprobs = None - device = next(model[0].parameters()).device - for key, value in inputs.items(): - if isinstance(value, torch.Tensor): - inputs[key] = value.to(device) # type: ignore - attention_state = create_shared_prefix_attention_state( # should happen after group_ids is moved to device - group_ids=inputs["group_ids"], - parent_ids=inputs["parent_ids"], + if not job_names: + time.sleep(1) + continue + + wake_lock_path = "/tmp/megatron_vllm_waking" + while os.path.exists(wake_lock_path): + time.sleep(0.2) + + reload_to_gpu(runtime.model, runtime.optimizer, runtime.rank, offload_state) + + job_name = job_names[0] + job_path = os.path.join(jobs_dir, job_name) + with open(job_path, "rb") as handle: + job = TrainingJob.model_validate_json(handle.read()) + config = job.config + experimental_config = job.experimental_config + + print0(runtime.rank, "Loaded job from", job_path) + print0(runtime.rank, "Job:", job) + + adapter_model_path = f"{job.lora_path}/adapter_model.safetensors" + if not os.path.exists(adapter_model_path): + raise FileNotFoundError(f"No adapter model found at {adapter_model_path}") + print0(runtime.rank, "Loading adapter model from", adapter_model_path) + adapter_model = load_file(adapter_model_path) + load_adapter_into_model(runtime.model, adapter_model) + + optimizer_shard_path = os.path.join( + job.optimizer_state_path, + f"{runtime.rank + 1:02d}-of-{runtime.world_size:02d}.pt", ) - # Megatron full-layer recompute saves positional tensor args, so keep a tiny - # placeholder Tensor here and pass flex BlockMask state via attention_bias. - attention_mask = torch.zeros((1, 1, 1, 1), dtype=torch.bool, device=device) - new_logprobs: torch.Tensor = -model[0]( - input_ids=inputs["tokens"], - position_ids=inputs["input_pos"], - attention_mask=attention_mask, - labels=shift_tensor(inputs["tokens"], 0), - extra_block_kwargs={"attention_bias": attention_state}, + if os.path.exists(optimizer_shard_path): + print("Loading optimizer state from", optimizer_shard_path) + runtime.optimizer.load_state_dict(torch.load(optimizer_shard_path)) + else: + print( + "No optimizer state found at", + optimizer_shard_path, + "- resetting optimizer for new run", + ) + runtime.optimizer.optimizer.state.clear() + runtime.optimizer.reload_model_params() + + print0( + runtime.rank, "Loading packed tensors from", job.disk_packed_tensors["dir"] ) - loss = loss_fn( - inputs, # type: ignore - new_logprobs, - ref_logprobs, - None, - experimental_config, + packed_tensors = packed_tensors_from_dir(**job.disk_packed_tensors) + num_sequences = job.disk_packed_tensors["num_sequences"] + + dp_rank = ps.get_data_parallel_rank() + dp_world_size = ps.get_data_parallel_world_size() + num_indices = math.ceil(num_sequences / dp_world_size) + indices = list(range(dp_rank, num_sequences, dp_world_size)) + if not indices: + indices = [dp_rank % num_sequences] + repeat = math.ceil(num_indices / len(indices)) + indices = (indices * repeat)[:num_indices] + + for index in indices: + inputs = select_indexed_inputs(packed_tensors, index) + step_result = run_training_step( + model_chunks=runtime.model, + optimizer=runtime.optimizer, + learning_rate=config.learning_rate, + inputs=inputs, + config=config, + experimental_config=experimental_config, + ref_logprobs=None, + ) + print0( + runtime.rank, + "Correlation between old and new probabilities:", + step_result.probs_corr, + ) + + if runtime.rank == 0: + with open( + "/tmp/megatron_training_log.jsonl", "a+", encoding="utf-8" + ) as log_file: + log_msg = json.dumps( + { + "loss": step_result.reduced_loss.item(), + "grad_norm": step_result.grad_norm, + "probs_corr": step_result.probs_corr, + } + ) + print("Logging", log_msg) + log_file.write(log_msg + "\n") + + sharded_state_dict, sharded_state_manifest = collect_sharded_lora_state( + runtime.model, + adapter_model, ) - probs_corr = loss.probs_corr.item() - print0("Correlation between old and new probabilities:", probs_corr) - loss = loss.mean_policy_loss + config.beta * loss.mean_kl - loss.backward() - # Reduce LoRA grads - start = time.perf_counter() - num_grads = 0 - for chunk in model: - for param in chunk.parameters(): - if param.grad is None: - continue - torch.distributed.all_reduce( # ty:ignore[possibly-missing-attribute] - param.grad, - op=torch.distributed.ReduceOp.AVG, # ty:ignore[possibly-missing-attribute] - group=ps.get_data_parallel_group(), - ) - num_grads += 1 - print0( - f"Reduced {num_grads} LoRA grads in {(time.perf_counter() - start) * 1e3:.1f} ms" + shard_path = os.path.join( + job.lora_path, + f"adapter_model-{runtime.rank + 1:02d}-of-{runtime.world_size:02d}.safetensors", ) - for param_group in optimizer.param_groups: - param_group["lr"] = config.learning_rate - update_successful, grad_norm, num_zeros_in_grad = cast( - tuple[bool, float, int | None], optimizer.step() + manifest_path = os.path.join( + job.lora_path, + f"adapter_manifest-{runtime.rank + 1:02d}-of-{runtime.world_size:02d}.json", ) - optimizer.zero_grad() - - # Mean reduce loss across all ranks for logging - torch.distributed.all_reduce(loss, op=torch.distributed.ReduceOp.AVG) # ty:ignore[possibly-missing-attribute] - - if rank == 0: - with open("/tmp/megatron_training_log.jsonl", "a+") as log_file: - log_msg = json.dumps( - { - "loss": loss.item(), - "grad_norm": grad_norm, - "probs_corr": probs_corr, - } - ) - print("Logging", log_msg) - log_file.write(log_msg + "\n") - - sharded_state_dict = {} - for chunk in model: - for module in chunk.modules(): - if hasattr(module, "sharded_lora_state_dict"): - module_sharded_lora_state_dict: dict[str, torch.Tensor] = ( - module.sharded_lora_state_dict() # type: ignore - ) - for key, value in module_sharded_lora_state_dict.items(): - target_dtype = ( - adapter_model[key].dtype - if key in adapter_model - else value.dtype - ) - sharded_state_dict[key] = value.to(target_dtype) - shard_path = os.path.join( - job.lora_path, - f"adapter_model-{rank + 1:02d}-of-{world_size:02d}.safetensors", + print("Saving adapter shard to", shard_path) + save_file(sharded_state_dict, shard_path) + print("Saving adapter shard manifest to", manifest_path) + with open(manifest_path, "w", encoding="utf-8") as manifest_file: + json.dump(sharded_state_manifest, manifest_file, sort_keys=True) + + print("Saving optimizer shard to", optimizer_shard_path) + os.makedirs(job.optimizer_state_path, exist_ok=True) + torch.save(runtime.optimizer.state_dict(), optimizer_shard_path) + + offload_to_cpu(runtime.model, runtime.optimizer, runtime.rank, offload_state) + + del packed_tensors + del adapter_model + if "inputs" in locals(): + del inputs + gc.collect() + torch.cuda.empty_cache() + + torch.distributed.barrier() + if runtime.rank == 0: + os.remove(job_path) + with open( + "/tmp/megatron_training_log.jsonl", "a+", encoding="utf-8" + ) as log_file: + log_file.write("all done\n") + shutil.rmtree(job.disk_packed_tensors["dir"]) + + +def main() -> None: + runtime = build_training_runtime( + model_identifier=os.environ.get("MODEL_IDENTIFIER", DEFAULT_MODEL_IDENTIFIER) ) - print("Saving adapter shard to", shard_path) - save_file(sharded_state_dict, shard_path) - print("Saving optimizer shard to", optimizer_shard_path) - os.makedirs(job.optimizer_state_path, exist_ok=True) - torch.save(optimizer.state_dict(), optimizer_shard_path) - offload_to_cpu(model, optimizer, rank, offload_state) - # Release mmap-backed packed tensor references on all ranks before rank0 cleanup. - del packed_tensors - del adapter_model - if "inputs" in locals(): - del inputs - gc.collect() - torch.cuda.empty_cache() - # Ensure all ranks have finished saving before signaling completion - torch.distributed.barrier() # ty:ignore[possibly-missing-attribute] - if rank == 0: - os.remove(job_path) - with open("/tmp/megatron_training_log.jsonl", "a+") as log_file: - log_file.write("all done\n") - shutil.rmtree(job.disk_packed_tensors["dir"]) + _run_service_loop(runtime) + + +if __name__ == "__main__": + main() From 112e97cde67deced438a473c5482467f0d5ff7d5 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 10 Mar 2026 05:13:03 +0000 Subject: [PATCH 02/28] megatron: harden sharded lora merge validation --- src/art/megatron/service.py | 91 +++++++++++++++++++++++++++++++------ 1 file changed, 78 insertions(+), 13 deletions(-) diff --git a/src/art/megatron/service.py b/src/art/megatron/service.py index 8ed6b82c..e4c99a98 100644 --- a/src/art/megatron/service.py +++ b/src/art/megatron/service.py @@ -12,7 +12,7 @@ from peft.tuners.lora.config import LoraConfig from pydantic import BaseModel from safetensors import safe_open -from safetensors.torch import load_file, save_file +from safetensors.torch import save_file import torch from vllm import AsyncEngineArgs from vllm.lora.request import LoRARequest @@ -311,26 +311,91 @@ def _merge_lora_adapter(self, lora_path: str) -> None: if not shard_filenames: return - adapter_model_path = base_dir / "adapter_model.safetensors" - sharded_tensors: dict[str, list[torch.Tensor]] = {} + shard_files_by_suffix = { + path.name.removeprefix("adapter_model-").removesuffix(".safetensors"): path + for path in shard_filenames + } + manifest_filenames = sorted(base_dir.glob("adapter_manifest-*-of-*.json")) + manifest_files_by_suffix = { + path.name.removeprefix("adapter_manifest-").removesuffix(".json"): path + for path in manifest_filenames + } - for filename in shard_filenames: - with safe_open(filename, framework="pt") as file: - for key in file.keys(): - tensor = file.get_tensor(key) - sharded_tensors.setdefault(key, []).append(tensor) + if set(shard_files_by_suffix) != set(manifest_files_by_suffix): + raise RuntimeError( + "Shard/manifest coverage mismatch: " + f"shards={sorted(shard_files_by_suffix)}, " + f"manifests={sorted(manifest_files_by_suffix)}" + ) - adapter_model: dict[str, torch.Tensor] = {} - if adapter_model_path.exists(): - adapter_model = load_file(adapter_model_path) + entries_by_key: dict[str, list[tuple[dict[str, Any], torch.Tensor]]] = {} + for suffix in sorted(shard_files_by_suffix): + shard_path = shard_files_by_suffix[suffix] + manifest_path = manifest_files_by_suffix[suffix] + with open(manifest_path, "r", encoding="utf-8") as manifest_file: + shard_manifest: dict[str, dict[str, Any]] = json.load(manifest_file) - for key, tensors in sharded_tensors.items(): - tensor = torch.cat(tensors, dim=1 if "lora_A" in key else 0) + with safe_open(shard_path, framework="pt") as file: + shard_tensors = {key: file.get_tensor(key) for key in file.keys()} + + if set(shard_tensors) != set(shard_manifest): + raise RuntimeError( + f"Tensor/manifest key mismatch for shard suffix={suffix}: " + f"tensor_keys={sorted(shard_tensors)}, " + f"manifest_keys={sorted(shard_manifest)}" + ) + + for key, tensor in shard_tensors.items(): + entries_by_key.setdefault(key, []).append((shard_manifest[key], tensor)) + + adapter_model: dict[str, torch.Tensor] = {} + for key, key_entries in entries_by_key.items(): + first_manifest = key_entries[0][0] + sharded = bool(first_manifest["sharded"]) + shard_world_size = int(first_manifest["shard_world_size"]) + + for manifest_entry, _tensor in key_entries: + if bool(manifest_entry["sharded"]) != sharded: + raise RuntimeError(f"Inconsistent sharded flag for key={key}") + if int(manifest_entry["shard_world_size"]) != shard_world_size: + raise RuntimeError(f"Inconsistent shard world size for key={key}") + + if not sharded: + if len(key_entries) != 1: + raise RuntimeError( + f"Replicated key={key} expected 1 shard, got {len(key_entries)}" + ) + tensor = key_entries[0][1] + else: + shard_rank_to_tensor: dict[int, torch.Tensor] = {} + for manifest_entry, shard_tensor in key_entries: + shard_rank = int(manifest_entry["shard_rank"]) + if shard_rank in shard_rank_to_tensor: + raise RuntimeError( + f"Duplicate shard_rank={shard_rank} for key={key}" + ) + shard_rank_to_tensor[shard_rank] = shard_tensor + + expected_shard_ranks = set(range(shard_world_size)) + if set(shard_rank_to_tensor.keys()) != expected_shard_ranks: + raise RuntimeError( + f"Shard rank coverage mismatch for key={key}: " + f"expected {sorted(expected_shard_ranks)}, got {sorted(shard_rank_to_tensor.keys())}" + ) + + ordered_shards = [ + shard_rank_to_tensor[i] for i in range(shard_world_size) + ] + concat_dim = 1 if "lora_A" in key else 0 + tensor = torch.cat(ordered_shards, dim=concat_dim) adapter_model[key] = tensor + adapter_model_path = base_dir / "adapter_model.safetensors" save_file(adapter_model, adapter_model_path) for filename in shard_filenames: filename.unlink() + for filename in manifest_filenames: + filename.unlink() @cached_property def llm(self) -> asyncio.Task[AsyncLLM]: From 4d5c3454ff2e7d3e40436628fb4e0cc51dc7af98 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 10 Mar 2026 05:13:25 +0000 Subject: [PATCH 03/28] tests: add megatron lora oracle correctness harness --- tests/integration/megatron_oracle_harness.py | 1189 +++++++++++++++++ .../test_megatron_lora_oracle_correctness.py | 100 ++ 2 files changed, 1289 insertions(+) create mode 100644 tests/integration/megatron_oracle_harness.py create mode 100644 tests/integration/test_megatron_lora_oracle_correctness.py diff --git a/tests/integration/megatron_oracle_harness.py b/tests/integration/megatron_oracle_harness.py new file mode 100644 index 00000000..0d61b41f --- /dev/null +++ b/tests/integration/megatron_oracle_harness.py @@ -0,0 +1,1189 @@ +from __future__ import annotations + +import argparse +from contextlib import contextmanager +import hashlib +import json +import os +from pathlib import Path +import random +import shutil +import subprocess +import sys +from typing import Any, Callable, Literal, cast + +import numpy as np +from pydantic import BaseModel, ConfigDict, Field + +REPO_ROOT = Path(__file__).resolve().parents[2] +ARTIFACT_ROOT = Path(REPO_ROOT / ".local/megatron_lora_oracles") + +REGENERATE_ENV = "ART_REGENERATE_MEGATRON_ORACLE" +BASE_MODEL_ENV = "ART_MEGATRON_ORACLE_BASE_MODEL" +DP_SUPPORT_ENV = "ART_MEGATRON_ORACLE_ENABLE_DP_PHASE_B" +SENSITIVITY_MUTATION_ENV = "ART_MEGATRON_ORACLE_MUTATION" + +SensitivityMutation = Literal["drop_finalize"] + +REQUIRED_PACKED_TENSOR_FILES = ( + "tokens.pt", + "group_ids.pt", + "parent_ids.pt", + "input_pos.pt", + "assistant_mask.pt", + "logprobs.pt", + "advantages.pt", + "weights.pt", +) + + +class Topology(BaseModel): + model_config = ConfigDict(frozen=True) + + tp: int + ep: int + etp: int = 1 + dp: int = 1 + sp: int = 0 + phase: Literal["A", "B"] = "A" + + def slug(self) -> str: + return f"tp{self.tp}_ep{self.ep}_etp{self.etp}_dp{self.dp}_sp{self.sp}" + + def world_size(self) -> int: + return self.tp * self.ep * self.etp * self.dp + + +class PackedTensorConfig(BaseModel): + num_sequences: int = 8 + sequence_length: int = 256 + prefill_tokens: int = 64 + decode_tokens: int = 64 + vocab_high: int = 8192 + + +class LoraConfig(BaseModel): + rank: int = 1 + alpha: int = 32 + target_modules: list[str] = Field( + default_factory=lambda: [ + "q_proj", + "k_proj", + "v_proj", + "o_proj", + "gate_proj", + "up_proj", + "down_proj", + ] + ) + + +class ToleranceProfile(BaseModel): + outputs_abs: float = 1e-2 + outputs_rel: float = 1e-2 + losses_abs: float = 1e-4 + losses_rel: float = 1e-4 + grads_abs: float = 1e-2 + grads_rel: float = 1e-2 + deltas_abs: float = 1e-2 + deltas_rel: float = 1e-2 + + +class OracleCaseConfig(BaseModel): + base_model: str + seed: int = 20260305 + num_steps: int = 3 + learning_rate: float = 5e-6 + beta: float = 0.0 + packed_tensors: PackedTensorConfig = Field(default_factory=PackedTensorConfig) + lora: LoraConfig = Field(default_factory=LoraConfig) + tolerances: ToleranceProfile = Field(default_factory=ToleranceProfile) + + +class DiskPackedTensorsSpec(BaseModel): + dir: str + num_sequences: int + sequence_length: int + pixel_values: tuple[int, list[int]] | None = None + image_grid_thw: tuple[int, list[int]] | None = None + + +class CaseArtifacts(BaseModel): + case_id: str + case_dir: str + packed_tensors: DiskPackedTensorsSpec + shared_init_adapter_path: str + + +class WorkerRunRequest(BaseModel): + case_id: str + case_config: OracleCaseConfig + topology: Topology + topology_dir: str + packed_tensors: DiskPackedTensorsSpec + shared_init_adapter_path: str + allow_create_shared_init: bool = False + mutation: SensitivityMutation | None = None + + +class StepTrace(BaseModel): + step_index: int + loss: float + probs_corr: float + output_file: str + grads_file: str + deltas_file: str + lora_file: str + + +class RunManifest(BaseModel): + case_id: str + base_model: str + topology: str + world_size: int + seed: int + num_steps: int + packed_tensors: DiskPackedTensorsSpec + tolerances: ToleranceProfile + steps: list[StepTrace] + + +class ComparisonFailure(BaseModel): + case_id: str + topology: str + oracle_topology: str + metric: Literal["outputs", "losses", "grads", "lora_deltas"] + step_index: int + key: str + max_abs_error: float + max_rel_error: float + abs_tolerance: float + rel_tolerance: float + message: str + + +PHASE_A_TOPOLOGIES = [ + Topology(tp=1, ep=1, etp=1, dp=1, sp=0, phase="A"), + Topology(tp=2, ep=1, etp=1, dp=1, sp=1, phase="A"), + Topology(tp=1, ep=2, etp=1, dp=1, sp=0, phase="A"), + Topology(tp=2, ep=2, etp=1, dp=1, sp=1, phase="A"), +] +PHASE_B_TOPOLOGIES = [ + Topology(tp=1, ep=1, etp=1, dp=2, sp=0, phase="B"), + Topology(tp=2, ep=1, etp=1, dp=2, sp=1, phase="B"), +] +ORACLE_TOPOLOGY = PHASE_A_TOPOLOGIES[0] +SENSITIVITY_TOPOLOGY = PHASE_A_TOPOLOGIES[1] + + +def _truthy(value: str | None) -> bool: + if value is None: + return False + return value.strip().lower() in {"1", "true", "yes", "on"} + + +def sensitivity_mutation() -> SensitivityMutation | None: + raw = os.environ.get(SENSITIVITY_MUTATION_ENV) + if raw is None or raw.strip() == "": + return None + normalized = raw.strip().lower() + if normalized in {"1", "true", "yes", "on"}: + return "drop_finalize" + if normalized == "drop_finalize": + return "drop_finalize" + raise ValueError( + f"Unsupported {SENSITIVITY_MUTATION_ENV} value '{raw}'. " + "Supported values: drop_finalize, 1/true/yes/on." + ) + + +def sensitivity_enabled() -> bool: + return sensitivity_mutation() is not None + + +def phase_b_dp_enabled() -> bool: + return _truthy(os.environ.get(DP_SUPPORT_ENV)) + + +def regenerate_requested() -> bool: + return _truthy(os.environ.get(REGENERATE_ENV)) + + +def default_case_config() -> OracleCaseConfig: + def _env_float(name: str, default: str) -> float: + return float(os.environ.get(name, default)) + + tolerances = ToleranceProfile( + outputs_abs=_env_float("ART_MEGATRON_ORACLE_OUTPUTS_ABS_TOL", "1e-2"), + outputs_rel=_env_float("ART_MEGATRON_ORACLE_OUTPUTS_REL_TOL", "1e-2"), + losses_abs=_env_float("ART_MEGATRON_ORACLE_LOSSES_ABS_TOL", "1e-4"), + losses_rel=_env_float("ART_MEGATRON_ORACLE_LOSSES_REL_TOL", "1e-4"), + grads_abs=_env_float("ART_MEGATRON_ORACLE_GRADS_ABS_TOL", "1e-2"), + grads_rel=_env_float("ART_MEGATRON_ORACLE_GRADS_REL_TOL", "1e-2"), + deltas_abs=_env_float("ART_MEGATRON_ORACLE_DELTAS_ABS_TOL", "1e-2"), + deltas_rel=_env_float("ART_MEGATRON_ORACLE_DELTAS_REL_TOL", "1e-2"), + ) + return OracleCaseConfig( + base_model=os.environ.get( + BASE_MODEL_ENV, + "Qwen/Qwen3-30B-A3B-Instruct-2507", + ), + seed=int(os.environ.get("ART_MEGATRON_ORACLE_SEED", "20260305")), + num_steps=int(os.environ.get("ART_MEGATRON_ORACLE_NUM_STEPS", "3")), + learning_rate=float(os.environ.get("ART_MEGATRON_ORACLE_LR", "5e-6")), + beta=float(os.environ.get("ART_MEGATRON_ORACLE_BETA", "0.0")), + tolerances=tolerances, + ) + + +def available_gpu_count() -> int: + import torch + + if not torch.cuda.is_available(): + return 0 + return int(torch.cuda.device_count()) + + +def stable_case_id(case_config: OracleCaseConfig) -> str: + payload = case_config.model_dump(mode="json") + encoded = json.dumps(payload, sort_keys=True, separators=(",", ":")) + digest = hashlib.sha256(encoded.encode("utf-8")).hexdigest()[:16] + model_tag = ( + case_config.base_model.replace("/", "_") + .replace("-", "_") + .replace(".", "_") + .lower() + ) + return f"{model_tag}_{digest}" + + +def _write_json(path: Path, payload: Any) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + with path.open("w", encoding="utf-8") as handle: + json.dump(payload, handle, indent=2, sort_keys=True) + + +def _read_json(path: Path) -> dict[str, Any]: + with path.open("r", encoding="utf-8") as handle: + return json.load(handle) + + +def _build_packed_tensors( + config: PackedTensorConfig, + seed: int, +) -> dict[str, Any]: + import torch + + if config.num_sequences <= 1: + raise ValueError("num_sequences must be greater than 1") + shape = (config.num_sequences, config.sequence_length) + generator = torch.Generator().manual_seed(seed) + tokens = torch.randint( + low=10, + high=config.vocab_high, + size=shape, + dtype=torch.long, + generator=generator, + ) + group_ids = torch.zeros(shape, dtype=torch.long) + parent_ids = torch.full(shape, -1, dtype=torch.long) + input_pos = ( + torch.arange(config.sequence_length, dtype=torch.long) + .unsqueeze(0) + .expand(config.num_sequences, -1) + .clone() + ) + prefix_length = max(1, min(config.sequence_length - 1, config.prefill_tokens)) + decode_span = max(1, config.decode_tokens) + cursor = prefix_length + branch = 1 + while cursor < config.sequence_length: + end = min(config.sequence_length, cursor + decode_span) + group_ids[:, cursor:end] = branch + parent_ids[:, cursor:end] = 0 + cursor = end + branch += 1 + assistant_mask = torch.zeros(shape, dtype=torch.bool) + assistant_mask[:, prefix_length:] = True + logprobs = ( + torch.randn( + shape, + generator=generator, + dtype=torch.float32, + ) + * 0.25 + - 1.75 + ) + advantages = ( + torch.randn( + shape, + generator=generator, + dtype=torch.float32, + ) + * 0.1 + + 1.0 + ) + weights = torch.ones(shape, dtype=torch.float32) + return { + "tokens": tokens, + "group_ids": group_ids, + "parent_ids": parent_ids, + "input_pos": input_pos, + "assistant_mask": assistant_mask, + "logprobs": logprobs, + "advantages": advantages, + "weights": weights, + "pixel_values": [None] * config.num_sequences, + "image_grid_thw": [None] * config.num_sequences, + } + + +def _create_packed_tensors( + case_config: OracleCaseConfig, + packed_dir: Path, +) -> DiskPackedTensorsSpec: + from art.preprocessing.pack import PackedTensors, packed_tensors_to_dir + + packed_tensors = cast( + PackedTensors, + _build_packed_tensors(case_config.packed_tensors, case_config.seed), + ) + descriptor = packed_tensors_to_dir(packed_tensors, str(packed_dir)) + return DiskPackedTensorsSpec.model_validate(descriptor) + + +def _validate_packed_tensor_files(spec: DiskPackedTensorsSpec) -> None: + tensor_dir = Path(spec.dir) + for filename in REQUIRED_PACKED_TENSOR_FILES: + file_path = tensor_dir / filename + if not file_path.exists(): + raise FileNotFoundError(f"Missing packed tensor file: {file_path}") + + +def ensure_case_artifacts(case_config: OracleCaseConfig) -> CaseArtifacts: + case_id = stable_case_id(case_config) + case_dir = ARTIFACT_ROOT / case_id + case_dir.mkdir(parents=True, exist_ok=True) + _write_json(case_dir / "case_config.json", case_config.model_dump(mode="json")) + + descriptor_path = case_dir / "packed_tensors.json" + if descriptor_path.exists(): + packed_spec = DiskPackedTensorsSpec.model_validate(_read_json(descriptor_path)) + _validate_packed_tensor_files(packed_spec) + else: + packed_spec = _create_packed_tensors(case_config, case_dir / "packed_tensors") + _write_json(descriptor_path, packed_spec.model_dump(mode="json")) + + shared_init_path = case_dir / "shared_init" / "adapter_model.safetensors" + shared_init_path.parent.mkdir(parents=True, exist_ok=True) + return CaseArtifacts( + case_id=case_id, + case_dir=str(case_dir), + packed_tensors=packed_spec, + shared_init_adapter_path=str(shared_init_path), + ) + + +def _replace_topology_dir(path: Path) -> None: + if path.exists(): + shutil.rmtree(path) + path.mkdir(parents=True, exist_ok=True) + (path / "traces").mkdir(parents=True, exist_ok=True) + + +def _run_worker_subprocess(request: WorkerRunRequest, topology_dir: Path) -> None: + request_path = topology_dir / "run_request.json" + _write_json(request_path, request.model_dump(mode="json")) + + command = [ + sys.executable, + "-m", + "torch.distributed.run", + "--standalone", + "--nproc_per_node", + str(request.topology.world_size()), + str(Path(__file__).resolve()), + "--worker-run", + "--run-request", + str(request_path), + ] + run = subprocess.run( + command, + cwd=str(REPO_ROOT), + env={**os.environ, "PYTHONUNBUFFERED": "1"}, + capture_output=True, + text=True, + check=False, + ) + combined_output = f"{run.stdout}\n{run.stderr}".strip() + (topology_dir / "worker.log").write_text(combined_output + "\n", encoding="utf-8") + if run.returncode != 0: + tail = "\n".join(combined_output.splitlines()[-80:]) + raise RuntimeError( + f"Topology run failed for {request.topology.slug()} with exit code " + f"{run.returncode}.\n{tail}" + ) + + +def ensure_topology_artifacts( + case_config: OracleCaseConfig, + topology: Topology, + *, + regenerate: bool = False, + mutation: SensitivityMutation | None = None, +) -> Path: + case_artifacts = ensure_case_artifacts(case_config) + case_dir = Path(case_artifacts.case_dir) + topology_dir = case_dir / topology.slug() + manifest_path = topology_dir / "manifest.json" + if manifest_path.exists() and not regenerate: + return topology_dir + + _replace_topology_dir(topology_dir) + shared_init_path = Path(case_artifacts.shared_init_adapter_path) + allow_create_shared_init = topology.slug() == ORACLE_TOPOLOGY.slug() + if not allow_create_shared_init and not shared_init_path.exists(): + ensure_topology_artifacts( + case_config=case_config, + topology=ORACLE_TOPOLOGY, + regenerate=False, + mutation=None, + ) + if not allow_create_shared_init and not shared_init_path.exists(): + raise FileNotFoundError( + f"Oracle shared adapter missing after oracle generation: {shared_init_path}" + ) + if mutation is not None and topology.slug() == ORACLE_TOPOLOGY.slug(): + raise RuntimeError("Sensitivity mutation cannot be applied to oracle topology") + + request = WorkerRunRequest( + case_id=case_artifacts.case_id, + case_config=case_config, + topology=topology, + topology_dir=str(topology_dir), + packed_tensors=case_artifacts.packed_tensors, + shared_init_adapter_path=str(shared_init_path), + allow_create_shared_init=allow_create_shared_init, + mutation=mutation, + ) + _run_worker_subprocess(request, topology_dir) + if not manifest_path.exists(): + raise RuntimeError(f"Missing manifest after run: {manifest_path}") + return topology_dir + + +def ensure_oracle_reference_artifacts( + *, + case_config: OracleCaseConfig, + regenerate: bool = False, +) -> Path: + return ensure_topology_artifacts( + case_config=case_config, + topology=ORACLE_TOPOLOGY, + regenerate=regenerate, + mutation=None, + ) + + +def _load_manifest(topology_dir: Path) -> RunManifest: + manifest_path = topology_dir / "manifest.json" + if not manifest_path.exists(): + raise FileNotFoundError(f"Missing topology manifest: {manifest_path}") + return RunManifest.model_validate(_read_json(manifest_path)) + + +def _load_output_tensor(topology_dir: Path, step: StepTrace): + import torch + + path = topology_dir / step.output_file + if not path.exists(): + raise FileNotFoundError(f"Missing output trace: {path}") + return torch.load(path, map_location="cpu") + + +def _load_safetensor_map(path: Path) -> dict[str, Any]: + from safetensors.torch import load_file + + if not path.exists(): + raise FileNotFoundError(f"Missing safetensor trace: {path}") + return load_file(str(path)) + + +def _tensor_error(reference, candidate) -> tuple[float, float]: + ref = reference.detach().float() + cand = candidate.detach().float() + if ref.shape != cand.shape: + return float("inf"), float("inf") + if ref.numel() == 0: + return 0.0, 0.0 + diff = (cand - ref).abs() + max_abs = float(diff.max().item()) + max_rel = float((diff / ref.abs().clamp_min(1e-12)).max().item()) + return max_abs, max_rel + + +def _build_failure( + *, + case_id: str, + topology: str, + metric: Literal["outputs", "losses", "grads", "lora_deltas"], + step_index: int, + key: str, + max_abs_error: float, + max_rel_error: float, + abs_tolerance: float, + rel_tolerance: float, + message: str, +) -> ComparisonFailure: + return ComparisonFailure( + case_id=case_id, + topology=topology, + oracle_topology=ORACLE_TOPOLOGY.slug(), + metric=metric, + step_index=step_index, + key=key, + max_abs_error=max_abs_error, + max_rel_error=max_rel_error, + abs_tolerance=abs_tolerance, + rel_tolerance=rel_tolerance, + message=message, + ) + + +def _compare_tensor_pair( + *, + case_id: str, + topology: str, + metric: Literal["outputs", "losses", "grads", "lora_deltas"], + step_index: int, + key: str, + reference, + candidate, + abs_tolerance: float, + rel_tolerance: float, +) -> ComparisonFailure | None: + max_abs, max_rel = _tensor_error(reference, candidate) + if max_abs <= abs_tolerance or max_rel <= rel_tolerance: + return None + return _build_failure( + case_id=case_id, + topology=topology, + metric=metric, + step_index=step_index, + key=key, + max_abs_error=max_abs, + max_rel_error=max_rel, + abs_tolerance=abs_tolerance, + rel_tolerance=rel_tolerance, + message=f"{metric} mismatch at step {step_index}, key '{key}'", + ) + + +def _compare_tensor_maps( + *, + case_id: str, + topology: str, + metric: Literal["grads", "lora_deltas"], + step_index: int, + reference: dict[str, Any], + candidate: dict[str, Any], + abs_tolerance: float, + rel_tolerance: float, +) -> ComparisonFailure | None: + ref_keys = set(reference.keys()) + cand_keys = set(candidate.keys()) + if ref_keys != cand_keys: + missing = sorted(ref_keys - cand_keys) + extra = sorted(cand_keys - ref_keys) + return _build_failure( + case_id=case_id, + topology=topology, + metric=metric, + step_index=step_index, + key="__keys__", + max_abs_error=float("inf"), + max_rel_error=float("inf"), + abs_tolerance=abs_tolerance, + rel_tolerance=rel_tolerance, + message=( + f"{metric} key mismatch at step {step_index}; " + f"missing={missing[:3]}, extra={extra[:3]}" + ), + ) + for key in sorted(ref_keys): + failure = _compare_tensor_pair( + case_id=case_id, + topology=topology, + metric=metric, + step_index=step_index, + key=key, + reference=reference[key], + candidate=candidate[key], + abs_tolerance=abs_tolerance, + rel_tolerance=rel_tolerance, + ) + if failure is not None: + return failure + return None + + +def _write_failure_report(topology_dir: Path, failure: ComparisonFailure) -> None: + _write_json(topology_dir / "failure_report.json", failure.model_dump(mode="json")) + + +def compare_topology_to_oracle( + *, + case_config: OracleCaseConfig, + topology: Topology, +) -> ComparisonFailure | None: + if topology.slug() == ORACLE_TOPOLOGY.slug(): + return None + + case_id = stable_case_id(case_config) + case_dir = ARTIFACT_ROOT / case_id + oracle_dir = case_dir / ORACLE_TOPOLOGY.slug() + topology_dir = case_dir / topology.slug() + + oracle_manifest = _load_manifest(oracle_dir) + topology_manifest = _load_manifest(topology_dir) + if len(oracle_manifest.steps) != len(topology_manifest.steps): + return _build_failure( + case_id=case_id, + topology=topology.slug(), + metric="losses", + step_index=0, + key="__step_count__", + max_abs_error=float("inf"), + max_rel_error=float("inf"), + abs_tolerance=case_config.tolerances.losses_abs, + rel_tolerance=case_config.tolerances.losses_rel, + message=( + "Step count mismatch: " + f"oracle={len(oracle_manifest.steps)} vs " + f"topology={len(topology_manifest.steps)}" + ), + ) + + import torch + + for oracle_step, topology_step in zip( + oracle_manifest.steps, topology_manifest.steps + ): + step_index = oracle_step.step_index + oracle_outputs = _load_output_tensor(oracle_dir, oracle_step) + topology_outputs = _load_output_tensor(topology_dir, topology_step) + failure = _compare_tensor_pair( + case_id=case_id, + topology=topology.slug(), + metric="outputs", + step_index=step_index, + key="logprobs", + reference=oracle_outputs, + candidate=topology_outputs, + abs_tolerance=case_config.tolerances.outputs_abs, + rel_tolerance=case_config.tolerances.outputs_rel, + ) + if failure is not None: + return failure + + oracle_loss = torch.tensor([oracle_step.loss], dtype=torch.float32) + topology_loss = torch.tensor([topology_step.loss], dtype=torch.float32) + failure = _compare_tensor_pair( + case_id=case_id, + topology=topology.slug(), + metric="losses", + step_index=step_index, + key="loss", + reference=oracle_loss, + candidate=topology_loss, + abs_tolerance=case_config.tolerances.losses_abs, + rel_tolerance=case_config.tolerances.losses_rel, + ) + if failure is not None: + return failure + + for metric, oracle_file, topo_file, abs_tol, rel_tol in ( + ( + "grads", + oracle_step.grads_file, + topology_step.grads_file, + case_config.tolerances.grads_abs, + case_config.tolerances.grads_rel, + ), + ( + "lora_deltas", + oracle_step.deltas_file, + topology_step.deltas_file, + case_config.tolerances.deltas_abs, + case_config.tolerances.deltas_rel, + ), + ): + failure = _compare_tensor_maps( + case_id=case_id, + topology=topology.slug(), + metric=metric, + step_index=step_index, + reference=_load_safetensor_map(oracle_dir / oracle_file), + candidate=_load_safetensor_map(topology_dir / topo_file), + abs_tolerance=abs_tol, + rel_tolerance=rel_tol, + ) + if failure is not None: + return failure + return None + + +def run_and_compare_topology( + *, + case_config: OracleCaseConfig, + topology: Topology, + regenerate: bool = False, +) -> None: + ensure_oracle_reference_artifacts( + case_config=case_config, + regenerate=regenerate and topology.slug() == ORACLE_TOPOLOGY.slug(), + ) + ensure_topology_artifacts( + case_config=case_config, + topology=topology, + regenerate=regenerate, + mutation=None, + ) + failure = compare_topology_to_oracle(case_config=case_config, topology=topology) + if failure is None: + return + topology_dir = ARTIFACT_ROOT / failure.case_id / topology.slug() + _write_failure_report(topology_dir, failure) + raise AssertionError( + "Megatron oracle mismatch: " + f"topology={failure.topology}, metric={failure.metric}, " + f"step={failure.step_index}, key={failure.key}, " + f"max_abs={failure.max_abs_error:.6g}, " + f"max_rel={failure.max_rel_error:.6g}, " + f"tol_abs={failure.abs_tolerance:.6g}, " + f"tol_rel={failure.rel_tolerance:.6g}" + ) + + +def run_sensitivity_check( + *, + case_config: OracleCaseConfig, + regenerate: bool = False, +) -> None: + mutation = sensitivity_mutation() + if mutation is None: + raise RuntimeError( + f"Sensitivity check requires {SENSITIVITY_MUTATION_ENV} to be set" + ) + + ensure_oracle_reference_artifacts( + case_config=case_config, + regenerate=regenerate, + ) + ensure_topology_artifacts( + case_config=case_config, + topology=SENSITIVITY_TOPOLOGY, + regenerate=True, + mutation=mutation, + ) + failure = compare_topology_to_oracle( + case_config=case_config, + topology=SENSITIVITY_TOPOLOGY, + ) + if failure is None: + raise AssertionError( + "Sensitivity mutation did not produce an oracle mismatch. " + f"mutation={mutation}, topology={SENSITIVITY_TOPOLOGY.slug()}" + ) + + +def _set_deterministic_seed(seed: int) -> None: + import torch + + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def _merge_sharded_dicts(shards_by_rank: list[dict[str, Any]]) -> dict[str, Any]: + import torch + + merged: dict[str, list[Any]] = {} + for rank_shards in shards_by_rank: + for key, tensor in rank_shards.items(): + merged.setdefault(key, []).append(tensor.detach().cpu()) + full_state: dict[str, Any] = {} + for key, shards in merged.items(): + if len(shards) == 1: + full_state[key] = shards[0].contiguous() + continue + concat_dim = 1 if ".lora_A." in key else 0 + full_state[key] = torch.cat(shards, dim=concat_dim).contiguous() + return full_state + + +def _gather_full_state(local_state: dict[str, Any]) -> dict[str, Any] | None: + import torch + + rank = torch.distributed.get_rank() + world_size = torch.distributed.get_world_size() + gathered = [None for _ in range(world_size)] if rank == 0 else None + torch.distributed.gather_object(local_state, gathered, dst=0) + if rank != 0: + return None + assert gathered is not None + entries = [entry for entry in gathered if entry is not None] + return _merge_sharded_dicts(entries) + + +def _collect_lora_state(model_chunks: list[Any]) -> dict[str, Any] | None: + local_state: dict[str, Any] = {} + for chunk in model_chunks: + for module in chunk.modules(): + if not hasattr(module, "sharded_lora_state_dict"): + continue + module_state = module.sharded_lora_state_dict() + for key, value in module_state.items(): + if key in local_state: + raise RuntimeError( + f"Duplicate LoRA key while collecting state: {key}" + ) + local_state[key] = value.detach().cpu() + return _gather_full_state(local_state) + + +def _collect_lora_grads(model_chunks: list[Any]) -> dict[str, Any] | None: + from megatron.core import parallel_state as ps + + from art.megatron.lora import LoRA + + local_grads: dict[str, Any] = {} + for chunk in model_chunks: + for module in chunk.modules(): + if not isinstance(module, LoRA): + continue + grad_a = ( + module.A_T.grad + if module.A_T.grad is not None + else module.A_T.new_zeros(module.A_T.shape) + ) + grad_b = ( + module.B_T.grad + if module.B_T.grad is not None + else module.B_T.new_zeros(module.B_T.shape) + ) + if module.num_local_experts > 1: + if ps.get_expert_data_parallel_rank() != 0: + continue + for expert in range(module.num_local_experts): + prefix = module.adapter_model_prefix.format( + expert=expert + module._expert_offset + ) + local_grads[f"{prefix}.lora_A.weight"] = ( + grad_a[expert].detach().cpu().T + ) + local_grads[f"{prefix}.lora_B.weight"] = ( + grad_b[expert].detach().cpu().T + ) + else: + if ps.get_data_parallel_rank() != 0: + continue + local_grads[f"{module.adapter_model_prefix}.lora_A.weight"] = ( + grad_a.detach().cpu().T + ) + local_grads[f"{module.adapter_model_prefix}.lora_B.weight"] = ( + grad_b.detach().cpu().T + ) + return _gather_full_state(local_grads) + + +def _validate_adapter_exact( + expected_state: dict[str, Any], + adapter_model: dict[str, Any], +) -> None: + expected_keys = set(expected_state.keys()) + adapter_keys = set(adapter_model.keys()) + missing = sorted(expected_keys - adapter_keys) + extra = sorted(adapter_keys - expected_keys) + if missing or extra: + raise KeyError( + f"Adapter keys mismatch: missing={missing[:5]} extra={extra[:5]}" + ) + + +def _validate_loaded_state_matches_adapter( + loaded_state: dict[str, Any], + adapter_model: dict[str, Any], +) -> None: + import torch + + for key in sorted(adapter_model.keys()): + if key not in loaded_state: + raise KeyError(f"Loaded LoRA state missing key: {key}") + if not torch.equal(loaded_state[key].cpu(), adapter_model[key].cpu()): + max_abs, max_rel = _tensor_error(adapter_model[key], loaded_state[key]) + raise RuntimeError( + f"Loaded LoRA state mismatch for key '{key}' " + f"(max_abs={max_abs:.6g}, max_rel={max_rel:.6g})" + ) + + +def _configure_provider(provider: Any, topology: Topology) -> None: + provider.tensor_model_parallel_size = topology.tp + provider.expert_model_parallel_size = topology.ep + provider.expert_tensor_parallel_size = topology.etp + provider.pipeline_model_parallel_size = 1 + provider.context_parallel_size = 1 + provider.sequence_parallel = bool(topology.sp) + if hasattr(provider, "attention_dropout"): + provider.attention_dropout = 0.0 + if hasattr(provider, "hidden_dropout"): + provider.hidden_dropout = 0.0 + + +def _delta_state( + initial_state: dict[str, Any], + current_state: dict[str, Any], +) -> dict[str, Any]: + initial_keys = set(initial_state.keys()) + current_keys = set(current_state.keys()) + if initial_keys != current_keys: + missing = sorted(initial_keys - current_keys) + extra = sorted(current_keys - initial_keys) + raise KeyError( + f"LoRA state keys changed during training: missing={missing[:3]} extra={extra[:3]}" + ) + return { + key: current_state[key].detach().cpu() - initial_state[key].detach().cpu() + for key in sorted(initial_keys) + } + + +@contextmanager +def _mutation_hook( + megatron_train_module: Any, + mutation: SensitivityMutation | None, + pre_optimizer_step_hook: Callable[[], None] | None = None, +): + original_finalize = megatron_train_module._finalize_grads + original_optimizer_step = megatron_train_module._optimizer_step + + if mutation == "drop_finalize": + megatron_train_module._finalize_grads = lambda _model: None + elif mutation is not None: + raise ValueError(f"Unsupported mutation: {mutation}") + + if pre_optimizer_step_hook is not None: + + def _patched_optimizer_step(optimizer: Any, learning_rate: float): + pre_optimizer_step_hook() + return original_optimizer_step(optimizer, learning_rate) + + megatron_train_module._optimizer_step = _patched_optimizer_step + + if mutation is None: + if pre_optimizer_step_hook is None: + yield + return + try: + yield + finally: + megatron_train_module._finalize_grads = original_finalize + megatron_train_module._optimizer_step = original_optimizer_step + + +def _worker_run(request: WorkerRunRequest) -> None: + from megatron.core.optimizer import OptimizerConfig + from safetensors.torch import load_file, save_file + import torch + + from art import dev, types + from art.megatron import train as megatron_train + from art.preprocessing.pack import packed_tensors_from_dir + + local_rank = int(os.environ["LOCAL_RANK"]) + torch.cuda.set_device(local_rank) + torch.distributed.init_process_group(backend="nccl") + _set_deterministic_seed(request.case_config.seed) + + world_size = torch.distributed.get_world_size() + if world_size != request.topology.world_size(): + raise RuntimeError( + f"World size mismatch: expected {request.topology.world_size()}, got {world_size}" + ) + + runtime = megatron_train.build_training_runtime( + model_identifier=request.case_config.base_model, + provider_configure=lambda provider: _configure_provider( + provider, request.topology + ), + optimizer_config=OptimizerConfig( + bf16=True, + lr=request.case_config.learning_rate, + adam_beta1=0.9, + adam_beta2=0.99, + clip_grad=0.1, + weight_decay=0.1, + ), + print_env=False, + print_optimizer_stats=False, + ) + model_chunks = runtime.model + optimizer = runtime.optimizer + + topology_dir = Path(request.topology_dir) + traces_dir = topology_dir / "traces" + traces_dir.mkdir(parents=True, exist_ok=True) + + shared_init_path = Path(request.shared_init_adapter_path) + if not shared_init_path.exists(): + if not request.allow_create_shared_init: + raise FileNotFoundError( + f"Missing oracle shared adapter at {shared_init_path}" + ) + initial_state = _collect_lora_state(model_chunks) + if torch.distributed.get_rank() == 0: + assert initial_state is not None + shared_init_path.parent.mkdir(parents=True, exist_ok=True) + save_file(initial_state, str(shared_init_path)) + torch.distributed.barrier() + if not shared_init_path.exists(): + raise FileNotFoundError(f"Shared init adapter not created: {shared_init_path}") + + adapter_model = load_file(str(shared_init_path)) + expected_state = _collect_lora_state(model_chunks) + if torch.distributed.get_rank() == 0: + assert expected_state is not None + _validate_adapter_exact(expected_state, adapter_model) + torch.distributed.barrier() + + megatron_train.load_adapter_into_model(model_chunks, adapter_model) + loaded_state = _collect_lora_state(model_chunks) + if torch.distributed.get_rank() == 0: + assert loaded_state is not None + _validate_loaded_state_matches_adapter(loaded_state, adapter_model) + torch.distributed.barrier() + + packed_tensors = packed_tensors_from_dir( + **request.packed_tensors.model_dump(exclude_none=True) + ) + initial_lora_state = _collect_lora_state(model_chunks) + if torch.distributed.get_rank() == 0 and initial_lora_state is None: + raise RuntimeError("Failed to collect initial LoRA state on rank 0") + + train_config = types.TrainConfig( + learning_rate=request.case_config.learning_rate, + beta=request.case_config.beta, + kl_penalty_coef=0.0, + ) + experimental_config: dev.TrainConfig = {} + step_traces: list[StepTrace] = [] + captured_grads: dict[str, Any] | None = None + + def _capture_lora_grads() -> None: + nonlocal captured_grads + captured_grads = _collect_lora_grads(model_chunks) + + with _mutation_hook( + megatron_train, + request.mutation, + pre_optimizer_step_hook=_capture_lora_grads, + ): + for step_index in range(request.case_config.num_steps): + sample_index = step_index % request.packed_tensors.num_sequences + inputs = megatron_train.select_indexed_inputs(packed_tensors, sample_index) + captured_grads = None + + step_result = megatron_train.run_training_step( + model_chunks=model_chunks, + optimizer=optimizer, + learning_rate=train_config.learning_rate, + inputs=inputs, + config=train_config, + experimental_config=experimental_config, + ref_logprobs=None, + ) + if torch.distributed.get_rank() == 0 and captured_grads is None: + raise RuntimeError("Failed to collect LoRA grads on rank 0") + + current_lora_state = _collect_lora_state(model_chunks) + if torch.distributed.get_rank() == 0 and current_lora_state is None: + raise RuntimeError("Failed to collect current LoRA state on rank 0") + + if torch.distributed.get_rank() == 0: + assert captured_grads is not None + assert initial_lora_state is not None + assert current_lora_state is not None + output_rel = Path("traces") / f"output_step_{step_index:03d}.pt" + grads_rel = Path("traces") / f"grads_step_{step_index:03d}.safetensors" + deltas_rel = ( + Path("traces") / f"deltas_step_{step_index:03d}.safetensors" + ) + lora_rel = Path(f"lora_step_{step_index:03d}.safetensors") + + torch.save( + step_result.new_logprobs.detach().cpu().float(), + topology_dir / output_rel, + ) + save_file(captured_grads, str(topology_dir / grads_rel)) + deltas = _delta_state(initial_lora_state, current_lora_state) + save_file(deltas, str(topology_dir / deltas_rel)) + save_file(current_lora_state, str(topology_dir / lora_rel)) + + step_traces.append( + StepTrace( + step_index=step_index, + loss=float(step_result.reduced_loss.item()), + probs_corr=step_result.probs_corr, + output_file=str(output_rel), + grads_file=str(grads_rel), + deltas_file=str(deltas_rel), + lora_file=str(lora_rel), + ) + ) + torch.distributed.barrier() + + if torch.distributed.get_rank() == 0: + manifest = RunManifest( + case_id=request.case_id, + base_model=request.case_config.base_model, + topology=request.topology.slug(), + world_size=request.topology.world_size(), + seed=request.case_config.seed, + num_steps=request.case_config.num_steps, + packed_tensors=request.packed_tensors, + tolerances=request.case_config.tolerances, + steps=step_traces, + ) + _write_json(topology_dir / "manifest.json", manifest.model_dump(mode="json")) + torch.distributed.barrier() + torch.distributed.destroy_process_group() + + +def _run_worker_cli(run_request_path: Path) -> None: + request = WorkerRunRequest.model_validate(_read_json(run_request_path)) + _worker_run(request) + + +def _parse_args(argv: list[str]) -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Megatron oracle harness worker") + parser.add_argument("--worker-run", action="store_true") + parser.add_argument("--run-request", type=Path) + return parser.parse_args(argv) + + +def _main(argv: list[str]) -> int: + args = _parse_args(argv) + if not args.worker_run: + raise SystemExit("This module is intended for test imports or --worker-run") + if args.run_request is None: + raise SystemExit("--run-request is required with --worker-run") + _run_worker_cli(args.run_request) + return 0 + + +if __name__ == "__main__": + raise SystemExit(_main(sys.argv[1:])) diff --git a/tests/integration/test_megatron_lora_oracle_correctness.py b/tests/integration/test_megatron_lora_oracle_correctness.py new file mode 100644 index 00000000..f6949b81 --- /dev/null +++ b/tests/integration/test_megatron_lora_oracle_correctness.py @@ -0,0 +1,100 @@ +import pytest + +from .megatron_oracle_harness import ( + ORACLE_TOPOLOGY, + PHASE_A_TOPOLOGIES, + PHASE_B_TOPOLOGIES, + SENSITIVITY_MUTATION_ENV, + SENSITIVITY_TOPOLOGY, + available_gpu_count, + default_case_config, + ensure_oracle_reference_artifacts, + phase_b_dp_enabled, + regenerate_requested, + run_and_compare_topology, + run_sensitivity_check, + sensitivity_enabled, +) + + +def _require_gpus_for(topology_world_size: int) -> None: + gpu_count = available_gpu_count() + if gpu_count < topology_world_size: + pytest.skip( + f"Need {topology_world_size} GPUs for topology run, only found {gpu_count}" + ) + + +def _skip_if_sensitivity_mode() -> None: + if sensitivity_enabled(): + pytest.skip( + f"{SENSITIVITY_MUTATION_ENV} is enabled; running sensitivity check only." + ) + + +def _run_topology_case( # type: ignore[no-untyped-def] + topology, + case_config, + *, + regenerate: bool, +) -> None: + _require_gpus_for(topology.world_size()) + run_and_compare_topology( + case_config=case_config, + topology=topology, + regenerate=regenerate, + ) + + +def test_000_megatron_lora_oracle_sensitivity_check() -> None: + if not sensitivity_enabled(): + pytest.skip( + f"Set {SENSITIVITY_MUTATION_ENV}=drop_finalize to enable sensitivity check." + ) + _require_gpus_for(SENSITIVITY_TOPOLOGY.world_size()) + run_sensitivity_check( + case_config=default_case_config(), + regenerate=regenerate_requested(), + ) + + +def test_megatron_lora_oracle_phase_a_matrix() -> None: + _skip_if_sensitivity_mode() + case_config = default_case_config() + regenerate = regenerate_requested() + _require_gpus_for(ORACLE_TOPOLOGY.world_size()) + ensure_oracle_reference_artifacts( + case_config=case_config, + regenerate=regenerate, + ) + for topology in PHASE_A_TOPOLOGIES: + _run_topology_case( + topology, + case_config, + regenerate=regenerate and topology.slug() != ORACLE_TOPOLOGY.slug(), + ) + + +@pytest.mark.parametrize( + "topology_index", + range(len(PHASE_B_TOPOLOGIES)), + ids=[topology.slug() for topology in PHASE_B_TOPOLOGIES], +) +def test_megatron_lora_oracle_phase_b_dp_matrix(topology_index: int) -> None: + _skip_if_sensitivity_mode() + if not phase_b_dp_enabled(): + pytest.xfail( + "DP matrix currently blocked until Megatron backend DP support is enabled" + ) + case_config = default_case_config() + regenerate = regenerate_requested() + _require_gpus_for(ORACLE_TOPOLOGY.world_size()) + ensure_oracle_reference_artifacts( + case_config=case_config, + regenerate=regenerate, + ) + _run_topology_case( + PHASE_B_TOPOLOGIES[topology_index], + case_config, + regenerate=regenerate, + ) From fde2ff3c45ac98e3cb743299ba29742c2e246a81 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 10 Mar 2026 05:29:15 +0000 Subject: [PATCH 04/28] Minor typing changes --- src/art/megatron/train.py | 4 +-- tests/integration/megatron_oracle_harness.py | 27 +++++++++++++++----- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index abc2ef7b..ef58f5d0 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -296,7 +296,7 @@ def run_training_step( attention_mask = torch.zeros((1, 1, 1, 1), dtype=torch.bool, device=device) for chunk in model_chunks: - cast(Any, chunk).zero_grad_buffer() + chunk.zero_grad_buffer() # ty: ignore[call-non-callable] new_logprobs: torch.Tensor = -model_chunks[0]( input_ids=inputs["tokens"], @@ -307,7 +307,7 @@ def run_training_step( ) loss_info = loss_fn( - cast(Any, inputs), + inputs, # ty: ignore[invalid-argument-type] new_logprobs, ref_logprobs, None, diff --git a/tests/integration/megatron_oracle_harness.py b/tests/integration/megatron_oracle_harness.py index 0d61b41f..273ac368 100644 --- a/tests/integration/megatron_oracle_harness.py +++ b/tests/integration/megatron_oracle_harness.py @@ -10,7 +10,7 @@ import shutil import subprocess import sys -from typing import Any, Callable, Literal, cast +from typing import Any, Callable, Literal, TypeVar, cast import numpy as np from pydantic import BaseModel, ConfigDict, Field @@ -162,6 +162,15 @@ class ComparisonFailure(BaseModel): message: str +T = TypeVar("T") + + +def _require_not_none(value: T | None, name: str) -> T: + if value is None: + raise RuntimeError(f"{name} is None") + return value + + PHASE_A_TOPOLOGIES = [ Topology(tp=1, ep=1, etp=1, dp=1, sp=0, phase="A"), Topology(tp=2, ep=1, etp=1, dp=1, sp=1, phase="A"), @@ -1114,9 +1123,13 @@ def _capture_lora_grads() -> None: raise RuntimeError("Failed to collect current LoRA state on rank 0") if torch.distributed.get_rank() == 0: - assert captured_grads is not None - assert initial_lora_state is not None - assert current_lora_state is not None + grads = _require_not_none(captured_grads, "captured_grads") + initial_state = _require_not_none( + initial_lora_state, "initial_lora_state" + ) + current_state = _require_not_none( + current_lora_state, "current_lora_state" + ) output_rel = Path("traces") / f"output_step_{step_index:03d}.pt" grads_rel = Path("traces") / f"grads_step_{step_index:03d}.safetensors" deltas_rel = ( @@ -1128,10 +1141,10 @@ def _capture_lora_grads() -> None: step_result.new_logprobs.detach().cpu().float(), topology_dir / output_rel, ) - save_file(captured_grads, str(topology_dir / grads_rel)) - deltas = _delta_state(initial_lora_state, current_lora_state) + save_file(grads, str(topology_dir / grads_rel)) + deltas = _delta_state(initial_state, current_state) save_file(deltas, str(topology_dir / deltas_rel)) - save_file(current_lora_state, str(topology_dir / lora_rel)) + save_file(current_state, str(topology_dir / lora_rel)) step_traces.append( StepTrace( From d2c11614d6407c1d71bbed09295d31f3e39ac95e Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Thu, 12 Mar 2026 09:40:34 +0000 Subject: [PATCH 05/28] megatron: extend LoRA grad-sync semantics across tp/expert-tp --- src/art/megatron/finalize_grads.py | 38 ++++++------ src/art/megatron/lora.py | 97 +++++++++++++++++++++--------- 2 files changed, 89 insertions(+), 46 deletions(-) diff --git a/src/art/megatron/finalize_grads.py b/src/art/megatron/finalize_grads.py index 8c496667..83e8cc4f 100644 --- a/src/art/megatron/finalize_grads.py +++ b/src/art/megatron/finalize_grads.py @@ -8,14 +8,15 @@ from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors GradSyncDomain = Literal["tp_default", "expert_tp"] -GradSyncOp = Literal["none", "avg"] +GradSyncOp = Literal["none", "sum", "avg"] TP_DEFAULT_GRAD_SYNC_DOMAIN: GradSyncDomain = "tp_default" EXPERT_TP_GRAD_SYNC_DOMAIN: GradSyncDomain = "expert_tp" GRAD_SYNC_OP_NONE: GradSyncOp = "none" +GRAD_SYNC_OP_SUM: GradSyncOp = "sum" GRAD_SYNC_OP_AVG: GradSyncOp = "avg" VALID_DOMAINS = (TP_DEFAULT_GRAD_SYNC_DOMAIN, EXPERT_TP_GRAD_SYNC_DOMAIN) -VALID_SYNC_OPS = (GRAD_SYNC_OP_NONE, GRAD_SYNC_OP_AVG) +VALID_SYNC_OPS = (GRAD_SYNC_OP_NONE, GRAD_SYNC_OP_SUM, GRAD_SYNC_OP_AVG) def _iter_named_trainable_parameters( @@ -37,31 +38,36 @@ def _resolve_domain_group( domain: GradSyncDomain, ) -> torch.distributed.ProcessGroup | None: if domain == TP_DEFAULT_GRAD_SYNC_DOMAIN: - return None + group = ps.get_tensor_model_parallel_group(check_initialized=False) + if group is None or group.size() <= 1: + return None + return group if domain != EXPERT_TP_GRAD_SYNC_DOMAIN: raise RuntimeError(f"Unknown grad sync domain: {domain}") group = ps.get_expert_tensor_parallel_group(check_initialized=False) - if group is None: - return None - if group.size() <= 1: + if group is None or group.size() <= 1: return None return group def _resolve_reduce_op(op: GradSyncOp) -> Any: + if op == GRAD_SYNC_OP_SUM: + return torch.distributed.ReduceOp.SUM if op == GRAD_SYNC_OP_AVG: return torch.distributed.ReduceOp.AVG raise RuntimeError(f"Unknown grad sync op: {op}") def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: - """Run Megatron finalize, then apply non-default grad-sync reductions. + """Run Megatron finalize, then apply extra LoRA grad-sync reductions. - Megatron finalize handles DP/CP (and expert-DP via `param.allreduce=False`) internally. - This extension only handles extra reductions outside Megatron's default TP path, - currently expert-TP reductions for params annotated with grad_sync_* metadata. + Megatron finalize handles DP/CP(via `param.allreduce=True`)(and expert-DP via `param.allreduce=False`) internally. + This extension handles extra TP/expert-TP reductions for params annotated + with grad_sync_* metadata. """ + # All-reduce all model grads across DP replicas, layernorm grads for sequence parallelism, + # embedding grads across first and last pipeline stages (if not tied) finalize_model_grads(model) buckets: dict[ @@ -73,10 +79,8 @@ def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: domain: GradSyncDomain = getattr( param, "grad_sync_domain", TP_DEFAULT_GRAD_SYNC_DOMAIN ) - if domain == TP_DEFAULT_GRAD_SYNC_DOMAIN: + if _resolve_domain_group(domain) is None: continue - if domain not in VALID_DOMAINS: - raise RuntimeError(f"{name}: unsupported grad_sync_domain={domain}") op: GradSyncOp = getattr(param, "grad_sync_op", GRAD_SYNC_OP_NONE) if op not in VALID_SYNC_OPS: @@ -93,7 +97,7 @@ def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: raise RuntimeError( f"{name}: expected non-None main_grad for domain={domain} reduce_op={op}" ) - local_grad = cast( + local_grad = cast( # local part of dtensor torch.Tensor, grad._local_tensor if hasattr(grad, "_local_tensor") else grad ) buckets[(domain, op, local_grad.dtype, local_grad.device)].append( @@ -101,9 +105,9 @@ def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: ) for (domain, op, _dtype, _device), entries in buckets.items(): - group = _resolve_domain_group(domain) - if group is None: - continue + group = _resolve_domain_group( + domain + ) # already checked if the domain is one we are handling grads = [grad for _name, grad in entries] coalesced = _flatten_dense_tensors(grads) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 12a38dec..b594bf18 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -23,11 +23,12 @@ ShardDomain = Literal["tp", "expert_tp"] GradSyncDomain = Literal["tp_default", "expert_tp"] -GradSyncOp = Literal["none", "avg"] +GradSyncOp = Literal["none", "sum", "avg"] TP_DEFAULT_GRAD_SYNC_DOMAIN: GradSyncDomain = "tp_default" EXPERT_TP_GRAD_SYNC_DOMAIN: GradSyncDomain = "expert_tp" GRAD_SYNC_OP_NONE: GradSyncOp = "none" +GRAD_SYNC_OP_SUM: GradSyncOp = "sum" GRAD_SYNC_OP_AVG: GradSyncOp = "avg" @@ -38,7 +39,7 @@ class LoRAParallelSpec(BaseModel): shard_domain: ShardDomain = "tp" sharded: bool = False - shard_axis: int | None = None + shard_dim: int | None = None grad_sync_domain: GradSyncDomain = TP_DEFAULT_GRAD_SYNC_DOMAIN grad_sync_op: GradSyncOp = GRAD_SYNC_OP_NONE @@ -95,7 +96,7 @@ def _set_lora_parallel_metadata( setattr(param, "lora_shard_domain", parallel_spec.shard_domain) setattr(param, "lora_tp_sharded", parallel_spec.sharded) setattr(param, "lora_tp_replicated", replicated) - setattr(param, "lora_tp_shard_axis", parallel_spec.shard_axis) + setattr(param, "lora_tp_shard_dim", parallel_spec.shard_dim) setattr(param, "grad_sync_domain", parallel_spec.grad_sync_domain) setattr(param, "grad_sync_op", parallel_spec.grad_sync_op) # Megatron DDP routing flag: @@ -115,6 +116,21 @@ def _set_lora_parallel_metadata( ), ) + # Megatron optimizer and checkpoint logic rely on tensor model-parallel metadata + # to distinguish true shards from TP-duplicate params. + if parallel_spec.sharded: + setattr(param, "tensor_model_parallel", True) + setattr( + param, "partition_dim", _normalize_axis(parallel_spec.shard_dim, param.ndim) + ) + # stride > 1 means the dim is split into blocks and each tp rank holds a shard of the block + # this might happen for fused e.g. gate_(up|proj), but loras are individual per module + setattr(param, "partition_stride", 1) + else: + setattr(param, "tensor_model_parallel", False) + setattr(param, "partition_dim", -1) + setattr(param, "partition_stride", 1) + class LoRA(torch.nn.Module): def __init__( @@ -238,7 +254,7 @@ def load_weight(self, weight: torch.Tensor, *, into: torch.nn.Parameter) -> None domain = getattr(into, "lora_shard_domain") sharded = bool(getattr(into, "lora_tp_sharded")) if sharded: - axis = getattr(into, "lora_tp_shard_axis") + axis = getattr(into, "lora_tp_shard_dim") if axis is None: raise RuntimeError( f"{self.adapter_model_prefix}: missing shard axis for sharded parameter" @@ -290,11 +306,11 @@ def _should_export_parameter(self, param: torch.nn.Parameter) -> bool: def _manifest_for_param(self, param: torch.nn.Parameter) -> dict[str, Any]: domain = getattr(param, "lora_shard_domain") sharded = bool(getattr(param, "lora_tp_sharded", False)) - shard_axis = getattr(param, "lora_tp_shard_axis", None) + shard_dim = getattr(param, "lora_tp_shard_dim", None) return { "domain": domain, "sharded": sharded, - "shard_axis": shard_axis, + "shard_dim": shard_dim, "shard_world_size": _get_shard_world_size(domain) if sharded else 1, "shard_rank": _get_shard_rank(domain) if sharded else 0, } @@ -367,15 +383,15 @@ def __init__( a_parallel_spec = LoRAParallelSpec( shard_domain="tp", sharded=True, - shard_axis=-2, + shard_dim=-2, grad_sync_domain=TP_DEFAULT_GRAD_SYNC_DOMAIN, grad_sync_op=GRAD_SYNC_OP_NONE, # only need DP-type reductions ) b_parallel_spec = a_parallel_spec.model_copy( update={ "sharded": False, - "shard_axis": None, - "grad_sync_op": GRAD_SYNC_OP_AVG, # megatron reduces across TP ranks + "shard_dim": None, + "grad_sync_op": GRAD_SYNC_OP_SUM, # sum replicated TP contributions } ) self.lora = LoRA( @@ -423,6 +439,10 @@ def __init__( assert self.provider.kv_channels is not None assert self.provider.num_query_groups is not None assert self.provider.num_attention_heads is not None + if self.provider.num_attention_heads % self.provider.num_query_groups != 0: + raise ValueError( + "num_attention_heads must be divisible by num_query_groups for QKV LoRA" + ) q_out_features = self.provider.kv_channels * self.provider.num_attention_heads kv_out_features = self.provider.kv_channels * self.provider.num_query_groups tp_world_size = ps.get_tensor_model_parallel_world_size() @@ -434,6 +454,13 @@ def __init__( ) q_out_features_per_rank = q_out_features // tp_world_size kv_out_features_per_rank = kv_out_features // tp_world_size + self.num_query_groups_per_partition = ( + self.provider.num_query_groups // tp_world_size + ) + self.num_attention_heads_per_group = ( + self.provider.num_attention_heads // self.provider.num_query_groups + ) + self.hidden_size_per_attention_head = self.provider.kv_channels assert isinstance(linear_qkv.weight, torch.Tensor) self.q_proj_lora = self._build_qkv_lora( adapter_model_prefix=f"{adapter_model_prefix}.q_proj", @@ -470,14 +497,14 @@ def _build_qkv_lora( a_parallel_spec = LoRAParallelSpec( shard_domain="tp", sharded=False, - shard_axis=None, + shard_dim=None, grad_sync_domain=TP_DEFAULT_GRAD_SYNC_DOMAIN, - grad_sync_op=GRAD_SYNC_OP_AVG, # megatron reduces across TP ranks + grad_sync_op=GRAD_SYNC_OP_SUM, # sum replicated TP contributions ) b_parallel_spec = a_parallel_spec.model_copy( update={ "sharded": True, - "shard_axis": -1, + "shard_dim": -1, "grad_sync_op": GRAD_SYNC_OP_NONE, # only need DP-type reductions } ) @@ -508,20 +535,32 @@ def forward(self, x: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor | None]: query = self.q_proj_lora(layernorm_output) key = self.k_proj_lora(layernorm_output) value = self.v_proj_lora(layernorm_output) - - assert isinstance(self.linear_qkv.config.kv_channels, int) - query_4d = query.reshape( - query.shape[0], query.shape[1], -1, self.linear_qkv.config.kv_channels + # Match Megatron mixed_qkv layout: + # [S, B, nqg, (nah/nqg + 2), hn] where each query-group packs + # [all query heads for that group, key, value]. + query_5d = query.reshape( + query.shape[0], + query.shape[1], + self.num_query_groups_per_partition, + self.num_attention_heads_per_group, + self.hidden_size_per_attention_head, ) - key_4d = key.reshape( - key.shape[0], key.shape[1], -1, self.linear_qkv.config.kv_channels + key_5d = key.reshape( + key.shape[0], + key.shape[1], + self.num_query_groups_per_partition, + 1, + self.hidden_size_per_attention_head, ) - value_4d = value.reshape( - value.shape[0], value.shape[1], -1, self.linear_qkv.config.kv_channels + value_5d = value.reshape( + value.shape[0], + value.shape[1], + self.num_query_groups_per_partition, + 1, + self.hidden_size_per_attention_head, ) - - qkv_4d = torch.cat([query_4d, key_4d, value_4d], dim=2) - adapter_output = qkv_4d.reshape(qkv_4d.shape[0], qkv_4d.shape[1], -1) + qkv_5d = torch.cat([query_5d, key_5d, value_5d], dim=3) + adapter_output = qkv_5d.reshape(qkv_5d.shape[0], qkv_5d.shape[1], -1) return linear_output + adapter_output, bias @@ -566,14 +605,14 @@ def _build_fc1_lora( a_parallel_spec = LoRAParallelSpec( shard_domain="expert_tp", sharded=False, - shard_axis=None, + shard_dim=None, grad_sync_domain=EXPERT_TP_GRAD_SYNC_DOMAIN, - grad_sync_op=GRAD_SYNC_OP_AVG, # we handle this with extended finalize_grads + grad_sync_op=GRAD_SYNC_OP_SUM, # we handle this with extended finalize_grads ) b_parallel_spec = a_parallel_spec.model_copy( update={ "sharded": True, - "shard_axis": -1, + "shard_dim": -1, "grad_sync_domain": EXPERT_TP_GRAD_SYNC_DOMAIN, "grad_sync_op": GRAD_SYNC_OP_NONE, # only need DP-type reductions } @@ -619,16 +658,16 @@ def __init__( a_parallel_spec = LoRAParallelSpec( shard_domain="expert_tp", sharded=True, - shard_axis=-2, + shard_dim=-2, grad_sync_domain=EXPERT_TP_GRAD_SYNC_DOMAIN, grad_sync_op=GRAD_SYNC_OP_NONE, # only need DP-type reductions ) b_parallel_spec = a_parallel_spec.model_copy( update={ "sharded": False, - "shard_axis": None, + "shard_dim": None, "grad_sync_domain": EXPERT_TP_GRAD_SYNC_DOMAIN, - "grad_sync_op": GRAD_SYNC_OP_AVG, # we handle this with extended finalize_grads + "grad_sync_op": GRAD_SYNC_OP_SUM, # we handle this with extended finalize_grads } ) self.lora = LoRA( From e4180184bb649b58daef258503130356040d5656 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Thu, 12 Mar 2026 09:40:37 +0000 Subject: [PATCH 06/28] megatron: add MoE routing replay core and unit tests --- src/art/megatron/routing_replay.py | 832 ++++++++++++++++++++++++++ tests/unit/test_moe_routing_replay.py | 189 ++++++ 2 files changed, 1021 insertions(+) create mode 100644 src/art/megatron/routing_replay.py create mode 100644 tests/unit/test_moe_routing_replay.py diff --git a/src/art/megatron/routing_replay.py b/src/art/megatron/routing_replay.py new file mode 100644 index 00000000..91865b80 --- /dev/null +++ b/src/art/megatron/routing_replay.py @@ -0,0 +1,832 @@ +from __future__ import annotations + +import json +from pathlib import Path +import re +import types +from typing import Any, Protocol + +from pydantic import BaseModel, ConfigDict, model_validator +from safetensors.torch import load_file, save_file +import torch + +ROUTER_NAME_TOKEN = ".mlp.router" +ROUTER_KEY_FORMAT_VERSION = "moe_routing_replay_v1" +GLOBAL_TOKEN_UIDS_KEY = "global_token_uids" + +_ROUTER_LAYER_PATTERN = re.compile(r"decoder\.layers\.(?P\d+)\.mlp\.router$") +_TRACE_CHUNK_PREFIX_PATTERN = re.compile(r"^chunk(?P\d+)\.(?P.+)$") + + +def _to_tensor_cpu_contiguous( + tensor: torch.Tensor, *, dtype: torch.dtype +) -> torch.Tensor: + if not isinstance(tensor, torch.Tensor): + raise TypeError(f"Expected torch.Tensor, got {type(tensor)}") + return tensor.detach().to(device="cpu", dtype=dtype).contiguous() + + +def _normalize_step_index(step_index: int) -> str: + if step_index < 0: + raise ValueError(f"step_index must be non-negative, got {step_index}") + return f"{step_index:06d}" + + +def _build_tensor_key(router_key: str, call_index: int, field_name: str) -> str: + return f"{router_key}/call_{call_index}/{field_name}" + + +def _flatten_router_tensor(tensor: torch.Tensor) -> torch.Tensor: + if tensor.ndim < 2: + raise RuntimeError( + f"Router tensor must have rank >=2, got shape={tuple(tensor.shape)}" + ) + num_experts = int(tensor.shape[-1]) + return tensor.reshape(-1, num_experts).contiguous() + + +def _extract_router_output_tensors(output: Any) -> tuple[torch.Tensor, torch.Tensor]: + if isinstance(output, (list, tuple)) and len(output) >= 2: + probs, routing_map = output[0], output[1] + elif isinstance(output, dict): + probs = output.get("probs") + routing_map = output.get("routing_map") + else: + raise RuntimeError(f"Unsupported router output type: {type(output)}") + + if not isinstance(probs, torch.Tensor): + raise RuntimeError(f"Expected probs tensor, got {type(probs)}") + if not isinstance(routing_map, torch.Tensor): + raise RuntimeError(f"Expected routing_map tensor, got {type(routing_map)}") + + probs_2d = _flatten_router_tensor(probs.to(torch.float32)) + routing_map_2d = _flatten_router_tensor(routing_map.bool()) + if probs_2d.shape != routing_map_2d.shape: + raise RuntimeError( + "Router output shape mismatch: " + f"probs={tuple(probs_2d.shape)} routing_map={tuple(routing_map_2d.shape)}" + ) + return probs_2d, routing_map_2d + + +def build_router_key_from_module_name(*, chunk_index: int, module_name: str) -> str: + match = _ROUTER_LAYER_PATTERN.search(module_name) + if match is None: + raise RuntimeError( + f"Unable to derive router key from module name '{module_name}'. " + f"Expected suffix matching '{_ROUTER_LAYER_PATTERN.pattern}'." + ) + layer_index = int(match.group("layer")) + return f"chunk_{chunk_index:02d}.layer_{layer_index:04d}.mlp.router" + + +def build_router_key_from_trace_name(trace_module_name: str) -> str: + chunk_match = _TRACE_CHUNK_PREFIX_PATTERN.match(trace_module_name) + if chunk_match is None: + raise RuntimeError( + "Forward trace router module name must start with 'chunk.'; " + f"got '{trace_module_name}'" + ) + chunk_index = int(chunk_match.group("chunk")) + module_name = chunk_match.group("name") + return build_router_key_from_module_name( + chunk_index=chunk_index, + module_name=module_name, + ) + + +class ParallelTopology(BaseModel): + tp: int + ep: int + etp: int = 1 + dp: int = 1 + sp: bool = False + cp: int = 1 + pp: int = 1 + vpp: int = 1 + + +class RouterCallRoute(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + expert_indices: torch.Tensor + expert_probs: torch.Tensor + expert_mask: torch.Tensor + routing_map: torch.Tensor | None = None + num_experts: int + + @model_validator(mode="after") + def _validate(self) -> "RouterCallRoute": + self.expert_indices = _to_tensor_cpu_contiguous( + self.expert_indices, dtype=torch.int32 + ) + self.expert_probs = _to_tensor_cpu_contiguous( + self.expert_probs, dtype=torch.float32 + ) + self.expert_mask = _to_tensor_cpu_contiguous(self.expert_mask, dtype=torch.bool) + if self.routing_map is not None: + self.routing_map = _to_tensor_cpu_contiguous( + self.routing_map, dtype=torch.bool + ) + + if self.expert_indices.ndim != 2: + raise RuntimeError( + "expert_indices must have shape [num_tokens, max_topk], got " + f"{tuple(self.expert_indices.shape)}" + ) + if self.expert_probs.shape != self.expert_indices.shape: + raise RuntimeError( + "expert_probs shape must match expert_indices shape, got " + f"{tuple(self.expert_probs.shape)} vs {tuple(self.expert_indices.shape)}" + ) + if self.expert_mask.shape != self.expert_indices.shape: + raise RuntimeError( + "expert_mask shape must match expert_indices shape, got " + f"{tuple(self.expert_mask.shape)} vs {tuple(self.expert_indices.shape)}" + ) + if self.num_experts <= 0: + raise RuntimeError(f"num_experts must be >0, got {self.num_experts}") + if self.routing_map is not None: + expected = (self.expert_indices.shape[0], self.num_experts) + if tuple(self.routing_map.shape) != expected: + raise RuntimeError( + "routing_map shape mismatch: " + f"expected={expected}, got={tuple(self.routing_map.shape)}" + ) + return self + + @property + def num_global_tokens(self) -> int: + return int(self.expert_indices.shape[0]) + + @property + def max_topk(self) -> int: + return int(self.expert_indices.shape[1]) + + +class StepRouterRoutes(BaseModel): + calls: dict[int, RouterCallRoute] + + @model_validator(mode="after") + def _validate_calls(self) -> "StepRouterRoutes": + if not self.calls: + raise RuntimeError("StepRouterRoutes.calls cannot be empty") + for call_index in self.calls: + if call_index < 0: + raise RuntimeError(f"call_index must be >=0, got {call_index}") + return self + + +class StepRoutes(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + routers: dict[str, StepRouterRoutes] + global_token_uids: torch.Tensor + + @model_validator(mode="after") + def _validate(self) -> "StepRoutes": + if not self.routers: + raise RuntimeError("StepRoutes.routers cannot be empty") + self.global_token_uids = _to_tensor_cpu_contiguous( + self.global_token_uids, dtype=torch.int64 + ) + if self.global_token_uids.ndim != 1: + raise RuntimeError( + "global_token_uids must have shape [num_global_tokens], got " + f"{tuple(self.global_token_uids.shape)}" + ) + if int(torch.unique(self.global_token_uids).numel()) != int( + self.global_token_uids.numel() + ): + raise RuntimeError("global_token_uids must be unique per step") + expected_tokens = int(self.global_token_uids.numel()) + for router_key, step_router in self.routers.items(): + for call_index, route in step_router.calls.items(): + if route.num_global_tokens != expected_tokens: + raise RuntimeError( + "Route token count mismatch for " + f"router='{router_key}' call={call_index}: " + f"route_tokens={route.num_global_tokens}, " + f"expected_tokens={expected_tokens}" + ) + return self + + +class MoeRoutingReplayBundle(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + format_version: str = ROUTER_KEY_FORMAT_VERSION + topology: ParallelTopology + num_steps: int + max_topk: int + router_keys: list[str] + steps: dict[int, StepRoutes] + + @model_validator(mode="after") + def _validate(self) -> "MoeRoutingReplayBundle": + if self.format_version != ROUTER_KEY_FORMAT_VERSION: + raise RuntimeError( + f"Unsupported format_version={self.format_version}; " + f"expected={ROUTER_KEY_FORMAT_VERSION}" + ) + if self.num_steps <= 0: + raise RuntimeError(f"num_steps must be >0, got {self.num_steps}") + if self.max_topk < 0: + raise RuntimeError(f"max_topk must be >=0, got {self.max_topk}") + if set(self.steps.keys()) != set(range(self.num_steps)): + raise RuntimeError( + "steps must be indexed from 0..num_steps-1 without gaps: " + f"num_steps={self.num_steps}, step_keys={sorted(self.steps.keys())}" + ) + if not self.router_keys: + raise RuntimeError("router_keys cannot be empty") + router_key_set = set(self.router_keys) + for step_index, step_routes in self.steps.items(): + step_router_keys = set(step_routes.routers.keys()) + if step_router_keys != router_key_set: + raise RuntimeError( + f"Step {step_index} router set mismatch. " + f"expected={sorted(router_key_set)}, got={sorted(step_router_keys)}" + ) + return self + + @classmethod + def from_dir(cls, bundle_dir: str | Path) -> "MoeRoutingReplayBundle": + base_dir = Path(bundle_dir) + manifest_path = base_dir / "manifest.json" + if not manifest_path.exists(): + raise FileNotFoundError(f"Missing routing replay manifest: {manifest_path}") + with manifest_path.open("r", encoding="utf-8") as handle: + manifest = json.load(handle) + + if manifest.get("format_version") != ROUTER_KEY_FORMAT_VERSION: + raise RuntimeError( + "Unsupported routing replay manifest version: " + f"{manifest.get('format_version')}" + ) + + topology = ParallelTopology.model_validate(manifest["topology"]) + num_steps = int(manifest["num_steps"]) + max_topk = int(manifest["max_topk"]) + router_keys = [str(key) for key in manifest["router_keys"]] + manifest_steps = manifest["steps"] + + steps: dict[int, StepRoutes] = {} + for step_index in range(num_steps): + step_manifest = manifest_steps[str(step_index)] + step_file = base_dir / step_manifest["file"] + if not step_file.exists(): + raise FileNotFoundError( + f"Missing routing replay step file for step={step_index}: {step_file}" + ) + step_tensors = load_file(str(step_file)) + if GLOBAL_TOKEN_UIDS_KEY not in step_tensors: + raise RuntimeError( + f"Step file missing '{GLOBAL_TOKEN_UIDS_KEY}': {step_file}" + ) + global_token_uids = step_tensors[GLOBAL_TOKEN_UIDS_KEY] + + routers: dict[str, StepRouterRoutes] = {} + for router_key in router_keys: + router_step_manifest = step_manifest["routers"].get(router_key) + if router_step_manifest is None: + raise RuntimeError( + f"Step manifest missing router_key='{router_key}' for step={step_index}" + ) + calls: dict[int, RouterCallRoute] = {} + for call_index_raw, call_manifest in router_step_manifest.items(): + call_index = int(call_index_raw) + expert_indices_key = _build_tensor_key( + router_key, call_index, "expert_indices" + ) + expert_probs_key = _build_tensor_key( + router_key, call_index, "expert_probs" + ) + expert_mask_key = _build_tensor_key( + router_key, call_index, "expert_mask" + ) + routing_map_key = _build_tensor_key( + router_key, call_index, "routing_map" + ) + if expert_indices_key not in step_tensors: + raise RuntimeError( + f"Missing tensor key '{expert_indices_key}' in {step_file}" + ) + if expert_probs_key not in step_tensors: + raise RuntimeError( + f"Missing tensor key '{expert_probs_key}' in {step_file}" + ) + if expert_mask_key not in step_tensors: + raise RuntimeError( + f"Missing tensor key '{expert_mask_key}' in {step_file}" + ) + routing_map = ( + step_tensors[routing_map_key] + if routing_map_key in step_tensors + else None + ) + calls[call_index] = RouterCallRoute( + expert_indices=step_tensors[expert_indices_key], + expert_probs=step_tensors[expert_probs_key], + expert_mask=step_tensors[expert_mask_key], + routing_map=routing_map, + num_experts=int(call_manifest["num_experts"]), + ) + routers[router_key] = StepRouterRoutes(calls=calls) + steps[step_index] = StepRoutes( + routers=routers, + global_token_uids=global_token_uids, + ) + + return cls( + format_version=ROUTER_KEY_FORMAT_VERSION, + topology=topology, + num_steps=num_steps, + max_topk=max_topk, + router_keys=router_keys, + steps=steps, + ) + + def to_dir(self, bundle_dir: str | Path) -> None: + base_dir = Path(bundle_dir) + base_dir.mkdir(parents=True, exist_ok=True) + + manifest_steps: dict[str, dict[str, Any]] = {} + for step_index in range(self.num_steps): + step_routes = self.steps[step_index] + step_file_name = f"step_{_normalize_step_index(step_index)}.safetensors" + step_file_path = base_dir / step_file_name + step_tensors: dict[str, torch.Tensor] = { + GLOBAL_TOKEN_UIDS_KEY: _to_tensor_cpu_contiguous( + step_routes.global_token_uids, dtype=torch.int64 + ) + } + step_manifest_routers: dict[str, dict[str, dict[str, int]]] = {} + for router_key in self.router_keys: + router_routes = step_routes.routers[router_key] + call_manifest: dict[str, dict[str, int]] = {} + for call_index, route in sorted(router_routes.calls.items()): + step_tensors[ + _build_tensor_key(router_key, call_index, "expert_indices") + ] = _to_tensor_cpu_contiguous( + route.expert_indices, dtype=torch.int32 + ) + step_tensors[ + _build_tensor_key(router_key, call_index, "expert_probs") + ] = _to_tensor_cpu_contiguous( + route.expert_probs, dtype=torch.float32 + ) + step_tensors[ + _build_tensor_key(router_key, call_index, "expert_mask") + ] = _to_tensor_cpu_contiguous(route.expert_mask, dtype=torch.bool) + if route.routing_map is not None: + step_tensors[ + _build_tensor_key(router_key, call_index, "routing_map") + ] = _to_tensor_cpu_contiguous( + route.routing_map, dtype=torch.bool + ) + call_manifest[str(call_index)] = {"num_experts": route.num_experts} + step_manifest_routers[router_key] = call_manifest + save_file(step_tensors, str(step_file_path)) + manifest_steps[str(step_index)] = { + "file": step_file_name, + "routers": step_manifest_routers, + } + + manifest = { + "format_version": ROUTER_KEY_FORMAT_VERSION, + "topology": self.topology.model_dump(mode="json"), + "num_steps": self.num_steps, + "max_topk": self.max_topk, + "router_keys": self.router_keys, + "steps": manifest_steps, + } + with (base_dir / "manifest.json").open("w", encoding="utf-8") as handle: + json.dump(manifest, handle, indent=2, sort_keys=True) + + +class LocalTokenIndexer(Protocol): + def build_local_token_uids( + self, + *, + global_token_uids: torch.Tensor, + num_local_tokens: int, + sequence_parallel: bool, + context_parallel_size: int, + ) -> torch.Tensor: + """Build local token uid order for current rank.""" + + +class TopologyAwareLocalTokenIndexer: + def __init__(self, parallel_state_module: Any | None = None) -> None: + self._parallel_state = parallel_state_module + + def _ps(self) -> Any: + if self._parallel_state is not None: + return self._parallel_state + from megatron.core import parallel_state as ps + + self._parallel_state = ps + return ps + + def build_local_token_uids( + self, + *, + global_token_uids: torch.Tensor, + num_local_tokens: int, + sequence_parallel: bool, + context_parallel_size: int, + ) -> torch.Tensor: + ps = self._ps() + + local_uids = global_token_uids.to(dtype=torch.int64, device="cpu").view(1, -1) + + cp_size = int(ps.get_context_parallel_world_size()) + if context_parallel_size > 1 and cp_size > 1: + from megatron.core.utils import get_batch_on_this_cp_rank + + local_uids = get_batch_on_this_cp_rank({"tokens": local_uids})["tokens"] + + tp_size = int(ps.get_tensor_model_parallel_world_size()) + tp_rank = int(ps.get_tensor_model_parallel_rank()) if tp_size > 1 else 0 + if sequence_parallel and tp_size > 1: + tokens_per_tp_rank = local_uids.shape[1] // tp_size + start = tp_rank * tokens_per_tp_rank + local_uids = local_uids[:, start : start + tokens_per_tp_rank] + + return local_uids.reshape(-1).contiguous() + + +def _patch_alltoall_dispatcher_preprocess() -> None: + try: + from megatron.core.transformer.moe.token_dispatcher import ( + MoEAlltoAllTokenDispatcher, + ) + except Exception: + return + + if hasattr(MoEAlltoAllTokenDispatcher, "_art_router_replay_preprocess_patched"): + return + + original_preprocess = MoEAlltoAllTokenDispatcher.preprocess + + def patched_preprocess( + self: Any, routing_map: torch.Tensor, *args: Any, **kwargs: Any + ): + result = original_preprocess(self, routing_map, *args, **kwargs) + if ( + not getattr(self, "drop_and_pad", False) + and getattr(self.config, "moe_expert_capacity_factor", None) is None + and not ( + getattr(self.config, "moe_router_padding_for_quantization", None) + or getattr(self.config, "moe_router_padding_for_fp8", None) + ) + ): + self.num_out_tokens = int(routing_map.sum().item()) + return result + + setattr(MoEAlltoAllTokenDispatcher, "preprocess", patched_preprocess) + setattr(MoEAlltoAllTokenDispatcher, "_art_router_replay_preprocess_patched", True) + + +class MoeRoutingReplayController: + def __init__( + self, + *, + bundle: MoeRoutingReplayBundle, + strict: bool, + local_token_indexer: LocalTokenIndexer | None = None, + ) -> None: + self.bundle = bundle + self.strict = strict + self.local_token_indexer = ( + local_token_indexer or TopologyAwareLocalTokenIndexer() + ) + + self._active_step_index: int | None = None + self._active_sample_index: int | None = None + self._active_step_routes: StepRoutes | None = None + self._router_call_cursors: dict[str, int] = {} + self._global_uid_to_row_index: dict[int, int] = {} + self._local_router_keys: set[str] = set() + + self._patched_router_modules: list[dict[str, Any]] = [] + + def install_router_patches(self, model_chunks: list[Any]) -> None: + if self._patched_router_modules: + return + _patch_alltoall_dispatcher_preprocess() + + for chunk_index, chunk in enumerate(model_chunks): + for module_name, module in chunk.named_modules(): + if ROUTER_NAME_TOKEN not in module_name: + continue + if not hasattr(module, "routing"): + continue + router_key = build_router_key_from_module_name( + chunk_index=chunk_index, + module_name=module_name, + ) + if self.strict and router_key not in self.bundle.router_keys: + raise RuntimeError( + "Router key from model is missing in replay bundle: " + f"router_key='{router_key}'" + ) + + original_routing = module.routing + if getattr(module, "_art_router_replay_patched", False): + continue + + sequence_parallel = bool( + getattr(getattr(module, "config", None), "sequence_parallel", False) + ) + context_parallel_size = int( + getattr(getattr(module, "config", None), "context_parallel_size", 1) + ) + + def routing_wrapper( + _module: Any, + logits: torch.Tensor, + *args: Any, + _router_key: str = router_key, + _sequence_parallel: bool = sequence_parallel, + _context_parallel_size: int = context_parallel_size, + **kwargs: Any, + ) -> tuple[torch.Tensor, torch.Tensor]: + live_probs, live_routing_map = original_routing( + logits, *args, **kwargs + ) + replay_probs, replay_routing_map = self.get_route_for_router( + router_key=_router_key, + logits=live_probs, + sequence_parallel=_sequence_parallel, + context_parallel_size=_context_parallel_size, + ) + # same result, but autograd goes through + probs = ( + live_probs + + ( + replay_probs.to( + device=live_probs.device, + dtype=live_probs.dtype, + ) + - live_probs + ).detach() + ) + routing_map = replay_routing_map.to( + device=live_routing_map.device, + dtype=live_routing_map.dtype, + ) + return probs, routing_map + + module.routing = types.MethodType(routing_wrapper, module) + module._art_router_replay_patched = True + self._local_router_keys.add(router_key) + self._patched_router_modules.append( + { + "module": module, + "router_key": router_key, + "original_routing": original_routing, + } + ) + + def remove_router_patches(self) -> None: + for item in self._patched_router_modules: + module = item["module"] + module.routing = item["original_routing"] + if hasattr(module, "_art_router_replay_patched"): + delattr(module, "_art_router_replay_patched") + self._patched_router_modules.clear() + self._local_router_keys.clear() + + def set_step(self, *, step_index: int, sample_index: int) -> None: + if step_index not in self.bundle.steps: + raise RuntimeError( + f"Replay bundle missing step_index={step_index}. " + f"Available steps={sorted(self.bundle.steps.keys())}" + ) + step_routes = self.bundle.steps[step_index] + self._active_step_index = step_index + self._active_sample_index = sample_index + self._active_step_routes = step_routes + for local_router_key in sorted(self._local_router_keys): + if local_router_key not in step_routes.routers: + raise RuntimeError( + "Replay bundle step is missing local router key: " + f"step={step_index}, router='{local_router_key}'" + ) + self._router_call_cursors = { + router_key: 0 for router_key in sorted(self._local_router_keys) + } + self._global_uid_to_row_index = { + int(uid.item()): row_index + for row_index, uid in enumerate(step_routes.global_token_uids) + } + + def finalize_step(self) -> None: + if self._active_step_routes is None: + raise RuntimeError("finalize_step called before set_step") + for router_key in sorted(self._local_router_keys): + router_routes = self._active_step_routes.routers[router_key] + consumed = self._router_call_cursors.get(router_key, 0) + expected = len(router_routes.calls) + if consumed != expected: + raise RuntimeError( + "Routing replay step consumption mismatch: " + f"step={self._active_step_index}, router='{router_key}', " + f"consumed={consumed}, expected={expected}" + ) + self._active_step_index = None + self._active_sample_index = None + self._active_step_routes = None + self._router_call_cursors = {} + self._global_uid_to_row_index = {} + + def get_route_for_router( + self, + *, + router_key: str, + logits: torch.Tensor, + sequence_parallel: bool, + context_parallel_size: int, + ) -> tuple[torch.Tensor, torch.Tensor]: + step_routes = self._active_step_routes + call_index = self._router_call_cursors.get(router_key, 0) + router_calls = step_routes.routers[router_key].calls + route = router_calls[call_index] + self._router_call_cursors[router_key] = call_index + 1 + + num_local_tokens = int(logits.shape[0]) + num_experts = int(logits.shape[1]) + + local_uids = self.local_token_indexer.build_local_token_uids( + global_token_uids=step_routes.global_token_uids, + num_local_tokens=num_local_tokens, + sequence_parallel=sequence_parallel, + context_parallel_size=context_parallel_size, + ) + row_index_tensor = torch.tensor( + [self._global_uid_to_row_index[int(uid)] for uid in local_uids.tolist()], + dtype=torch.int64, + ) + + local_indices = route.expert_indices.index_select(0, row_index_tensor) + local_probs = route.expert_probs.index_select(0, row_index_tensor) + local_mask = route.expert_mask.index_select(0, row_index_tensor) + + probs = torch.zeros( + (num_local_tokens, num_experts), + dtype=logits.dtype, + device=logits.device, + ) + routing_map = torch.zeros( + (num_local_tokens, num_experts), + dtype=torch.bool, + device=logits.device, + ) + + if local_indices.numel() > 0: + indices_device = local_indices.to(device=logits.device, dtype=torch.long) + probs_device = local_probs.to(device=logits.device, dtype=logits.dtype) + mask_device = local_mask.to(device=logits.device, dtype=torch.bool) + row_index_device = ( + torch.arange(num_local_tokens, device=logits.device) + .unsqueeze(1) + .expand_as(indices_device) + ) + + selected_rows = row_index_device[mask_device] + selected_cols = indices_device[mask_device] + selected_probs = probs_device[mask_device] + + if selected_rows.numel() > 0: + probs[selected_rows, selected_cols] = selected_probs + routing_map[selected_rows, selected_cols] = True + + return probs, routing_map + + +def _compact_route_from_dense( + probs_2d: torch.Tensor, + routing_map_2d: torch.Tensor, +) -> RouterCallRoute: + num_tokens, num_experts = probs_2d.shape + if num_tokens == 0: + return RouterCallRoute( + expert_indices=torch.zeros((0, 0), dtype=torch.int32), + expert_probs=torch.zeros((0, 0), dtype=torch.float32), + expert_mask=torch.zeros((0, 0), dtype=torch.bool), + num_experts=num_experts, + ) + + max_topk = int(routing_map_2d.sum(dim=1).max().item()) + expert_indices = torch.zeros((num_tokens, max_topk), dtype=torch.int32) + expert_probs = torch.zeros((num_tokens, max_topk), dtype=torch.float32) + expert_mask = torch.zeros((num_tokens, max_topk), dtype=torch.bool) + for token_index in range(num_tokens): + expert_ids = torch.nonzero( + routing_map_2d[token_index], as_tuple=False + ).flatten() + slot_count = int(expert_ids.numel()) + if slot_count == 0: + continue + expert_indices[token_index, :slot_count] = expert_ids.to(torch.int32) + expert_probs[token_index, :slot_count] = probs_2d[token_index, expert_ids].to( + torch.float32 + ) + expert_mask[token_index, :slot_count] = True + + return RouterCallRoute( + expert_indices=expert_indices, + expert_probs=expert_probs, + expert_mask=expert_mask, + num_experts=num_experts, + ) + + +def build_bundle_from_forward_trace_dir( + *, + traces_dir: str | Path, + num_steps: int, + topology: ParallelTopology, +) -> MoeRoutingReplayBundle: + """Build a replay bundle from saved forward traces for the correctness harness. + + This helper is intended for testing/oracle routing replay workflows and is not + part of inference routing capture/export. + """ + trace_dir = Path(traces_dir) + steps: dict[int, StepRoutes] = {} + router_keys_union: set[str] = set() + max_topk = 0 + + for step_index in range(num_steps): + trace_path = trace_dir / f"forward_trace_step_{step_index:03d}.pt" + if not trace_path.exists(): + raise FileNotFoundError( + f"Missing forward trace for step={step_index}: {trace_path}" + ) + step_trace: dict[str, list[dict[str, Any]]] = torch.load( + trace_path, map_location="cpu", weights_only=False + ) + + step_routers: dict[str, StepRouterRoutes] = {} + step_global_tokens: int | None = None + for module_name in sorted(step_trace.keys()): + if ROUTER_NAME_TOKEN not in module_name: + continue + router_key = build_router_key_from_trace_name(module_name) + router_calls: dict[int, RouterCallRoute] = {} + for call_index, call_entry in enumerate(step_trace[module_name]): + output = call_entry.get("output") + probs_2d, routing_map_2d = _extract_router_output_tensors(output) + compact_route = _compact_route_from_dense(probs_2d, routing_map_2d) + router_calls[call_index] = compact_route + max_topk = max(max_topk, compact_route.max_topk) + token_count = compact_route.num_global_tokens + if step_global_tokens is None: + step_global_tokens = token_count + elif step_global_tokens != token_count: + raise RuntimeError( + "Inconsistent token count across routers within step: " + f"step={step_index}, expected={step_global_tokens}, got={token_count}, " + f"router='{router_key}', call={call_index}" + ) + + if not router_calls: + raise RuntimeError( + f"Router trace has no calls for module '{module_name}' at step={step_index}" + ) + step_routers[router_key] = StepRouterRoutes(calls=router_calls) + router_keys_union.add(router_key) + + if not step_routers: + raise RuntimeError( + f"No router traces found for step={step_index} in {trace_path}" + ) + if step_global_tokens is None: + raise RuntimeError( + f"Could not infer token count for step={step_index} from router traces" + ) + global_token_uids = torch.arange(step_global_tokens, dtype=torch.int64) + steps[step_index] = StepRoutes( + routers=step_routers, + global_token_uids=global_token_uids, + ) + + router_keys = sorted(router_keys_union) + for step_index, step_routes in steps.items(): + if set(step_routes.routers.keys()) != set(router_keys): + raise RuntimeError( + f"Step {step_index} router keys differ from global set: " + f"step_keys={sorted(step_routes.routers.keys())}, router_keys={router_keys}" + ) + + return MoeRoutingReplayBundle( + format_version=ROUTER_KEY_FORMAT_VERSION, + topology=topology, + num_steps=num_steps, + max_topk=max_topk, + router_keys=router_keys, + steps=steps, + ) diff --git a/tests/unit/test_moe_routing_replay.py b/tests/unit/test_moe_routing_replay.py new file mode 100644 index 00000000..980784c7 --- /dev/null +++ b/tests/unit/test_moe_routing_replay.py @@ -0,0 +1,189 @@ +from __future__ import annotations + +from pathlib import Path +import tempfile + +import pytest +import torch +from torch import nn + +from art.megatron.routing_replay import ( + MoeRoutingReplayBundle, + MoeRoutingReplayController, + ParallelTopology, + RouterCallRoute, + StepRouterRoutes, + StepRoutes, +) + + +def _dense_from_compact( + route: RouterCallRoute, + *, + dtype: torch.dtype, +) -> tuple[torch.Tensor, torch.Tensor]: + num_tokens = route.expert_indices.shape[0] + num_experts = route.num_experts + probs = torch.zeros((num_tokens, num_experts), dtype=dtype) + routing_map = torch.zeros((num_tokens, num_experts), dtype=torch.bool) + for token_idx in range(num_tokens): + for slot in range(route.expert_indices.shape[1]): + if not bool(route.expert_mask[token_idx, slot]): + continue + expert_idx = int(route.expert_indices[token_idx, slot].item()) + probs[token_idx, expert_idx] = route.expert_probs[token_idx, slot].to(dtype) + routing_map[token_idx, expert_idx] = True + return probs, routing_map + + +def _make_bundle() -> tuple[MoeRoutingReplayBundle, RouterCallRoute]: + router_key = "chunk_00.layer_0000.mlp.router" + route = RouterCallRoute( + expert_indices=torch.tensor( + [ + [0, 2], + [1, 0], + [2, 1], + [1, 0], + ], + dtype=torch.int32, + ), + expert_probs=torch.tensor( + [ + [0.70, 0.30], + [1.00, 0.00], + [0.65, 0.35], + [1.00, 0.00], + ], + dtype=torch.float32, + ), + expert_mask=torch.tensor( + [ + [True, True], + [True, False], + [True, True], + [True, False], + ], + dtype=torch.bool, + ), + num_experts=3, + ) + bundle = MoeRoutingReplayBundle( + topology=ParallelTopology(tp=1, ep=1, etp=1, dp=1, sp=False, cp=1, pp=1, vpp=1), + num_steps=1, + max_topk=2, + router_keys=[router_key], + steps={ + 0: StepRoutes( + routers={router_key: StepRouterRoutes(calls={0: route})}, + global_token_uids=torch.arange(4, dtype=torch.int64), + ) + }, + ) + return bundle, route + + +class _IdentityIndexer: + def build_local_token_uids( + self, + *, + global_token_uids: torch.Tensor, + num_local_tokens: int, + sequence_parallel: bool, + context_parallel_size: int, + ) -> torch.Tensor: + del sequence_parallel, context_parallel_size + if int(global_token_uids.numel()) < num_local_tokens: + raise RuntimeError("num_local_tokens exceeds global token count") + return global_token_uids[:num_local_tokens].clone() + + +class _FakeRouter(nn.Module): + def __init__(self) -> None: + super().__init__() + self.config = type( + "Config", + (), + {"sequence_parallel": False, "context_parallel_size": 1}, + )() + + def routing(self, logits: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor]: + probs = torch.softmax(logits, dim=-1) + routing_map = torch.zeros_like(logits, dtype=torch.bool) + return probs, routing_map + + +class _FakeMlp(nn.Module): + def __init__(self) -> None: + super().__init__() + self.router = _FakeRouter() + + +class _FakeLayer(nn.Module): + def __init__(self) -> None: + super().__init__() + self.mlp = _FakeMlp() + + +class _FakeDecoder(nn.Module): + def __init__(self) -> None: + super().__init__() + self.layers = nn.ModuleList([_FakeLayer()]) + + +class _FakeChunk(nn.Module): + def __init__(self) -> None: + super().__init__() + self.decoder = _FakeDecoder() + + +def test_bundle_roundtrip_disk() -> None: + bundle, route = _make_bundle() + with tempfile.TemporaryDirectory() as tmp_dir: + bundle_path = Path(tmp_dir) + bundle.to_dir(bundle_path) + loaded = MoeRoutingReplayBundle.from_dir(bundle_path) + + assert loaded.num_steps == 1 + assert loaded.max_topk == 2 + assert loaded.router_keys == bundle.router_keys + loaded_route = loaded.steps[0].routers[bundle.router_keys[0]].calls[0] + assert torch.equal(loaded_route.expert_indices, route.expert_indices) + assert torch.equal(loaded_route.expert_probs, route.expert_probs) + assert torch.equal(loaded_route.expert_mask, route.expert_mask) + + +def test_controller_patches_router_and_replays() -> None: + bundle, route = _make_bundle() + controller = MoeRoutingReplayController( + bundle=bundle, + strict=True, + local_token_indexer=_IdentityIndexer(), + ) + chunk = _FakeChunk() + controller.install_router_patches([chunk]) + controller.set_step(step_index=0, sample_index=0) + + logits = torch.randn((4, 3), dtype=torch.float32) + replay_probs, replay_map = chunk.decoder.layers[0].mlp.router.routing(logits) + expected_probs, expected_map = _dense_from_compact(route, dtype=logits.dtype) + + assert torch.equal(replay_map.cpu(), expected_map) + assert torch.allclose(replay_probs.cpu(), expected_probs, atol=0.0, rtol=0.0) + + controller.finalize_step() + controller.remove_router_patches() + + +def test_controller_finalize_fails_when_unconsumed_calls_remain() -> None: + bundle, _route = _make_bundle() + controller = MoeRoutingReplayController( + bundle=bundle, + strict=True, + local_token_indexer=_IdentityIndexer(), + ) + chunk = _FakeChunk() + controller.install_router_patches([chunk]) + controller.set_step(step_index=0, sample_index=0) + with pytest.raises(RuntimeError, match="consumption mismatch"): + controller.finalize_step() From bc5e7a48dfbd6b5d9c7ef2e6d57c500c6d3cc044 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Thu, 12 Mar 2026 09:40:39 +0000 Subject: [PATCH 07/28] megatron runtime/service: wire routing replay into training jobs --- src/art/dev/train.py | 8 +++- src/art/megatron/service.py | 9 ++++ src/art/megatron/train.py | 83 +++++++++++++++++++++++++++++++++++-- 3 files changed, 96 insertions(+), 4 deletions(-) diff --git a/src/art/dev/train.py b/src/art/dev/train.py index b0e232c5..0ada9ccb 100644 --- a/src/art/dev/train.py +++ b/src/art/dev/train.py @@ -1,7 +1,10 @@ -from typing import Literal +from typing import TYPE_CHECKING, Literal from typing_extensions import TypedDict +if TYPE_CHECKING: + from art.megatron.routing_replay import MoeRoutingReplayBundle + class TrainConfig(TypedDict, total=False): advantage_balance: float @@ -22,6 +25,9 @@ class TrainConfig(TypedDict, total=False): logprob_calculation_chunk_size: int mask_prob_ratio: bool max_negative_advantage_importance_sampling_weight: float + moe_routing_replay_bundle: "MoeRoutingReplayBundle | None" + moe_routing_replay_path: str | None + moe_routing_replay_strict: bool num_trajectories_learning_rate_multiplier_power: float plot_tensors: bool ppo: bool diff --git a/src/art/megatron/service.py b/src/art/megatron/service.py index e4c99a98..42ec4f9a 100644 --- a/src/art/megatron/service.py +++ b/src/art/megatron/service.py @@ -36,6 +36,8 @@ class MegatronTrainingJob(BaseModel): disk_packed_tensors: DiskPackedTensors config: types.TrainConfig experimental_config: dev.TrainConfig + moe_routing_replay_path: str | None = None + moe_routing_replay_strict: bool = True @dataclass @@ -241,12 +243,19 @@ async def train( for job_name in os.listdir(jobs_dir): if job_name.endswith(".json"): os.remove(os.path.join(jobs_dir, job_name)) + if _config.get("moe_routing_replay_bundle") is not None: + raise RuntimeError( + "moe_routing_replay_bundle is only supported for in-process/runtime APIs; " + "MegatronService subprocess jobs must use moe_routing_replay_path." + ) job = MegatronTrainingJob( lora_path=lora_path, optimizer_state_path=self._optimizer_state_path, disk_packed_tensors=disk_packed_tensors, config=config, experimental_config=_config, + moe_routing_replay_path=_config.get("moe_routing_replay_path"), + moe_routing_replay_strict=_config.get("moe_routing_replay_strict", True), ) job_path = os.path.join(jobs_dir, f"{datetime.datetime.now().isoformat()}.json") with open(job_path, "w") as f: diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index ef58f5d0..33dc8172 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -39,6 +39,10 @@ def _set_cache_dir(env_var: str, default_path: str) -> None: from art.megatron.lora import apply_lora_adapters from art.megatron.offload import OffloadState, offload_to_cpu, reload_to_gpu from art.megatron.provider import get_provider +from art.megatron.routing_replay import ( + MoeRoutingReplayBundle, + MoeRoutingReplayController, +) from art.preprocessing.pack import ( DiskPackedTensors, PackedTensors, @@ -54,6 +58,8 @@ class TrainingJob(BaseModel): disk_packed_tensors: DiskPackedTensors config: types.TrainConfig experimental_config: dev.TrainConfig + moe_routing_replay_path: str | None = None + moe_routing_replay_strict: bool = True class TrainingRuntime(BaseModel): @@ -64,6 +70,7 @@ class TrainingRuntime(BaseModel): optimizer: Any rank: int world_size: int + moe_routing_replay_controller: MoeRoutingReplayController | None = None class TrainStepResult(BaseModel): @@ -129,11 +136,47 @@ def _default_optimizer_config() -> OptimizerConfig: ) +def configure_moe_routing_replay( + runtime: TrainingRuntime, + *, + replay_bundle_path: str | None = None, + replay_bundle: MoeRoutingReplayBundle | None = None, + strict: bool = True, +) -> None: + if runtime.moe_routing_replay_controller is not None: + runtime.moe_routing_replay_controller.remove_router_patches() + runtime.moe_routing_replay_controller = None + + if replay_bundle is not None and replay_bundle_path is not None: + raise RuntimeError( + "Provide either replay_bundle_path or replay_bundle, not both" + ) + if replay_bundle is None and replay_bundle_path is None: + return + + if replay_bundle is None: + if replay_bundle_path is None: + raise RuntimeError( + "replay_bundle_path is required when replay_bundle is None" + ) + replay_bundle = MoeRoutingReplayBundle.from_dir(replay_bundle_path) + + controller = MoeRoutingReplayController( + bundle=replay_bundle, + strict=strict, + ) + controller.install_router_patches(runtime.model) + runtime.moe_routing_replay_controller = controller + + def build_training_runtime( *, model_identifier: str | None = None, provider_configure: Callable[[Any], None] | None = None, optimizer_config: OptimizerConfig | None = None, + moe_routing_replay_path: str | None = None, + moe_routing_replay_bundle: MoeRoutingReplayBundle | None = None, + moe_routing_replay_strict: bool = True, print_env: bool = True, print_optimizer_stats: bool = True, ) -> TrainingRuntime: @@ -186,13 +229,20 @@ def build_training_runtime( percent = (num_params / total_params) * 100 if total_params > 0 else 0 print(f"Optimizer parameters as percent of total: {percent:0.2f}%") - return TrainingRuntime( + runtime = TrainingRuntime( provider=provider, model=model, optimizer=optimizer, rank=rank, world_size=world_size, ) + configure_moe_routing_replay( + runtime, + replay_bundle_path=moe_routing_replay_path, + replay_bundle=moe_routing_replay_bundle, + strict=moe_routing_replay_strict, + ) + return runtime def iter_modules(model_chunks: list[MegatronModule]) -> Any: @@ -204,12 +254,17 @@ def iter_modules(model_chunks: list[MegatronModule]) -> Any: def load_adapter_into_model( model_chunks: list[MegatronModule], adapter_model: dict[str, torch.Tensor], + optimizer: Any | None = None, ) -> None: with torch.no_grad(): for module in iter_modules(model_chunks): if hasattr(module, "load_lora"): module.load_lora(adapter_model) # type: ignore[attr-defined] + if optimizer is None: + return + optimizer.reload_model_params() + def collect_sharded_lora_state( model_chunks: list[MegatronModule], @@ -285,7 +340,17 @@ def run_training_step( config: types.TrainConfig, experimental_config: dev.TrainConfig, ref_logprobs: torch.Tensor | None = None, + step_index: int | None = None, + sample_index: int | None = None, + moe_routing_replay_controller: MoeRoutingReplayController | None = None, ) -> TrainStepResult: + if moe_routing_replay_controller is not None: + assert step_index is not None and sample_index is not None + moe_routing_replay_controller.set_step( + step_index=step_index, + sample_index=sample_index, + ) + device = next(model_chunks[0].parameters()).device _move_inputs_to_device(inputs, device) @@ -322,6 +387,9 @@ def run_training_step( ) reduced_loss = _reduce_loss(loss) + if moe_routing_replay_controller is not None: + moe_routing_replay_controller.finalize_step() + return TrainStepResult( reduced_loss=reduced_loss, probs_corr=float(loss_info.probs_corr.item()), @@ -360,6 +428,12 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: config = job.config experimental_config = job.experimental_config + configure_moe_routing_replay( + runtime, + replay_bundle_path=job.moe_routing_replay_path, + strict=job.moe_routing_replay_strict, + ) + print0(runtime.rank, "Loaded job from", job_path) print0(runtime.rank, "Job:", job) @@ -368,7 +442,7 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: raise FileNotFoundError(f"No adapter model found at {adapter_model_path}") print0(runtime.rank, "Loading adapter model from", adapter_model_path) adapter_model = load_file(adapter_model_path) - load_adapter_into_model(runtime.model, adapter_model) + load_adapter_into_model(runtime.model, adapter_model, runtime.optimizer) optimizer_shard_path = os.path.join( job.optimizer_state_path, @@ -401,7 +475,7 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: repeat = math.ceil(num_indices / len(indices)) indices = (indices * repeat)[:num_indices] - for index in indices: + for step_index, index in enumerate(indices): inputs = select_indexed_inputs(packed_tensors, index) step_result = run_training_step( model_chunks=runtime.model, @@ -411,6 +485,9 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: config=config, experimental_config=experimental_config, ref_logprobs=None, + step_index=step_index, + sample_index=index, + moe_routing_replay_controller=runtime.moe_routing_replay_controller, ) print0( runtime.rank, From c5e06d96fb7f378e65e553c8b88f1471ce83544c Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Thu, 12 Mar 2026 09:40:47 +0000 Subject: [PATCH 08/28] oracle worker/trace: capture forward traces and emit replay bundles --- tests/integration/megatron_forward_trace.py | 489 ++++++++++++++++++ tests/integration/megatron_oracle_worker.py | 521 ++++++++++++++++++++ 2 files changed, 1010 insertions(+) create mode 100644 tests/integration/megatron_forward_trace.py create mode 100644 tests/integration/megatron_oracle_worker.py diff --git a/tests/integration/megatron_forward_trace.py b/tests/integration/megatron_forward_trace.py new file mode 100644 index 00000000..2ca418aa --- /dev/null +++ b/tests/integration/megatron_forward_trace.py @@ -0,0 +1,489 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Any, cast + +import torch + +CAPTURE_NAME_TOKENS = ( + ".self_attention.linear_qkv", + ".self_attention.linear_qkv.q_proj_lora", + ".self_attention.linear_qkv.k_proj_lora", + ".self_attention.linear_qkv.v_proj_lora", + ".self_attention.linear_proj", + ".self_attention.linear_proj.lora", + ".mlp.router", + ".mlp.experts.linear_fc1", + ".mlp.experts.linear_fc1.gate_lora", + ".mlp.experts.linear_fc1.up_lora", + ".mlp.experts.linear_fc2", + ".mlp.experts.linear_fc2.lora", +) +ROUTER_NAME_TOKEN = ".mlp.router" + + +def _safe_int(value: Any, default: int = 0) -> int: + """Coerces scalar values to int for trace metadata.""" + try: + return int(value) + except Exception: + return default + + +def _safe_ps_stat(name: str, default: int) -> int: + """Reads one Megatron parallel-state integer when available.""" + try: + from megatron.core import parallel_state as ps + + getter = getattr(ps, name) + return _safe_int(getter(), default) + except Exception: + return default + + +def _rank_metadata() -> dict[str, int]: + """Builds lightweight distributed metadata for one trace call.""" + rank = 0 + world_size = 1 + if torch.distributed.is_initialized(): + rank = _safe_int(torch.distributed.get_rank(), 0) + world_size = _safe_int(torch.distributed.get_world_size(), 1) + return { + "global_rank": rank, + "world_size": world_size, + "tp_rank": _safe_ps_stat("get_tensor_model_parallel_rank", 0), + "tp_world_size": _safe_ps_stat("get_tensor_model_parallel_world_size", 1), + "ep_rank": _safe_ps_stat("get_expert_model_parallel_rank", 0), + "ep_world_size": _safe_ps_stat("get_expert_model_parallel_world_size", 1), + "etp_rank": _safe_ps_stat("get_expert_tensor_parallel_rank", 0), + "etp_world_size": _safe_ps_stat("get_expert_tensor_parallel_world_size", 1), + "dp_rank": _safe_ps_stat("get_data_parallel_rank", 0), + "dp_world_size": _safe_ps_stat("get_data_parallel_world_size", 1), + "expert_dp_rank": _safe_ps_stat("get_expert_data_parallel_rank", 0), + "expert_dp_world_size": _safe_ps_stat("get_expert_data_parallel_world_size", 1), + } + + +def _shard_world_size_for_domain(domain: Any) -> int: + """Returns shard-group world size for one LoRA shard domain.""" + if domain == "tp": + return _safe_ps_stat("get_tensor_model_parallel_world_size", 1) + if domain == "expert_tp": + return _safe_ps_stat("get_expert_tensor_parallel_world_size", 1) + return 1 + + +def _extract_primary_tensor(value: Any) -> torch.Tensor | None: + if isinstance(value, torch.Tensor): + return value + if isinstance(value, dict): + for item in value.values(): + tensor = _extract_primary_tensor(item) + if tensor is not None: + return tensor + if isinstance(value, (list, tuple)): + for item in value: + tensor = _extract_primary_tensor(item) + if tensor is not None: + return tensor + return None + + +def _materialize_tensor(tensor: torch.Tensor) -> torch.Tensor: + if hasattr(tensor, "full_tensor"): + tensor = cast(torch.Tensor, tensor.full_tensor()) + elif hasattr(tensor, "to_local"): + tensor = cast(torch.Tensor, tensor.to_local()) + elif hasattr(tensor, "_local_tensor"): + tensor = cast(torch.Tensor, tensor._local_tensor) + return tensor.detach().cpu() + + +def _materialize_trace_value(value: Any) -> Any: + if isinstance(value, torch.Tensor): + return _materialize_tensor(value) + if isinstance(value, dict): + return {key: _materialize_trace_value(item) for key, item in value.items()} + if isinstance(value, list): + return [_materialize_trace_value(item) for item in value] + if isinstance(value, tuple): + return tuple(_materialize_trace_value(item) for item in value) + return value + + +def _extract_router_topk(output: Any) -> tuple[torch.Tensor, torch.Tensor] | None: + if not isinstance(output, tuple) or len(output) < 2: + return None + probs = output[0] + routing_map = output[1] + if not isinstance(probs, torch.Tensor) or not isinstance(routing_map, torch.Tensor): + return None + probs = _materialize_tensor(probs.float()) + routing_map = _materialize_tensor(routing_map) + topk = int(routing_map.sum(dim=-1).max().item()) + if topk < 0: + raise RuntimeError(f"Invalid router topk={topk}") + if topk == 0: + topk_scores = probs.new_zeros((probs.shape[0], 0)) + topk_ids = torch.zeros((probs.shape[0], 0), dtype=torch.int64) + else: + topk_scores, topk_ids = torch.topk(probs, k=topk, dim=-1) + return topk_ids.contiguous(), topk_scores.contiguous() + + +class ForwardTraceCapture: + def __init__( + self, + model_chunks: list[Any], + *, + enabled: bool, + capture_name_tokens: tuple[str, ...] = CAPTURE_NAME_TOKENS, + ) -> None: + self.enabled = enabled + self.capture_name_tokens = capture_name_tokens + self.current_step_index: int | None = None + self.current_step_trace: dict[str, list[dict[str, Any]]] = {} + self._hook_handles: list[Any] = [] + if not enabled: + return + self._register_hooks(model_chunks) + + def _register_hooks(self, model_chunks: list[Any]) -> None: + for chunk_index, chunk in enumerate(model_chunks): + for module_name, module in chunk.named_modules(): + trace_module_name = f"chunk{chunk_index}.{module_name}" + is_layer_output = ( + ".decoder.layers." in module_name + and module_name.rsplit(".", 1)[-1].isdigit() + ) + if not is_layer_output and not any( + module_name.endswith(token) for token in self.capture_name_tokens + ): + continue + self._hook_handles.append( + module.register_forward_hook( + self._make_hook(trace_module_name, module) + ) + ) + + @staticmethod + def _sequence_parallel_enabled(module: Any) -> bool: + """Returns sequence-parallel flag from module/provider/config when present.""" + for owner in ( + module, + getattr(module, "provider", None), + getattr(module, "config", None), + ): + if owner is None: + continue + value = getattr(owner, "sequence_parallel", None) + if isinstance(value, bool): + return value + return False + + @staticmethod + def _lora_primary_output_merge_hint(module: Any) -> dict[str, Any] | None: + """Infers the correct output merge op for LoRA modules.""" + if module.__class__.__name__ != "LoRA": + return None + lora_module = module + b_param = getattr(lora_module, "B_T", None) + if b_param is None: + return None + b_domain = getattr(b_param, "lora_shard_domain", None) + b_world_size = _shard_world_size_for_domain(b_domain) + if bool(getattr(b_param, "lora_tp_sharded", False)) and b_world_size > 1: + shard_dim = getattr(b_param, "lora_tp_shard_dim", None) + if isinstance(shard_dim, int): + return {"op": "concat", "dim": shard_dim} + a_param = getattr(lora_module, "A_T", None) + if a_param is None: + return None + a_domain = getattr(a_param, "lora_shard_domain", None) + a_world_size = _shard_world_size_for_domain(a_domain) + if bool(getattr(a_param, "lora_tp_sharded", False)) and a_world_size > 1: + return {"op": "sum"} + return None + + def _infer_primary_output_merge_hint( + self, name: str, module: Any + ) -> dict[str, Any] | None: + """Chooses canonical cross-rank concat axis for one module output.""" + if ROUTER_NAME_TOKEN in name: + return {"op": "concat", "dim": 0} + + lora_hint = self._lora_primary_output_merge_hint(module) + if lora_hint is not None: + return lora_hint + + gather_output = getattr(module, "gather_output", None) + if isinstance(gather_output, bool) and not gather_output: + return {"op": "concat", "dim": -1} + + if ".self_attention.linear_qkv" in name: + return {"op": "concat", "dim": -1} + + if ".mlp.experts." in name: + return {"op": "concat", "dim": 0} + + if bool( + getattr(module, "input_is_parallel", False) + ) and self._sequence_parallel_enabled(module): + return {"op": "concat", "dim": 0} + + return None + + def _build_merge_hints(self, name: str, module: Any) -> dict[str, dict[str, Any]]: + """Builds field-level tensor merge hints for one call record.""" + hints: dict[str, dict[str, Any]] = {} + primary_output_hint = self._infer_primary_output_merge_hint(name, module) + if primary_output_hint is not None: + hints["primary_output"] = primary_output_hint + if ROUTER_NAME_TOKEN in name: + concat_dim0 = {"op": "concat", "dim": 0} + hints["output"] = concat_dim0 + hints["router_topk_ids"] = concat_dim0 + hints["router_topk_scores"] = concat_dim0 + return hints + + def _make_hook(self, name: str, module: Any): + def _hook(_module: Any, inputs: Any, output: Any) -> None: + if self.current_step_index is None: + return + call_index = len(self.current_step_trace.get(name, [])) + trace_item: dict[str, Any] = { + "call_index": call_index, + "module_type": module.__class__.__name__, + "rank_meta": _rank_metadata(), + "merge_hints": self._build_merge_hints(name, module), + "inputs": _materialize_trace_value(inputs), + "output": _materialize_trace_value(output), + "primary_input": self.guess_primary_tensor(inputs), + "primary_output": self.guess_primary_tensor(output), + } + if ROUTER_NAME_TOKEN in name: + router_topk = _extract_router_topk(output) + if router_topk is not None: + topk_ids, topk_scores = router_topk + trace_item["router_topk_ids"] = topk_ids + trace_item["router_topk_scores"] = topk_scores + self.current_step_trace.setdefault(name, []).append(trace_item) + + return _hook + + @staticmethod + def guess_primary_tensor(value: Any) -> torch.Tensor | None: + tensor = _extract_primary_tensor(value) + if tensor is None: + return None + return _materialize_tensor(tensor) + + def set_step(self, step_index: int) -> None: + self.current_step_index = step_index + self.current_step_trace = {} + + @classmethod + def _merge_rank_values( + cls, + values_by_rank: list[Any], + *, + preferred_cat_dim: int | None = None, + preferred_reduce: str | None = None, + ) -> Any: + if not values_by_rank: + raise RuntimeError("Cannot merge empty rank value list") + if all(isinstance(value, torch.Tensor) for value in values_by_rank): + tensors = cast(list[torch.Tensor], values_by_rank) + if preferred_reduce == "sum" and all( + tensors[0].shape == tensor.shape for tensor in tensors[1:] + ): + return torch.stack(tensors, dim=0).sum(dim=0) + if ( + preferred_cat_dim is not None + and all(tensor.ndim > 0 for tensor in tensors) + and cls._can_cat_along_dim(tensors, dim=preferred_cat_dim) + ): + return torch.cat(tensors, dim=preferred_cat_dim) + if all( + tensors[0].shape == tensor.shape and torch.equal(tensors[0], tensor) + for tensor in tensors[1:] + ): + return tensors[0] + if all(tensor.ndim > 0 for tensor in tensors): + if cls._can_cat_along_dim(tensors, dim=0): + return torch.cat(tensors, dim=0) + if cls._can_cat_along_dim(tensors, dim=-1): + return torch.cat(tensors, dim=-1) + if all(tensors[0].shape == tensor.shape for tensor in tensors[1:]): + return torch.stack(tensors, dim=0) + return tensors + if all(isinstance(value, dict) for value in values_by_rank): + dicts = cast(list[dict[str, Any]], values_by_rank) + keys = sorted(set().union(*(value.keys() for value in dicts))) + return { + key: cls._merge_rank_values( + [value[key] for value in dicts if key in value], + preferred_cat_dim=preferred_cat_dim, + preferred_reduce=preferred_reduce, + ) + for key in keys + } + if all(isinstance(value, list) for value in values_by_rank): + lists = cast(list[list[Any]], values_by_rank) + if any(len(values) != len(lists[0]) for values in lists[1:]): + return lists + return [ + cls._merge_rank_values( + [value[index] for value in lists], + preferred_cat_dim=preferred_cat_dim, + preferred_reduce=preferred_reduce, + ) + for index in range(len(lists[0])) + ] + if all(isinstance(value, tuple) for value in values_by_rank): + tuples = cast(list[tuple[Any, ...]], values_by_rank) + if any(len(values) != len(tuples[0]) for values in tuples[1:]): + return tuples + return tuple( + cls._merge_rank_values( + [value[index] for value in tuples], + preferred_cat_dim=preferred_cat_dim, + preferred_reduce=preferred_reduce, + ) + for index in range(len(tuples[0])) + ) + if all(value == values_by_rank[0] for value in values_by_rank[1:]): + return values_by_rank[0] + return values_by_rank[0] + + @classmethod + def _merge_rank_call_entries( + cls, + rank_call_entries: list[dict[str, Any]], + ) -> dict[str, Any]: + """Merges one module call across ranks using per-field merge hints.""" + merged_call: dict[str, Any] = {} + keys = sorted(set().union(*(entry.keys() for entry in rank_call_entries))) + for key in keys: + values = [entry[key] for entry in rank_call_entries if key in entry] + if key == "rank_meta": + merged_call[key] = values + continue + preferred_cat_dim: int | None = None + preferred_reduce: str | None = None + if values and key not in {"merge_hints", "call_index", "module_type"}: + hint_values = [ + cast(dict[str, Any], entry["merge_hints"]).get(key) + for entry in rank_call_entries + if isinstance(entry.get("merge_hints"), dict) + ] + op_hints = [ + hint + for hint in hint_values + if isinstance(hint, dict) and isinstance(hint.get("op"), str) + ] + if op_hints: + selected_hint = op_hints[0] + op = selected_hint.get("op") + if op == "concat": + dim = selected_hint.get("dim") + if isinstance(dim, int): + preferred_cat_dim = dim + elif op == "sum": + preferred_reduce = "sum" + if ( + preferred_reduce is None + and preferred_cat_dim == 0 + and all(isinstance(value, torch.Tensor) for value in values) + ): + merged_call[f"{key}__row_splits"] = [ + int(cast(torch.Tensor, value).shape[0]) for value in values + ] + merged_call[key] = cls._merge_rank_values( + values, + preferred_cat_dim=preferred_cat_dim, + preferred_reduce=preferred_reduce, + ) + return merged_call + + @staticmethod + def _can_cat_along_dim(tensors: list[torch.Tensor], dim: int) -> bool: + if not tensors: + return False + if tensors[0].ndim == 0: + return False + ndim = tensors[0].ndim + axis = dim if dim >= 0 else ndim + dim + if axis < 0 or axis >= ndim: + return False + if any(tensor.ndim != ndim for tensor in tensors[1:]): + return False + for dim_index in range(ndim): + if dim_index == axis: + continue + dim_size = tensors[0].shape[dim_index] + if any(tensor.shape[dim_index] != dim_size for tensor in tensors[1:]): + return False + return True + + @classmethod + def _merge_rank_traces( + cls, + rank_traces: list[dict[str, list[dict[str, Any]]]], + ) -> dict[str, list[dict[str, Any]]]: + if len(rank_traces) == 1: + return rank_traces[0] + merged: dict[str, list[dict[str, Any]]] = {} + module_names = sorted(set().union(*(trace.keys() for trace in rank_traces))) + for module_name in module_names: + call_count = max(len(trace.get(module_name, [])) for trace in rank_traces) + module_calls: list[dict[str, Any]] = [] + for call_index in range(call_count): + rank_values = [ + trace[module_name][call_index] + for trace in rank_traces + if module_name in trace and call_index < len(trace[module_name]) + ] + if not rank_values: + continue + module_calls.append(cls._merge_rank_call_entries(rank_values)) + merged[module_name] = module_calls + return merged + + @staticmethod + def _gather_rank_traces( + local_trace: dict[str, list[dict[str, Any]]], + ) -> list[dict[str, list[dict[str, Any]]]] | None: + if ( + not torch.distributed.is_initialized() + or torch.distributed.get_world_size() == 1 + ): + return [local_trace] + gathered: list[dict[str, list[dict[str, Any]]] | None] = [ + None + ] * torch.distributed.get_world_size() + torch.distributed.all_gather_object(gathered, local_trace) + if torch.distributed.get_rank() != 0: + return None + return cast(list[dict[str, list[dict[str, Any]]]], gathered) + + def save_current_step(self, traces_dir: Path) -> Path | None: + if not self.enabled or self.current_step_index is None: + return None + gathered_traces = self._gather_rank_traces(self.current_step_trace) + if gathered_traces is None: + return None + merged_trace = self._merge_rank_traces(gathered_traces) + traces_dir.mkdir(parents=True, exist_ok=True) + trace_path = traces_dir / f"forward_trace_step_{self.current_step_index:03d}.pt" + torch.save(merged_trace, trace_path) + return trace_path + + @staticmethod + def load_trace(trace_path: Path) -> dict[str, list[dict[str, Any]]]: + return torch.load(trace_path, map_location="cpu", weights_only=False) + + def close(self) -> None: + for handle in self._hook_handles: + handle.remove() + self._hook_handles.clear() diff --git a/tests/integration/megatron_oracle_worker.py b/tests/integration/megatron_oracle_worker.py new file mode 100644 index 00000000..91c7647d --- /dev/null +++ b/tests/integration/megatron_oracle_worker.py @@ -0,0 +1,521 @@ +from __future__ import annotations + +import argparse +from contextlib import contextmanager +import os +from pathlib import Path +import random +import subprocess +import sys +from typing import Any, Callable + +import numpy as np + +from art.megatron.routing_replay import ( + ParallelTopology as ReplayParallelTopology, +) +from art.megatron.routing_replay import ( + build_bundle_from_forward_trace_dir, +) + +from .megatron_forward_trace import ForwardTraceCapture +from .megatron_oracle_harness import ( + OracleCaseConfig, + RunManifest, + SensitivityMutation, + StepTrace, + Topology, + WorkerRunRequest, + _read_json, + _require_not_none, + _write_json, +) + + +def run_worker_subprocess( + request: WorkerRunRequest, + topology_dir: Path, + *, + repo_root: Path, +) -> None: + """Runs one distributed worker subprocess and stores combined logs.""" + request_path = topology_dir / "run_request.json" + _write_json(request_path, request.model_dump(mode="json")) + worker_module = "integration.megatron_oracle_worker" + worker_cwd = repo_root / "tests" + + command = [ + sys.executable, + "-m", + "torch.distributed.run", + "--standalone", + "--nproc_per_node", + str(request.topology.world_size()), + "-m", + worker_module, + "--worker-run", + "--run-request", + str(request_path), + ] + run = subprocess.run( + command, + cwd=str(worker_cwd), + env={**os.environ, "PYTHONUNBUFFERED": "1"}, + capture_output=True, + text=True, + check=False, + ) + combined_output = f"{run.stdout}\n{run.stderr}".strip() + (topology_dir / "worker.log").write_text(combined_output + "\n", encoding="utf-8") + if run.returncode != 0: + tail = "\n".join(combined_output.splitlines()[-80:]) + raise RuntimeError( + f"Topology run failed for {request.topology.slug()} with exit code " + f"{run.returncode}.\n{tail}" + ) + + +def _set_deterministic_seed(seed: int) -> None: + import torch + + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def _merge_sharded_dicts(shards_by_rank: list[dict[str, Any]]) -> dict[str, Any]: + """Merges rank-sharded LoRA tensors into a full state dict on rank 0.""" + import torch + + merged: dict[str, list[Any]] = {} + for rank_shards in shards_by_rank: + for key, tensor in rank_shards.items(): + merged.setdefault(key, []).append(tensor.detach().cpu()) + full_state: dict[str, Any] = {} + for key, shards in merged.items(): + if len(shards) == 1: + full_state[key] = shards[0].contiguous() + continue + concat_dim = 1 if ".lora_A." in key else 0 + full_state[key] = torch.cat(shards, dim=concat_dim).contiguous() + return full_state + + +def _gather_full_state(local_state: dict[str, Any]) -> dict[str, Any] | None: + """Gathers local state dicts to rank 0 and merges them.""" + import torch + + rank = torch.distributed.get_rank() + world_size = torch.distributed.get_world_size() + gathered = [None for _ in range(world_size)] if rank == 0 else None + torch.distributed.gather_object(local_state, gathered, dst=0) + if rank != 0: + return None + assert gathered is not None + entries = [entry for entry in gathered if entry is not None] + return _merge_sharded_dicts(entries) + + +def _collect_lora_state(model_chunks: list[Any]) -> dict[str, Any] | None: + """Collects full LoRA adapter state for validation and delta computation.""" + local_state: dict[str, Any] = {} + for chunk in model_chunks: + for module in chunk.modules(): + if not hasattr(module, "sharded_lora_state_dict"): + continue + module_state = module.sharded_lora_state_dict() + for key, value in module_state.items(): + if key in local_state: + raise RuntimeError( + f"Duplicate LoRA key while collecting state: {key}" + ) + local_state[key] = value.detach().cpu() + return _gather_full_state(local_state) + + +def _collect_lora_grads(model_chunks: list[Any]) -> dict[str, Any] | None: + """Collects full LoRA gradient tensors across all ranks.""" + from art.megatron.lora import LoRA + + local_grads: dict[str, Any] = {} + for chunk in model_chunks: + for module in chunk.modules(): + if not isinstance(module, LoRA): + continue + for key, param, expert in module._export_items(): # type: ignore[attr-defined] + if not hasattr(param, "main_grad"): + raise RuntimeError( + f"LoRA param missing main_grad attribute for key '{key}'" + ) + grad = param.main_grad + if grad is None: + raise RuntimeError(f"LoRA param main_grad is None for key '{key}'") + if hasattr(grad, "_local_tensor"): + grad = grad._local_tensor + local_grads[key] = ( + grad[expert].detach().cpu().T + if expert is not None + else grad.detach().cpu().T + ) + return _gather_full_state(local_grads) + + +def _validate_loaded_state_matches_adapter( + loaded_state: dict[str, Any], + adapter_model: dict[str, Any], +) -> None: + """Checks loaded model LoRA state exactly matches adapter tensors and keys.""" + import torch + + for key in sorted(adapter_model.keys()): + assert torch.equal(loaded_state[key].cpu(), adapter_model[key].cpu()), ( + f"Loaded LoRA state mismatch for key '{key}'" + ) + + +def _configure_provider( + provider: Any, + topology: Topology, + case_config: OracleCaseConfig, +) -> None: + """Applies deterministic topology/model overrides to provider config.""" + provider.tensor_model_parallel_size = topology.tp + provider.expert_model_parallel_size = topology.ep + provider.expert_tensor_parallel_size = topology.etp + # These are intentionally pinned to 1 for now; switching to topology-driven + # values is the single lever to start CP/PP coverage in the harness. + provider.pipeline_model_parallel_size = 1 + provider.context_parallel_size = 1 + provider.sequence_parallel = topology.sp + provider.num_layers = case_config.num_layers + if hasattr(provider, "attention_dropout"): + provider.attention_dropout = 0.0 + if hasattr(provider, "hidden_dropout"): + provider.hidden_dropout = 0.0 + + +def _build_optimizer_config(case_config: OracleCaseConfig): + """Builds Megatron optimizer settings for deterministic harness runs.""" + from megatron.core.optimizer import OptimizerConfig + + optimizer_kwargs = dict( + lr=case_config.learning_rate, + adam_beta1=0.9, + adam_beta2=0.99, + clip_grad=0.1, + weight_decay=0.1, + ) + return OptimizerConfig( + bf16=True, + **optimizer_kwargs, + ) + + +def _assert_runtime_configuration( + model_chunks: list[Any], + case_config: OracleCaseConfig, +) -> None: + """Validates runtime model depth equals requested oracle case config.""" + observed_num_layers: set[int] = set() + + for chunk in model_chunks: + module: Any = chunk + while hasattr(module, "module"): + module = module.module + config = getattr(module, "config", None) + if config is not None and hasattr(config, "num_layers"): + observed_num_layers.add(int(config.num_layers)) + + if observed_num_layers != {case_config.num_layers}: + raise RuntimeError( + "Runtime num_layers mismatch: " + f"requested={case_config.num_layers}, observed={sorted(observed_num_layers)}" + ) + + +def _delta_state( + initial_state: dict[str, Any], + current_state: dict[str, Any], +) -> dict[str, Any]: + """Computes LoRA parameter deltas while enforcing stable key sets.""" + initial_keys = set(initial_state.keys()) + current_keys = set(current_state.keys()) + if initial_keys != current_keys: + missing = sorted(initial_keys - current_keys) + extra = sorted(current_keys - initial_keys) + raise KeyError( + f"LoRA state keys changed during training: missing={missing[:3]} extra={extra[:3]}" + ) + return { + key: current_state[key].detach().cpu() - initial_state[key].detach().cpu() + for key in sorted(initial_keys) + } + + +@contextmanager +def _mutation_hook( + megatron_train_module: Any, + mutation: SensitivityMutation | None, + pre_optimizer_step_hook: Callable[[], None] | None = None, + loss_scale: float = 1.0, +): + """Applies optional sensitivity mutation hooks around training steps.""" + original_finalize = megatron_train_module._finalize_grads + original_optimizer_step = megatron_train_module._optimizer_step + original_loss_fn = megatron_train_module.loss_fn + + if mutation == "drop_finalize": + megatron_train_module._finalize_grads = lambda _model: None + elif mutation is not None: + raise ValueError(f"Unsupported mutation: {mutation}") + + if pre_optimizer_step_hook is not None: + + def _patched_optimizer_step(optimizer: Any, learning_rate: float): + pre_optimizer_step_hook() + return original_optimizer_step(optimizer, learning_rate) + + megatron_train_module._optimizer_step = _patched_optimizer_step + + if loss_scale <= 0: + raise ValueError(f"loss_scale must be > 0, got {loss_scale}") + if loss_scale != 1.0: + + def _scaled_loss_fn(*args: Any, **kwargs: Any): + loss = original_loss_fn(*args, **kwargs) + return loss.model_copy( + update={ + "mean_policy_loss": loss.mean_policy_loss * loss_scale, + "mean_kl": loss.mean_kl * loss_scale, + "policy_loss_sum": loss.policy_loss_sum * loss_scale, + } + ) + + megatron_train_module.loss_fn = _scaled_loss_fn + + if mutation is None: + if pre_optimizer_step_hook is None and loss_scale == 1.0: + yield + return + try: + yield + finally: + megatron_train_module._finalize_grads = original_finalize + megatron_train_module._optimizer_step = original_optimizer_step + megatron_train_module.loss_fn = original_loss_fn + + +def _worker_run(request: WorkerRunRequest) -> None: + """Executes one full distributed training trace generation worker run.""" + from safetensors.torch import load_file, save_file + import torch + + from art import dev, types + from art.megatron import train as megatron_train + from art.preprocessing.pack import packed_tensors_from_dir + + local_rank = int(os.environ["LOCAL_RANK"]) + torch.cuda.set_device(local_rank) + torch.distributed.init_process_group(backend="nccl") + _set_deterministic_seed(request.case_config.seed) + + runtime = megatron_train.build_training_runtime( + model_identifier=request.case_config.base_model, + provider_configure=lambda provider: _configure_provider( + provider, request.topology, request.case_config + ), + optimizer_config=_build_optimizer_config(request.case_config), + print_env=False, + print_optimizer_stats=False, + ) + model_chunks = runtime.model + optimizer = runtime.optimizer + megatron_train.configure_moe_routing_replay( + runtime, + replay_bundle_path=request.moe_routing_replay_path, + strict=request.moe_routing_replay_strict, + ) + _assert_runtime_configuration(model_chunks, request.case_config) + + topology_dir = Path(request.topology_dir) + traces_dir = topology_dir / "traces" + traces_dir.mkdir(parents=True, exist_ok=True) + + # setup the shared initial lora + shared_init_path = Path(request.shared_init_adapter_path) + if not shared_init_path.exists(): + initial_state = _collect_lora_state(model_chunks) + if torch.distributed.get_rank() == 0: + shared_init_path.parent.mkdir(parents=True, exist_ok=True) + save_file( + _require_not_none(initial_state, "initial_state"), + str(shared_init_path), + ) + torch.distributed.barrier() + + # load the shared initial lora into the model and validate we can collect it from the model + adapter_model = load_file(str(shared_init_path)) + megatron_train.load_adapter_into_model(model_chunks, adapter_model, optimizer) + loaded_state = _collect_lora_state(model_chunks) + if torch.distributed.get_rank() == 0: + _validate_loaded_state_matches_adapter( + _require_not_none(loaded_state, "loaded_state"), adapter_model + ) + torch.distributed.barrier() + + # load the inputs + packed_tensors = packed_tensors_from_dir( + **request.packed_tensors.model_dump(exclude_none=True) + ) + initial_lora_state = loaded_state + + train_config = types.TrainConfig( + learning_rate=request.case_config.learning_rate, + beta=request.case_config.beta, + kl_penalty_coef=0.0, + ) + experimental_config: dev.TrainConfig = {} + step_traces: list[StepTrace] = [] + captured_grads: dict[str, Any] | None = None + forward_trace_capture = ForwardTraceCapture(model_chunks, enabled=True) + + def _capture_lora_grads() -> None: + nonlocal captured_grads + captured_grads = _collect_lora_grads(model_chunks) + + with _mutation_hook( + megatron_train, + request.mutation, + pre_optimizer_step_hook=_capture_lora_grads, + loss_scale=request.case_config.loss_scale, + ): + for step_index in range(request.case_config.num_steps): + forward_trace_capture.set_step(step_index) + sample_index = step_index % request.packed_tensors.num_sequences + inputs = megatron_train.select_indexed_inputs(packed_tensors, sample_index) + captured_grads = None + + step_result = megatron_train.run_training_step( + model_chunks=model_chunks, + optimizer=optimizer, + learning_rate=train_config.learning_rate, + inputs=inputs, + config=train_config, + experimental_config=experimental_config, + ref_logprobs=None, + step_index=step_index, + sample_index=sample_index, + moe_routing_replay_controller=runtime.moe_routing_replay_controller, + ) + forward_trace_capture.save_current_step(traces_dir) + torch.distributed.barrier() + current_lora_state = _collect_lora_state(model_chunks) + + if torch.distributed.get_rank() == 0: + # save artifacts (outputs, grads, lora deltas, current lora) + grads = _require_not_none(captured_grads, "captured_grads") + initial_state = _require_not_none( + initial_lora_state, "initial_lora_state" + ) + current_state = _require_not_none( + current_lora_state, "current_lora_state" + ) + deltas = _delta_state(initial_state, current_state) + + output_rel = Path("traces") / f"output_step_{step_index:03d}.pt" + grads_rel = Path("traces") / f"grads_step_{step_index:03d}.safetensors" + deltas_rel = ( + Path("traces") / f"deltas_step_{step_index:03d}.safetensors" + ) + lora_rel = Path(f"lora_step_{step_index:03d}.safetensors") + + torch.save( + step_result.new_logprobs.detach().cpu().float(), + topology_dir / output_rel, + ) + save_file(grads, str(topology_dir / grads_rel)) + save_file(deltas, str(topology_dir / deltas_rel)) + save_file(current_state, str(topology_dir / lora_rel)) + + # build and append the step trace + step_traces.append( + StepTrace( + step_index=step_index, + loss=float( + step_result.reduced_loss.item() + / request.case_config.loss_scale + ), + probs_corr=step_result.probs_corr, + output_file=str(output_rel), + grads_file=str(grads_rel), + deltas_file=str(deltas_rel), + lora_file=str(lora_rel), + ) + ) + torch.distributed.barrier() + + forward_trace_capture.close() + + if torch.distributed.get_rank() == 0: + # build and save the moe routing replay bundle + if request.capture_moe_routing_bundle_path is not None: + replay_bundle = build_bundle_from_forward_trace_dir( + traces_dir=traces_dir, + num_steps=request.case_config.num_steps, + topology=ReplayParallelTopology.model_validate( + request.topology.model_dump( + include={"tp", "ep", "etp", "dp", "sp", "cp", "pp", "vpp"}, + mode="python", + ) + ), + ) + replay_bundle.to_dir(request.capture_moe_routing_bundle_path) + + # build and save the run manifest + manifest = RunManifest( + case_id=request.case_id, + base_model=request.case_config.base_model, + num_layers=request.case_config.num_layers, + topology=request.topology.slug(), + world_size=request.topology.world_size(), + seed=request.case_config.seed, + num_steps=request.case_config.num_steps, + packed_tensors=request.packed_tensors, + tolerances=request.case_config.tolerances, + steps=step_traces, + ) + _write_json(topology_dir / "manifest.json", manifest.model_dump(mode="json")) + torch.distributed.barrier() + torch.distributed.destroy_process_group() + + +def run_worker_cli(run_request_path: Path) -> None: + """Loads a worker request and dispatches worker execution.""" + request = WorkerRunRequest.model_validate(_read_json(run_request_path)) + _worker_run(request) + + +def _parse_args(argv: list[str]) -> argparse.Namespace: + """Parses worker CLI arguments.""" + parser = argparse.ArgumentParser(description="Megatron oracle harness worker") + parser.add_argument("--worker-run", action="store_true") + parser.add_argument("--run-request", type=Path) + return parser.parse_args(argv) + + +def _main(argv: list[str]) -> int: + """CLI entry for worker-only execution mode.""" + args = _parse_args(argv) + if not args.worker_run: + raise SystemExit("This module is intended for test imports or --worker-run") + if args.run_request is None: + raise SystemExit("--run-request is required with --worker-run") + run_worker_cli(args.run_request) + return 0 + + +if __name__ == "__main__": + raise SystemExit(_main(sys.argv[1:])) From a73ca1a54b4cd870fd24a6711f074a112c324740 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Thu, 12 Mar 2026 09:40:50 +0000 Subject: [PATCH 09/28] oracle harness/tests: refactor suite and add oracle-replay parity flow --- tests/integration/megatron_oracle_harness.py | 1800 +++++++++-------- .../test_megatron_lora_oracle_correctness.py | 102 +- 2 files changed, 1022 insertions(+), 880 deletions(-) diff --git a/tests/integration/megatron_oracle_harness.py b/tests/integration/megatron_oracle_harness.py index 273ac368..a66385b0 100644 --- a/tests/integration/megatron_oracle_harness.py +++ b/tests/integration/megatron_oracle_harness.py @@ -1,29 +1,35 @@ from __future__ import annotations -import argparse -from contextlib import contextmanager +from functools import partial import hashlib import json +import math import os from pathlib import Path -import random +import re import shutil -import subprocess -import sys -from typing import Any, Callable, Literal, TypeVar, cast +from typing import Any, Literal, TypeVar, cast -import numpy as np from pydantic import BaseModel, ConfigDict, Field +from rich import box +from rich.console import Console +from rich.table import Table +import torch + +from .megatron_forward_trace import ForwardTraceCapture REPO_ROOT = Path(__file__).resolve().parents[2] -ARTIFACT_ROOT = Path(REPO_ROOT / ".local/megatron_lora_oracles") +ARTIFACT_ROOT = Path(REPO_ROOT / ".local/megatron_lora_correctness") +ORACLE_MOE_ROUTING_BUNDLE_DIRNAME = "oracle_moe_routing_replay" +ORACLE_REPLAY_TOPOLOGY_SUFFIX = "oracle_replay" REGENERATE_ENV = "ART_REGENERATE_MEGATRON_ORACLE" -BASE_MODEL_ENV = "ART_MEGATRON_ORACLE_BASE_MODEL" -DP_SUPPORT_ENV = "ART_MEGATRON_ORACLE_ENABLE_DP_PHASE_B" +EXTENDED_TOPOLOGIES_ENV = "ART_MEGATRON_ORACLE_ENABLE_EXTENDED_TOPOLOGIES" SENSITIVITY_MUTATION_ENV = "ART_MEGATRON_ORACLE_MUTATION" -SensitivityMutation = Literal["drop_finalize"] +DEFAULT_SENSITIVITY_MUTATION = "drop_finalize" +SUPPORTED_SENSITIVITY_MUTATIONS = (DEFAULT_SENSITIVITY_MUTATION,) +SensitivityMutation = str REQUIRED_PACKED_TENSOR_FILES = ( "tokens.pt", @@ -35,26 +41,68 @@ "advantages.pt", "weights.pt", ) +NON_FINITE_METRIC_VALUE = 1e30 +EXPERT_TABLE_ROW_LIMIT = 8 +EXPERT_TRIPLET_PARAM_RE = re.compile( + r"layers\.(?P\d+)\.mlp\.experts\.(?P\d+)\." + r"(?Pgate_proj|up_proj|down_proj)\." +) +PHASE_PRINT_ORDER = { + "forward": 0, + "router_scores": 1, + "router_topk_ids": 2, + "outputs": 3, + "losses": 4, + "grads": 5, + "deltas": 6, +} class Topology(BaseModel): + """Defines distributed topology settings for one Megatron run variant.""" + model_config = ConfigDict(frozen=True) tp: int ep: int etp: int = 1 dp: int = 1 - sp: int = 0 - phase: Literal["A", "B"] = "A" + sp: bool = False + cp: int = 1 + pp: int = 1 + vpp: int = 1 + + def resolved_expert_dp(self) -> int: + """Derives expert data parallel size from topology/world-size constraints.""" + attention_world = self.tp * self.cp * self.pp * self.dp + expert_divisor = self.etp * self.ep * self.pp + if attention_world % expert_divisor != 0: + raise ValueError( + "Invalid topology for Megatron expert parallelism: " + f"world_size={attention_world} is not divisible by " + f"etp*ep*pp={expert_divisor}." + ) + return attention_world // expert_divisor def slug(self) -> str: - return f"tp{self.tp}_ep{self.ep}_etp{self.etp}_dp{self.dp}_sp{self.sp}" + return ( + f"tp{self.tp}_ep{self.ep}_etp{self.etp}" + f"_dp{self.dp}_edp{self.resolved_expert_dp()}" + f"_cp{self.cp}_pp{self.pp}_vpp{self.vpp}_sp{int(self.sp)}" + ) def world_size(self) -> int: - return self.tp * self.ep * self.etp * self.dp + # Mirrors Megatron parallel-state sizing: + # attention side: world = tp * pp * cp * dp + # expert side must also divide this world size (validated in resolved_expert_dp()). + attention_world = self.tp * self.cp * self.pp * self.dp + self.resolved_expert_dp() + return attention_world class PackedTensorConfig(BaseModel): + """Controls synthetic packed tensor generation used by oracle harness runs.""" + num_sequences: int = 8 sequence_length: int = 256 prefill_tokens: int = 64 @@ -63,6 +111,8 @@ class PackedTensorConfig(BaseModel): class LoraConfig(BaseModel): + """Configures LoRA adapter dimensions and targeted module families.""" + rank: int = 1 alpha: int = 32 target_modules: list[str] = Field( @@ -79,28 +129,30 @@ class LoraConfig(BaseModel): class ToleranceProfile(BaseModel): - outputs_abs: float = 1e-2 - outputs_rel: float = 1e-2 - losses_abs: float = 1e-4 - losses_rel: float = 1e-4 - grads_abs: float = 1e-2 - grads_rel: float = 1e-2 - deltas_abs: float = 1e-2 - deltas_rel: float = 1e-2 + """Defines row-level pass/fail thresholds for variant comparison phases.""" + + relative_l2: float = 1e-2 + mean_abs_pct: float = 1.0 class OracleCaseConfig(BaseModel): + """Contains all deterministic run parameters for one oracle case.""" + base_model: str + num_layers: int = 4 seed: int = 20260305 - num_steps: int = 3 - learning_rate: float = 5e-6 + num_steps: int = 2 + learning_rate: float = 1e-3 beta: float = 0.0 + loss_scale: float = 1e4 packed_tensors: PackedTensorConfig = Field(default_factory=PackedTensorConfig) lora: LoraConfig = Field(default_factory=LoraConfig) tolerances: ToleranceProfile = Field(default_factory=ToleranceProfile) class DiskPackedTensorsSpec(BaseModel): + """Describes packed tensor artifacts persisted on disk for reuse.""" + dir: str num_sequences: int sequence_length: int @@ -109,6 +161,8 @@ class DiskPackedTensorsSpec(BaseModel): class CaseArtifacts(BaseModel): + """Holds stable case-level artifact paths used by all variants.""" + case_id: str case_dir: str packed_tensors: DiskPackedTensorsSpec @@ -116,17 +170,23 @@ class CaseArtifacts(BaseModel): class WorkerRunRequest(BaseModel): + """Defines one distributed worker invocation for generating variant artifacts.""" + case_id: str case_config: OracleCaseConfig topology: Topology topology_dir: str packed_tensors: DiskPackedTensorsSpec shared_init_adapter_path: str - allow_create_shared_init: bool = False mutation: SensitivityMutation | None = None + moe_routing_replay_path: str | None = None + moe_routing_replay_strict: bool = True + capture_moe_routing_bundle_path: str | None = None class StepTrace(BaseModel): + """Tracks per-step trace artifact filenames and loss metadata.""" + step_index: int loss: float probs_corr: float @@ -137,8 +197,11 @@ class StepTrace(BaseModel): class RunManifest(BaseModel): + """Records run metadata and per-step trace references for one topology output.""" + case_id: str base_model: str + num_layers: int topology: str world_size: int seed: int @@ -148,18 +211,142 @@ class RunManifest(BaseModel): steps: list[StepTrace] -class ComparisonFailure(BaseModel): +class MetricRow(BaseModel): + """Represents one comparable unit (param/module/global) for one phase and step.""" + case_id: str + variant: str topology: str oracle_topology: str - metric: Literal["outputs", "losses", "grads", "lora_deltas"] step_index: int - key: str - max_abs_error: float - max_rel_error: float - abs_tolerance: float - rel_tolerance: float - message: str + phase: str + param: str + numel: float + mean_abs_diff: float + relative_l2: float + typical_abs_scale: float + mean_abs_pct: float + topk_mismatch_fraction: float | None = None + top1_mismatch_fraction: float | None = None + thresholds: dict[str, float] = Field(default_factory=dict) + pass_signal: bool = True + failure_reasons: list[str] = Field(default_factory=list) + + +class VariantSpec(BaseModel): + """Declares how to execute and evaluate one candidate variant against the oracle.""" + + name: str + topology: Topology + thresholds_by_phase: dict[str, dict[str, float]] + output_slug: str | None = None + reference_slug: str | None = None + mutation: SensitivityMutation | None = None + expected_signal: Literal["pass", "fail"] = "pass" + + def resolved_output_slug(self) -> str: + if self.output_slug is not None: + return self.output_slug + return _topology_output_slug(self.topology, self.mutation) + + def resolved_reference_slug(self) -> str: + if self.reference_slug is not None: + return self.reference_slug + return ORACLE_TOPOLOGY.slug() + + +class VariantReport(BaseModel): + """Captures full comparison output for one variant run.""" + + case_id: str + variant: str + topology: str + reference_topology: str + expected_signal: Literal["pass", "fail"] + signal: Literal["pass", "fail"] + pass_count: int + fail_count: int + step_summaries: dict[int, dict[str, Any]] + metrics: list[MetricRow] + + +class DiffAccumulator: + """Accumulates diff statistics across tensors and router-id mismatch counters.""" + + def __init__(self) -> None: + self.numel = 0 + self.abs_sum = 0.0 + self.diff_sq_sum = 0.0 + self.ref_sq_sum = 0.0 + self.ref_abs_sum = 0.0 + self.router_topk_total = 0 + self.router_topk_mismatch = 0 + self.router_top1_total = 0 + self.router_top1_mismatch = 0 + + def update(self, reference, candidate) -> None: # type: ignore[no-untyped-def] + """Adds one tensor pair into the accumulator.""" + ref = reference.detach().float() + cand = candidate.detach().float() + diff = (cand - ref).abs() + if diff.numel() == 0: + return + self.numel += int(diff.numel()) + self.abs_sum += float(diff.sum().item()) + self.diff_sq_sum += float((cand - ref).square().sum().item()) + self.ref_sq_sum += float(ref.square().sum().item()) + self.ref_abs_sum += float(ref.abs().sum().item()) + + def update_router_ids(self, reference_ids, candidate_ids) -> None: # type: ignore[no-untyped-def] + """Adds router top-k id mismatch counts into the accumulator.""" + self.router_topk_total += int(reference_ids.numel()) + self.router_topk_mismatch += int((reference_ids != candidate_ids).sum().item()) + if reference_ids.ndim >= 2 and reference_ids.shape[1] > 0: + self.router_top1_total += int(reference_ids.shape[0]) + self.router_top1_mismatch += int( + (reference_ids[:, 0] != candidate_ids[:, 0]).sum().item() + ) + + def as_summary(self) -> dict[str, float]: + """Returns normalized summary values for one row.""" + if self.numel == 0: + topk_fraction = 0.0 + top1_fraction = 0.0 + else: + topk_fraction = ( + self.router_topk_mismatch / self.router_topk_total + if self.router_topk_total > 0 + else 0.0 + ) + top1_fraction = ( + self.router_top1_mismatch / self.router_top1_total + if self.router_top1_total > 0 + else 0.0 + ) + if self.numel == 0: + return { + "numel": 0.0, + "mean_abs_diff": 0.0, + "relative_l2": 0.0, + "typical_abs_scale": 0.0, + "mean_abs_pct": 0.0, + "topk_mismatch_fraction": topk_fraction, + "top1_mismatch_fraction": top1_fraction, + } + mean_abs = self.abs_sum / self.numel + typical_abs = self.ref_abs_sum / self.numel + mean_abs_pct = (mean_abs / (typical_abs + 1e-12)) * 100.0 + return { + "numel": _finite_metric(float(self.numel), default=0.0), + "mean_abs_diff": _finite_metric(mean_abs), + "relative_l2": _finite_metric( + (self.diff_sq_sum**0.5) / max(self.ref_sq_sum**0.5, 1e-12) + ), + "typical_abs_scale": _finite_metric(typical_abs, default=0.0), + "mean_abs_pct": _finite_metric(mean_abs_pct), + "topk_mismatch_fraction": _finite_metric(topk_fraction, default=1.0), + "top1_mismatch_fraction": _finite_metric(top1_fraction, default=1.0), + } T = TypeVar("T") @@ -171,18 +358,18 @@ def _require_not_none(value: T | None, name: str) -> T: return value -PHASE_A_TOPOLOGIES = [ - Topology(tp=1, ep=1, etp=1, dp=1, sp=0, phase="A"), - Topology(tp=2, ep=1, etp=1, dp=1, sp=1, phase="A"), - Topology(tp=1, ep=2, etp=1, dp=1, sp=0, phase="A"), - Topology(tp=2, ep=2, etp=1, dp=1, sp=1, phase="A"), +TOPOLOGIES = [ + Topology(tp=1, ep=1, etp=1, dp=1, sp=False), + Topology(tp=2, ep=1, etp=1, dp=1, sp=True), + Topology(tp=1, ep=2, etp=1, dp=2, sp=False), + Topology(tp=2, ep=2, etp=1, dp=2, sp=True), ] -PHASE_B_TOPOLOGIES = [ - Topology(tp=1, ep=1, etp=1, dp=2, sp=0, phase="B"), - Topology(tp=2, ep=1, etp=1, dp=2, sp=1, phase="B"), +EXTENDED_TOPOLOGIES = [ + Topology(tp=1, ep=1, etp=1, dp=2, sp=False), + Topology(tp=2, ep=1, etp=1, dp=2, sp=True), ] -ORACLE_TOPOLOGY = PHASE_A_TOPOLOGIES[0] -SENSITIVITY_TOPOLOGY = PHASE_A_TOPOLOGIES[1] +ORACLE_TOPOLOGY = TOPOLOGIES[0] +SENSITIVITY_TOPOLOGY = TOPOLOGIES[1] def _truthy(value: str | None) -> bool: @@ -191,69 +378,57 @@ def _truthy(value: str | None) -> bool: return value.strip().lower() in {"1", "true", "yes", "on"} -def sensitivity_mutation() -> SensitivityMutation | None: +def sensitivity_mutations() -> list[SensitivityMutation]: + """Parses sensitivity mutation selectors from env as a CSV list.""" raw = os.environ.get(SENSITIVITY_MUTATION_ENV) if raw is None or raw.strip() == "": - return None + return [] normalized = raw.strip().lower() if normalized in {"1", "true", "yes", "on"}: - return "drop_finalize" - if normalized == "drop_finalize": - return "drop_finalize" + return [DEFAULT_SENSITIVITY_MUTATION] + mutations = [item.strip().lower() for item in raw.split(",") if item.strip()] + unsupported = [ + mutation + for mutation in mutations + if mutation not in SUPPORTED_SENSITIVITY_MUTATIONS + ] + if not unsupported: + return mutations + supported = ", ".join(SUPPORTED_SENSITIVITY_MUTATIONS) raise ValueError( f"Unsupported {SENSITIVITY_MUTATION_ENV} value '{raw}'. " - "Supported values: drop_finalize, 1/true/yes/on." + f"Supported values: {supported}, CSV of supported values, 1/true/yes/on." ) def sensitivity_enabled() -> bool: - return sensitivity_mutation() is not None + return bool(sensitivity_mutations()) -def phase_b_dp_enabled() -> bool: - return _truthy(os.environ.get(DP_SUPPORT_ENV)) +def extended_topologies_enabled() -> bool: + """Returns whether extended topologies are enabled for the suite.""" + return _truthy(os.environ.get(EXTENDED_TOPOLOGIES_ENV)) def regenerate_requested() -> bool: return _truthy(os.environ.get(REGENERATE_ENV)) -def default_case_config() -> OracleCaseConfig: - def _env_float(name: str, default: str) -> float: - return float(os.environ.get(name, default)) - - tolerances = ToleranceProfile( - outputs_abs=_env_float("ART_MEGATRON_ORACLE_OUTPUTS_ABS_TOL", "1e-2"), - outputs_rel=_env_float("ART_MEGATRON_ORACLE_OUTPUTS_REL_TOL", "1e-2"), - losses_abs=_env_float("ART_MEGATRON_ORACLE_LOSSES_ABS_TOL", "1e-4"), - losses_rel=_env_float("ART_MEGATRON_ORACLE_LOSSES_REL_TOL", "1e-4"), - grads_abs=_env_float("ART_MEGATRON_ORACLE_GRADS_ABS_TOL", "1e-2"), - grads_rel=_env_float("ART_MEGATRON_ORACLE_GRADS_REL_TOL", "1e-2"), - deltas_abs=_env_float("ART_MEGATRON_ORACLE_DELTAS_ABS_TOL", "1e-2"), - deltas_rel=_env_float("ART_MEGATRON_ORACLE_DELTAS_REL_TOL", "1e-2"), - ) - return OracleCaseConfig( - base_model=os.environ.get( - BASE_MODEL_ENV, - "Qwen/Qwen3-30B-A3B-Instruct-2507", - ), - seed=int(os.environ.get("ART_MEGATRON_ORACLE_SEED", "20260305")), - num_steps=int(os.environ.get("ART_MEGATRON_ORACLE_NUM_STEPS", "3")), - learning_rate=float(os.environ.get("ART_MEGATRON_ORACLE_LR", "5e-6")), - beta=float(os.environ.get("ART_MEGATRON_ORACLE_BETA", "0.0")), - tolerances=tolerances, - ) +def case_config( + base_model: str = "Qwen/Qwen3-30B-A3B-Instruct-2507", +) -> OracleCaseConfig: + """Builds the deterministic default oracle case config.""" + return OracleCaseConfig(base_model=base_model) def available_gpu_count() -> int: import torch - if not torch.cuda.is_available(): - return 0 return int(torch.cuda.device_count()) def stable_case_id(case_config: OracleCaseConfig) -> str: + """Builds a deterministic case id from case config contents.""" payload = case_config.model_dump(mode="json") encoded = json.dumps(payload, sort_keys=True, separators=(",", ":")) digest = hashlib.sha256(encoded.encode("utf-8")).hexdigest()[:16] @@ -269,7 +444,7 @@ def stable_case_id(case_config: OracleCaseConfig) -> str: def _write_json(path: Path, payload: Any) -> None: path.parent.mkdir(parents=True, exist_ok=True) with path.open("w", encoding="utf-8") as handle: - json.dump(payload, handle, indent=2, sort_keys=True) + json.dump(payload, handle, indent=2, sort_keys=True, allow_nan=False) def _read_json(path: Path) -> dict[str, Any]: @@ -281,6 +456,7 @@ def _build_packed_tensors( config: PackedTensorConfig, seed: int, ) -> dict[str, Any]: + """Generates deterministic synthetic packed tensors used in integration runs.""" import torch if config.num_sequences <= 1: @@ -351,6 +527,7 @@ def _create_packed_tensors( case_config: OracleCaseConfig, packed_dir: Path, ) -> DiskPackedTensorsSpec: + """Persists packed tensors to disk and returns their descriptor.""" from art.preprocessing.pack import PackedTensors, packed_tensors_to_dir packed_tensors = cast( @@ -361,15 +538,8 @@ def _create_packed_tensors( return DiskPackedTensorsSpec.model_validate(descriptor) -def _validate_packed_tensor_files(spec: DiskPackedTensorsSpec) -> None: - tensor_dir = Path(spec.dir) - for filename in REQUIRED_PACKED_TENSOR_FILES: - file_path = tensor_dir / filename - if not file_path.exists(): - raise FileNotFoundError(f"Missing packed tensor file: {file_path}") - - def ensure_case_artifacts(case_config: OracleCaseConfig) -> CaseArtifacts: + """Ensures stable case-level artifacts (input tensors) are present and reusable.""" case_id = stable_case_id(case_config) case_dir = ARTIFACT_ROOT / case_id case_dir.mkdir(parents=True, exist_ok=True) @@ -378,7 +548,6 @@ def ensure_case_artifacts(case_config: OracleCaseConfig) -> CaseArtifacts: descriptor_path = case_dir / "packed_tensors.json" if descriptor_path.exists(): packed_spec = DiskPackedTensorsSpec.model_validate(_read_json(descriptor_path)) - _validate_packed_tensor_files(packed_spec) else: packed_spec = _create_packed_tensors(case_config, case_dir / "packed_tensors") _write_json(descriptor_path, packed_spec.model_dump(mode="json")) @@ -394,809 +563,822 @@ def ensure_case_artifacts(case_config: OracleCaseConfig) -> CaseArtifacts: def _replace_topology_dir(path: Path) -> None: + """Resets one topology output directory before regeneration.""" if path.exists(): shutil.rmtree(path) path.mkdir(parents=True, exist_ok=True) (path / "traces").mkdir(parents=True, exist_ok=True) -def _run_worker_subprocess(request: WorkerRunRequest, topology_dir: Path) -> None: - request_path = topology_dir / "run_request.json" - _write_json(request_path, request.model_dump(mode="json")) - - command = [ - sys.executable, - "-m", - "torch.distributed.run", - "--standalone", - "--nproc_per_node", - str(request.topology.world_size()), - str(Path(__file__).resolve()), - "--worker-run", - "--run-request", - str(request_path), - ] - run = subprocess.run( - command, - cwd=str(REPO_ROOT), - env={**os.environ, "PYTHONUNBUFFERED": "1"}, - capture_output=True, - text=True, - check=False, - ) - combined_output = f"{run.stdout}\n{run.stderr}".strip() - (topology_dir / "worker.log").write_text(combined_output + "\n", encoding="utf-8") - if run.returncode != 0: - tail = "\n".join(combined_output.splitlines()[-80:]) - raise RuntimeError( - f"Topology run failed for {request.topology.slug()} with exit code " - f"{run.returncode}.\n{tail}" - ) - - -def ensure_topology_artifacts( - case_config: OracleCaseConfig, +def _topology_output_slug( topology: Topology, - *, - regenerate: bool = False, mutation: SensitivityMutation | None = None, -) -> Path: - case_artifacts = ensure_case_artifacts(case_config) - case_dir = Path(case_artifacts.case_dir) - topology_dir = case_dir / topology.slug() - manifest_path = topology_dir / "manifest.json" - if manifest_path.exists() and not regenerate: - return topology_dir - - _replace_topology_dir(topology_dir) - shared_init_path = Path(case_artifacts.shared_init_adapter_path) - allow_create_shared_init = topology.slug() == ORACLE_TOPOLOGY.slug() - if not allow_create_shared_init and not shared_init_path.exists(): - ensure_topology_artifacts( - case_config=case_config, - topology=ORACLE_TOPOLOGY, - regenerate=False, - mutation=None, - ) - if not allow_create_shared_init and not shared_init_path.exists(): - raise FileNotFoundError( - f"Oracle shared adapter missing after oracle generation: {shared_init_path}" - ) - if mutation is not None and topology.slug() == ORACLE_TOPOLOGY.slug(): - raise RuntimeError("Sensitivity mutation cannot be applied to oracle topology") - - request = WorkerRunRequest( - case_id=case_artifacts.case_id, - case_config=case_config, - topology=topology, - topology_dir=str(topology_dir), - packed_tensors=case_artifacts.packed_tensors, - shared_init_adapter_path=str(shared_init_path), - allow_create_shared_init=allow_create_shared_init, - mutation=mutation, - ) - _run_worker_subprocess(request, topology_dir) - if not manifest_path.exists(): - raise RuntimeError(f"Missing manifest after run: {manifest_path}") - return topology_dir - - -def ensure_oracle_reference_artifacts( - *, - case_config: OracleCaseConfig, - regenerate: bool = False, -) -> Path: - return ensure_topology_artifacts( - case_config=case_config, - topology=ORACLE_TOPOLOGY, - regenerate=regenerate, - mutation=None, - ) +) -> str: + """Builds output slug for a topology and optional mutation variant.""" + return topology.slug() if mutation is None else f"{topology.slug()}__{mutation}" def _load_manifest(topology_dir: Path) -> RunManifest: + """Loads one run manifest for a topology output directory.""" manifest_path = topology_dir / "manifest.json" - if not manifest_path.exists(): - raise FileNotFoundError(f"Missing topology manifest: {manifest_path}") return RunManifest.model_validate(_read_json(manifest_path)) def _load_output_tensor(topology_dir: Path, step: StepTrace): + """Loads one output trace tensor referenced by a step trace entry.""" import torch path = topology_dir / step.output_file - if not path.exists(): - raise FileNotFoundError(f"Missing output trace: {path}") return torch.load(path, map_location="cpu") def _load_safetensor_map(path: Path) -> dict[str, Any]: + """Loads one safetensor map from disk.""" from safetensors.torch import load_file - if not path.exists(): - raise FileNotFoundError(f"Missing safetensor trace: {path}") return load_file(str(path)) -def _tensor_error(reference, candidate) -> tuple[float, float]: - ref = reference.detach().float() - cand = candidate.detach().float() - if ref.shape != cand.shape: - return float("inf"), float("inf") - if ref.numel() == 0: - return 0.0, 0.0 - diff = (cand - ref).abs() - max_abs = float(diff.max().item()) - max_rel = float((diff / ref.abs().clamp_min(1e-12)).max().item()) - return max_abs, max_rel +def _align_sequence_parallel(reference, candidate): # type: ignore[no-untyped-def] + """Aligns sequence-parallel-shaped tensors so diff computation is topology-agnostic.""" + if reference.shape == candidate.shape: + return candidate + if ( + candidate.ndim == reference.ndim + 1 + and candidate.shape[0] * candidate.shape[1] == reference.shape[0] + and tuple(candidate.shape[2:]) == tuple(reference.shape[1:]) + ): + return candidate.reshape(reference.shape) + return None -def _build_failure( - *, - case_id: str, - topology: str, - metric: Literal["outputs", "losses", "grads", "lora_deltas"], - step_index: int, - key: str, - max_abs_error: float, - max_rel_error: float, - abs_tolerance: float, - rel_tolerance: float, - message: str, -) -> ComparisonFailure: - return ComparisonFailure( - case_id=case_id, - topology=topology, - oracle_topology=ORACLE_TOPOLOGY.slug(), - metric=metric, - step_index=step_index, - key=key, - max_abs_error=max_abs_error, - max_rel_error=max_rel_error, - abs_tolerance=abs_tolerance, - rel_tolerance=rel_tolerance, - message=message, - ) +def _is_moe_base_forward_param(name: str) -> bool: + """Returns whether this forward param is a base MoE expert internal tensor.""" + if ".mlp.experts." not in name: + return False + if any(token in name for token in (".router", ".gate_lora", ".up_lora", ".lora")): + return False + return ".linear_fc1" in name or ".linear_fc2" in name -def _compare_tensor_pair( - *, - case_id: str, - topology: str, - metric: Literal["outputs", "losses", "grads", "lora_deltas"], - step_index: int, - key: str, - reference, - candidate, - abs_tolerance: float, - rel_tolerance: float, -) -> ComparisonFailure | None: - max_abs, max_rel = _tensor_error(reference, candidate) - if max_abs <= abs_tolerance or max_rel <= rel_tolerance: +def _lookup_call_by_index( + trace: dict[str, list[dict[str, Any]]], + module_name: str, + call_index: int, +) -> dict[str, Any] | None: + calls = trace.get(module_name) + if calls is None: return None - return _build_failure( - case_id=case_id, - topology=topology, - metric=metric, - step_index=step_index, - key=key, - max_abs_error=max_abs, - max_rel_error=max_rel, - abs_tolerance=abs_tolerance, - rel_tolerance=rel_tolerance, - message=f"{metric} mismatch at step {step_index}, key '{key}'", - ) - - -def _compare_tensor_maps( - *, - case_id: str, - topology: str, - metric: Literal["grads", "lora_deltas"], - step_index: int, - reference: dict[str, Any], - candidate: dict[str, Any], - abs_tolerance: float, - rel_tolerance: float, -) -> ComparisonFailure | None: - ref_keys = set(reference.keys()) - cand_keys = set(candidate.keys()) - if ref_keys != cand_keys: - missing = sorted(ref_keys - cand_keys) - extra = sorted(cand_keys - ref_keys) - return _build_failure( - case_id=case_id, - topology=topology, - metric=metric, - step_index=step_index, - key="__keys__", - max_abs_error=float("inf"), - max_rel_error=float("inf"), - abs_tolerance=abs_tolerance, - rel_tolerance=rel_tolerance, - message=( - f"{metric} key mismatch at step {step_index}; " - f"missing={missing[:3]}, extra={extra[:3]}" - ), - ) - for key in sorted(ref_keys): - failure = _compare_tensor_pair( - case_id=case_id, - topology=topology, - metric=metric, - step_index=step_index, - key=key, - reference=reference[key], - candidate=candidate[key], - abs_tolerance=abs_tolerance, - rel_tolerance=rel_tolerance, - ) - if failure is not None: - return failure + for call in calls: + if int(call.get("call_index", -1)) == call_index: + return call + if 0 <= call_index < len(calls): + return calls[call_index] return None -def _write_failure_report(topology_dir: Path, failure: ComparisonFailure) -> None: - _write_json(topology_dir / "failure_report.json", failure.model_dump(mode="json")) +def _router_module_name_for_expert_module(module_name: str) -> str | None: + if ".mlp.experts.linear_fc1" in module_name: + return module_name.replace(".mlp.experts.linear_fc1", ".mlp.router") + if ".mlp.experts.linear_fc2" in module_name: + return module_name.replace(".mlp.experts.linear_fc2", ".mlp.router") + return None -def compare_topology_to_oracle( +def _build_moe_row_identities( *, - case_config: OracleCaseConfig, - topology: Topology, -) -> ComparisonFailure | None: - if topology.slug() == ORACLE_TOPOLOGY.slug(): + module_name: str, + call_index: int, + trace: dict[str, list[dict[str, Any]]], + row_splits: list[int] | None, +) -> list[tuple[int, int, int]] | None: + router_module_name = _router_module_name_for_expert_module(module_name) + if router_module_name is None: return None - - case_id = stable_case_id(case_config) - case_dir = ARTIFACT_ROOT / case_id - oracle_dir = case_dir / ORACLE_TOPOLOGY.slug() - topology_dir = case_dir / topology.slug() - - oracle_manifest = _load_manifest(oracle_dir) - topology_manifest = _load_manifest(topology_dir) - if len(oracle_manifest.steps) != len(topology_manifest.steps): - return _build_failure( - case_id=case_id, - topology=topology.slug(), - metric="losses", - step_index=0, - key="__step_count__", - max_abs_error=float("inf"), - max_rel_error=float("inf"), - abs_tolerance=case_config.tolerances.losses_abs, - rel_tolerance=case_config.tolerances.losses_rel, - message=( - "Step count mismatch: " - f"oracle={len(oracle_manifest.steps)} vs " - f"topology={len(topology_manifest.steps)}" - ), - ) - - import torch - - for oracle_step, topology_step in zip( - oracle_manifest.steps, topology_manifest.steps - ): - step_index = oracle_step.step_index - oracle_outputs = _load_output_tensor(oracle_dir, oracle_step) - topology_outputs = _load_output_tensor(topology_dir, topology_step) - failure = _compare_tensor_pair( - case_id=case_id, - topology=topology.slug(), - metric="outputs", - step_index=step_index, - key="logprobs", - reference=oracle_outputs, - candidate=topology_outputs, - abs_tolerance=case_config.tolerances.outputs_abs, - rel_tolerance=case_config.tolerances.outputs_rel, - ) - if failure is not None: - return failure - - oracle_loss = torch.tensor([oracle_step.loss], dtype=torch.float32) - topology_loss = torch.tensor([topology_step.loss], dtype=torch.float32) - failure = _compare_tensor_pair( - case_id=case_id, - topology=topology.slug(), - metric="losses", - step_index=step_index, - key="loss", - reference=oracle_loss, - candidate=topology_loss, - abs_tolerance=case_config.tolerances.losses_abs, - rel_tolerance=case_config.tolerances.losses_rel, - ) - if failure is not None: - return failure - - for metric, oracle_file, topo_file, abs_tol, rel_tol in ( - ( - "grads", - oracle_step.grads_file, - topology_step.grads_file, - case_config.tolerances.grads_abs, - case_config.tolerances.grads_rel, - ), - ( - "lora_deltas", - oracle_step.deltas_file, - topology_step.deltas_file, - case_config.tolerances.deltas_abs, - case_config.tolerances.deltas_rel, - ), - ): - failure = _compare_tensor_maps( - case_id=case_id, - topology=topology.slug(), - metric=metric, - step_index=step_index, - reference=_load_safetensor_map(oracle_dir / oracle_file), - candidate=_load_safetensor_map(topology_dir / topo_file), - abs_tolerance=abs_tol, - rel_tolerance=rel_tol, - ) - if failure is not None: - return failure - return None + router_call = _lookup_call_by_index(trace, router_module_name, call_index) + if router_call is None: + return None + router_topk_ids = router_call.get("router_topk_ids") + if not isinstance(router_topk_ids, torch.Tensor) or router_topk_ids.ndim != 2: + return None + token_splits_raw = router_call.get("router_topk_ids__row_splits") + if row_splits is None: + if isinstance(token_splits_raw, list): + row_splits = [ + int(v) * int(router_topk_ids.shape[1]) for v in token_splits_raw + ] + else: + row_splits = [int(router_topk_ids.numel())] + if isinstance(token_splits_raw, list): + token_splits = [int(v) for v in token_splits_raw] + else: + topk = int(router_topk_ids.shape[1]) + token_splits = [int(v) // topk for v in row_splits] + if len(row_splits) != len(token_splits): + return None + row_cursor = 0 + token_cursor = 0 + identities: list[tuple[int, int, int]] = [] + for row_count, token_count in zip(row_splits, token_splits): + local_ids = router_topk_ids[token_cursor : token_cursor + token_count] + token_cursor += token_count + local_identities: list[tuple[int, int, int]] = [] + max_expert = int(local_ids.max().item()) if local_ids.numel() > 0 else -1 + for expert_id in range(max_expert + 1): + expert_rows = (local_ids == expert_id).nonzero(as_tuple=False) + for token_offset, slot_index in expert_rows.tolist(): + local_identities.append( + (expert_id, token_cursor - token_count + token_offset, slot_index) + ) + if len(local_identities) != row_count: + return None + identities.extend(local_identities) + row_cursor += row_count + if row_cursor != sum(row_splits): + return None + return identities -def run_and_compare_topology( +def _canonicalize_moe_base_forward_tensor( *, - case_config: OracleCaseConfig, - topology: Topology, - regenerate: bool = False, -) -> None: - ensure_oracle_reference_artifacts( - case_config=case_config, - regenerate=regenerate and topology.slug() == ORACLE_TOPOLOGY.slug(), + module_name: str, + call_index: int, + tensor: torch.Tensor, + trace: dict[str, list[dict[str, Any]]], + call: dict[str, Any], +) -> torch.Tensor: + if not _is_moe_base_forward_param(module_name): + return tensor + if tensor.ndim != 2: + return tensor + row_splits_raw = call.get("primary_output__row_splits") + row_splits = ( + [int(v) for v in row_splits_raw] if isinstance(row_splits_raw, list) else None ) - ensure_topology_artifacts( - case_config=case_config, - topology=topology, - regenerate=regenerate, - mutation=None, + identities = _build_moe_row_identities( + module_name=module_name, + call_index=call_index, + trace=trace, + row_splits=row_splits, ) - failure = compare_topology_to_oracle(case_config=case_config, topology=topology) - if failure is None: - return - topology_dir = ARTIFACT_ROOT / failure.case_id / topology.slug() - _write_failure_report(topology_dir, failure) - raise AssertionError( - "Megatron oracle mismatch: " - f"topology={failure.topology}, metric={failure.metric}, " - f"step={failure.step_index}, key={failure.key}, " - f"max_abs={failure.max_abs_error:.6g}, " - f"max_rel={failure.max_rel_error:.6g}, " - f"tol_abs={failure.abs_tolerance:.6g}, " - f"tol_rel={failure.rel_tolerance:.6g}" - ) - + if identities is None or len(identities) != int(tensor.shape[0]): + return tensor + order = sorted(range(len(identities)), key=lambda index: identities[index]) + return tensor[order] -def run_sensitivity_check( - *, - case_config: OracleCaseConfig, - regenerate: bool = False, -) -> None: - mutation = sensitivity_mutation() - if mutation is None: - raise RuntimeError( - f"Sensitivity check requires {SENSITIVITY_MUTATION_ENV} to be set" - ) - ensure_oracle_reference_artifacts( - case_config=case_config, - regenerate=regenerate, - ) - ensure_topology_artifacts( - case_config=case_config, - topology=SENSITIVITY_TOPOLOGY, - regenerate=True, - mutation=mutation, - ) - failure = compare_topology_to_oracle( - case_config=case_config, - topology=SENSITIVITY_TOPOLOGY, +def _minimal_param_name(name: str) -> str: + """Returns a shorter but 1:1 param/module identifier for report readability.""" + return name.removeprefix("base_model.model.model.").replace( + "module.module.decoder.", "" ) - if failure is None: - raise AssertionError( - "Sensitivity mutation did not produce an oracle mismatch. " - f"mutation={mutation}, topology={SENSITIVITY_TOPOLOGY.slug()}" - ) - -def _set_deterministic_seed(seed: int) -> None: - import torch - random.seed(seed) - np.random.seed(seed) - torch.manual_seed(seed) - torch.cuda.manual_seed_all(seed) - torch.backends.cudnn.deterministic = True - torch.backends.cudnn.benchmark = False +def _load_forward_trace( + topology_dir: Path, step_index: int +) -> dict[str, list[dict[str, Any]]]: + """Loads one merged forward-trace file for a given step.""" + trace_path = topology_dir / "traces" / f"forward_trace_step_{step_index:03d}.pt" + return ForwardTraceCapture.load_trace(trace_path) -def _merge_sharded_dicts(shards_by_rank: list[dict[str, Any]]) -> dict[str, Any]: - import torch +def _threshold_string(thresholds: dict[str, float]) -> str: + """Formats threshold dicts into compact table cells.""" + if not thresholds: + return "-" + return ", ".join(f"{key}<={value:.3g}" for key, value in sorted(thresholds.items())) - merged: dict[str, list[Any]] = {} - for rank_shards in shards_by_rank: - for key, tensor in rank_shards.items(): - merged.setdefault(key, []).append(tensor.detach().cpu()) - full_state: dict[str, Any] = {} - for key, shards in merged.items(): - if len(shards) == 1: - full_state[key] = shards[0].contiguous() - continue - concat_dim = 1 if ".lora_A." in key else 0 - full_state[key] = torch.cat(shards, dim=concat_dim).contiguous() - return full_state - - -def _gather_full_state(local_state: dict[str, Any]) -> dict[str, Any] | None: - import torch - rank = torch.distributed.get_rank() - world_size = torch.distributed.get_world_size() - gathered = [None for _ in range(world_size)] if rank == 0 else None - torch.distributed.gather_object(local_state, gathered, dst=0) - if rank != 0: - return None - assert gathered is not None - entries = [entry for entry in gathered if entry is not None] - return _merge_sharded_dicts(entries) +def _finite_metric(value: float, *, default: float = NON_FINITE_METRIC_VALUE) -> float: + """Maps NaN/Inf metric values to a large finite sentinel for JSON-safe reports.""" + value_f = float(value) + if math.isnan(value_f): + return default + if math.isinf(value_f): + return default if value_f > 0 else -default + return value_f -def _collect_lora_state(model_chunks: list[Any]) -> dict[str, Any] | None: - local_state: dict[str, Any] = {} - for chunk in model_chunks: - for module in chunk.modules(): - if not hasattr(module, "sharded_lora_state_dict"): - continue - module_state = module.sharded_lora_state_dict() - for key, value in module_state.items(): - if key in local_state: - raise RuntimeError( - f"Duplicate LoRA key while collecting state: {key}" - ) - local_state[key] = value.detach().cpu() - return _gather_full_state(local_state) - +def _triplet_expert_key(param: str) -> tuple[int, int] | None: + """Returns (layer, expert_id) for expert up/gate/down params.""" + match = EXPERT_TRIPLET_PARAM_RE.search(param) + if match is None: + return None + return int(match.group("layer")), int(match.group("expert")) + + +class VariantRunner: + """Runs oracle/candidate variants and emits row-level comparison reports.""" + + def __init__( + self, + *, + case_config: OracleCaseConfig, + console: Console | None = None, + ) -> None: + self.case_config = case_config + self.case_artifacts = ensure_case_artifacts(case_config) + self.case_id = self.case_artifacts.case_id + self.case_dir = Path(self.case_artifacts.case_dir) + self.oracle_slug = ORACLE_TOPOLOGY.slug() + self.oracle_dir = self.case_dir / self.oracle_slug + self.oracle_routing_bundle_dir = ( + self.case_dir / ORACLE_MOE_ROUTING_BUNDLE_DIRNAME + ) + self.shared_init_path = Path(self.case_artifacts.shared_init_adapter_path) + self.console = console or Console(width=160) + self._oracle_initialized = False + self._oracle_regenerated = False + + def _run_topology( + self, + *, + topology: Topology, + output_slug: str, + mutation: SensitivityMutation | None, + replay_bundle_dir: Path | None, + capture_bundle_dir: Path | None, + regenerate: bool, + ) -> Path: + """Executes one topology worker run and returns its output directory.""" + topology_dir = self.case_dir / output_slug + manifest_path = topology_dir / "manifest.json" + if manifest_path.exists() and not regenerate: + return topology_dir + _replace_topology_dir(topology_dir) + request = WorkerRunRequest( + case_id=self.case_id, + case_config=self.case_config, + topology=topology, + topology_dir=str(topology_dir), + packed_tensors=self.case_artifacts.packed_tensors, + shared_init_adapter_path=str(self.shared_init_path), + mutation=mutation, + moe_routing_replay_path=( + None if replay_bundle_dir is None else str(replay_bundle_dir) + ), + moe_routing_replay_strict=True, + capture_moe_routing_bundle_path=( + None if capture_bundle_dir is None else str(capture_bundle_dir) + ), + ) + from .megatron_oracle_worker import run_worker_subprocess -def _collect_lora_grads(model_chunks: list[Any]) -> dict[str, Any] | None: - from megatron.core import parallel_state as ps + run_worker_subprocess(request, topology_dir, repo_root=REPO_ROOT) + return topology_dir - from art.megatron.lora import LoRA + def ensure_oracle(self) -> Path: + """Ensures oracle capture and canonical replay artifacts exist exactly once per session.""" + regenerate = regenerate_requested() + if self._oracle_initialized and (not regenerate or self._oracle_regenerated): + return self.oracle_dir + if regenerate and self.shared_init_path.exists(): + self.shared_init_path.unlink() + bundle_manifest = self.oracle_routing_bundle_dir / "manifest.json" + oracle_manifest = self.oracle_dir / "manifest.json" + need_capture = ( + regenerate + or not bundle_manifest.exists() + or not self.shared_init_path.exists() + ) + run_oracle_topology = partial( + self._run_topology, + topology=ORACLE_TOPOLOGY, + mutation=None, + regenerate=True, + ) + if need_capture: + run_oracle_topology( + output_slug=f"{self.oracle_slug}__oracle_capture", + replay_bundle_dir=None, + capture_bundle_dir=self.oracle_routing_bundle_dir, + ) + if regenerate or not oracle_manifest.exists(): + run_oracle_topology( + output_slug=self.oracle_slug, + replay_bundle_dir=self.oracle_routing_bundle_dir, + capture_bundle_dir=None, + ) + self._oracle_initialized = True + self._oracle_regenerated = self._oracle_regenerated or regenerate + return self.oracle_dir + + def ensure_variant_artifacts( + self, + variant: VariantSpec, + ) -> Path: + """Ensures oracle prerequisites and candidate artifacts for one variant.""" + self.ensure_oracle() + output_slug = variant.resolved_output_slug() + if output_slug == self.oracle_slug and variant.mutation is None: + return self.oracle_dir + return self._run_topology( + topology=variant.topology, + output_slug=output_slug, + mutation=variant.mutation, + replay_bundle_dir=self.oracle_routing_bundle_dir, + capture_bundle_dir=None, + regenerate=True, + ) - local_grads: dict[str, Any] = {} - for chunk in model_chunks: - for module in chunk.modules(): - if not isinstance(module, LoRA): + @staticmethod + def _apply_thresholds(row: MetricRow, thresholds: dict[str, float]) -> None: + """Evaluates row thresholds using AND semantics over all configured keys.""" + row.thresholds = dict(thresholds) + if not thresholds: + row.pass_signal = True + row.failure_reasons = [] + return + payload = row.model_dump(mode="python") + reasons: list[str] = [] + for key, limit in sorted(thresholds.items()): + value = payload.get(key) + if not isinstance(value, (int, float)): + reasons.append(f"{key}=missing") continue - grad_a = ( - module.A_T.grad - if module.A_T.grad is not None - else module.A_T.new_zeros(module.A_T.shape) - ) - grad_b = ( - module.B_T.grad - if module.B_T.grad is not None - else module.B_T.new_zeros(module.B_T.shape) + if float(value) > float(limit): + reasons.append(f"{key}={float(value):.6g}>{float(limit):.6g}") + row.pass_signal = len(reasons) == 0 + row.failure_reasons = reasons + + @staticmethod + def _inf_summary() -> dict[str, float]: + """Builds a large-error finite summary for structural mismatches.""" + return { + "numel": 0.0, + "mean_abs_diff": NON_FINITE_METRIC_VALUE, + "relative_l2": NON_FINITE_METRIC_VALUE, + "typical_abs_scale": 0.0, + "mean_abs_pct": NON_FINITE_METRIC_VALUE, + "topk_mismatch_fraction": 1.0, + "top1_mismatch_fraction": 1.0, + } + + def _build_metric_row( + self, + *, + variant: VariantSpec, + step_index: int, + phase: str, + param: str, + summary: dict[str, float], + structural_failure: str | None = None, + ) -> MetricRow: + """Builds one metric row and applies per-phase thresholds.""" + row = MetricRow( + case_id=self.case_id, + variant=variant.name, + topology=variant.resolved_output_slug(), + oracle_topology=variant.resolved_reference_slug(), + step_index=step_index, + phase=phase, + param=param, + numel=summary["numel"], + mean_abs_diff=summary["mean_abs_diff"], + relative_l2=summary["relative_l2"], + typical_abs_scale=summary["typical_abs_scale"], + mean_abs_pct=summary["mean_abs_pct"], + topk_mismatch_fraction=summary.get("topk_mismatch_fraction"), + top1_mismatch_fraction=summary.get("top1_mismatch_fraction"), + ) + self._apply_thresholds(row, variant.thresholds_by_phase.get(phase, {})) + if structural_failure is not None: + row.pass_signal = False + row.failure_reasons = [structural_failure, *row.failure_reasons] + return row + + def _build_metric_rows_from_tensor_pairs( + self, + *, + variant: VariantSpec, + step_index: int, + phase: str, + pairs: list[tuple[str, Any, Any]], + router_ids: bool = False, + ) -> list[MetricRow]: + """Builds rows from named tensor pairs with one shared diff path.""" + rows: list[MetricRow] = [] + for name, reference, candidate in pairs: + shared_kwargs = { + "variant": variant, + "step_index": step_index, + "phase": phase, + "param": _minimal_param_name(name), + } + reference_aligned = reference + candidate_aligned = candidate + aligned_candidate = _align_sequence_parallel( + reference_aligned, candidate_aligned ) - if module.num_local_experts > 1: - if ps.get_expert_data_parallel_rank() != 0: - continue - for expert in range(module.num_local_experts): - prefix = module.adapter_model_prefix.format( - expert=expert + module._expert_offset - ) - local_grads[f"{prefix}.lora_A.weight"] = ( - grad_a[expert].detach().cpu().T - ) - local_grads[f"{prefix}.lora_B.weight"] = ( - grad_b[expert].detach().cpu().T + if aligned_candidate is None: + rows.append( + self._build_metric_row( + summary=self._inf_summary(), + structural_failure="shape mismatch", + **shared_kwargs, ) + ) + continue + accumulator = DiffAccumulator() + if router_ids: + accumulator.update_router_ids(reference_aligned, aligned_candidate) else: - if ps.get_data_parallel_rank() != 0: - continue - local_grads[f"{module.adapter_model_prefix}.lora_A.weight"] = ( - grad_a.detach().cpu().T + accumulator.update(reference_aligned, aligned_candidate) + rows.append( + self._build_metric_row( + summary=accumulator.as_summary(), **shared_kwargs ) - local_grads[f"{module.adapter_model_prefix}.lora_B.weight"] = ( - grad_b.detach().cpu().T + ) + return rows + + def _check_matching_keys( + self, + reference: dict[str, Any], + candidate: dict[str, Any], + variant: VariantSpec, + step_index: int, + phase: str, + ) -> tuple[bool, list[MetricRow] | None]: + """Checks if the keys of two tensor maps match and builds a metric row if they don't.""" + reference_keys = set(reference.keys()) + candidate_keys = set(candidate.keys()) + if reference_keys != candidate_keys: + missing = sorted(reference_keys - candidate_keys) + extra = sorted(candidate_keys - reference_keys) + return False, [ + self._build_metric_row( + variant=variant, + step_index=step_index, + phase=phase, + param="__keys__", + summary=self._inf_summary(), + structural_failure=f"missing={missing[:5]} extra={extra[:5]}", ) - return _gather_full_state(local_grads) - - -def _validate_adapter_exact( - expected_state: dict[str, Any], - adapter_model: dict[str, Any], -) -> None: - expected_keys = set(expected_state.keys()) - adapter_keys = set(adapter_model.keys()) - missing = sorted(expected_keys - adapter_keys) - extra = sorted(adapter_keys - expected_keys) - if missing or extra: - raise KeyError( - f"Adapter keys mismatch: missing={missing[:5]} extra={extra[:5]}" + ] + return True, None + + def _build_metric_rows_from_tensor_maps( + self, + *, + variant: VariantSpec, + step_index: int, + phase: str, + reference: dict[str, Any], + candidate: dict[str, Any], + router_ids: bool = False, + ) -> list[MetricRow]: + """Builds rows from two keyed tensor maps through a unified compare path.""" + matching, rows = self._check_matching_keys( + reference, candidate, variant, step_index, phase + ) + if not matching: + return rows + pairs = [ + (key, reference[key], candidate[key]) + for key in sorted(set(reference.keys())) + ] + return self._build_metric_rows_from_tensor_pairs( + variant=variant, + step_index=step_index, + phase=phase, + pairs=pairs, + router_ids=router_ids, ) - -def _validate_loaded_state_matches_adapter( - loaded_state: dict[str, Any], - adapter_model: dict[str, Any], -) -> None: - import torch - - for key in sorted(adapter_model.keys()): - if key not in loaded_state: - raise KeyError(f"Loaded LoRA state missing key: {key}") - if not torch.equal(loaded_state[key].cpu(), adapter_model[key].cpu()): - max_abs, max_rel = _tensor_error(adapter_model[key], loaded_state[key]) - raise RuntimeError( - f"Loaded LoRA state mismatch for key '{key}' " - f"(max_abs={max_abs:.6g}, max_rel={max_rel:.6g})" + @staticmethod + def _flatten_forward_trace_tensors( + trace: dict[str, list[dict[str, Any]]], + *, + value_key: str, + ) -> dict[str, Any]: + """Flattens per-module forward trace calls into a deterministic tensor map.""" + flattened: dict[str, Any] = {} + for module_name in sorted(trace.keys()): + for call_offset, call in enumerate(trace[module_name]): + tensor = call.get(value_key) + if tensor is None: + continue + call_index = call.get("call_index", call_offset) + if value_key == "primary_output" and isinstance(tensor, torch.Tensor): + tensor = _canonicalize_moe_base_forward_tensor( + module_name=module_name, + call_index=int(call_index), + tensor=tensor, + trace=trace, + call=call, + ) + flattened[f"{module_name}.call_{call_index}"] = tensor + return flattened + + @staticmethod + def _build_step_summaries(rows: list[MetricRow]) -> dict[int, dict[str, Any]]: + """Builds step-indexed payloads directly from row model dumps.""" + step_summaries: dict[int, dict[str, Any]] = {} + for row in rows: + step_entry = step_summaries.setdefault(row.step_index, {}) + phase_entry = cast(dict[str, Any], step_entry.setdefault(row.phase, {})) + phase_entry[row.param] = row.model_dump(mode="json") + return step_summaries + + def compare_variant(self, variant: VariantSpec) -> VariantReport: + """Compares one candidate variant against its reference topology.""" + reference_slug = variant.resolved_reference_slug() + topology_slug = variant.resolved_output_slug() + reference_dir = self.case_dir / reference_slug + topology_dir = self.case_dir / topology_slug + reference_manifest = _load_manifest(reference_dir) + topology_manifest = _load_manifest(topology_dir) + rows: list[MetricRow] = [] + if len(reference_manifest.steps) != len(topology_manifest.steps): + rows.append( + self._build_metric_row( + variant=variant, + step_index=0, + phase="step_count", + param="__step_count__", + summary=self._inf_summary(), + structural_failure=( + f"reference={len(reference_manifest.steps)} " + f"candidate={len(topology_manifest.steps)}" + ), + ) ) + import torch -def _configure_provider(provider: Any, topology: Topology) -> None: - provider.tensor_model_parallel_size = topology.tp - provider.expert_model_parallel_size = topology.ep - provider.expert_tensor_parallel_size = topology.etp - provider.pipeline_model_parallel_size = 1 - provider.context_parallel_size = 1 - provider.sequence_parallel = bool(topology.sp) - if hasattr(provider, "attention_dropout"): - provider.attention_dropout = 0.0 - if hasattr(provider, "hidden_dropout"): - provider.hidden_dropout = 0.0 - - -def _delta_state( - initial_state: dict[str, Any], - current_state: dict[str, Any], -) -> dict[str, Any]: - initial_keys = set(initial_state.keys()) - current_keys = set(current_state.keys()) - if initial_keys != current_keys: - missing = sorted(initial_keys - current_keys) - extra = sorted(current_keys - initial_keys) - raise KeyError( - f"LoRA state keys changed during training: missing={missing[:3]} extra={extra[:3]}" + for reference_step, topology_step in zip( + reference_manifest.steps, topology_manifest.steps + ): + step_index = reference_step.step_index + reference_trace = _load_forward_trace(reference_dir, step_index) + topology_trace = _load_forward_trace(topology_dir, step_index) + map_phase_inputs = [ + ( + "outputs", + {"logprobs": _load_output_tensor(reference_dir, reference_step)}, + {"logprobs": _load_output_tensor(topology_dir, topology_step)}, + False, + ), + ( + "losses", + {"loss": torch.tensor([reference_step.loss], dtype=torch.float32)}, + {"loss": torch.tensor([topology_step.loss], dtype=torch.float32)}, + False, + ), + ( + "grads", + _load_safetensor_map(reference_dir / reference_step.grads_file), + _load_safetensor_map(topology_dir / topology_step.grads_file), + False, + ), + ( + "deltas", + _load_safetensor_map(reference_dir / reference_step.deltas_file), + _load_safetensor_map(topology_dir / topology_step.deltas_file), + False, + ), + *[ + ( + phase, + self._flatten_forward_trace_tensors( + reference_trace, + value_key=value_key, + ), + self._flatten_forward_trace_tensors( + topology_trace, + value_key=value_key, + ), + phase == "router_topk_ids", + ) + for phase, value_key in ( + ("forward", "primary_output"), + ("router_scores", "router_topk_scores"), + ("router_topk_ids", "router_topk_ids"), + ) + ], + ] + for phase, reference_map, candidate_map, router_ids in map_phase_inputs: + rows.extend( + self._build_metric_rows_from_tensor_maps( + variant=variant, + step_index=step_index, + phase=phase, + reference=reference_map, + candidate=candidate_map, + router_ids=router_ids, + ) + ) + pass_count = sum(1 for row in rows if row.pass_signal) + fail_count = len(rows) - pass_count + signal: Literal["pass", "fail"] = "pass" if fail_count == 0 else "fail" + return VariantReport( + case_id=self.case_id, + variant=variant.name, + topology=topology_slug, + reference_topology=reference_slug, + expected_signal=variant.expected_signal, + signal=signal, + pass_count=pass_count, + fail_count=fail_count, + step_summaries=self._build_step_summaries(rows), + metrics=rows, ) - return { - key: current_state[key].detach().cpu() - initial_state[key].detach().cpu() - for key in sorted(initial_keys) - } - - -@contextmanager -def _mutation_hook( - megatron_train_module: Any, - mutation: SensitivityMutation | None, - pre_optimizer_step_hook: Callable[[], None] | None = None, -): - original_finalize = megatron_train_module._finalize_grads - original_optimizer_step = megatron_train_module._optimizer_step - - if mutation == "drop_finalize": - megatron_train_module._finalize_grads = lambda _model: None - elif mutation is not None: - raise ValueError(f"Unsupported mutation: {mutation}") - if pre_optimizer_step_hook is not None: - - def _patched_optimizer_step(optimizer: Any, learning_rate: float): - pre_optimizer_step_hook() - return original_optimizer_step(optimizer, learning_rate) - - megatron_train_module._optimizer_step = _patched_optimizer_step - - if mutation is None: - if pre_optimizer_step_hook is None: - yield + @staticmethod + def assert_expected_signal(report: VariantReport, context: str) -> None: + """Raises when observed run signal diverges from variant expectation.""" + if report.signal == report.expected_signal: return - try: - yield - finally: - megatron_train_module._finalize_grads = original_finalize - megatron_train_module._optimizer_step = original_optimizer_step - + if report.signal == "fail": + first_failure = next(row for row in report.metrics if not row.pass_signal) + raise AssertionError( + f"{context}: topology={report.topology} phase={first_failure.phase} " + f"step={first_failure.step_index} param={first_failure.param} " + f"reasons={'; '.join(first_failure.failure_reasons)}" + ) + raise AssertionError( + f"{context}: expected_signal={report.expected_signal} " + f"observed_signal={report.signal} topology={report.topology}" + ) -def _worker_run(request: WorkerRunRequest) -> None: - from megatron.core.optimizer import OptimizerConfig - from safetensors.torch import load_file, save_file - import torch + def _write_variant_report(self, topology_dir: Path, report: VariantReport) -> None: + """Persists full variant report JSON for debugging and regression inspection.""" + _write_json( + topology_dir / "variant_report.json", report.model_dump(mode="json") + ) - from art import dev, types - from art.megatron import train as megatron_train - from art.preprocessing.pack import packed_tensors_from_dir + def print_report(self, report: VariantReport) -> None: + """Prints a row-level table with expert rows subsampled by highest relative_l2.""" + non_expert_rows: list[MetricRow] = [] + triplet_rows: list[tuple[tuple[int, int], MetricRow]] = [] + for row in report.metrics: + expert_key = _triplet_expert_key(row.param) + if expert_key is None: + non_expert_rows.append(row) + continue + triplet_rows.append((expert_key, row)) - local_rank = int(os.environ["LOCAL_RANK"]) - torch.cuda.set_device(local_rank) - torch.distributed.init_process_group(backend="nccl") - _set_deterministic_seed(request.case_config.seed) + scores_by_layer: dict[int, dict[int, float]] = {} + for (layer, expert_id), row in triplet_rows: + layer_scores = scores_by_layer.setdefault(layer, {}) + layer_scores[expert_id] = max( + layer_scores.get(expert_id, float("-inf")), row.relative_l2 + ) - world_size = torch.distributed.get_world_size() - if world_size != request.topology.world_size(): - raise RuntimeError( - f"World size mismatch: expected {request.topology.world_size()}, got {world_size}" + selected_experts: set[tuple[int, int]] = set() + for layer, expert_scores in scores_by_layer.items(): + top_experts = sorted( + expert_scores.items(), + key=lambda item: item[1], + reverse=True, + )[:EXPERT_TABLE_ROW_LIMIT] + for expert_id, _score in top_experts: + selected_experts.add((layer, expert_id)) + + selected_triplet_rows = [ + row for expert_key, row in triplet_rows if expert_key in selected_experts + ] + table_rows = non_expert_rows + selected_triplet_rows + detail_table = Table( + title=( + f"Variant Report | variant={report.variant} " + f"| topology={report.topology} | signal={report.signal} " + f"| selected_experts={len(selected_experts)} " + f"(top {EXPERT_TABLE_ROW_LIMIT} per layer)" + ), + box=box.SIMPLE_HEAVY, + show_lines=False, ) - - runtime = megatron_train.build_training_runtime( - model_identifier=request.case_config.base_model, - provider_configure=lambda provider: _configure_provider( - provider, request.topology - ), - optimizer_config=OptimizerConfig( - bf16=True, - lr=request.case_config.learning_rate, - adam_beta1=0.9, - adam_beta2=0.99, - clip_grad=0.1, - weight_decay=0.1, - ), - print_env=False, - print_optimizer_stats=False, - ) - model_chunks = runtime.model - optimizer = runtime.optimizer - - topology_dir = Path(request.topology_dir) - traces_dir = topology_dir / "traces" - traces_dir.mkdir(parents=True, exist_ok=True) - - shared_init_path = Path(request.shared_init_adapter_path) - if not shared_init_path.exists(): - if not request.allow_create_shared_init: - raise FileNotFoundError( - f"Missing oracle shared adapter at {shared_init_path}" + detail_table.add_column("Step", justify="right") + detail_table.add_column("Phase", style="cyan") + detail_table.add_column("Param") + detail_table.add_column("Status") + detail_table.add_column("relative_l2", justify="right") + detail_table.add_column("mean_abs_pct", justify="right") + detail_table.add_column("typical_abs", justify="right") + # detail_table.add_column("Thresholds") + detail_table.add_column("Failure") + sorted_rows = sorted( + table_rows, + key=lambda row: ( + row.step_index, + PHASE_PRINT_ORDER.get(row.phase, 999), + row.param, + row.pass_signal, + ), + ) + for row in sorted_rows: + status_text = ( + "[green]PASS[/green]" if row.pass_signal else "[red]FAIL[/red]" ) - initial_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0: - assert initial_state is not None - shared_init_path.parent.mkdir(parents=True, exist_ok=True) - save_file(initial_state, str(shared_init_path)) - torch.distributed.barrier() - if not shared_init_path.exists(): - raise FileNotFoundError(f"Shared init adapter not created: {shared_init_path}") - - adapter_model = load_file(str(shared_init_path)) - expected_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0: - assert expected_state is not None - _validate_adapter_exact(expected_state, adapter_model) - torch.distributed.barrier() - - megatron_train.load_adapter_into_model(model_chunks, adapter_model) - loaded_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0: - assert loaded_state is not None - _validate_loaded_state_matches_adapter(loaded_state, adapter_model) - torch.distributed.barrier() - - packed_tensors = packed_tensors_from_dir( - **request.packed_tensors.model_dump(exclude_none=True) - ) - initial_lora_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0 and initial_lora_state is None: - raise RuntimeError("Failed to collect initial LoRA state on rank 0") - - train_config = types.TrainConfig( - learning_rate=request.case_config.learning_rate, - beta=request.case_config.beta, - kl_penalty_coef=0.0, - ) - experimental_config: dev.TrainConfig = {} - step_traces: list[StepTrace] = [] - captured_grads: dict[str, Any] | None = None - - def _capture_lora_grads() -> None: - nonlocal captured_grads - captured_grads = _collect_lora_grads(model_chunks) - - with _mutation_hook( - megatron_train, - request.mutation, - pre_optimizer_step_hook=_capture_lora_grads, - ): - for step_index in range(request.case_config.num_steps): - sample_index = step_index % request.packed_tensors.num_sequences - inputs = megatron_train.select_indexed_inputs(packed_tensors, sample_index) - captured_grads = None - - step_result = megatron_train.run_training_step( - model_chunks=model_chunks, - optimizer=optimizer, - learning_rate=train_config.learning_rate, - inputs=inputs, - config=train_config, - experimental_config=experimental_config, - ref_logprobs=None, + failure_text = "" if row.pass_signal else "; ".join(row.failure_reasons) + detail_table.add_row( + str(row.step_index), + row.phase, + row.param, + status_text, + f"{row.relative_l2:.6g}", + f"{row.mean_abs_pct:.6g}%", + f"{row.typical_abs_scale:.6g}", + # _threshold_string(row.thresholds), # disabled for now to avoid clutter, neat to keep though + failure_text, ) - if torch.distributed.get_rank() == 0 and captured_grads is None: - raise RuntimeError("Failed to collect LoRA grads on rank 0") - - current_lora_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0 and current_lora_state is None: - raise RuntimeError("Failed to collect current LoRA state on rank 0") - - if torch.distributed.get_rank() == 0: - grads = _require_not_none(captured_grads, "captured_grads") - initial_state = _require_not_none( - initial_lora_state, "initial_lora_state" - ) - current_state = _require_not_none( - current_lora_state, "current_lora_state" - ) - output_rel = Path("traces") / f"output_step_{step_index:03d}.pt" - grads_rel = Path("traces") / f"grads_step_{step_index:03d}.safetensors" - deltas_rel = ( - Path("traces") / f"deltas_step_{step_index:03d}.safetensors" - ) - lora_rel = Path(f"lora_step_{step_index:03d}.safetensors") + self.console.print(detail_table) + + def run_variant( + self, + variant: VariantSpec, + ) -> VariantReport: + """Runs a variant end-to-end, writes JSON report, and prints row table.""" + topology_dir = self.ensure_variant_artifacts(variant) + report = self.compare_variant(variant) + self._write_variant_report(topology_dir, report) + self.print_report(report) + return report + + def run_suite( + self, + variants: list[VariantSpec], + ) -> list[VariantReport]: + """Runs variants in order and stops at the first unexpected signal.""" + reports: list[VariantReport] = [] + for variant in variants: + report = self.run_variant(variant) + reports.append(report) + self.assert_expected_signal(report, "Megatron oracle suite mismatch") + return reports + + +def _default_phase_thresholds( + case_cfg: OracleCaseConfig, +) -> dict[str, dict[str, float]]: + """Builds default per-phase (fwd, grad, outputs, losses, deltas) threshold dictionaries.""" + default = { + "relative_l2": case_cfg.tolerances.relative_l2, + "mean_abs_pct": case_cfg.tolerances.mean_abs_pct, + } + return { + key: default for key in ["outputs", "losses", "grads", "deltas", "forward"] + } | { + "router_scores": {"mean_abs_pct": 0.0}, + "router_topk_ids": { + "topk_mismatch_fraction": 0.0, + "top1_mismatch_fraction": 0.0, + }, + } - torch.save( - step_result.new_logprobs.detach().cpu().float(), - topology_dir / output_rel, - ) - save_file(grads, str(topology_dir / grads_rel)) - deltas = _delta_state(initial_state, current_state) - save_file(deltas, str(topology_dir / deltas_rel)) - save_file(current_state, str(topology_dir / lora_rel)) - step_traces.append( - StepTrace( - step_index=step_index, - loss=float(step_result.reduced_loss.item()), - probs_corr=step_result.probs_corr, - output_file=str(output_rel), - grads_file=str(grads_rel), - deltas_file=str(deltas_rel), - lora_file=str(lora_rel), - ) - ) - torch.distributed.barrier() - - if torch.distributed.get_rank() == 0: - manifest = RunManifest( - case_id=request.case_id, - base_model=request.case_config.base_model, - topology=request.topology.slug(), - world_size=request.topology.world_size(), - seed=request.case_config.seed, - num_steps=request.case_config.num_steps, - packed_tensors=request.packed_tensors, - tolerances=request.case_config.tolerances, - steps=step_traces, +def _suite_variants(case_cfg: OracleCaseConfig) -> list[VariantSpec]: + """Builds the standard oracle suite variant ordering.""" + thresholds = _default_phase_thresholds(case_cfg) + variants = [ + VariantSpec( + name="oracle_replay_parity", + topology=ORACLE_TOPOLOGY, + output_slug=_topology_output_slug( + ORACLE_TOPOLOGY, ORACLE_REPLAY_TOPOLOGY_SUFFIX + ), + thresholds_by_phase=thresholds, ) - _write_json(topology_dir / "manifest.json", manifest.model_dump(mode="json")) - torch.distributed.barrier() - torch.distributed.destroy_process_group() - - -def _run_worker_cli(run_request_path: Path) -> None: - request = WorkerRunRequest.model_validate(_read_json(run_request_path)) - _worker_run(request) - - -def _parse_args(argv: list[str]) -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Megatron oracle harness worker") - parser.add_argument("--worker-run", action="store_true") - parser.add_argument("--run-request", type=Path) - return parser.parse_args(argv) + ] + for topology in TOPOLOGIES[1:] + ( + EXTENDED_TOPOLOGIES if extended_topologies_enabled() else [] + ): + variants.append( + VariantSpec( + name=f"topology_{topology.slug()}", + topology=topology, + thresholds_by_phase=thresholds, + ) + ) + return variants -def _main(argv: list[str]) -> int: - args = _parse_args(argv) - if not args.worker_run: - raise SystemExit("This module is intended for test imports or --worker-run") - if args.run_request is None: - raise SystemExit("--run-request is required with --worker-run") - _run_worker_cli(args.run_request) - return 0 +def run_suite( + *, + case_config: OracleCaseConfig, +) -> list[VariantReport]: + """Runs replay parity and topology variants with fail-fast assertions.""" + runner = VariantRunner(case_config=case_config) + return runner.run_suite(_suite_variants(case_config)) -if __name__ == "__main__": - raise SystemExit(_main(sys.argv[1:])) +def run_sensitivity_suite( + *, + case_config: OracleCaseConfig, + mutations: list[SensitivityMutation], +) -> list[VariantReport]: + """Runs a list of sensitivity mutations and expects each to fail.""" + runner = VariantRunner(case_config=case_config) + thresholds = _default_phase_thresholds(case_config) + variants = [ + VariantSpec( + name=f"sensitivity_{mutation}", + topology=SENSITIVITY_TOPOLOGY, + mutation=mutation, + expected_signal="fail", + thresholds_by_phase=thresholds, + ) + for mutation in mutations + ] + return runner.run_suite(variants) diff --git a/tests/integration/test_megatron_lora_oracle_correctness.py b/tests/integration/test_megatron_lora_oracle_correctness.py index f6949b81..f7e4dbbc 100644 --- a/tests/integration/test_megatron_lora_oracle_correctness.py +++ b/tests/integration/test_megatron_lora_oracle_correctness.py @@ -1,19 +1,17 @@ import pytest from .megatron_oracle_harness import ( - ORACLE_TOPOLOGY, - PHASE_A_TOPOLOGIES, - PHASE_B_TOPOLOGIES, + EXTENDED_TOPOLOGIES, SENSITIVITY_MUTATION_ENV, SENSITIVITY_TOPOLOGY, + TOPOLOGIES, available_gpu_count, - default_case_config, - ensure_oracle_reference_artifacts, - phase_b_dp_enabled, - regenerate_requested, - run_and_compare_topology, - run_sensitivity_check, + case_config, + extended_topologies_enabled, + run_sensitivity_suite, + run_suite, sensitivity_enabled, + sensitivity_mutations, ) @@ -25,76 +23,38 @@ def _require_gpus_for(topology_world_size: int) -> None: ) -def _skip_if_sensitivity_mode() -> None: - if sensitivity_enabled(): - pytest.skip( - f"{SENSITIVITY_MUTATION_ENV} is enabled; running sensitivity check only." - ) +def _suite_world_size() -> int: + suite_topologies = list(TOPOLOGIES) + if extended_topologies_enabled(): + suite_topologies.extend(EXTENDED_TOPOLOGIES) + return max(topology.world_size() for topology in suite_topologies) -def _run_topology_case( # type: ignore[no-untyped-def] - topology, - case_config, - *, - regenerate: bool, -) -> None: - _require_gpus_for(topology.world_size()) - run_and_compare_topology( - case_config=case_config, - topology=topology, - regenerate=regenerate, - ) - +def test_megatron_lora_diff_sensitivity() -> None: + """ + Runs a each of the sensitivity mutations (e.g. drop megatron finalize grads) + and expects each to fail (numerical differences larger than our thresholds) -def test_000_megatron_lora_oracle_sensitivity_check() -> None: + This test ensures we can catch errors we know of (implying we will be able to catch unknown errors as well) + """ if not sensitivity_enabled(): pytest.skip( - f"Set {SENSITIVITY_MUTATION_ENV}=drop_finalize to enable sensitivity check." + f"Set {SENSITIVITY_MUTATION_ENV}=drop_finalize (or CSV) to enable sensitivity check." ) _require_gpus_for(SENSITIVITY_TOPOLOGY.world_size()) - run_sensitivity_check( - case_config=default_case_config(), - regenerate=regenerate_requested(), + mutations = sensitivity_mutations() + assert mutations + run_sensitivity_suite( + case_config=case_config(), + mutations=mutations, ) -def test_megatron_lora_oracle_phase_a_matrix() -> None: - _skip_if_sensitivity_mode() - case_config = default_case_config() - regenerate = regenerate_requested() - _require_gpus_for(ORACLE_TOPOLOGY.world_size()) - ensure_oracle_reference_artifacts( - case_config=case_config, - regenerate=regenerate, - ) - for topology in PHASE_A_TOPOLOGIES: - _run_topology_case( - topology, - case_config, - regenerate=regenerate and topology.slug() != ORACLE_TOPOLOGY.slug(), - ) - - -@pytest.mark.parametrize( - "topology_index", - range(len(PHASE_B_TOPOLOGIES)), - ids=[topology.slug() for topology in PHASE_B_TOPOLOGIES], -) -def test_megatron_lora_oracle_phase_b_dp_matrix(topology_index: int) -> None: - _skip_if_sensitivity_mode() - if not phase_b_dp_enabled(): - pytest.xfail( - "DP matrix currently blocked until Megatron backend DP support is enabled" - ) - case_config = default_case_config() - regenerate = regenerate_requested() - _require_gpus_for(ORACLE_TOPOLOGY.world_size()) - ensure_oracle_reference_artifacts( - case_config=case_config, - regenerate=regenerate, - ) - _run_topology_case( - PHASE_B_TOPOLOGIES[topology_index], - case_config, - regenerate=regenerate, +def test_megatron_lora_topology_suite() -> None: + """ + Runs the suite of topologies and expects each to pass (numerical differences within our thresholds) + """ + _require_gpus_for(_suite_world_size()) + run_suite( + case_config=case_config(), ) From ec8371629d06ed8f9fe7d3f3d1ea08084ff861d6 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Thu, 12 Mar 2026 09:41:59 +0000 Subject: [PATCH 10/28] typing: clear blocking ty errors in oracle replay and LoRA paths --- src/art/megatron/lora.py | 7 ++++--- tests/integration/megatron_forward_trace.py | 17 +++++++++++------ tests/integration/megatron_oracle_harness.py | 20 +++++++++++--------- tests/integration/megatron_oracle_worker.py | 7 ++----- tests/unit/test_moe_routing_replay.py | 4 +++- 5 files changed, 31 insertions(+), 24 deletions(-) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index b594bf18..ca578e5f 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -119,10 +119,11 @@ def _set_lora_parallel_metadata( # Megatron optimizer and checkpoint logic rely on tensor model-parallel metadata # to distinguish true shards from TP-duplicate params. if parallel_spec.sharded: + shard_dim = parallel_spec.shard_dim + if shard_dim is None: + raise ValueError("LoRAParallelSpec.shard_dim must be set when sharded=True") setattr(param, "tensor_model_parallel", True) - setattr( - param, "partition_dim", _normalize_axis(parallel_spec.shard_dim, param.ndim) - ) + setattr(param, "partition_dim", _normalize_axis(shard_dim, param.ndim)) # stride > 1 means the dim is split into blocks and each tp rank holds a shard of the block # this might happen for fused e.g. gate_(up|proj), but loras are individual per module setattr(param, "partition_stride", 1) diff --git a/tests/integration/megatron_forward_trace.py b/tests/integration/megatron_forward_trace.py index 2ca418aa..5da6f0d6 100644 --- a/tests/integration/megatron_forward_trace.py +++ b/tests/integration/megatron_forward_trace.py @@ -90,12 +90,17 @@ def _extract_primary_tensor(value: Any) -> torch.Tensor | None: def _materialize_tensor(tensor: torch.Tensor) -> torch.Tensor: - if hasattr(tensor, "full_tensor"): - tensor = cast(torch.Tensor, tensor.full_tensor()) - elif hasattr(tensor, "to_local"): - tensor = cast(torch.Tensor, tensor.to_local()) - elif hasattr(tensor, "_local_tensor"): - tensor = cast(torch.Tensor, tensor._local_tensor) + full_tensor = getattr(tensor, "full_tensor", None) + if callable(full_tensor): + tensor = cast(torch.Tensor, full_tensor()) + else: + to_local = getattr(tensor, "to_local", None) + if callable(to_local): + tensor = cast(torch.Tensor, to_local()) + else: + local_tensor = getattr(tensor, "_local_tensor", None) + if isinstance(local_tensor, torch.Tensor): + tensor = local_tensor return tensor.detach().cpu() diff --git a/tests/integration/megatron_oracle_harness.py b/tests/integration/megatron_oracle_harness.py index a66385b0..4e6567f9 100644 --- a/tests/integration/megatron_oracle_harness.py +++ b/tests/integration/megatron_oracle_harness.py @@ -959,12 +959,7 @@ def _build_metric_rows_from_tensor_pairs( """Builds rows from named tensor pairs with one shared diff path.""" rows: list[MetricRow] = [] for name, reference, candidate in pairs: - shared_kwargs = { - "variant": variant, - "step_index": step_index, - "phase": phase, - "param": _minimal_param_name(name), - } + param_name = _minimal_param_name(name) reference_aligned = reference candidate_aligned = candidate aligned_candidate = _align_sequence_parallel( @@ -973,9 +968,12 @@ def _build_metric_rows_from_tensor_pairs( if aligned_candidate is None: rows.append( self._build_metric_row( + variant=variant, + step_index=step_index, + phase=phase, + param=param_name, summary=self._inf_summary(), structural_failure="shape mismatch", - **shared_kwargs, ) ) continue @@ -986,7 +984,11 @@ def _build_metric_rows_from_tensor_pairs( accumulator.update(reference_aligned, aligned_candidate) rows.append( self._build_metric_row( - summary=accumulator.as_summary(), **shared_kwargs + variant=variant, + step_index=step_index, + phase=phase, + param=param_name, + summary=accumulator.as_summary(), ) ) return rows @@ -1032,7 +1034,7 @@ def _build_metric_rows_from_tensor_maps( reference, candidate, variant, step_index, phase ) if not matching: - return rows + return rows if rows is not None else [] pairs = [ (key, reference[key], candidate[key]) for key in sorted(set(reference.keys())) diff --git a/tests/integration/megatron_oracle_worker.py b/tests/integration/megatron_oracle_worker.py index 91c7647d..a5a3ed66 100644 --- a/tests/integration/megatron_oracle_worker.py +++ b/tests/integration/megatron_oracle_worker.py @@ -201,17 +201,14 @@ def _build_optimizer_config(case_config: OracleCaseConfig): """Builds Megatron optimizer settings for deterministic harness runs.""" from megatron.core.optimizer import OptimizerConfig - optimizer_kwargs = dict( + return OptimizerConfig( + bf16=True, lr=case_config.learning_rate, adam_beta1=0.9, adam_beta2=0.99, clip_grad=0.1, weight_decay=0.1, ) - return OptimizerConfig( - bf16=True, - **optimizer_kwargs, - ) def _assert_runtime_configuration( diff --git a/tests/unit/test_moe_routing_replay.py b/tests/unit/test_moe_routing_replay.py index 980784c7..15d1ebc6 100644 --- a/tests/unit/test_moe_routing_replay.py +++ b/tests/unit/test_moe_routing_replay.py @@ -2,6 +2,7 @@ from pathlib import Path import tempfile +from typing import cast import pytest import torch @@ -165,7 +166,8 @@ def test_controller_patches_router_and_replays() -> None: controller.set_step(step_index=0, sample_index=0) logits = torch.randn((4, 3), dtype=torch.float32) - replay_probs, replay_map = chunk.decoder.layers[0].mlp.router.routing(logits) + router = cast(_FakeRouter, chunk.decoder.layers[0].mlp.router) + replay_probs, replay_map = router.routing(logits) expected_probs, expected_map = _dense_from_compact(route, dtype=logits.dtype) assert torch.equal(replay_map.cpu(), expected_map) From 83d871bcb75610518e224dd0253ad270aa623191 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Sat, 14 Mar 2026 08:20:41 +0000 Subject: [PATCH 11/28] megatron: reduce oracle variance with sequence grad accumulation Use per-step micro-accumulation over multiple packed sequences so updates are less sensitive to sparse expert token assignment. Also make backend progress accounting accumulation-aware. --- src/art/local/backend.py | 5 +- src/art/megatron/train.py | 109 ++++++++++++++++++++++++++------------ src/art/types.py | 1 + 3 files changed, 79 insertions(+), 36 deletions(-) diff --git a/src/art/local/backend.py b/src/art/local/backend.py index b74c0b05..f75a77a2 100644 --- a/src/art/local/backend.py +++ b/src/art/local/backend.py @@ -696,7 +696,10 @@ async def _train_model( ) # Note: scale_learning_rate_by_reward_std_dev is now handled by the frontend (Model.train()) results: list[dict[str, float]] = [] - estimated_gradient_steps = disk_packed_tensors["num_sequences"] + grad_accumulation_sequences = max(1, int(config.grad_accumulation_sequences)) + estimated_gradient_steps = math.ceil( + disk_packed_tensors["num_sequences"] / grad_accumulation_sequences + ) pbar = tqdm.tqdm(total=estimated_gradient_steps, desc="train") async for result in service.train( disk_packed_tensors, config, dev_config, verbose diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index 33dc8172..a66156bc 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -133,6 +133,7 @@ def _default_optimizer_config() -> OptimizerConfig: adam_beta2=0.99, clip_grad=0.1, weight_decay=0.1, + adam_eps=1e-13, ) @@ -336,63 +337,94 @@ def run_training_step( model_chunks: list[MegatronModule], optimizer: Any, learning_rate: float, - inputs: PackedTensors, + inputs: PackedTensors | list[PackedTensors], config: types.TrainConfig, experimental_config: dev.TrainConfig, ref_logprobs: torch.Tensor | None = None, step_index: int | None = None, - sample_index: int | None = None, + sample_index: int | list[int] | None = None, moe_routing_replay_controller: MoeRoutingReplayController | None = None, ) -> TrainStepResult: + micro_inputs = inputs if isinstance(inputs, list) else [inputs] + if not micro_inputs: + raise ValueError("run_training_step requires at least one packed sequence") + + if isinstance(sample_index, list): + if len(sample_index) != len(micro_inputs): + raise ValueError( + "sample_index list length must match number of micro inputs: " + f"{len(sample_index)} != {len(micro_inputs)}" + ) + micro_sample_indices = sample_index + elif sample_index is None: + micro_sample_indices = [0] * len(micro_inputs) + else: + micro_sample_indices = [sample_index] * len(micro_inputs) + if moe_routing_replay_controller is not None: - assert step_index is not None and sample_index is not None + assert step_index is not None moe_routing_replay_controller.set_step( step_index=step_index, - sample_index=sample_index, + sample_index=micro_sample_indices[0], ) device = next(model_chunks[0].parameters()).device - _move_inputs_to_device(inputs, device) - - attention_state = create_shared_prefix_attention_state( - group_ids=inputs["group_ids"], - parent_ids=inputs["parent_ids"], - ) - attention_mask = torch.zeros((1, 1, 1, 1), dtype=torch.bool, device=device) for chunk in model_chunks: chunk.zero_grad_buffer() # ty: ignore[call-non-callable] - new_logprobs: torch.Tensor = -model_chunks[0]( - input_ids=inputs["tokens"], - position_ids=inputs["input_pos"], - attention_mask=attention_mask, - labels=shift_tensor(inputs["tokens"], 0), - extra_block_kwargs={"attention_bias": attention_state}, - ) + micro_count = len(micro_inputs) + loss_sum: torch.Tensor | None = None + probs_corr_sum = 0.0 + new_logprobs: torch.Tensor | None = None + + for micro in micro_inputs: + _move_inputs_to_device(micro, device) + attention_state = create_shared_prefix_attention_state( + group_ids=micro["group_ids"], + parent_ids=micro["parent_ids"], + ) + attention_mask = torch.zeros((1, 1, 1, 1), dtype=torch.bool, device=device) + + new_logprobs = -model_chunks[0]( + input_ids=micro["tokens"], + position_ids=micro["input_pos"], + attention_mask=attention_mask, + labels=shift_tensor(micro["tokens"], 0), + extra_block_kwargs={"attention_bias": attention_state}, + ) + + loss_info = loss_fn( + micro, # ty: ignore[invalid-argument-type] + new_logprobs, + ref_logprobs, + None, + experimental_config, + ) + micro_loss = loss_info.mean_policy_loss + config.beta * loss_info.mean_kl + (micro_loss / micro_count).backward() + probs_corr_sum += float(loss_info.probs_corr.item()) + if loss_sum is None: + loss_sum = micro_loss.detach() + else: + loss_sum = loss_sum + micro_loss.detach() + + if new_logprobs is None or loss_sum is None: + raise RuntimeError("run_training_step did not produce outputs") - loss_info = loss_fn( - inputs, # ty: ignore[invalid-argument-type] - new_logprobs, - ref_logprobs, - None, - experimental_config, - ) - loss = loss_info.mean_policy_loss + config.beta * loss_info.mean_kl - loss.backward() _finalize_grads(model_chunks) update_successful, grad_norm, num_zeros_in_grad = _optimizer_step( optimizer, learning_rate, ) - reduced_loss = _reduce_loss(loss) + reduced_loss = _reduce_loss(loss_sum / micro_count) if moe_routing_replay_controller is not None: moe_routing_replay_controller.finalize_step() return TrainStepResult( reduced_loss=reduced_loss, - probs_corr=float(loss_info.probs_corr.item()), + probs_corr=probs_corr_sum / micro_count, new_logprobs=new_logprobs, update_successful=update_successful, grad_norm=grad_norm, @@ -475,18 +507,25 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: repeat = math.ceil(num_indices / len(indices)) indices = (indices * repeat)[:num_indices] - for step_index, index in enumerate(indices): - inputs = select_indexed_inputs(packed_tensors, index) + grad_accumulation_sequences = max(1, int(config.grad_accumulation_sequences)) + for step_index, start in enumerate( + range(0, len(indices), grad_accumulation_sequences) + ): + micro_indices = indices[start : start + grad_accumulation_sequences] + micro_inputs = [ + select_indexed_inputs(packed_tensors, sample_index) + for sample_index in micro_indices + ] step_result = run_training_step( model_chunks=runtime.model, optimizer=runtime.optimizer, learning_rate=config.learning_rate, - inputs=inputs, + inputs=micro_inputs, config=config, experimental_config=experimental_config, ref_logprobs=None, step_index=step_index, - sample_index=index, + sample_index=micro_indices, moe_routing_replay_controller=runtime.moe_routing_replay_controller, ) print0( @@ -535,8 +574,8 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: del packed_tensors del adapter_model - if "inputs" in locals(): - del inputs + if "micro_inputs" in locals(): + del micro_inputs gc.collect() torch.cuda.empty_cache() diff --git a/src/art/types.py b/src/art/types.py index 017f05c7..be31db23 100644 --- a/src/art/types.py +++ b/src/art/types.py @@ -18,6 +18,7 @@ class TrainConfig(pydantic.BaseModel): learning_rate: float = 5e-6 beta: float = 0.0 kl_penalty_coef: float = 0.0 + grad_accumulation_sequences: int = pydantic.Field(default=1, ge=1) class TrainSFTConfig(pydantic.BaseModel): From 84e2ea76d8811754d75ec7ca4eddd02897e3a78b Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Sat, 14 Mar 2026 08:21:17 +0000 Subject: [PATCH 12/28] megatron lora: fix TP/EP export participation rules Correct LoRA shard export behavior so non-zero TP ranks in EP/ETP topologies contribute when required, while still filtering replicated-only entries. --- src/art/megatron/lora.py | 53 +++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 30 deletions(-) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index ca578e5f..247389c3 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -182,9 +182,9 @@ def num_local_experts(self) -> int: return self.A_T.shape[0] if self.A_T.ndim == 3 else 1 def _broadcast_if_replicated(self, param: torch.nn.Parameter) -> None: - if not getattr(param, "lora_tp_replicated", False): + if not param.lora_tp_replicated: return - domain = getattr(param, "lora_shard_domain") + domain = param.lora_shard_domain world_size = _get_shard_world_size(domain) if world_size <= 1: return @@ -252,14 +252,9 @@ def load_weights( self.load_weight(weight, into=into) def load_weight(self, weight: torch.Tensor, *, into: torch.nn.Parameter) -> None: - domain = getattr(into, "lora_shard_domain") - sharded = bool(getattr(into, "lora_tp_sharded")) - if sharded: - axis = getattr(into, "lora_tp_shard_dim") - if axis is None: - raise RuntimeError( - f"{self.adapter_model_prefix}: missing shard axis for sharded parameter" - ) + domain = into.lora_shard_domain + if into.lora_tp_sharded: + axis = into.lora_tp_shard_dim axis = _normalize_axis(axis, weight.ndim) world_size = _get_shard_world_size(domain) rank = _get_shard_rank(domain) @@ -283,37 +278,35 @@ def load_weight(self, weight: torch.Tensor, *, into: torch.nn.Parameter) -> None into.requires_grad = True def _should_export_parameter(self, param: torch.nn.Parameter) -> bool: + """ + Determine if the given LoRA param should be exported in the sharded LoRA state dict + (drop replicated ranks/params). + """ if self.num_local_experts > 1: # self is a MoE layer if ps.get_expert_data_parallel_rank() != 0: return False else: # self is a non-MoE layer - if ps.get_data_parallel_rank() != 0: - return False - # Non-MoE layers are replicated across expert-model-parallel ranks. - if ( - ps.get_expert_model_parallel_world_size() > 1 - and ps.get_expert_model_parallel_rank() != 0 - ): + # dp x cp rank 0 participates + if ps.get_data_parallel_rank(with_context_parallel=True) != 0: return False - if getattr(param, "lora_tp_sharded", False): - # this param is fully sharded, all shard ranks participate + # this param is fully sharded, all shard ranks participate + if param.lora_tp_sharded: return True - - domain = getattr(param, "lora_shard_domain") # param is replicated, tp rank 0 or etp rank 0 participates - return _get_shard_rank(domain) == 0 + return _get_shard_rank(param.lora_shard_domain) == 0 def _manifest_for_param(self, param: torch.nn.Parameter) -> dict[str, Any]: - domain = getattr(param, "lora_shard_domain") - sharded = bool(getattr(param, "lora_tp_sharded", False)) - shard_dim = getattr(param, "lora_tp_shard_dim", None) return { - "domain": domain, - "sharded": sharded, - "shard_dim": shard_dim, - "shard_world_size": _get_shard_world_size(domain) if sharded else 1, - "shard_rank": _get_shard_rank(domain) if sharded else 0, + "domain": param.lora_shard_domain, + "sharded": param.lora_tp_sharded, + "shard_dim": param.lora_tp_shard_dim, + "shard_world_size": _get_shard_world_size(param.lora_shard_domain) + if param.lora_tp_sharded + else 1, + "shard_rank": _get_shard_rank(param.lora_shard_domain) + if param.lora_tp_sharded + else 0, } def _lora_params(self) -> list[tuple[str, torch.nn.Parameter]]: From 0bc99194d162a8a803a1cb684a2d9d799722fa8e Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Sat, 14 Mar 2026 08:21:25 +0000 Subject: [PATCH 13/28] oracle trace: canonicalize MoE outputs across arbitrary topologies Move normalization logic into ForwardTraceCapture so saved traces are canonicalized toward world-size-1 semantics (expert row identity/order and ETP fc1 layout). --- tests/integration/megatron_forward_trace.py | 272 +++++++++++++++++++- 1 file changed, 268 insertions(+), 4 deletions(-) diff --git a/tests/integration/megatron_forward_trace.py b/tests/integration/megatron_forward_trace.py index 5da6f0d6..d3befd9f 100644 --- a/tests/integration/megatron_forward_trace.py +++ b/tests/integration/megatron_forward_trace.py @@ -20,6 +20,7 @@ ".mlp.experts.linear_fc2.lora", ) ROUTER_NAME_TOKEN = ".mlp.router" +PRIMARY_OUTPUT_CANONICAL_KEY = "primary_output__is_canonical" def _safe_int(value: Any, default: int = 0) -> int: @@ -221,6 +222,25 @@ def _infer_primary_output_merge_hint( if lora_hint is not None: return lora_hint + # Base MoE expert linears need expert-TP aware merge semantics. + # With etp>1: + # - FC1 (column-parallel) shards output features -> concat on feature dim. + # - FC2 (row-parallel) emits partial output contributions -> sum across ranks. + # With etp==1, keep the existing token-row concat behavior. + etp_world_size = _safe_ps_stat("get_expert_tensor_parallel_world_size", 1) + if ".mlp.experts.linear_fc1" in name and ".lora" not in name: + if etp_world_size > 1: + return { + "op": "concat", + "dim": -1, + "layout": "gate_up_rank_interleaved", + } + return {"op": "concat", "dim": 0} + if ".mlp.experts.linear_fc2" in name and ".lora" not in name: + if etp_world_size > 1: + return {"op": "sum"} + return {"op": "concat", "dim": 0} + gather_output = getattr(module, "gather_output", None) if isinstance(gather_output, bool) and not gather_output: return {"op": "concat", "dim": -1} @@ -287,6 +307,249 @@ def set_step(self, step_index: int) -> None: self.current_step_index = step_index self.current_step_trace = {} + @staticmethod + def _is_moe_expert_forward_module(module_name: str) -> bool: + """Returns whether one module emits MoE expert forward outputs.""" + if ".mlp.experts." not in module_name: + return False + if ".mlp.router" in module_name: + return False + return ".linear_fc1" in module_name or ".linear_fc2" in module_name + + @staticmethod + def _primary_output_merge_hint(call: dict[str, Any]) -> dict[str, Any] | None: + """Reads primary-output merge metadata from one call payload.""" + merge_hints = call.get("merge_hints") + if not isinstance(merge_hints, dict): + return None + primary_hint = merge_hints.get("primary_output") + if not isinstance(primary_hint, dict): + return None + return primary_hint + + @classmethod + def _lookup_call_by_index( + cls, + trace: dict[str, list[dict[str, Any]]], + module_name: str, + call_index: int, + ) -> dict[str, Any] | None: + """Finds one call entry by call-index with positional fallback.""" + calls = trace.get(module_name) + if calls is None: + return None + for call in calls: + if int(call.get("call_index", -1)) == call_index: + return call + if 0 <= call_index < len(calls): + return calls[call_index] + return None + + @staticmethod + def _router_module_name_for_expert_module(module_name: str) -> str | None: + """Maps one expert module name to its layer router module name.""" + for token in (".mlp.experts.linear_fc1", ".mlp.experts.linear_fc2"): + token_index = module_name.find(token) + if token_index != -1: + return f"{module_name[:token_index]}.mlp.router" + return None + + @classmethod + def _build_moe_row_identities( + cls, + *, + module_name: str, + call_index: int, + trace: dict[str, list[dict[str, Any]]], + row_splits: list[int] | None, + ) -> list[tuple[int, int, int]] | None: + """Builds stable `(expert_id, token_index, topk_slot)` identities for MoE rows.""" + router_module_name = cls._router_module_name_for_expert_module(module_name) + if router_module_name is None: + return None + router_call = cls._lookup_call_by_index(trace, router_module_name, call_index) + if router_call is None: + return None + router_topk_ids = router_call.get("router_topk_ids") + if not isinstance(router_topk_ids, torch.Tensor) or router_topk_ids.ndim != 2: + return None + token_splits_raw = router_call.get("router_topk_ids__row_splits") + if row_splits is None: + if isinstance(token_splits_raw, list): + row_splits = [ + int(v) * int(router_topk_ids.shape[1]) for v in token_splits_raw + ] + else: + row_splits = [int(router_topk_ids.numel())] + if isinstance(token_splits_raw, list): + token_splits = [int(v) for v in token_splits_raw] + else: + topk = int(router_topk_ids.shape[1]) + token_splits = [int(v) // topk for v in row_splits] + if len(row_splits) != len(token_splits): + return None + row_cursor = 0 + token_cursor = 0 + identities: list[tuple[int, int, int]] = [] + for row_count, token_count in zip(row_splits, token_splits): + local_ids = router_topk_ids[token_cursor : token_cursor + token_count] + token_cursor += token_count + local_identities: list[tuple[int, int, int]] = [] + max_expert = int(local_ids.max().item()) if local_ids.numel() > 0 else -1 + for expert_id in range(max_expert + 1): + expert_rows = (local_ids == expert_id).nonzero(as_tuple=False) + for token_offset, slot_index in expert_rows.tolist(): + local_identities.append( + (expert_id, token_cursor - token_count + token_offset, slot_index) + ) + if len(local_identities) != row_count: + return None + identities.extend(local_identities) + row_cursor += row_count + if row_cursor != sum(row_splits): + return None + return identities + + @classmethod + def _canonicalize_etp_fc1_feature_layout( + cls, + *, + module_name: str, + tensor: torch.Tensor, + call: dict[str, Any], + ) -> torch.Tensor: + """Normalizes expert-TP fc1 feature order to a topology-independent layout.""" + if ".mlp.experts.linear_fc1" not in module_name or ".lora" in module_name: + return tensor + if tensor.ndim != 2: + return tensor + primary_hint = cls._primary_output_merge_hint(call) + if not isinstance(primary_hint, dict): + return tensor + if primary_hint.get("layout") != "gate_up_rank_interleaved": + return tensor + rank_meta = call.get("rank_meta") + etp_world_size = None + if isinstance(rank_meta, list) and rank_meta: + first_meta = rank_meta[0] + if isinstance(first_meta, dict): + etp_world_size = first_meta.get("etp_world_size") + elif isinstance(rank_meta, dict): + etp_world_size = rank_meta.get("etp_world_size") + if not isinstance(etp_world_size, int) or etp_world_size <= 1: + return tensor + block_count = 2 * etp_world_size + if tensor.shape[1] % block_count != 0: + return tensor + blocks = torch.chunk(tensor, block_count, dim=1) + reordered = [blocks[index] for index in range(0, block_count, 2)] + [ + blocks[index] for index in range(1, block_count, 2) + ] + return torch.cat(reordered, dim=1).contiguous() + + @classmethod + def _canonicalize_moe_expert_row_order( + cls, + *, + module_name: str, + call_index: int, + tensor: torch.Tensor, + trace: dict[str, list[dict[str, Any]]], + call: dict[str, Any], + ) -> torch.Tensor: + """Canonicalizes MoE expert-row ordering using router replay identities.""" + if not cls._is_moe_expert_forward_module(module_name): + return tensor + if tensor.ndim != 2: + return tensor + primary_hint = cls._primary_output_merge_hint(call) + if isinstance(primary_hint, dict) and ( + primary_hint.get("op") != "concat" or primary_hint.get("dim") != 0 + ): + return tensor + row_splits_raw = call.get("primary_output__row_splits") + row_splits = ( + [int(v) for v in row_splits_raw] if isinstance(row_splits_raw, list) else None + ) + identities = cls._build_moe_row_identities( + module_name=module_name, + call_index=call_index, + trace=trace, + row_splits=row_splits, + ) + if identities is None or len(identities) != int(tensor.shape[0]): + return tensor + order = sorted(range(len(identities)), key=lambda index: identities[index]) + return tensor[order] + + @classmethod + def _canonicalize_primary_output_tensor( + cls, + *, + module_name: str, + call_index: int, + tensor: torch.Tensor, + trace: dict[str, list[dict[str, Any]]], + call: dict[str, Any], + ) -> torch.Tensor: + """Runs all primary-output canonicalization passes for one call tensor.""" + tensor = cls._canonicalize_etp_fc1_feature_layout( + module_name=module_name, + tensor=tensor, + call=call, + ) + return cls._canonicalize_moe_expert_row_order( + module_name=module_name, + call_index=call_index, + tensor=tensor, + trace=trace, + call=call, + ) + + @classmethod + def canonicalize_trace( + cls, + trace: dict[str, list[dict[str, Any]]], + ) -> dict[str, list[dict[str, Any]]]: + """Canonicalizes topology-dependent trace outputs in place.""" + for module_name in sorted(trace.keys()): + calls = trace[module_name] + for call_offset, call in enumerate(calls): + if bool(call.get(PRIMARY_OUTPUT_CANONICAL_KEY)): + continue + call_index = int(call.get("call_index", call_offset)) + tensor = call.get("primary_output") + if isinstance(tensor, torch.Tensor): + call["primary_output"] = cls._canonicalize_primary_output_tensor( + module_name=module_name, + call_index=call_index, + tensor=tensor, + trace=trace, + call=call, + ) + call[PRIMARY_OUTPUT_CANONICAL_KEY] = True + return trace + + @classmethod + def flatten_trace_tensors( + cls, + trace: dict[str, list[dict[str, Any]]], + *, + value_key: str, + ) -> dict[str, Any]: + """Flattens trace calls into deterministic key->value tensor maps.""" + if value_key == "primary_output": + cls.canonicalize_trace(trace) + flattened: dict[str, Any] = {} + for module_name in sorted(trace.keys()): + for call_offset, call in enumerate(trace[module_name]): + tensor = call.get(value_key) + if tensor is None: + continue + call_index = call.get("call_index", call_offset) + flattened[f"{module_name}.call_{call_index}"] = tensor + return flattened + @classmethod def _merge_rank_values( cls, @@ -478,15 +741,16 @@ def save_current_step(self, traces_dir: Path) -> Path | None: gathered_traces = self._gather_rank_traces(self.current_step_trace) if gathered_traces is None: return None - merged_trace = self._merge_rank_traces(gathered_traces) + merged_trace = self.canonicalize_trace(self._merge_rank_traces(gathered_traces)) traces_dir.mkdir(parents=True, exist_ok=True) trace_path = traces_dir / f"forward_trace_step_{self.current_step_index:03d}.pt" torch.save(merged_trace, trace_path) return trace_path - @staticmethod - def load_trace(trace_path: Path) -> dict[str, list[dict[str, Any]]]: - return torch.load(trace_path, map_location="cpu", weights_only=False) + @classmethod + def load_trace(cls, trace_path: Path) -> dict[str, list[dict[str, Any]]]: + trace = torch.load(trace_path, map_location="cpu", weights_only=False) + return cls.canonicalize_trace(trace) def close(self) -> None: for handle in self._hook_handles: From 8370c7d912a182e892ca092770f3de3d2dfdb81c Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Sat, 14 Mar 2026 08:21:31 +0000 Subject: [PATCH 14/28] oracle harness: stabilize scoring and expand sensitivity mutations Rework oracle pass/fail evaluation with per-phase functions, layer-averaged metrics, deterministic init, expanded sensitivity mutations, and smaller Adam epsilon for tiny-gradient regimes. --- tests/integration/megatron_oracle_harness.py | 499 +++++++++---------- tests/integration/megatron_oracle_worker.py | 299 +++++++++-- 2 files changed, 518 insertions(+), 280 deletions(-) diff --git a/tests/integration/megatron_oracle_harness.py b/tests/integration/megatron_oracle_harness.py index 4e6567f9..785e8618 100644 --- a/tests/integration/megatron_oracle_harness.py +++ b/tests/integration/megatron_oracle_harness.py @@ -8,7 +8,7 @@ from pathlib import Path import re import shutil -from typing import Any, Literal, TypeVar, cast +from typing import Any, Callable, Literal, TypeVar, cast from pydantic import BaseModel, ConfigDict, Field from rich import box @@ -23,12 +23,21 @@ ORACLE_MOE_ROUTING_BUNDLE_DIRNAME = "oracle_moe_routing_replay" ORACLE_REPLAY_TOPOLOGY_SUFFIX = "oracle_replay" -REGENERATE_ENV = "ART_REGENERATE_MEGATRON_ORACLE" -EXTENDED_TOPOLOGIES_ENV = "ART_MEGATRON_ORACLE_ENABLE_EXTENDED_TOPOLOGIES" -SENSITIVITY_MUTATION_ENV = "ART_MEGATRON_ORACLE_MUTATION" - -DEFAULT_SENSITIVITY_MUTATION = "drop_finalize" -SUPPORTED_SENSITIVITY_MUTATIONS = (DEFAULT_SENSITIVITY_MUTATION,) +REGENERATE_ENV = "ART_REGENERATE_ORACLE" +EXTENDED_TOPOLOGIES_ENV = "ART_ENABLE_EXTENDED_TOPOLOGIES" +SENSITIVITY_MUTATION_ENV = "ART_SENSITIVITY_MUTATIONS" + +DEFAULT_SENSITIVITY_MUTATION = "skip_finalize" +SUPPORTED_SENSITIVITY_MUTATIONS = ( + DEFAULT_SENSITIVITY_MUTATION, + "fwd_skip_o_proj_tp_reduce", + "fwd_o_proj_tp_reduce_avg_not_sum", + "bwd_skip_sync_qkv_a", + "bwd_skip_sync_o_proj_b", + "bwd_skip_sync_fc1_a", + "save_drop_nonzero_ranked_tp_shards", + "save_duplicate_replicated_entries", +) SensitivityMutation = str REQUIRED_PACKED_TENSOR_FILES = ( @@ -44,9 +53,10 @@ NON_FINITE_METRIC_VALUE = 1e30 EXPERT_TABLE_ROW_LIMIT = 8 EXPERT_TRIPLET_PARAM_RE = re.compile( - r"layers\.(?P\d+)\.mlp\.experts\.(?P\d+)\." + r"layers\.(?P\d+|__layer_avg__)\.mlp\.experts\.(?P\d+)\." r"(?Pgate_proj|up_proj|down_proj)\." ) +LAYER_INDEX_RE = re.compile(r"layers\.(\d+)\.") PHASE_PRINT_ORDER = { "forward": 0, "router_scores": 1, @@ -85,6 +95,7 @@ def resolved_expert_dp(self) -> int: return attention_world // expert_divisor def slug(self) -> str: + """Builds a deterministic topology identifier used for output directories.""" return ( f"tp{self.tp}_ep{self.ep}_etp{self.etp}" f"_dp{self.dp}_edp{self.resolved_expert_dp()}" @@ -103,7 +114,7 @@ def world_size(self) -> int: class PackedTensorConfig(BaseModel): """Controls synthetic packed tensor generation used by oracle harness runs.""" - num_sequences: int = 8 + num_sequences: int = 4 sequence_length: int = 256 prefill_tokens: int = 64 decode_tokens: int = 64 @@ -128,11 +139,30 @@ class LoraConfig(BaseModel): ) -class ToleranceProfile(BaseModel): - """Defines row-level pass/fail thresholds for variant comparison phases.""" +MetricSummary = dict[str, float] +PhasePassFn = Callable[[MetricSummary], bool] - relative_l2: float = 1e-2 - mean_abs_pct: float = 1.0 + +class MetricThresholdRule(BaseModel): + """Callable row pass rule that AND-checks configured metric upper bounds.""" + + limits: dict[str, float] = Field(default_factory=dict) + + def failure_reasons(self, summary: MetricSummary) -> list[str]: + """Builds readable failure reasons for this threshold rule.""" + reasons: list[str] = [] + for key, limit in sorted(self.limits.items()): + value = summary.get(key) + if not isinstance(value, (int, float)): + reasons.append(f"{key}=missing") + continue + if float(value) > float(limit): + reasons.append(f"{key}={float(value):.6g}>{float(limit):.6g}") + return reasons + + def __call__(self, summary: MetricSummary) -> bool: + """Evaluates whether the summary satisfies all configured bounds.""" + return len(self.failure_reasons(summary)) == 0 class OracleCaseConfig(BaseModel): @@ -141,13 +171,13 @@ class OracleCaseConfig(BaseModel): base_model: str num_layers: int = 4 seed: int = 20260305 - num_steps: int = 2 - learning_rate: float = 1e-3 + num_steps: int = 1 + grad_accumulation_sequences: int = Field(default=4, ge=1) + learning_rate: float = 5e-6 beta: float = 0.0 - loss_scale: float = 1e4 + loss_scale: float = 1 packed_tensors: PackedTensorConfig = Field(default_factory=PackedTensorConfig) lora: LoraConfig = Field(default_factory=LoraConfig) - tolerances: ToleranceProfile = Field(default_factory=ToleranceProfile) class DiskPackedTensorsSpec(BaseModel): @@ -207,7 +237,6 @@ class RunManifest(BaseModel): seed: int num_steps: int packed_tensors: DiskPackedTensorsSpec - tolerances: ToleranceProfile steps: list[StepTrace] @@ -228,7 +257,6 @@ class MetricRow(BaseModel): mean_abs_pct: float topk_mismatch_fraction: float | None = None top1_mismatch_fraction: float | None = None - thresholds: dict[str, float] = Field(default_factory=dict) pass_signal: bool = True failure_reasons: list[str] = Field(default_factory=list) @@ -238,18 +266,25 @@ class VariantSpec(BaseModel): name: str topology: Topology - thresholds_by_phase: dict[str, dict[str, float]] + pass_fn_by_phase: dict[str, PhasePassFn] = Field( + default_factory=dict, + repr=False, + exclude=True, + ) output_slug: str | None = None reference_slug: str | None = None mutation: SensitivityMutation | None = None expected_signal: Literal["pass", "fail"] = "pass" + force_regenerate: bool = True def resolved_output_slug(self) -> str: + """Resolves the artifact slug for this run, including mutation suffix when present.""" if self.output_slug is not None: return self.output_slug return _topology_output_slug(self.topology, self.mutation) def resolved_reference_slug(self) -> str: + """Resolves which topology slug should be treated as the comparison oracle.""" if self.reference_slug is not None: return self.reference_slug return ORACLE_TOPOLOGY.slug() @@ -266,8 +301,8 @@ class VariantReport(BaseModel): signal: Literal["pass", "fail"] pass_count: int fail_count: int - step_summaries: dict[int, dict[str, Any]] - metrics: list[MetricRow] + step_summaries: dict[int, dict[str, Any]] = Field(repr=False) + metrics: list[MetricRow] = Field(repr=False) class DiffAccumulator: @@ -297,6 +332,20 @@ def update(self, reference, candidate) -> None: # type: ignore[no-untyped-def] self.ref_sq_sum += float(ref.square().sum().item()) self.ref_abs_sum += float(ref.abs().sum().item()) + @staticmethod + def layer_averaged_summary(reference_stack, candidate_stack) -> dict[str, float]: # type: ignore[no-untyped-def] + """Computes normal per-layer summaries, then averages those summaries.""" + ref = reference_stack.detach().float() + cand = candidate_stack.detach().float() + layer_count = int(ref.shape[0]) + metrics = {k: 0.0 for k in ["numel", "mean_abs_diff", "relative_l2", "typical_abs_scale", "mean_abs_pct"]} + for layer_index in range(layer_count): + layer_accumulator = DiffAccumulator() + layer_accumulator.update(ref[layer_index], cand[layer_index]) + layer_summary = layer_accumulator.as_summary() + metrics = {k: metrics[k] + layer_summary[k] for k in metrics.keys()} + return {k: _finite_metric(metrics[k] / layer_count) for k in metrics.keys()} + def update_router_ids(self, reference_ids, candidate_ids) -> None: # type: ignore[no-untyped-def] """Adds router top-k id mismatch counts into the accumulator.""" self.router_topk_total += int(reference_ids.numel()) @@ -353,6 +402,7 @@ def as_summary(self) -> dict[str, float]: def _require_not_none(value: T | None, name: str) -> T: + """Asserts non-None values for required artifacts and raises a named runtime error.""" if value is None: raise RuntimeError(f"{name} is None") return value @@ -361,18 +411,23 @@ def _require_not_none(value: T | None, name: str) -> T: TOPOLOGIES = [ Topology(tp=1, ep=1, etp=1, dp=1, sp=False), Topology(tp=2, ep=1, etp=1, dp=1, sp=True), - Topology(tp=1, ep=2, etp=1, dp=2, sp=False), - Topology(tp=2, ep=2, etp=1, dp=2, sp=True), + Topology(tp=2, ep=2, etp=1, dp=1, sp=True), + Topology(tp=2, ep=1, etp=2, dp=1, sp=True), ] EXTENDED_TOPOLOGIES = [ Topology(tp=1, ep=1, etp=1, dp=2, sp=False), - Topology(tp=2, ep=1, etp=1, dp=2, sp=True), + Topology(tp=1, ep=2, etp=1, dp=2, sp=True), ] ORACLE_TOPOLOGY = TOPOLOGIES[0] -SENSITIVITY_TOPOLOGY = TOPOLOGIES[1] +SENSITIVITY_TOPOLOGY = Topology(tp=2, ep=2, etp=1, dp=1, sp=True) +SENSITIVITY_TOPOLOGY_BY_MUTATION: dict[SensitivityMutation, Topology] = { + mutation: SENSITIVITY_TOPOLOGY for mutation in SUPPORTED_SENSITIVITY_MUTATIONS +} +SENSITIVITY_TOPOLOGY_BY_MUTATION["bwd_skip_sync_fc1_a"] = Topology(tp=2, ep=1, etp=2, dp=1, sp=True) def _truthy(value: str | None) -> bool: + """Parses env-var style booleans using a small accepted truthy set.""" if value is None: return False return value.strip().lower() in {"1", "true", "yes", "on"} @@ -384,6 +439,8 @@ def sensitivity_mutations() -> list[SensitivityMutation]: if raw is None or raw.strip() == "": return [] normalized = raw.strip().lower() + if normalized == "all": + return list(SUPPORTED_SENSITIVITY_MUTATIONS) if normalized in {"1", "true", "yes", "on"}: return [DEFAULT_SENSITIVITY_MUTATION] mutations = [item.strip().lower() for item in raw.split(",") if item.strip()] @@ -397,20 +454,35 @@ def sensitivity_mutations() -> list[SensitivityMutation]: supported = ", ".join(SUPPORTED_SENSITIVITY_MUTATIONS) raise ValueError( f"Unsupported {SENSITIVITY_MUTATION_ENV} value '{raw}'. " - f"Supported values: {supported}, CSV of supported values, 1/true/yes/on." + f"Supported values: {supported}, CSV of supported values, all, 1/true/yes/on." ) def sensitivity_enabled() -> bool: + """Returns whether any sensitivity mutation has been requested via environment.""" return bool(sensitivity_mutations()) +def sensitivity_topology_for_mutation(mutation: SensitivityMutation) -> Topology: + """Returns the sensitivity topology required for one mutation.""" + return SENSITIVITY_TOPOLOGY_BY_MUTATION[mutation] + + +def sensitivity_required_world_size(mutations: list[SensitivityMutation]) -> int: + """Returns the max world-size required by a selected mutation set.""" + return max( + sensitivity_topology_for_mutation(mutation).world_size() + for mutation in mutations + ) + + def extended_topologies_enabled() -> bool: """Returns whether extended topologies are enabled for the suite.""" return _truthy(os.environ.get(EXTENDED_TOPOLOGIES_ENV)) def regenerate_requested() -> bool: + """Returns whether regeneration mode is enabled for oracle artifacts.""" return _truthy(os.environ.get(REGENERATE_ENV)) @@ -422,6 +494,7 @@ def case_config( def available_gpu_count() -> int: + """Reports visible CUDA device count for topology scheduling and test skips.""" import torch return int(torch.cuda.device_count()) @@ -442,12 +515,14 @@ def stable_case_id(case_config: OracleCaseConfig) -> str: def _write_json(path: Path, payload: Any) -> None: + """Writes canonical pretty JSON to disk, creating parent directories as needed.""" path.parent.mkdir(parents=True, exist_ok=True) with path.open("w", encoding="utf-8") as handle: json.dump(payload, handle, indent=2, sort_keys=True, allow_nan=False) def _read_json(path: Path) -> dict[str, Any]: + """Loads a JSON object from disk.""" with path.open("r", encoding="utf-8") as handle: return json.load(handle) @@ -612,128 +687,6 @@ def _align_sequence_parallel(reference, candidate): # type: ignore[no-untyped-d return None -def _is_moe_base_forward_param(name: str) -> bool: - """Returns whether this forward param is a base MoE expert internal tensor.""" - if ".mlp.experts." not in name: - return False - if any(token in name for token in (".router", ".gate_lora", ".up_lora", ".lora")): - return False - return ".linear_fc1" in name or ".linear_fc2" in name - - -def _lookup_call_by_index( - trace: dict[str, list[dict[str, Any]]], - module_name: str, - call_index: int, -) -> dict[str, Any] | None: - calls = trace.get(module_name) - if calls is None: - return None - for call in calls: - if int(call.get("call_index", -1)) == call_index: - return call - if 0 <= call_index < len(calls): - return calls[call_index] - return None - - -def _router_module_name_for_expert_module(module_name: str) -> str | None: - if ".mlp.experts.linear_fc1" in module_name: - return module_name.replace(".mlp.experts.linear_fc1", ".mlp.router") - if ".mlp.experts.linear_fc2" in module_name: - return module_name.replace(".mlp.experts.linear_fc2", ".mlp.router") - return None - - -def _build_moe_row_identities( - *, - module_name: str, - call_index: int, - trace: dict[str, list[dict[str, Any]]], - row_splits: list[int] | None, -) -> list[tuple[int, int, int]] | None: - router_module_name = _router_module_name_for_expert_module(module_name) - if router_module_name is None: - return None - router_call = _lookup_call_by_index(trace, router_module_name, call_index) - if router_call is None: - return None - router_topk_ids = router_call.get("router_topk_ids") - if not isinstance(router_topk_ids, torch.Tensor) or router_topk_ids.ndim != 2: - return None - token_splits_raw = router_call.get("router_topk_ids__row_splits") - if row_splits is None: - if isinstance(token_splits_raw, list): - row_splits = [ - int(v) * int(router_topk_ids.shape[1]) for v in token_splits_raw - ] - else: - row_splits = [int(router_topk_ids.numel())] - if isinstance(token_splits_raw, list): - token_splits = [int(v) for v in token_splits_raw] - else: - topk = int(router_topk_ids.shape[1]) - token_splits = [int(v) // topk for v in row_splits] - if len(row_splits) != len(token_splits): - return None - row_cursor = 0 - token_cursor = 0 - identities: list[tuple[int, int, int]] = [] - for row_count, token_count in zip(row_splits, token_splits): - local_ids = router_topk_ids[token_cursor : token_cursor + token_count] - token_cursor += token_count - local_identities: list[tuple[int, int, int]] = [] - max_expert = int(local_ids.max().item()) if local_ids.numel() > 0 else -1 - for expert_id in range(max_expert + 1): - expert_rows = (local_ids == expert_id).nonzero(as_tuple=False) - for token_offset, slot_index in expert_rows.tolist(): - local_identities.append( - (expert_id, token_cursor - token_count + token_offset, slot_index) - ) - if len(local_identities) != row_count: - return None - identities.extend(local_identities) - row_cursor += row_count - if row_cursor != sum(row_splits): - return None - return identities - - -def _canonicalize_moe_base_forward_tensor( - *, - module_name: str, - call_index: int, - tensor: torch.Tensor, - trace: dict[str, list[dict[str, Any]]], - call: dict[str, Any], -) -> torch.Tensor: - if not _is_moe_base_forward_param(module_name): - return tensor - if tensor.ndim != 2: - return tensor - row_splits_raw = call.get("primary_output__row_splits") - row_splits = ( - [int(v) for v in row_splits_raw] if isinstance(row_splits_raw, list) else None - ) - identities = _build_moe_row_identities( - module_name=module_name, - call_index=call_index, - trace=trace, - row_splits=row_splits, - ) - if identities is None or len(identities) != int(tensor.shape[0]): - return tensor - order = sorted(range(len(identities)), key=lambda index: identities[index]) - return tensor[order] - - -def _minimal_param_name(name: str) -> str: - """Returns a shorter but 1:1 param/module identifier for report readability.""" - return name.removeprefix("base_model.model.model.").replace( - "module.module.decoder.", "" - ) - - def _load_forward_trace( topology_dir: Path, step_index: int ) -> dict[str, list[dict[str, Any]]]: @@ -742,13 +695,6 @@ def _load_forward_trace( return ForwardTraceCapture.load_trace(trace_path) -def _threshold_string(thresholds: dict[str, float]) -> str: - """Formats threshold dicts into compact table cells.""" - if not thresholds: - return "-" - return ", ".join(f"{key}<={value:.3g}" for key, value in sorted(thresholds.items())) - - def _finite_metric(value: float, *, default: float = NON_FINITE_METRIC_VALUE) -> float: """Maps NaN/Inf metric values to a large finite sentinel for JSON-safe reports.""" value_f = float(value) @@ -759,12 +705,50 @@ def _finite_metric(value: float, *, default: float = NON_FINITE_METRIC_VALUE) -> return value_f -def _triplet_expert_key(param: str) -> tuple[int, int] | None: - """Returns (layer, expert_id) for expert up/gate/down params.""" +def _triplet_expert_key(param: str) -> tuple[str, int] | None: + """Returns (projection, expert_id) for expert gate/up/down params.""" match = EXPERT_TRIPLET_PARAM_RE.search(param) if match is None: return None - return int(match.group("layer")), int(match.group("expert")) + return match.group("proj"), int(match.group("expert")) + + +def _layer_agnostic_param_key(param: str) -> str | None: + """Normalizes one parameter name by stripping the explicit layer index.""" + if LAYER_INDEX_RE.search(param) is None: + return None + return LAYER_INDEX_RE.sub("layers.__layer_avg__.", param, count=1) + + +def _stacked_layers( + pairs: list[tuple[str, Any, Any]], +) -> list[tuple[str, Any, Any]]: + """Builds layer-stacked tensor pairs keyed without explicit layer index.""" + import torch + + grouped: dict[str, list[tuple[Any, Any]]] = {} + for name, reference, candidate in pairs: + normalized = _layer_agnostic_param_key(name) + if normalized is None: + raise RuntimeError( + "Expected all compared params to include a layer index, " + f"got '{name}'." + ) + grouped.setdefault(normalized, []).append( + (reference.detach().float(), candidate.detach().float()) + ) + + stacked_pairs: list[tuple[str, Any, Any]] = [] + for normalized in sorted(grouped): + group = grouped[normalized] + stacked_pairs.append( + ( + normalized, + torch.stack([reference for reference, _ in group], dim=0), + torch.stack([candidate for _, candidate in group], dim=0), + ) + ) + return stacked_pairs class VariantRunner: @@ -806,9 +790,10 @@ def _run_topology( if manifest_path.exists() and not regenerate: return topology_dir _replace_topology_dir(topology_dir) + run_case_config = self.case_config request = WorkerRunRequest( case_id=self.case_id, - case_config=self.case_config, + case_config=run_case_config, topology=topology, topology_dir=str(topology_dir), packed_tensors=self.case_artifacts.packed_tensors, @@ -878,28 +863,33 @@ def ensure_variant_artifacts( mutation=variant.mutation, replay_bundle_dir=self.oracle_routing_bundle_dir, capture_bundle_dir=None, - regenerate=True, + regenerate=variant.force_regenerate, ) @staticmethod - def _apply_thresholds(row: MetricRow, thresholds: dict[str, float]) -> None: - """Evaluates row thresholds using AND semantics over all configured keys.""" - row.thresholds = dict(thresholds) - if not thresholds: + def _apply_phase_pass( + *, + row: MetricRow, + phase: str, + summary: MetricSummary, + pass_fn_by_phase: dict[str, PhasePassFn], + ) -> None: + """Evaluates a per-phase pass function against one summary payload.""" + pass_fn = pass_fn_by_phase.get(phase) + if pass_fn is None: row.pass_signal = True row.failure_reasons = [] return - payload = row.model_dump(mode="python") - reasons: list[str] = [] - for key, limit in sorted(thresholds.items()): - value = payload.get(key) - if not isinstance(value, (int, float)): - reasons.append(f"{key}=missing") - continue - if float(value) > float(limit): - reasons.append(f"{key}={float(value):.6g}>{float(limit):.6g}") - row.pass_signal = len(reasons) == 0 - row.failure_reasons = reasons + row.pass_signal = bool(pass_fn(summary)) + if row.pass_signal: + row.failure_reasons = [] + return + explain = getattr(pass_fn, "failure_reasons", None) + if callable(explain): + reasons = explain(summary) + row.failure_reasons = reasons if reasons else ["phase pass function returned false"] + return + row.failure_reasons = ["phase pass function returned false"] @staticmethod def _inf_summary() -> dict[str, float]: @@ -924,7 +914,7 @@ def _build_metric_row( summary: dict[str, float], structural_failure: str | None = None, ) -> MetricRow: - """Builds one metric row and applies per-phase thresholds.""" + """Builds one metric row and applies per-phase pass evaluation.""" row = MetricRow( case_id=self.case_id, variant=variant.name, @@ -941,7 +931,12 @@ def _build_metric_row( topk_mismatch_fraction=summary.get("topk_mismatch_fraction"), top1_mismatch_fraction=summary.get("top1_mismatch_fraction"), ) - self._apply_thresholds(row, variant.thresholds_by_phase.get(phase, {})) + self._apply_phase_pass( + row=row, + phase=phase, + summary=summary, + pass_fn_by_phase=variant.pass_fn_by_phase, + ) if structural_failure is not None: row.pass_signal = False row.failure_reasons = [structural_failure, *row.failure_reasons] @@ -955,11 +950,11 @@ def _build_metric_rows_from_tensor_pairs( phase: str, pairs: list[tuple[str, Any, Any]], router_ids: bool = False, + layer_averaged: bool = False, ) -> list[MetricRow]: """Builds rows from named tensor pairs with one shared diff path.""" rows: list[MetricRow] = [] for name, reference, candidate in pairs: - param_name = _minimal_param_name(name) reference_aligned = reference candidate_aligned = candidate aligned_candidate = _align_sequence_parallel( @@ -971,24 +966,30 @@ def _build_metric_rows_from_tensor_pairs( variant=variant, step_index=step_index, phase=phase, - param=param_name, + param=name, summary=self._inf_summary(), structural_failure="shape mismatch", ) ) continue - accumulator = DiffAccumulator() + summary: dict[str, float] if router_ids: + accumulator = DiffAccumulator() accumulator.update_router_ids(reference_aligned, aligned_candidate) + summary = accumulator.as_summary() + elif layer_averaged: + summary = DiffAccumulator.layer_averaged_summary(reference_aligned, aligned_candidate) else: + accumulator = DiffAccumulator() accumulator.update(reference_aligned, aligned_candidate) + summary = accumulator.as_summary() rows.append( self._build_metric_row( variant=variant, step_index=step_index, phase=phase, - param=param_name, - summary=accumulator.as_summary(), + param=name, + summary=summary, ) ) return rows @@ -1039,39 +1040,17 @@ def _build_metric_rows_from_tensor_maps( (key, reference[key], candidate[key]) for key in sorted(set(reference.keys())) ] + if phase in {"forward", "grads", "deltas"}: + pairs = _stacked_layers(pairs) return self._build_metric_rows_from_tensor_pairs( variant=variant, step_index=step_index, phase=phase, pairs=pairs, router_ids=router_ids, + layer_averaged=phase in {"forward", "grads", "deltas"}, ) - @staticmethod - def _flatten_forward_trace_tensors( - trace: dict[str, list[dict[str, Any]]], - *, - value_key: str, - ) -> dict[str, Any]: - """Flattens per-module forward trace calls into a deterministic tensor map.""" - flattened: dict[str, Any] = {} - for module_name in sorted(trace.keys()): - for call_offset, call in enumerate(trace[module_name]): - tensor = call.get(value_key) - if tensor is None: - continue - call_index = call.get("call_index", call_offset) - if value_key == "primary_output" and isinstance(tensor, torch.Tensor): - tensor = _canonicalize_moe_base_forward_tensor( - module_name=module_name, - call_index=int(call_index), - tensor=tensor, - trace=trace, - call=call, - ) - flattened[f"{module_name}.call_{call_index}"] = tensor - return flattened - @staticmethod def _build_step_summaries(rows: list[MetricRow]) -> dict[int, dict[str, Any]]: """Builds step-indexed payloads directly from row model dumps.""" @@ -1142,11 +1121,11 @@ def compare_variant(self, variant: VariantSpec) -> VariantReport: *[ ( phase, - self._flatten_forward_trace_tensors( + ForwardTraceCapture.flatten_trace_tensors( reference_trace, value_key=value_key, ), - self._flatten_forward_trace_tensors( + ForwardTraceCapture.flatten_trace_tensors( topology_trace, value_key=value_key, ), @@ -1187,7 +1166,12 @@ def compare_variant(self, variant: VariantSpec) -> VariantReport: ) @staticmethod - def assert_expected_signal(report: VariantReport, context: str) -> None: + def assert_expected_signal( + report: VariantReport, + context: str, + *, + report_path: Path, + ) -> None: """Raises when observed run signal diverges from variant expectation.""" if report.signal == report.expected_signal: return @@ -1196,11 +1180,13 @@ def assert_expected_signal(report: VariantReport, context: str) -> None: raise AssertionError( f"{context}: topology={report.topology} phase={first_failure.phase} " f"step={first_failure.step_index} param={first_failure.param} " - f"reasons={'; '.join(first_failure.failure_reasons)}" + f"reasons={'; '.join(first_failure.failure_reasons)} " + f"report={report_path}" ) raise AssertionError( f"{context}: expected_signal={report.expected_signal} " - f"observed_signal={report.signal} topology={report.topology}" + f"observed_signal={report.signal} topology={report.topology} " + f"report={report_path}" ) def _write_variant_report(self, topology_dir: Path, report: VariantReport) -> None: @@ -1210,9 +1196,9 @@ def _write_variant_report(self, topology_dir: Path, report: VariantReport) -> No ) def print_report(self, report: VariantReport) -> None: - """Prints a row-level table with expert rows subsampled by highest relative_l2.""" + """Prints a row-level table with expert rows subsampled by highest mean_abs_pct.""" non_expert_rows: list[MetricRow] = [] - triplet_rows: list[tuple[tuple[int, int], MetricRow]] = [] + triplet_rows: list[tuple[tuple[str, int], MetricRow]] = [] for row in report.metrics: expert_key = _triplet_expert_key(row.param) if expert_key is None: @@ -1220,22 +1206,22 @@ def print_report(self, report: VariantReport) -> None: continue triplet_rows.append((expert_key, row)) - scores_by_layer: dict[int, dict[int, float]] = {} - for (layer, expert_id), row in triplet_rows: - layer_scores = scores_by_layer.setdefault(layer, {}) - layer_scores[expert_id] = max( - layer_scores.get(expert_id, float("-inf")), row.relative_l2 + scores_by_proj: dict[str, dict[int, float]] = {} + for (projection, expert_id), row in triplet_rows: + projection_scores = scores_by_proj.setdefault(projection, {}) + projection_scores[expert_id] = max( + projection_scores.get(expert_id, float("-inf")), row.mean_abs_pct ) - selected_experts: set[tuple[int, int]] = set() - for layer, expert_scores in scores_by_layer.items(): + selected_experts: set[tuple[str, int]] = set() + for projection, expert_scores in scores_by_proj.items(): top_experts = sorted( expert_scores.items(), key=lambda item: item[1], reverse=True, )[:EXPERT_TABLE_ROW_LIMIT] for expert_id, _score in top_experts: - selected_experts.add((layer, expert_id)) + selected_experts.add((projection, expert_id)) selected_triplet_rows = [ row for expert_key, row in triplet_rows if expert_key in selected_experts @@ -1244,20 +1230,20 @@ def print_report(self, report: VariantReport) -> None: detail_table = Table( title=( f"Variant Report | variant={report.variant} " - f"| topology={report.topology} | signal={report.signal} " f"| selected_experts={len(selected_experts)} " - f"(top {EXPERT_TABLE_ROW_LIMIT} per layer)" + f"(top {EXPERT_TABLE_ROW_LIMIT} per projection by mean_abs_pct)" ), box=box.SIMPLE_HEAVY, show_lines=False, ) detail_table.add_column("Step", justify="right") detail_table.add_column("Phase", style="cyan") - detail_table.add_column("Param") + detail_table.add_column("Param", overflow="fold") detail_table.add_column("Status") detail_table.add_column("relative_l2", justify="right") detail_table.add_column("mean_abs_pct", justify="right") detail_table.add_column("typical_abs", justify="right") + detail_table.add_column("mean_abs_diff", justify="right") # detail_table.add_column("Thresholds") detail_table.add_column("Failure") sorted_rows = sorted( @@ -1282,7 +1268,7 @@ def print_report(self, report: VariantReport) -> None: f"{row.relative_l2:.6g}", f"{row.mean_abs_pct:.6g}%", f"{row.typical_abs_scale:.6g}", - # _threshold_string(row.thresholds), # disabled for now to avoid clutter, neat to keep though + f"{row.mean_abs_diff:.6g}", failure_text, ) self.console.print(detail_table) @@ -1307,32 +1293,42 @@ def run_suite( for variant in variants: report = self.run_variant(variant) reports.append(report) - self.assert_expected_signal(report, "Megatron oracle suite mismatch") + self.assert_expected_signal( + report, + "Megatron correctness suite mismatch", + report_path=self.case_dir + / variant.resolved_output_slug() + / "variant_report.json", + ) return reports -def _default_phase_thresholds( - case_cfg: OracleCaseConfig, -) -> dict[str, dict[str, float]]: - """Builds default per-phase (fwd, grad, outputs, losses, deltas) threshold dictionaries.""" - default = { - "relative_l2": case_cfg.tolerances.relative_l2, - "mean_abs_pct": case_cfg.tolerances.mean_abs_pct, - } - return { - key: default for key in ["outputs", "losses", "grads", "deltas", "forward"] - } | { - "router_scores": {"mean_abs_pct": 0.0}, - "router_topk_ids": { +def _default_phase_pass_fns() -> dict[str, PhasePassFn]: + """Builds default per-phase pass functions over diff summaries.""" + # note the metrics get averaged across layers to reduce noise + # we don't expect particular layers to see errors as opposed to the others so this is helpful + fwd_out_loss = MetricThresholdRule(limits={"relative_l2": 3e-2, "mean_abs_pct": 3.0}) + grads = lambda summary: ( + summary["mean_abs_pct"] < 5.0 + or (summary["typical_abs_scale"] < 1e-6 and summary["mean_abs_diff"] < 2e-8 and summary["relative_l2"] < 1.0) + ) + deltas = lambda summary: ( + summary["mean_abs_pct"] < 15.0 + ) + router_topk_rule = MetricThresholdRule( # should be no mismatch due to router replay + limits={ "topk_mismatch_fraction": 0.0, "top1_mismatch_fraction": 0.0, - }, - } + } + ) + return { + key: fwd_out_loss for key in ["forward", "outputs", "losses"] + } | {"grads": grads, "deltas": deltas, "router_topk_ids": router_topk_rule} -def _suite_variants(case_cfg: OracleCaseConfig) -> list[VariantSpec]: +def _suite_variants() -> list[VariantSpec]: """Builds the standard oracle suite variant ordering.""" - thresholds = _default_phase_thresholds(case_cfg) + phase_pass = _default_phase_pass_fns() variants = [ VariantSpec( name="oracle_replay_parity", @@ -1340,7 +1336,8 @@ def _suite_variants(case_cfg: OracleCaseConfig) -> list[VariantSpec]: output_slug=_topology_output_slug( ORACLE_TOPOLOGY, ORACLE_REPLAY_TOPOLOGY_SUFFIX ), - thresholds_by_phase=thresholds, + pass_fn_by_phase=phase_pass, + force_regenerate=regenerate_requested(), ) ] for topology in TOPOLOGIES[1:] + ( @@ -1350,7 +1347,7 @@ def _suite_variants(case_cfg: OracleCaseConfig) -> list[VariantSpec]: VariantSpec( name=f"topology_{topology.slug()}", topology=topology, - thresholds_by_phase=thresholds, + pass_fn_by_phase=phase_pass, ) ) return variants @@ -1362,7 +1359,7 @@ def run_suite( ) -> list[VariantReport]: """Runs replay parity and topology variants with fail-fast assertions.""" runner = VariantRunner(case_config=case_config) - return runner.run_suite(_suite_variants(case_config)) + return runner.run_suite(_suite_variants()) def run_sensitivity_suite( @@ -1372,14 +1369,14 @@ def run_sensitivity_suite( ) -> list[VariantReport]: """Runs a list of sensitivity mutations and expects each to fail.""" runner = VariantRunner(case_config=case_config) - thresholds = _default_phase_thresholds(case_config) + phase_pass = _default_phase_pass_fns() variants = [ VariantSpec( name=f"sensitivity_{mutation}", - topology=SENSITIVITY_TOPOLOGY, + topology=sensitivity_topology_for_mutation(mutation), mutation=mutation, expected_signal="fail", - thresholds_by_phase=thresholds, + pass_fn_by_phase=phase_pass, ) for mutation in mutations ] diff --git a/tests/integration/megatron_oracle_worker.py b/tests/integration/megatron_oracle_worker.py index a5a3ed66..1d734d91 100644 --- a/tests/integration/megatron_oracle_worker.py +++ b/tests/integration/megatron_oracle_worker.py @@ -1,15 +1,18 @@ from __future__ import annotations import argparse -from contextlib import contextmanager +from contextlib import ExitStack, contextmanager +import hashlib import os from pathlib import Path import random import subprocess import sys +from types import MethodType from typing import Any, Callable import numpy as np +import torch from art.megatron.routing_replay import ( ParallelTopology as ReplayParallelTopology, @@ -20,6 +23,7 @@ from .megatron_forward_trace import ForwardTraceCapture from .megatron_oracle_harness import ( + SUPPORTED_SENSITIVITY_MUTATIONS, OracleCaseConfig, RunManifest, SensitivityMutation, @@ -104,7 +108,9 @@ def _merge_sharded_dicts(shards_by_rank: list[dict[str, Any]]) -> dict[str, Any] return full_state -def _gather_full_state(local_state: dict[str, Any]) -> dict[str, Any] | None: +def _gather_full_state( + local_state: dict[str, Any], +) -> dict[str, Any] | None: """Gathers local state dicts to rank 0 and merges them.""" import torch @@ -119,7 +125,9 @@ def _gather_full_state(local_state: dict[str, Any]) -> dict[str, Any] | None: return _merge_sharded_dicts(entries) -def _collect_lora_state(model_chunks: list[Any]) -> dict[str, Any] | None: +def _collect_lora_state( + model_chunks: list[Any], +) -> dict[str, Any] | None: """Collects full LoRA adapter state for validation and delta computation.""" local_state: dict[str, Any] = {} for chunk in model_chunks: @@ -163,6 +171,49 @@ def _collect_lora_grads(model_chunks: list[Any]) -> dict[str, Any] | None: return _gather_full_state(local_grads) +def _apply_save_mutation_to_tensor_map( + tensor_map: dict[str, Any], + *, + mutation: SensitivityMutation | None, +) -> dict[str, Any]: + """Applies save-only mutation transforms to already-collected full tensor maps.""" + if mutation == "save_drop_nonzero_ranked_tp_shards": + mutated: dict[str, Any] = {} + for key, value in tensor_map.items(): + if not isinstance(value, torch.Tensor): + mutated[key] = value + continue + if ".lora_A." in key and value.ndim >= 2 and value.shape[1] > 1: + keep = max(1, value.shape[1] // 2) + mutated[key] = value.narrow(1, 0, keep).contiguous() + continue + if ".lora_B." in key and value.ndim >= 2 and value.shape[0] > 1: + keep = max(1, value.shape[0] // 2) + mutated[key] = value.narrow(0, 0, keep).contiguous() + continue + mutated[key] = value + return mutated + + if mutation == "save_duplicate_replicated_entries": + mutated = dict(tensor_map) + source_by_bucket: dict[tuple[tuple[int, ...], str], torch.Tensor] = {} + for key in sorted(mutated.keys()): + value = mutated[key] + if not isinstance(value, torch.Tensor): + continue + if not key.endswith(".weight"): + continue + bucket = (tuple(value.shape), str(value.dtype)) + source = source_by_bucket.get(bucket) + if source is None: + source_by_bucket[bucket] = value + continue + mutated[key] = source.clone().contiguous() + return mutated + + return tensor_map + + def _validate_loaded_state_matches_adapter( loaded_state: dict[str, Any], adapter_model: dict[str, Any], @@ -176,6 +227,29 @@ def _validate_loaded_state_matches_adapter( ) +def _build_deterministic_shared_init( + initial_state: dict[str, Any], + *, + seed: int, +) -> dict[str, Any]: + """Builds deterministic nonzero LoRA init values for both A and B tensors.""" + initialized: dict[str, Any] = {} + for key in sorted(initial_state.keys()): + value = initial_state[key] + if not isinstance(value, torch.Tensor): + raise TypeError(f"Expected tensor value for key '{key}', got {type(value)}") + digest = hashlib.sha256(f"{seed}:{key}".encode("utf-8")).digest() + key_seed = int.from_bytes(digest[:8], "little") % (2**31) + generator = torch.Generator(device="cpu").manual_seed(key_seed) + random_values = torch.randn( + value.shape, + generator=generator, + dtype=torch.float32, + ) + initialized[key] = (0.01 * random_values).to(dtype=value.dtype).contiguous() + return initialized + + def _configure_provider( provider: Any, topology: Topology, @@ -207,7 +281,8 @@ def _build_optimizer_config(case_config: OracleCaseConfig): adam_beta1=0.9, adam_beta2=0.99, clip_grad=0.1, - weight_decay=0.1, + weight_decay=0.0, + adam_eps=1e-13, ) @@ -252,9 +327,144 @@ def _delta_state( } +def _iter_named_unique_parameters( + model_chunks: list[Any], +) -> list[tuple[str, torch.nn.Parameter]]: + seen: set[int] = set() + params: list[tuple[str, torch.nn.Parameter]] = [] + for chunk_index, chunk in enumerate(model_chunks): + for name, param in chunk.named_parameters(): + param_id = id(param) + if param_id in seen: + continue + seen.add(param_id) + params.append((f"chunk{chunk_index}.{name}", param)) + return params + + +def _matches_grad_sync_skip_mutation(param_name: str, mutation: SensitivityMutation) -> bool: + if mutation == "bwd_skip_sync_qkv_a": + return any( + token in param_name + for token in ( + ".self_attention.linear_qkv.q_proj_lora.A_T", + ".self_attention.linear_qkv.k_proj_lora.A_T", + ".self_attention.linear_qkv.v_proj_lora.A_T", + ) + ) + if mutation == "bwd_skip_sync_o_proj_b": + return ".self_attention.linear_proj.lora.B_T" in param_name + if mutation == "bwd_skip_sync_fc1_a": + return ( + ".mlp.experts.linear_fc1.gate_lora.A_T" in param_name + or ".mlp.experts.linear_fc1.up_lora.A_T" in param_name + ) + return False + + +@contextmanager +def _apply_grad_sync_skip_mutation( + model_chunks: list[Any], + mutation: SensitivityMutation | None, +): + if mutation not in { + "bwd_skip_sync_qkv_a", + "bwd_skip_sync_o_proj_b", + "bwd_skip_sync_fc1_a", + }: + yield + return + + saved_attrs: list[tuple[Any, str, Any]] = [] + for param_name, param in _iter_named_unique_parameters(model_chunks): + # this only passes lora params atm, so we assume lora params below + if not _matches_grad_sync_skip_mutation(param_name, mutation): + continue + if ( + mutation == "bwd_skip_sync_fc1_a" + and param.grad_sync_domain != "expert_tp" + ): + continue + + # For fc1 A params, extended finalize handles expert-TP sync via grad_sync_op. + saved_attrs.append((param, "grad_sync_op", param.grad_sync_op)) + param.grad_sync_op = "none" + + # Megatron native TP finalize uses this only for tp_default-domain params. + if param.average_gradients_across_tp_domain and param.grad_sync_domain == "tp_default": + saved_attrs.append((param, "average_gradients_across_tp_domain", param.average_gradients_across_tp_domain)) + param.average_gradients_across_tp_domain = False + try: + yield + finally: + for param, attr, value in reversed(saved_attrs): + setattr(param, attr, value) + + +@contextmanager +def _apply_o_proj_forward_mutation( + model_chunks: list[Any], + mutation: SensitivityMutation | None, +): + if mutation not in { + "fwd_skip_o_proj_tp_reduce", + "fwd_o_proj_tp_reduce_avg_not_sum", + }: + yield + return + + from megatron.core import parallel_state as ps + from megatron.core.tensor_parallel.mappings import ( + reduce_from_tensor_model_parallel_region, + reduce_scatter_to_sequence_parallel_region, + ) + + from art.megatron.lora import SelfAttentionLinearProjLoRA + + original_forwards: list[tuple[Any, Any]] = [] + for chunk in model_chunks: + for module in chunk.modules(): + if not isinstance(module, SelfAttentionLinearProjLoRA): + continue + original_forwards.append((module, module.forward)) + + def _mutated_forward(self: Any, x: Any): + base_output, bias_output = self.linear_proj(x) + lora_output = self.lora(x) + tp_size = self.provider.tensor_model_parallel_size + if tp_size > 1: + if mutation == "fwd_o_proj_tp_reduce_avg_not_sum": + if self.provider.sequence_parallel: + lora_output = reduce_scatter_to_sequence_parallel_region( + lora_output + ) + else: + lora_output = reduce_from_tensor_model_parallel_region( + lora_output + ) + lora_output = lora_output / tp_size + elif mutation == "fwd_skip_o_proj_tp_reduce": + if self.provider.sequence_parallel: + seq_per_rank = lora_output.shape[0] // tp_size + tp_rank = ps.get_tensor_model_parallel_rank() + lora_output = lora_output.narrow( + 0, tp_rank * seq_per_rank, seq_per_rank + ) + return base_output + lora_output, bias_output + + module.forward = MethodType(_mutated_forward, module) + + try: + yield + finally: + for module, original_forward in reversed(original_forwards): + module.forward = original_forward + + @contextmanager def _mutation_hook( megatron_train_module: Any, + model_chunks: list[Any], mutation: SensitivityMutation | None, pre_optimizer_step_hook: Callable[[], None] | None = None, loss_scale: float = 1.0, @@ -264,45 +474,54 @@ def _mutation_hook( original_optimizer_step = megatron_train_module._optimizer_step original_loss_fn = megatron_train_module.loss_fn - if mutation == "drop_finalize": - megatron_train_module._finalize_grads = lambda _model: None - elif mutation is not None: + known_mutations = {None, *SUPPORTED_SENSITIVITY_MUTATIONS} + if mutation not in known_mutations: raise ValueError(f"Unsupported mutation: {mutation}") + if mutation == "skip_finalize": + megatron_train_module._finalize_grads = lambda _model: None + if pre_optimizer_step_hook is not None: def _patched_optimizer_step(optimizer: Any, learning_rate: float): - pre_optimizer_step_hook() + if pre_optimizer_step_hook is not None: + pre_optimizer_step_hook() return original_optimizer_step(optimizer, learning_rate) megatron_train_module._optimizer_step = _patched_optimizer_step - if loss_scale <= 0: - raise ValueError(f"loss_scale must be > 0, got {loss_scale}") - if loss_scale != 1.0: + effective_loss_scale = loss_scale + if effective_loss_scale <= 0: + raise ValueError( + f"effective_loss_scale must be > 0, got {effective_loss_scale}" + ) + if effective_loss_scale != 1.0: def _scaled_loss_fn(*args: Any, **kwargs: Any): loss = original_loss_fn(*args, **kwargs) return loss.model_copy( update={ - "mean_policy_loss": loss.mean_policy_loss * loss_scale, - "mean_kl": loss.mean_kl * loss_scale, - "policy_loss_sum": loss.policy_loss_sum * loss_scale, + "mean_policy_loss": loss.mean_policy_loss * effective_loss_scale, + "mean_kl": loss.mean_kl * effective_loss_scale, + "policy_loss_sum": loss.policy_loss_sum * effective_loss_scale, } ) megatron_train_module.loss_fn = _scaled_loss_fn if mutation is None: - if pre_optimizer_step_hook is None and loss_scale == 1.0: + if pre_optimizer_step_hook is None and effective_loss_scale == 1.0: yield return - try: - yield - finally: - megatron_train_module._finalize_grads = original_finalize - megatron_train_module._optimizer_step = original_optimizer_step - megatron_train_module.loss_fn = original_loss_fn + with ExitStack() as stack: + stack.enter_context(_apply_o_proj_forward_mutation(model_chunks, mutation)) + stack.enter_context(_apply_grad_sync_skip_mutation(model_chunks, mutation)) + try: + yield + finally: + megatron_train_module._finalize_grads = original_finalize + megatron_train_module._optimizer_step = original_optimizer_step + megatron_train_module.loss_fn = original_loss_fn def _worker_run(request: WorkerRunRequest) -> None: @@ -347,8 +566,12 @@ def _worker_run(request: WorkerRunRequest) -> None: initial_state = _collect_lora_state(model_chunks) if torch.distributed.get_rank() == 0: shared_init_path.parent.mkdir(parents=True, exist_ok=True) - save_file( + deterministic_init = _build_deterministic_shared_init( _require_not_none(initial_state, "initial_state"), + seed=request.case_config.seed, + ) + save_file( + deterministic_init, str(shared_init_path), ) torch.distributed.barrier() @@ -373,6 +596,7 @@ def _worker_run(request: WorkerRunRequest) -> None: learning_rate=request.case_config.learning_rate, beta=request.case_config.beta, kl_penalty_coef=0.0, + grad_accumulation_sequences=request.case_config.grad_accumulation_sequences, ) experimental_config: dev.TrainConfig = {} step_traces: list[StepTrace] = [] @@ -385,26 +609,36 @@ def _capture_lora_grads() -> None: with _mutation_hook( megatron_train, + model_chunks, request.mutation, pre_optimizer_step_hook=_capture_lora_grads, loss_scale=request.case_config.loss_scale, ): for step_index in range(request.case_config.num_steps): forward_trace_capture.set_step(step_index) - sample_index = step_index % request.packed_tensors.num_sequences - inputs = megatron_train.select_indexed_inputs(packed_tensors, sample_index) + base_sample_index = ( + step_index * request.case_config.grad_accumulation_sequences + ) + micro_sample_indices = [ + (base_sample_index + offset) % request.packed_tensors.num_sequences + for offset in range(request.case_config.grad_accumulation_sequences) + ] + micro_inputs = [ + megatron_train.select_indexed_inputs(packed_tensors, sample_index) + for sample_index in micro_sample_indices + ] captured_grads = None step_result = megatron_train.run_training_step( model_chunks=model_chunks, optimizer=optimizer, learning_rate=train_config.learning_rate, - inputs=inputs, + inputs=micro_inputs, config=train_config, experimental_config=experimental_config, ref_logprobs=None, step_index=step_index, - sample_index=sample_index, + sample_index=micro_sample_indices, moe_routing_replay_controller=runtime.moe_routing_replay_controller, ) forward_trace_capture.save_current_step(traces_dir) @@ -421,6 +655,14 @@ def _capture_lora_grads() -> None: current_lora_state, "current_lora_state" ) deltas = _delta_state(initial_state, current_state) + saved_deltas = _apply_save_mutation_to_tensor_map( + deltas, + mutation=request.mutation, + ) + saved_current_state = _apply_save_mutation_to_tensor_map( + current_state, + mutation=request.mutation, + ) output_rel = Path("traces") / f"output_step_{step_index:03d}.pt" grads_rel = Path("traces") / f"grads_step_{step_index:03d}.safetensors" @@ -434,8 +676,8 @@ def _capture_lora_grads() -> None: topology_dir / output_rel, ) save_file(grads, str(topology_dir / grads_rel)) - save_file(deltas, str(topology_dir / deltas_rel)) - save_file(current_state, str(topology_dir / lora_rel)) + save_file(saved_deltas, str(topology_dir / deltas_rel)) + save_file(saved_current_state, str(topology_dir / lora_rel)) # build and append the step trace step_traces.append( @@ -481,7 +723,6 @@ def _capture_lora_grads() -> None: seed=request.case_config.seed, num_steps=request.case_config.num_steps, packed_tensors=request.packed_tensors, - tolerances=request.case_config.tolerances, steps=step_traces, ) _write_json(topology_dir / "manifest.json", manifest.model_dump(mode="json")) From d396bfd60bad639fc5124db046b3d6e7640508f6 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Sat, 14 Mar 2026 08:21:40 +0000 Subject: [PATCH 15/28] oracle tests: write suite output tables to log files Redirect suite stdout/stderr into local correctness/sensitivity logs and make skip/report messaging point to those artifacts instead of terminal output. --- src/art/megatron/lora.py | 28 +++--- tests/integration/megatron_oracle_worker.py | 25 ++++-- .../test_megatron_lora_oracle_correctness.py | 88 ++++++++++++++++--- 3 files changed, 108 insertions(+), 33 deletions(-) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 247389c3..63f10c85 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -182,9 +182,9 @@ def num_local_experts(self) -> int: return self.A_T.shape[0] if self.A_T.ndim == 3 else 1 def _broadcast_if_replicated(self, param: torch.nn.Parameter) -> None: - if not param.lora_tp_replicated: + if not param.lora_tp_replicated: # ty: ignore[unresolved-attribute] return - domain = param.lora_shard_domain + domain = param.lora_shard_domain # ty: ignore[unresolved-attribute] world_size = _get_shard_world_size(domain) if world_size <= 1: return @@ -252,9 +252,9 @@ def load_weights( self.load_weight(weight, into=into) def load_weight(self, weight: torch.Tensor, *, into: torch.nn.Parameter) -> None: - domain = into.lora_shard_domain - if into.lora_tp_sharded: - axis = into.lora_tp_shard_dim + domain = into.lora_shard_domain # ty: ignore[unresolved-attribute] + if into.lora_tp_sharded: # ty: ignore[unresolved-attribute] + axis = into.lora_tp_shard_dim # ty: ignore[unresolved-attribute] axis = _normalize_axis(axis, weight.ndim) world_size = _get_shard_world_size(domain) rank = _get_shard_rank(domain) @@ -291,21 +291,21 @@ def _should_export_parameter(self, param: torch.nn.Parameter) -> bool: return False # this param is fully sharded, all shard ranks participate - if param.lora_tp_sharded: + if param.lora_tp_sharded: # ty: ignore[unresolved-attribute] return True # param is replicated, tp rank 0 or etp rank 0 participates - return _get_shard_rank(param.lora_shard_domain) == 0 + return _get_shard_rank(param.lora_shard_domain) == 0 # ty: ignore[unresolved-attribute] def _manifest_for_param(self, param: torch.nn.Parameter) -> dict[str, Any]: return { - "domain": param.lora_shard_domain, - "sharded": param.lora_tp_sharded, - "shard_dim": param.lora_tp_shard_dim, - "shard_world_size": _get_shard_world_size(param.lora_shard_domain) - if param.lora_tp_sharded + "domain": param.lora_shard_domain, # ty: ignore[unresolved-attribute] + "sharded": param.lora_tp_sharded, # ty: ignore[unresolved-attribute] + "shard_dim": param.lora_tp_shard_dim, # ty: ignore[unresolved-attribute] + "shard_world_size": _get_shard_world_size(param.lora_shard_domain) # ty: ignore[unresolved-attribute] + if param.lora_tp_sharded # ty: ignore[unresolved-attribute] else 1, - "shard_rank": _get_shard_rank(param.lora_shard_domain) - if param.lora_tp_sharded + "shard_rank": _get_shard_rank(param.lora_shard_domain) # ty: ignore[unresolved-attribute] + if param.lora_tp_sharded # ty: ignore[unresolved-attribute] else 0, } diff --git a/tests/integration/megatron_oracle_worker.py b/tests/integration/megatron_oracle_worker.py index 1d734d91..316d39d8 100644 --- a/tests/integration/megatron_oracle_worker.py +++ b/tests/integration/megatron_oracle_worker.py @@ -342,7 +342,9 @@ def _iter_named_unique_parameters( return params -def _matches_grad_sync_skip_mutation(param_name: str, mutation: SensitivityMutation) -> bool: +def _matches_grad_sync_skip_mutation( + param_name: str, mutation: SensitivityMutation +) -> bool: if mutation == "bwd_skip_sync_qkv_a": return any( token in param_name @@ -381,19 +383,26 @@ def _apply_grad_sync_skip_mutation( if not _matches_grad_sync_skip_mutation(param_name, mutation): continue if ( - mutation == "bwd_skip_sync_fc1_a" - and param.grad_sync_domain != "expert_tp" + mutation == "bwd_skip_sync_fc1_a" and param.grad_sync_domain != "expert_tp" # ty: ignore[unresolved-attribute] ): continue # For fc1 A params, extended finalize handles expert-TP sync via grad_sync_op. - saved_attrs.append((param, "grad_sync_op", param.grad_sync_op)) - param.grad_sync_op = "none" + saved_attrs.append((param, "grad_sync_op", param.grad_sync_op)) # ty: ignore[unresolved-attribute] + param.grad_sync_op = "none" # ty: ignore[unresolved-attribute] # Megatron native TP finalize uses this only for tp_default-domain params. - if param.average_gradients_across_tp_domain and param.grad_sync_domain == "tp_default": - saved_attrs.append((param, "average_gradients_across_tp_domain", param.average_gradients_across_tp_domain)) - param.average_gradients_across_tp_domain = False + average_gradients_across_tp_domain = param.average_gradients_across_tp_domain # ty: ignore[unresolved-attribute] + grad_sync_domain = param.grad_sync_domain # ty: ignore[unresolved-attribute] + if average_gradients_across_tp_domain and grad_sync_domain == "tp_default": + saved_attrs.append( + ( + param, + "average_gradients_across_tp_domain", + average_gradients_across_tp_domain, + ) + ) + param.average_gradients_across_tp_domain = False # ty: ignore[unresolved-attribute] try: yield finally: diff --git a/tests/integration/test_megatron_lora_oracle_correctness.py b/tests/integration/test_megatron_lora_oracle_correctness.py index f7e4dbbc..67c35adb 100644 --- a/tests/integration/test_megatron_lora_oracle_correctness.py +++ b/tests/integration/test_megatron_lora_oracle_correctness.py @@ -1,9 +1,12 @@ +from contextlib import redirect_stderr, redirect_stdout +from pathlib import Path +from typing import Callable + import pytest from .megatron_oracle_harness import ( EXTENDED_TOPOLOGIES, SENSITIVITY_MUTATION_ENV, - SENSITIVITY_TOPOLOGY, TOPOLOGIES, available_gpu_count, case_config, @@ -12,8 +15,33 @@ run_suite, sensitivity_enabled, sensitivity_mutations, + sensitivity_required_world_size, ) +REPO_ROOT = Path(__file__).resolve().parents[2] +CORRECTNESS_LOG_PATH = REPO_ROOT / ".local" / "correctness.log" +SENSITIVITY_LOG_PATH = REPO_ROOT / ".local" / "sensitivity.log" + + +def _run_suite_with_log( + *, + log_path: Path, + run: Callable[[], object], +) -> None: + log_path.parent.mkdir(parents=True, exist_ok=True) + with log_path.open("w", encoding="utf-8") as log_file: + with redirect_stdout(log_file), redirect_stderr(log_file): + run() + + +def _announce_report_log( + *, + log_path: Path, + capsys: pytest.CaptureFixture[str], +) -> None: + with capsys.disabled(): + print(f"\nMegatron LoRA oracle report log: {log_path}", flush=True) + def _require_gpus_for(topology_world_size: int) -> None: gpu_count = available_gpu_count() @@ -30,31 +58,69 @@ def _suite_world_size() -> int: return max(topology.world_size() for topology in suite_topologies) -def test_megatron_lora_diff_sensitivity() -> None: +def test_megatron_lora_diff_sensitivity(capsys: pytest.CaptureFixture[str]) -> None: """ Runs a each of the sensitivity mutations (e.g. drop megatron finalize grads) and expects each to fail (numerical differences larger than our thresholds) This test ensures we can catch errors we know of (implying we will be able to catch unknown errors as well) """ + _announce_report_log(log_path=SENSITIVITY_LOG_PATH, capsys=capsys) if not sensitivity_enabled(): + SENSITIVITY_LOG_PATH.parent.mkdir(parents=True, exist_ok=True) + SENSITIVITY_LOG_PATH.write_text( + ( + "Sensitivity suite skipped. " + f"Set {SENSITIVITY_MUTATION_ENV}=all (or one mutation / CSV).\n" + ), + encoding="utf-8", + ) pytest.skip( - f"Set {SENSITIVITY_MUTATION_ENV}=drop_finalize (or CSV) to enable sensitivity check." + f"Set {SENSITIVITY_MUTATION_ENV}=all (or one mutation / CSV) to enable sensitivity check." ) - _require_gpus_for(SENSITIVITY_TOPOLOGY.world_size()) mutations = sensitivity_mutations() assert mutations - run_sensitivity_suite( - case_config=case_config(), - mutations=mutations, + sensitivity_world_size = sensitivity_required_world_size(mutations) + gpu_count = available_gpu_count() + if gpu_count < sensitivity_world_size: + SENSITIVITY_LOG_PATH.parent.mkdir(parents=True, exist_ok=True) + SENSITIVITY_LOG_PATH.write_text( + ( + "Sensitivity suite skipped. " + f"Need {sensitivity_world_size} GPUs, found {gpu_count}.\n" + ), + encoding="utf-8", + ) + _require_gpus_for(sensitivity_world_size) + _run_suite_with_log( + log_path=SENSITIVITY_LOG_PATH, + run=lambda: run_sensitivity_suite( + case_config=case_config(), + mutations=mutations, + ), ) -def test_megatron_lora_topology_suite() -> None: +def test_megatron_lora_topology_suite(capsys: pytest.CaptureFixture[str]) -> None: """ Runs the suite of topologies and expects each to pass (numerical differences within our thresholds) """ - _require_gpus_for(_suite_world_size()) - run_suite( - case_config=case_config(), + _announce_report_log(log_path=CORRECTNESS_LOG_PATH, capsys=capsys) + suite_world_size = _suite_world_size() + gpu_count = available_gpu_count() + if gpu_count < suite_world_size: + CORRECTNESS_LOG_PATH.parent.mkdir(parents=True, exist_ok=True) + CORRECTNESS_LOG_PATH.write_text( + ( + "Topology suite skipped. " + f"Need {suite_world_size} GPUs, found {gpu_count}.\n" + ), + encoding="utf-8", + ) + _require_gpus_for(suite_world_size) + _run_suite_with_log( + log_path=CORRECTNESS_LOG_PATH, + run=lambda: run_suite( + case_config=case_config(), + ), ) From 5385fbbc1f6959d6d5341f8fa304cef57b6370b8 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Mon, 16 Mar 2026 18:40:13 +0000 Subject: [PATCH 16/28] Add correct data parallelism. --- src/art/loss.py | 14 +- src/art/megatron/routing_replay.py | 42 ++++- src/art/megatron/train.py | 180 +++++++++++++++---- tests/integration/megatron_forward_trace.py | 17 +- tests/integration/megatron_oracle_harness.py | 146 +++++++++++---- tests/integration/megatron_oracle_worker.py | 70 ++++++-- 6 files changed, 373 insertions(+), 96 deletions(-) diff --git a/src/art/loss.py b/src/art/loss.py index a22cca3f..7d25a8eb 100644 --- a/src/art/loss.py +++ b/src/art/loss.py @@ -1,4 +1,4 @@ -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Literal from pydantic import BaseModel, ConfigDict import torch @@ -27,6 +27,7 @@ def loss_fn( ref_logprobs: torch.Tensor | None, entropies: torch.Tensor | None, experimental_config: dev.TrainConfig, + reduction: Literal["mean", "sum"] = "mean", ) -> Loss: old_logprobs = shift_tensor(inputs["logprobs"], float("nan")) advantages = shift_tensor(inputs["advantages"], 0.0) @@ -132,14 +133,15 @@ def loss_fn( kl_div = torch.zeros_like(policy_loss) policy_loss = policy_loss * weights * assistant_mask kl_div = kl_div * weights * assistant_mask - mean_policy_loss = policy_loss.sum() / (assistant_mask.sum() + 1e-6) - mean_kl = kl_div.sum() / (assistant_mask.sum() + 1e-6) + denominator = assistant_mask.sum() + 1e-6 if reduction == "mean" else 1.0 + mean_policy_loss = policy_loss.sum() / denominator + mean_kl = kl_div.sum() / denominator # Compute mean entropy for the current step if entropies is not None: shifted_entropies = shift_tensor(entropies, 0.0) - mean_entropy = (shifted_entropies * weights * assistant_mask).sum() / ( - assistant_mask.sum() + 1e-6 - ) + mean_entropy = ( + shifted_entropies * weights * assistant_mask + ).sum() / denominator else: mean_entropy = None return Loss( diff --git a/src/art/megatron/routing_replay.py b/src/art/megatron/routing_replay.py index 91865b80..463e5258 100644 --- a/src/art/megatron/routing_replay.py +++ b/src/art/megatron/routing_replay.py @@ -507,6 +507,7 @@ def __init__( self._active_sample_index: int | None = None self._active_step_routes: StepRoutes | None = None self._router_call_cursors: dict[str, int] = {} + self._router_call_limits: dict[str, int] = {} self._global_uid_to_row_index: dict[int, int] = {} self._local_router_keys: set[str] = set() @@ -600,6 +601,8 @@ def remove_router_patches(self) -> None: self._local_router_keys.clear() def set_step(self, *, step_index: int, sample_index: int) -> None: + from megatron.core import parallel_state as ps + if step_index not in self.bundle.steps: raise RuntimeError( f"Replay bundle missing step_index={step_index}. " @@ -615,9 +618,26 @@ def set_step(self, *, step_index: int, sample_index: int) -> None: "Replay bundle step is missing local router key: " f"step={step_index}, router='{local_router_key}'" ) - self._router_call_cursors = { - router_key: 0 for router_key in sorted(self._local_router_keys) - } + dp_world_size = int(ps.get_data_parallel_world_size(with_context_parallel=True)) + dp_rank = int(ps.get_data_parallel_rank(with_context_parallel=True)) + self._router_call_cursors = {} + self._router_call_limits = {} + for router_key in sorted(self._local_router_keys): + total_calls = len(step_routes.routers[router_key].calls) + call_start = 0 + call_limit = total_calls + if dp_world_size > 1: + if total_calls % dp_world_size != 0: + raise RuntimeError( + "Replay router call count is not divisible by DP world size: " + f"step={step_index}, router='{router_key}', " + f"calls={total_calls}, dp_world_size={dp_world_size}" + ) + calls_per_dp_rank = total_calls // dp_world_size + call_start = dp_rank * calls_per_dp_rank + call_limit = call_start + calls_per_dp_rank + self._router_call_cursors[router_key] = call_start + self._router_call_limits[router_key] = call_limit self._global_uid_to_row_index = { int(uid.item()): row_index for row_index, uid in enumerate(step_routes.global_token_uids) @@ -627,9 +647,13 @@ def finalize_step(self) -> None: if self._active_step_routes is None: raise RuntimeError("finalize_step called before set_step") for router_key in sorted(self._local_router_keys): - router_routes = self._active_step_routes.routers[router_key] consumed = self._router_call_cursors.get(router_key, 0) - expected = len(router_routes.calls) + expected = self._router_call_limits.get(router_key) + if expected is None: + raise RuntimeError( + "Routing replay call limits missing for router key: " + f"step={self._active_step_index}, router='{router_key}'" + ) if consumed != expected: raise RuntimeError( "Routing replay step consumption mismatch: " @@ -640,6 +664,7 @@ def finalize_step(self) -> None: self._active_sample_index = None self._active_step_routes = None self._router_call_cursors = {} + self._router_call_limits = {} self._global_uid_to_row_index = {} def get_route_for_router( @@ -652,7 +677,14 @@ def get_route_for_router( ) -> tuple[torch.Tensor, torch.Tensor]: step_routes = self._active_step_routes call_index = self._router_call_cursors.get(router_key, 0) + call_limit = self._router_call_limits.get(router_key) router_calls = step_routes.routers[router_key].calls + if call_limit is not None and call_index >= call_limit: + raise RuntimeError( + "Routing replay call cursor exceeded local call range: " + f"step={self._active_step_index}, router='{router_key}', " + f"call_index={call_index}, limit={call_limit}" + ) route = router_calls[call_index] self._router_call_cursors[router_key] = call_index + 1 diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index a66156bc..c08394e1 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -291,6 +291,7 @@ def collect_sharded_lora_state( return sharded_state_dict, sharded_state_manifest +@torch.no_grad() def select_indexed_inputs(packed_tensors: PackedTensors, index: int) -> PackedTensors: return PackedTensors( # type: ignore[call-arg] **{ @@ -303,6 +304,76 @@ def select_indexed_inputs(packed_tensors: PackedTensors, index: int) -> PackedTe ) +@torch.no_grad() +def _clone_packed_tensors(inputs: PackedTensors) -> PackedTensors: + return PackedTensors( # type: ignore[call-arg] + **{ + key: value.clone() + for key, value in inputs.items() + if isinstance(value, torch.Tensor) + }, + pixel_values=[None], + image_grid_thw=[None], + ) + + +@torch.no_grad() +def _zero_contribution_inputs(template: PackedTensors) -> PackedTensors: + dummy = _clone_packed_tensors(template) + dummy["assistant_mask"].zero_() + return dummy + + +def resolve_local_grad_accumulation_sequences( + global_grad_accumulation_sequences: int, +) -> int: + dp_world_size = ps.get_data_parallel_world_size(with_context_parallel=True) + if ( + global_grad_accumulation_sequences <= 0 + or global_grad_accumulation_sequences % dp_world_size != 0 + ): + raise RuntimeError( + "Invalid global grad accumulation / DP world size combination: " + f"global_grad_accumulation_sequences={global_grad_accumulation_sequences}, " + f"dp_world_size={dp_world_size}" + ) + return global_grad_accumulation_sequences // dp_world_size + + +def build_micro_sample_indices( + step_index: int, + num_sequences: int, + global_grad_accumulation_sequences: int, +) -> list[int | None]: + dp_rank = ps.get_data_parallel_rank(with_context_parallel=True) + local_grad_accumulation_sequences = resolve_local_grad_accumulation_sequences( + global_grad_accumulation_sequences=global_grad_accumulation_sequences, + ) + base_global_sample_index = step_index * global_grad_accumulation_sequences + global_step_indices: list[int | None] = [] + for offset in range(global_grad_accumulation_sequences): + global_sample_index = base_global_sample_index + offset + global_step_indices.append( + global_sample_index if global_sample_index < num_sequences else None + ) + rank_start = dp_rank * local_grad_accumulation_sequences + rank_end = rank_start + local_grad_accumulation_sequences + return global_step_indices[rank_start:rank_end] + + +def select_micro_inputs( + packed_tensors: PackedTensors, + sample_indices: list[int | None], + zero_template: PackedTensors, +) -> list[PackedTensors]: + return [ + _clone_packed_tensors(zero_template) + if sample_index is None + else select_indexed_inputs(packed_tensors, sample_index) + for sample_index in sample_indices + ] + + def _move_inputs_to_device(inputs: PackedTensors, device: torch.device) -> None: for key, value in inputs.items(): if isinstance(value, torch.Tensor): @@ -332,6 +403,46 @@ def _reduce_loss(loss: torch.Tensor) -> torch.Tensor: return reduced_loss +def _count_trainable_tokens(inputs: PackedTensors) -> float: + assistant_mask = shift_tensor(inputs["assistant_mask"], False) + return float(assistant_mask.sum().item()) + + +def _global_token_normalization_scale( + micro_inputs: list[PackedTensors], + device: torch.device, +) -> float: + """ + Data parallel grad normalization scale + dp_world_size / global_micro_batch_token_count, where dp_world_size cancels out + the dp grad averaging, since we divide by global rather than local token count. + Using reduction="sum" and dividing by global token count means each rank is normalized + correctly. + """ + local_token_total = sum(_count_trainable_tokens(micro) for micro in micro_inputs) + dp_world_size = 1 + global_token_total = local_token_total + + dp_world_size = ps.get_data_parallel_world_size(with_context_parallel=True) + if dp_world_size > 1: + dp_group = ps.get_data_parallel_group(with_context_parallel=True) + + global_token_tensor = torch.tensor( + [local_token_total], device=device, dtype=torch.float32 + ) + torch.distributed.all_reduce( + global_token_tensor, + op=torch.distributed.ReduceOp.SUM, + group=dp_group, + ) + global_token_total = float(global_token_tensor.item()) + + if global_token_total <= 0.0: + return 0.0 + + return float(dp_world_size) / global_token_total + + def run_training_step( *, model_chunks: list[MegatronModule], @@ -340,9 +451,9 @@ def run_training_step( inputs: PackedTensors | list[PackedTensors], config: types.TrainConfig, experimental_config: dev.TrainConfig, + step_index: int, + sample_index: int | list[int | None], ref_logprobs: torch.Tensor | None = None, - step_index: int | None = None, - sample_index: int | list[int] | None = None, moe_routing_replay_controller: MoeRoutingReplayController | None = None, ) -> TrainStepResult: micro_inputs = inputs if isinstance(inputs, list) else [inputs] @@ -356,16 +467,18 @@ def run_training_step( f"{len(sample_index)} != {len(micro_inputs)}" ) micro_sample_indices = sample_index - elif sample_index is None: - micro_sample_indices = [0] * len(micro_inputs) else: - micro_sample_indices = [sample_index] * len(micro_inputs) + assert len(micro_inputs) == 1 + micro_sample_indices = [sample_index] if moe_routing_replay_controller is not None: - assert step_index is not None + step_sample_index = next( + (index for index in micro_sample_indices if index is not None), + 0, + ) moe_routing_replay_controller.set_step( step_index=step_index, - sample_index=micro_sample_indices[0], + sample_index=step_sample_index, ) device = next(model_chunks[0].parameters()).device @@ -374,7 +487,8 @@ def run_training_step( chunk.zero_grad_buffer() # ty: ignore[call-non-callable] micro_count = len(micro_inputs) - loss_sum: torch.Tensor | None = None + normalization_scale = _global_token_normalization_scale(micro_inputs, device=device) + normalized_loss: torch.Tensor | None = None probs_corr_sum = 0.0 new_logprobs: torch.Tensor | None = None @@ -400,16 +514,20 @@ def run_training_step( ref_logprobs, None, experimental_config, + reduction="sum", ) - micro_loss = loss_info.mean_policy_loss + config.beta * loss_info.mean_kl - (micro_loss / micro_count).backward() + micro_loss = ( + loss_info.mean_policy_loss + config.beta * loss_info.mean_kl + ) * normalization_scale + micro_loss.backward() probs_corr_sum += float(loss_info.probs_corr.item()) - if loss_sum is None: - loss_sum = micro_loss.detach() + detached_micro_loss = micro_loss.detach() + if normalized_loss is None: + normalized_loss = detached_micro_loss else: - loss_sum = loss_sum + micro_loss.detach() + normalized_loss = normalized_loss + detached_micro_loss - if new_logprobs is None or loss_sum is None: + if new_logprobs is None or normalized_loss is None: raise RuntimeError("run_training_step did not produce outputs") _finalize_grads(model_chunks) @@ -417,7 +535,7 @@ def run_training_step( optimizer, learning_rate, ) - reduced_loss = _reduce_loss(loss_sum / micro_count) + reduced_loss = _reduce_loss(normalized_loss) if moe_routing_replay_controller is not None: moe_routing_replay_controller.finalize_step() @@ -496,26 +614,20 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: runtime.rank, "Loading packed tensors from", job.disk_packed_tensors["dir"] ) packed_tensors = packed_tensors_from_dir(**job.disk_packed_tensors) + template = select_indexed_inputs(packed_tensors, 0) + zero_template = _zero_contribution_inputs(template) num_sequences = job.disk_packed_tensors["num_sequences"] - - dp_rank = ps.get_data_parallel_rank() - dp_world_size = ps.get_data_parallel_world_size() - num_indices = math.ceil(num_sequences / dp_world_size) - indices = list(range(dp_rank, num_sequences, dp_world_size)) - if not indices: - indices = [dp_rank % num_sequences] - repeat = math.ceil(num_indices / len(indices)) - indices = (indices * repeat)[:num_indices] - - grad_accumulation_sequences = max(1, int(config.grad_accumulation_sequences)) - for step_index, start in enumerate( - range(0, len(indices), grad_accumulation_sequences) - ): - micro_indices = indices[start : start + grad_accumulation_sequences] - micro_inputs = [ - select_indexed_inputs(packed_tensors, sample_index) - for sample_index in micro_indices - ] + global_grad_accumulation_sequences = config.grad_accumulation_sequences + num_steps = math.ceil(num_sequences / global_grad_accumulation_sequences) + for step_index in range(num_steps): + micro_indices = build_micro_sample_indices( + step_index=step_index, + num_sequences=num_sequences, + global_grad_accumulation_sequences=global_grad_accumulation_sequences, + ) + micro_inputs = select_micro_inputs( + packed_tensors, micro_indices, zero_template + ) step_result = run_training_step( model_chunks=runtime.model, optimizer=runtime.optimizer, diff --git a/tests/integration/megatron_forward_trace.py b/tests/integration/megatron_forward_trace.py index d3befd9f..90c4a3af 100644 --- a/tests/integration/megatron_forward_trace.py +++ b/tests/integration/megatron_forward_trace.py @@ -400,7 +400,11 @@ def _build_moe_row_identities( expert_rows = (local_ids == expert_id).nonzero(as_tuple=False) for token_offset, slot_index in expert_rows.tolist(): local_identities.append( - (expert_id, token_cursor - token_count + token_offset, slot_index) + ( + expert_id, + token_cursor - token_count + token_offset, + slot_index, + ) ) if len(local_identities) != row_count: return None @@ -469,7 +473,9 @@ def _canonicalize_moe_expert_row_order( return tensor row_splits_raw = call.get("primary_output__row_splits") row_splits = ( - [int(v) for v in row_splits_raw] if isinstance(row_splits_raw, list) else None + [int(v) for v in row_splits_raw] + if isinstance(row_splits_raw, list) + else None ) identities = cls._build_moe_row_identities( module_name=module_name, @@ -572,11 +578,6 @@ def _merge_rank_values( and cls._can_cat_along_dim(tensors, dim=preferred_cat_dim) ): return torch.cat(tensors, dim=preferred_cat_dim) - if all( - tensors[0].shape == tensor.shape and torch.equal(tensors[0], tensor) - for tensor in tensors[1:] - ): - return tensors[0] if all(tensor.ndim > 0 for tensor in tensors): if cls._can_cat_along_dim(tensors, dim=0): return torch.cat(tensors, dim=0) @@ -622,7 +623,7 @@ def _merge_rank_values( ) if all(value == values_by_rank[0] for value in values_by_rank[1:]): return values_by_rank[0] - return values_by_rank[0] + return values_by_rank @classmethod def _merge_rank_call_entries( diff --git a/tests/integration/megatron_oracle_harness.py b/tests/integration/megatron_oracle_harness.py index 785e8618..ad19c194 100644 --- a/tests/integration/megatron_oracle_harness.py +++ b/tests/integration/megatron_oracle_harness.py @@ -37,6 +37,8 @@ "bwd_skip_sync_fc1_a", "save_drop_nonzero_ranked_tp_shards", "save_duplicate_replicated_entries", + "dp_grad_accumulation_seqs", + "dp_local_token_normalization", ) SensitivityMutation = str @@ -118,6 +120,7 @@ class PackedTensorConfig(BaseModel): sequence_length: int = 256 prefill_tokens: int = 64 decode_tokens: int = 64 + decode_tokens_jitter: int = Field(default=32, ge=0) vocab_high: int = 8192 @@ -170,7 +173,7 @@ class OracleCaseConfig(BaseModel): base_model: str num_layers: int = 4 - seed: int = 20260305 + seed: int = 20260304 num_steps: int = 1 grad_accumulation_sequences: int = Field(default=4, ge=1) learning_rate: float = 5e-6 @@ -338,7 +341,16 @@ def layer_averaged_summary(reference_stack, candidate_stack) -> dict[str, float] ref = reference_stack.detach().float() cand = candidate_stack.detach().float() layer_count = int(ref.shape[0]) - metrics = {k: 0.0 for k in ["numel", "mean_abs_diff", "relative_l2", "typical_abs_scale", "mean_abs_pct"]} + metrics = { + k: 0.0 + for k in [ + "numel", + "mean_abs_diff", + "relative_l2", + "typical_abs_scale", + "mean_abs_pct", + ] + } for layer_index in range(layer_count): layer_accumulator = DiffAccumulator() layer_accumulator.update(ref[layer_index], cand[layer_index]) @@ -423,7 +435,13 @@ def _require_not_none(value: T | None, name: str) -> T: SENSITIVITY_TOPOLOGY_BY_MUTATION: dict[SensitivityMutation, Topology] = { mutation: SENSITIVITY_TOPOLOGY for mutation in SUPPORTED_SENSITIVITY_MUTATIONS } -SENSITIVITY_TOPOLOGY_BY_MUTATION["bwd_skip_sync_fc1_a"] = Topology(tp=2, ep=1, etp=2, dp=1, sp=True) +SENSITIVITY_TOPOLOGY_BY_MUTATION["bwd_skip_sync_fc1_a"] = Topology( + tp=2, ep=1, etp=2, dp=1, sp=True +) +SENSITIVITY_TOPOLOGY_BY_MUTATION |= { + k: Topology(tp=1, ep=2, etp=1, dp=2, sp=False) + for k in ["dp_grad_accumulation_seqs", "dp_local_token_normalization"] +} def _truthy(value: str | None) -> bool: @@ -545,6 +563,15 @@ def _build_packed_tensors( dtype=torch.long, generator=generator, ) + # Ensure paired cross-DP rows are never token-identical. + half = config.num_sequences // 2 + if half > 0 and config.num_sequences % 2 == 0: + for pair_index in range(half): + left_index = pair_index + right_index = pair_index + half + if torch.equal(tokens[left_index], tokens[right_index]): + token_span = max(1, config.vocab_high - 10) + tokens[right_index] = ((tokens[right_index] - 10 + 1) % token_span) + 10 group_ids = torch.zeros(shape, dtype=torch.long) parent_ids = torch.full(shape, -1, dtype=torch.long) input_pos = ( @@ -554,17 +581,57 @@ def _build_packed_tensors( .clone() ) prefix_length = max(1, min(config.sequence_length - 1, config.prefill_tokens)) - decode_span = max(1, config.decode_tokens) - cursor = prefix_length - branch = 1 - while cursor < config.sequence_length: - end = min(config.sequence_length, cursor + decode_span) - group_ids[:, cursor:end] = branch - parent_ids[:, cursor:end] = 0 - cursor = end - branch += 1 assistant_mask = torch.zeros(shape, dtype=torch.bool) - assistant_mask[:, prefix_length:] = True + max_decode_tokens = max(1, config.sequence_length - prefix_length) + base_decode_tokens = max(1, min(config.decode_tokens, max_decode_tokens)) + jitter_width = min(config.decode_tokens_jitter, max_decode_tokens - 1) + candidate_decode_lengths: list[int] = [] + for _ in range(config.num_sequences): + if jitter_width > 0: + jitter = int( + torch.randint( + low=-jitter_width, + high=jitter_width + 1, + size=(1,), + generator=generator, + dtype=torch.long, + ).item() + ) + else: + jitter = 0 + decode_length = max( + 1, + min(max_decode_tokens, base_decode_tokens + jitter), + ) + candidate_decode_lengths.append(decode_length) + # Keep jitter local around the configured decode length, but force pairwise + # differences across halves so default DP rank shards see different lengths. + if half > 0 and config.num_sequences % 2 == 0: + for pair_index in range(half): + left_index = pair_index + right_index = pair_index + half + if ( + candidate_decode_lengths[left_index] + != candidate_decode_lengths[right_index] + ): + continue + if candidate_decode_lengths[right_index] < max_decode_tokens: + candidate_decode_lengths[right_index] += 1 + elif candidate_decode_lengths[right_index] > 1: + candidate_decode_lengths[right_index] -= 1 + + for sequence_index, decode_length in enumerate(candidate_decode_lengths): + active_stop = prefix_length + decode_length + assistant_mask[sequence_index, prefix_length:active_stop] = True + decode_span = max(1, min(config.decode_tokens, decode_length)) + cursor = prefix_length + branch = 1 + while cursor < active_stop: + end = min(active_stop, cursor + decode_span) + group_ids[sequence_index, cursor:end] = branch + parent_ids[sequence_index, cursor:end] = 0 + cursor = end + branch += 1 logprobs = ( torch.randn( shape, @@ -619,12 +686,16 @@ def ensure_case_artifacts(case_config: OracleCaseConfig) -> CaseArtifacts: case_dir = ARTIFACT_ROOT / case_id case_dir.mkdir(parents=True, exist_ok=True) _write_json(case_dir / "case_config.json", case_config.model_dump(mode="json")) + regenerate = regenerate_requested() descriptor_path = case_dir / "packed_tensors.json" - if descriptor_path.exists(): + packed_dir = case_dir / "packed_tensors" + if descriptor_path.exists() and not regenerate: packed_spec = DiskPackedTensorsSpec.model_validate(_read_json(descriptor_path)) else: - packed_spec = _create_packed_tensors(case_config, case_dir / "packed_tensors") + if packed_dir.exists(): + shutil.rmtree(packed_dir) + packed_spec = _create_packed_tensors(case_config, packed_dir) _write_json(descriptor_path, packed_spec.model_dump(mode="json")) shared_init_path = case_dir / "shared_init" / "adapter_model.safetensors" @@ -731,8 +802,7 @@ def _stacked_layers( normalized = _layer_agnostic_param_key(name) if normalized is None: raise RuntimeError( - "Expected all compared params to include a layer index, " - f"got '{name}'." + f"Expected all compared params to include a layer index, got '{name}'." ) grouped.setdefault(normalized, []).append( (reference.detach().float(), candidate.detach().float()) @@ -887,7 +957,9 @@ def _apply_phase_pass( explain = getattr(pass_fn, "failure_reasons", None) if callable(explain): reasons = explain(summary) - row.failure_reasons = reasons if reasons else ["phase pass function returned false"] + row.failure_reasons = ( + reasons if reasons else ["phase pass function returned false"] + ) return row.failure_reasons = ["phase pass function returned false"] @@ -978,7 +1050,9 @@ def _build_metric_rows_from_tensor_pairs( accumulator.update_router_ids(reference_aligned, aligned_candidate) summary = accumulator.as_summary() elif layer_averaged: - summary = DiffAccumulator.layer_averaged_summary(reference_aligned, aligned_candidate) + summary = DiffAccumulator.layer_averaged_summary( + reference_aligned, aligned_candidate + ) else: accumulator = DiffAccumulator() accumulator.update(reference_aligned, aligned_candidate) @@ -1307,23 +1381,31 @@ def _default_phase_pass_fns() -> dict[str, PhasePassFn]: """Builds default per-phase pass functions over diff summaries.""" # note the metrics get averaged across layers to reduce noise # we don't expect particular layers to see errors as opposed to the others so this is helpful - fwd_out_loss = MetricThresholdRule(limits={"relative_l2": 3e-2, "mean_abs_pct": 3.0}) + fwd_out_loss = MetricThresholdRule( + limits={"relative_l2": 3e-2, "mean_abs_pct": 3.0} + ) grads = lambda summary: ( summary["mean_abs_pct"] < 5.0 - or (summary["typical_abs_scale"] < 1e-6 and summary["mean_abs_diff"] < 2e-8 and summary["relative_l2"] < 1.0) - ) - deltas = lambda summary: ( - summary["mean_abs_pct"] < 15.0 + or ( + summary["typical_abs_scale"] < 1e-6 + and summary["mean_abs_diff"] < 2e-8 + and summary["relative_l2"] < 1.0 + ) ) - router_topk_rule = MetricThresholdRule( # should be no mismatch due to router replay - limits={ - "topk_mismatch_fraction": 0.0, - "top1_mismatch_fraction": 0.0, - } + deltas = lambda summary: summary["mean_abs_pct"] < 15.0 + router_topk_rule = ( + MetricThresholdRule( # should be no mismatch due to router replay + limits={ + "topk_mismatch_fraction": 0.0, + "top1_mismatch_fraction": 0.0, + } + ) ) - return { - key: fwd_out_loss for key in ["forward", "outputs", "losses"] - } | {"grads": grads, "deltas": deltas, "router_topk_ids": router_topk_rule} + return {key: fwd_out_loss for key in ["forward", "outputs", "losses"]} | { + "grads": grads, + "deltas": deltas, + "router_topk_ids": router_topk_rule, + } def _suite_variants() -> list[VariantSpec]: diff --git a/tests/integration/megatron_oracle_worker.py b/tests/integration/megatron_oracle_worker.py index 316d39d8..d3c5b836 100644 --- a/tests/integration/megatron_oracle_worker.py +++ b/tests/integration/megatron_oracle_worker.py @@ -475,6 +475,7 @@ def _mutation_hook( megatron_train_module: Any, model_chunks: list[Any], mutation: SensitivityMutation | None, + topology: Topology, pre_optimizer_step_hook: Callable[[], None] | None = None, loss_scale: float = 1.0, ): @@ -482,6 +483,9 @@ def _mutation_hook( original_finalize = megatron_train_module._finalize_grads original_optimizer_step = megatron_train_module._optimizer_step original_loss_fn = megatron_train_module.loss_fn + original_token_normalization_scale = ( + megatron_train_module._global_token_normalization_scale + ) known_mutations = {None, *SUPPORTED_SENSITIVITY_MUTATIONS} if mutation not in known_mutations: @@ -490,6 +494,46 @@ def _mutation_hook( if mutation == "skip_finalize": megatron_train_module._finalize_grads = lambda _model: None + if mutation == "dp_local_token_normalization": + + def _wrong_local_token_normalization_scale( + micro_inputs: list[Any], + device: torch.device, + ) -> float: + del device + local_token_total = sum( + megatron_train_module._count_trainable_tokens(micro) + for micro in micro_inputs + ) + if local_token_total <= 0.0: + return 0.0 + # Intentionally wrong normalization: use only local token total. + dp_world_size = int( + megatron_train_module.ps.get_data_parallel_world_size( + with_context_parallel=True + ) + ) + return float(dp_world_size) / float(local_token_total) + + megatron_train_module._global_token_normalization_scale = ( + _wrong_local_token_normalization_scale + ) + + if mutation == "dp_grad_accumulation_seqs": + + def _wrong_resolve_local_grad_accumulation_sequences( + global_grad_accumulation_sequences: int, + ) -> int: + return megatron_train_module.resolve_local_grad_accumulation_sequences( + global_grad_accumulation_sequences=( + topology.dp * global_grad_accumulation_sequences + ) + ) + + megatron_train_module.resolve_local_grad_accumulation_sequences = ( + _wrong_resolve_local_grad_accumulation_sequences + ) + if pre_optimizer_step_hook is not None: def _patched_optimizer_step(optimizer: Any, learning_rate: float): @@ -531,6 +575,9 @@ def _scaled_loss_fn(*args: Any, **kwargs: Any): megatron_train_module._finalize_grads = original_finalize megatron_train_module._optimizer_step = original_optimizer_step megatron_train_module.loss_fn = original_loss_fn + megatron_train_module._global_token_normalization_scale = ( + original_token_normalization_scale + ) def _worker_run(request: WorkerRunRequest) -> None: @@ -599,13 +646,16 @@ def _worker_run(request: WorkerRunRequest) -> None: packed_tensors = packed_tensors_from_dir( **request.packed_tensors.model_dump(exclude_none=True) ) + template = megatron_train.select_indexed_inputs(packed_tensors, 0) + zero_template = megatron_train._zero_contribution_inputs(template) initial_lora_state = loaded_state + global_grad_accumulation_sequences = request.case_config.grad_accumulation_sequences train_config = types.TrainConfig( learning_rate=request.case_config.learning_rate, beta=request.case_config.beta, kl_penalty_coef=0.0, - grad_accumulation_sequences=request.case_config.grad_accumulation_sequences, + grad_accumulation_sequences=global_grad_accumulation_sequences, ) experimental_config: dev.TrainConfig = {} step_traces: list[StepTrace] = [] @@ -620,22 +670,20 @@ def _capture_lora_grads() -> None: megatron_train, model_chunks, request.mutation, + request.topology, pre_optimizer_step_hook=_capture_lora_grads, loss_scale=request.case_config.loss_scale, ): for step_index in range(request.case_config.num_steps): forward_trace_capture.set_step(step_index) - base_sample_index = ( - step_index * request.case_config.grad_accumulation_sequences + micro_sample_indices = megatron_train.build_micro_sample_indices( + step_index=step_index, + num_sequences=request.packed_tensors.num_sequences, + global_grad_accumulation_sequences=global_grad_accumulation_sequences, + ) + micro_inputs = megatron_train.select_micro_inputs( + packed_tensors, micro_sample_indices, zero_template ) - micro_sample_indices = [ - (base_sample_index + offset) % request.packed_tensors.num_sequences - for offset in range(request.case_config.grad_accumulation_sequences) - ] - micro_inputs = [ - megatron_train.select_indexed_inputs(packed_tensors, sample_index) - for sample_index in micro_sample_indices - ] captured_grads = None step_result = megatron_train.run_training_step( From 75255671712a4e07442e21932933adc3e652d49c Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 17 Mar 2026 20:33:44 +0000 Subject: [PATCH 17/28] Fix per-token DP normalization in Megatron training --- src/art/loss.py | 26 ++++---- src/art/megatron/finalize_grads.py | 18 ++++-- src/art/megatron/lora.py | 2 + src/art/megatron/provider.py | 48 +++++++++++++- src/art/megatron/train.py | 100 ++++++++++++----------------- src/art/tinker/service.py | 2 +- src/art/unsloth/train.py | 10 +-- 7 files changed, 122 insertions(+), 84 deletions(-) diff --git a/src/art/loss.py b/src/art/loss.py index 7d25a8eb..0aab6084 100644 --- a/src/art/loss.py +++ b/src/art/loss.py @@ -13,9 +13,10 @@ class Loss(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) - mean_policy_loss: torch.Tensor - mean_kl: torch.Tensor - mean_entropy: torch.Tensor | None + reduction: Literal["mean", "sum"] + policy_loss: torch.Tensor + kl: torch.Tensor + entropy: torch.Tensor | None policy_loss_sum: torch.Tensor probs_corr: torch.Tensor kl_policy_ref: torch.Tensor | None = None @@ -134,20 +135,19 @@ def loss_fn( policy_loss = policy_loss * weights * assistant_mask kl_div = kl_div * weights * assistant_mask denominator = assistant_mask.sum() + 1e-6 if reduction == "mean" else 1.0 - mean_policy_loss = policy_loss.sum() / denominator - mean_kl = kl_div.sum() / denominator - # Compute mean entropy for the current step + reduced_policy_loss = policy_loss.sum() / denominator + kl = kl_div.sum() / denominator + # Compute reduced entropy for the current step. if entropies is not None: shifted_entropies = shift_tensor(entropies, 0.0) - mean_entropy = ( - shifted_entropies * weights * assistant_mask - ).sum() / denominator + entropy = (shifted_entropies * weights * assistant_mask).sum() / denominator else: - mean_entropy = None + entropy = None return Loss( - mean_policy_loss=mean_policy_loss, - mean_kl=mean_kl, - mean_entropy=mean_entropy, + reduction=reduction, + policy_loss=reduced_policy_loss, + kl=kl, + entropy=entropy, policy_loss_sum=policy_loss.sum(), probs_corr=probs_corr, kl_policy_ref=kl_policy_ref, diff --git a/src/art/megatron/finalize_grads.py b/src/art/megatron/finalize_grads.py index 83e8cc4f..6fce32c3 100644 --- a/src/art/megatron/finalize_grads.py +++ b/src/art/megatron/finalize_grads.py @@ -59,7 +59,10 @@ def _resolve_reduce_op(op: GradSyncOp) -> Any: raise RuntimeError(f"Unknown grad sync op: {op}") -def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: +def finalize_model_grads_extended( + model: list[torch.nn.Module], + num_tokens: torch.Tensor | None = None, +) -> None: """Run Megatron finalize, then apply extra LoRA grad-sync reductions. Megatron finalize handles DP/CP(via `param.allreduce=True`)(and expert-DP via `param.allreduce=False`) internally. @@ -68,7 +71,7 @@ def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: """ # All-reduce all model grads across DP replicas, layernorm grads for sequence parallelism, # embedding grads across first and last pipeline stages (if not tied) - finalize_model_grads(model) + finalize_model_grads(model, num_tokens=num_tokens) buckets: dict[ tuple[GradSyncDomain, GradSyncOp, torch.dtype, torch.device], @@ -111,6 +114,13 @@ def finalize_model_grads_extended(model: list[torch.nn.Module]) -> None: grads = [grad for _name, grad in entries] coalesced = _flatten_dense_tensors(grads) - torch.distributed.all_reduce(coalesced, op=_resolve_reduce_op(op), group=group) - for grad, synced in zip(grads, _unflatten_dense_tensors(coalesced, grads)): + reduced = ( + coalesced.float() + if torch.is_floating_point(coalesced) and coalesced.dtype != torch.float32 + else coalesced + ) + torch.distributed.all_reduce(reduced, op=_resolve_reduce_op(op), group=group) + if reduced is not coalesced: + reduced = reduced.to(dtype=coalesced.dtype) + for grad, synced in zip(grads, _unflatten_dense_tensors(reduced, grads)): grad.copy_(synced) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 63f10c85..fd62a249 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -684,6 +684,8 @@ def forward( ) -> tuple[torch.Tensor, torch.Tensor | None]: base_out, bias_out = self.linear_fc2(x, tokens_per_expert) adapter_out = self.lora(x, tokens_per_expert=tokens_per_expert) + # the reason there is no TP comm here is because the MoE token routing handles + # expert TP comm externally return base_out + adapter_out, bias_out diff --git a/src/art/megatron/provider.py b/src/art/megatron/provider.py index d1029b35..acd2eda1 100644 --- a/src/art/megatron/provider.py +++ b/src/art/megatron/provider.py @@ -5,6 +5,11 @@ from megatron.bridge import AutoBridge from megatron.bridge.models.gpt_provider import GPTModelProvider +from megatron.bridge.models.hf_pretrained.state import ( + SafeTensorsStateSource, + StateDict, + StateSource, +) from megatron.bridge.models.qwen.qwen3_moe_bridge import Qwen3MoEBridge from megatron.core.transformer.enums import AttnBackend from megatron.core.transformer.spec_utils import ModuleSpec @@ -28,15 +33,49 @@ def _resolve_layer_spec( return base_layer_spec(config, **kwargs) -def get_provider(model: str) -> GPTModelProvider: +class _CastingStateSource(StateSource): + def __init__(self, source: StateSource, *, dtype: torch.dtype): + self._source = source + self._dtype = dtype + + def get_all_keys(self) -> list[str]: + return self._source.get_all_keys() + + def load_tensors(self, keys: list[str]) -> dict[str, torch.Tensor]: + loaded = self._source.load_tensors(keys) + return { + key: ( + value.to(dtype=self._dtype) + if torch.is_floating_point(value) and value.dtype != self._dtype + else value + ) + for key, value in loaded.items() + } + + def has_glob(self, pattern: str) -> bool: + return self._source.has_glob(pattern) + + +def get_provider( + model: str, + *, + torch_dtype: torch.dtype = torch.bfloat16, +) -> GPTModelProvider: bridge = AutoBridge.from_hf_pretrained( model, - torch_dtype=torch.bfloat16, + dtype=torch_dtype, trust_remote_code=True, ) assert isinstance(bridge._model_bridge, Qwen3MoEBridge), ( "Only Qwen3 MoE models are supported" ) + if torch_dtype != torch.bfloat16: + bridge.hf_pretrained._state_dict_accessor = StateDict( + _CastingStateSource( + SafeTensorsStateSource(bridge.hf_pretrained.model_name_or_path), + dtype=torch_dtype, + ) + ) provider = bridge.to_megatron_provider() base_layer_spec = provider.transformer_layer_spec @@ -62,6 +101,11 @@ def _flex_attention_layer_spec( provider.expert_tensor_parallel_size = 1 provider.moe_shared_expert_overlap = True provider.moe_router_dtype = "fp32" + # params are disabled anyways, but should know about this if we switch to full FT + # because DP 'dummy' microbatches will unintentionally have loss for this + provider.moe_aux_loss_coeff = 0.0 + # effectively just a flag modifying finalize_model_grads behavior for DPxCP + provider.calculate_per_token_loss = True if provider.tensor_model_parallel_size > 1: provider.sequence_parallel = True return provider diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index c08394e1..a67b6eea 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -173,6 +173,7 @@ def configure_moe_routing_replay( def build_training_runtime( *, model_identifier: str | None = None, + provider_torch_dtype: torch.dtype = torch.bfloat16, provider_configure: Callable[[Any], None] | None = None, optimizer_config: OptimizerConfig | None = None, moe_routing_replay_path: str | None = None, @@ -182,7 +183,9 @@ def build_training_runtime( print_optimizer_stats: bool = True, ) -> TrainingRuntime: provider = get_provider( - model_identifier or os.environ.get("MODEL_IDENTIFIER", DEFAULT_MODEL_IDENTIFIER) + model_identifier + or os.environ.get("MODEL_IDENTIFIER", DEFAULT_MODEL_IDENTIFIER), + torch_dtype=provider_torch_dtype, ) if provider_configure is not None: provider_configure(provider) @@ -194,7 +197,11 @@ def build_training_runtime( model = cast( list[MegatronModule], provider.provide_distributed_model( - ddp_config=DistributedDataParallelConfig(), + ddp_config=DistributedDataParallelConfig( + # memory and comm for this should be small anyways cause lora + grad_reduce_in_fp32=True, + average_in_collective=False, + ), data_parallel_random_init=False, ), ) @@ -327,7 +334,7 @@ def _zero_contribution_inputs(template: PackedTensors) -> PackedTensors: def resolve_local_grad_accumulation_sequences( global_grad_accumulation_sequences: int, ) -> int: - dp_world_size = ps.get_data_parallel_world_size(with_context_parallel=True) + dp_world_size = ps.get_data_parallel_world_size() if ( global_grad_accumulation_sequences <= 0 or global_grad_accumulation_sequences % dp_world_size != 0 @@ -345,7 +352,8 @@ def build_micro_sample_indices( num_sequences: int, global_grad_accumulation_sequences: int, ) -> list[int | None]: - dp_rank = ps.get_data_parallel_rank(with_context_parallel=True) + dp_rank = ps.get_data_parallel_rank() + dp_world_size = ps.get_data_parallel_world_size() local_grad_accumulation_sequences = resolve_local_grad_accumulation_sequences( global_grad_accumulation_sequences=global_grad_accumulation_sequences, ) @@ -356,9 +364,10 @@ def build_micro_sample_indices( global_step_indices.append( global_sample_index if global_sample_index < num_sequences else None ) - rank_start = dp_rank * local_grad_accumulation_sequences - rank_end = rank_start + local_grad_accumulation_sequences - return global_step_indices[rank_start:rank_end] + return [ + global_step_indices[offset * dp_world_size + dp_rank] + for offset in range(local_grad_accumulation_sequences) + ] def select_micro_inputs( @@ -380,10 +389,6 @@ def _move_inputs_to_device(inputs: PackedTensors, device: torch.device) -> None: inputs[key] = value.to(device) # type: ignore[index] -def _finalize_grads(model_chunks: list[MegatronModule]) -> None: - finalize_model_grads_extended(cast(list[torch.nn.Module], model_chunks)) - - def _optimizer_step( optimizer: Any, learning_rate: float, @@ -397,9 +402,13 @@ def _optimizer_step( return update_successful, grad_norm, num_zeros_in_grad -def _reduce_loss(loss: torch.Tensor) -> torch.Tensor: +def _reduce_loss( + loss: torch.Tensor, + op: torch.distributed.ReduceOp.RedOpType = torch.distributed.ReduceOp.AVG, + group: torch.distributed.ProcessGroup | None = None, +) -> torch.Tensor: reduced_loss = loss.detach().clone() - torch.distributed.all_reduce(reduced_loss, op=torch.distributed.ReduceOp.AVG) + torch.distributed.all_reduce(reduced_loss, op=op, group=group) return reduced_loss @@ -408,39 +417,12 @@ def _count_trainable_tokens(inputs: PackedTensors) -> float: return float(assistant_mask.sum().item()) -def _global_token_normalization_scale( +def _local_trainable_token_count_tensor( micro_inputs: list[PackedTensors], device: torch.device, -) -> float: - """ - Data parallel grad normalization scale - dp_world_size / global_micro_batch_token_count, where dp_world_size cancels out - the dp grad averaging, since we divide by global rather than local token count. - Using reduction="sum" and dividing by global token count means each rank is normalized - correctly. - """ +) -> torch.Tensor: local_token_total = sum(_count_trainable_tokens(micro) for micro in micro_inputs) - dp_world_size = 1 - global_token_total = local_token_total - - dp_world_size = ps.get_data_parallel_world_size(with_context_parallel=True) - if dp_world_size > 1: - dp_group = ps.get_data_parallel_group(with_context_parallel=True) - - global_token_tensor = torch.tensor( - [local_token_total], device=device, dtype=torch.float32 - ) - torch.distributed.all_reduce( - global_token_tensor, - op=torch.distributed.ReduceOp.SUM, - group=dp_group, - ) - global_token_total = float(global_token_tensor.item()) - - if global_token_total <= 0.0: - return 0.0 - - return float(dp_world_size) / global_token_total + return torch.tensor([local_token_total], device=device, dtype=torch.float32) def run_training_step( @@ -472,13 +454,10 @@ def run_training_step( micro_sample_indices = [sample_index] if moe_routing_replay_controller is not None: - step_sample_index = next( - (index for index in micro_sample_indices if index is not None), - 0, - ) moe_routing_replay_controller.set_step( step_index=step_index, - sample_index=step_sample_index, + sample_index=micro_sample_indices, + global_grad_accumulation_sequences=config.grad_accumulation_sequences, ) device = next(model_chunks[0].parameters()).device @@ -487,8 +466,8 @@ def run_training_step( chunk.zero_grad_buffer() # ty: ignore[call-non-callable] micro_count = len(micro_inputs) - normalization_scale = _global_token_normalization_scale(micro_inputs, device=device) - normalized_loss: torch.Tensor | None = None + raw_loss_sum: torch.Tensor | None = None + num_tokens = _local_trainable_token_count_tensor(micro_inputs, device=device) probs_corr_sum = 0.0 new_logprobs: torch.Tensor | None = None @@ -516,26 +495,29 @@ def run_training_step( experimental_config, reduction="sum", ) - micro_loss = ( - loss_info.mean_policy_loss + config.beta * loss_info.mean_kl - ) * normalization_scale + micro_loss = loss_info.policy_loss + config.beta * loss_info.kl micro_loss.backward() probs_corr_sum += float(loss_info.probs_corr.item()) detached_micro_loss = micro_loss.detach() - if normalized_loss is None: - normalized_loss = detached_micro_loss + if raw_loss_sum is None: + raw_loss_sum = detached_micro_loss else: - normalized_loss = normalized_loss + detached_micro_loss + raw_loss_sum = raw_loss_sum + detached_micro_loss - if new_logprobs is None or normalized_loss is None: + if new_logprobs is None or raw_loss_sum is None: raise RuntimeError("run_training_step did not produce outputs") - _finalize_grads(model_chunks) + finalize_model_grads_extended(model_chunks, num_tokens=num_tokens) update_successful, grad_norm, num_zeros_in_grad = _optimizer_step( optimizer, learning_rate, ) - reduced_loss = _reduce_loss(normalized_loss) + global_num_tokens = max(num_tokens.item(), 1.0) + reduced_loss = _reduce_loss( + raw_loss_sum / global_num_tokens, + op=torch.distributed.ReduceOp.SUM, + group=ps.get_data_parallel_group(with_context_parallel=True), + ) if moe_routing_replay_controller is not None: moe_routing_replay_controller.finalize_step() diff --git a/src/art/tinker/service.py b/src/art/tinker/service.py index ba6768eb..d1bc7444 100644 --- a/src/art/tinker/service.py +++ b/src/art/tinker/service.py @@ -80,7 +80,7 @@ def custom_loss_fn( for mask, lp in zip(masks, logprobs_list): logprobs[mask] = lp loss = loss_fn(inputs, logprobs.unsqueeze(0), None, None, _config) - return loss.mean_policy_loss, {"policy_loss": loss.mean_policy_loss.item()} + return loss.policy_loss, {"policy_loss": loss.policy_loss.item()} shifted_tokens = shift_tensor(packed_tensors["tokens"], 0) diff --git a/src/art/unsloth/train.py b/src/art/unsloth/train.py index 34dbc5cd..91d76be3 100644 --- a/src/art/unsloth/train.py +++ b/src/art/unsloth/train.py @@ -170,14 +170,14 @@ def compute_loss( ) trainer._metrics["train"]["learning_rate"].append(config.learning_rate) - trainer._metrics["train"]["policy_loss"].append(loss.mean_policy_loss.item()) - if loss.mean_entropy is not None: - trainer._metrics["train"]["entropy"].append(loss.mean_entropy.item()) + trainer._metrics["train"]["policy_loss"].append(loss.policy_loss.item()) + if loss.entropy is not None: + trainer._metrics["train"]["entropy"].append(loss.entropy.item()) if config.beta > 0.0: - trainer._metrics["train"]["kl_div"].append(loss.mean_kl.item()) + trainer._metrics["train"]["kl_div"].append(loss.kl.item()) if loss.kl_policy_ref is not None: trainer._metrics["train"]["kl_policy_ref"].append(loss.kl_policy_ref.item()) - return loss.mean_policy_loss + config.beta * loss.mean_kl + return loss.policy_loss + config.beta * loss.kl return compute_loss From 7eb96e562acb81badf95888a00f2bacdbac52bd7 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 17 Mar 2026 20:35:23 +0000 Subject: [PATCH 18/28] Expand the oracle harness for DP correctness checks --- src/art/megatron/routing_replay.py | 661 ++++++++++++++++++- tests/integration/megatron_forward_trace.py | 491 ++++++++++---- tests/integration/megatron_oracle_harness.py | 78 +-- tests/integration/megatron_oracle_worker.py | 227 +++++-- 4 files changed, 1209 insertions(+), 248 deletions(-) diff --git a/src/art/megatron/routing_replay.py b/src/art/megatron/routing_replay.py index 463e5258..104fe185 100644 --- a/src/art/megatron/routing_replay.py +++ b/src/art/megatron/routing_replay.py @@ -1,11 +1,17 @@ from __future__ import annotations +from collections import defaultdict import json from pathlib import Path import re import types from typing import Any, Protocol +from megatron.core.tensor_parallel import ( + all_to_all, + gather_from_sequence_parallel_region, +) +from megatron.core.transformer.moe.moe_utils import permute, sort_chunks_by_idxs from pydantic import BaseModel, ConfigDict, model_validator from safetensors.torch import load_file, save_file import torch @@ -13,6 +19,8 @@ ROUTER_NAME_TOKEN = ".mlp.router" ROUTER_KEY_FORMAT_VERSION = "moe_routing_replay_v1" GLOBAL_TOKEN_UIDS_KEY = "global_token_uids" +TRACE_ROW_TOKEN_UIDS_ATTR = "_art_trace_row_token_uids" +TRACE_UID_SPAN_ATTR = "_art_trace_uid_span" _ROUTER_LAYER_PATTERN = re.compile(r"decoder\.layers\.(?P\d+)\.mlp\.router$") _TRACE_CHUNK_PREFIX_PATTERN = re.compile(r"^chunk(?P\d+)\.(?P.+)$") @@ -69,6 +77,40 @@ def _extract_router_output_tensors(output: Any) -> tuple[torch.Tensor, torch.Ten return probs_2d, routing_map_2d +def _extract_dp_slot_from_rank_meta(rank_meta: Any) -> tuple[int, int] | None: + if isinstance(rank_meta, dict): + rank_meta = [rank_meta] + if not isinstance(rank_meta, list) or not rank_meta: + return None + dp_ranks = { + int(item["dp_rank"]) + for item in rank_meta + if isinstance(item, dict) and "dp_rank" in item + } + dp_world_sizes = { + int(item["dp_world_size"]) + for item in rank_meta + if isinstance(item, dict) and "dp_world_size" in item + } + if len(dp_ranks) != 1 or len(dp_world_sizes) != 1: + return None + return next(iter(dp_ranks)), next(iter(dp_world_sizes)) + + +def _trace_call_route_metadata( + call_entry: dict[str, Any], +) -> tuple[int | None, int | None]: + sample_index = call_entry.get("micro_sample_index") + if isinstance(sample_index, int): + return int(sample_index), None + dp_slot = _extract_dp_slot_from_rank_meta(call_entry.get("rank_meta")) + micro_order = int(call_entry.get("micro_order", 0)) + if dp_slot is None: + return None, micro_order + dp_rank, dp_world_size = dp_slot + return None, micro_order * dp_world_size + dp_rank + + def build_router_key_from_module_name(*, chunk_index: int, module_name: str) -> str: match = _ROUTER_LAYER_PATTERN.search(module_name) if match is None: @@ -114,6 +156,8 @@ class RouterCallRoute(BaseModel): expert_mask: torch.Tensor routing_map: torch.Tensor | None = None num_experts: int + sample_index: int | None = None + micro_slot: int | None = None @model_validator(mode="after") def _validate(self) -> "RouterCallRoute": @@ -146,6 +190,10 @@ def _validate(self) -> "RouterCallRoute": ) if self.num_experts <= 0: raise RuntimeError(f"num_experts must be >0, got {self.num_experts}") + if self.sample_index is not None: + self.sample_index = int(self.sample_index) + if self.micro_slot is not None: + self.micro_slot = int(self.micro_slot) if self.routing_map is not None: expected = (self.expert_indices.shape[0], self.num_experts) if tuple(self.routing_map.shape) != expected: @@ -331,6 +379,8 @@ def from_dir(cls, bundle_dir: str | Path) -> "MoeRoutingReplayBundle": expert_mask=step_tensors[expert_mask_key], routing_map=routing_map, num_experts=int(call_manifest["num_experts"]), + sample_index=call_manifest.get("sample_index"), + micro_slot=call_manifest.get("micro_slot"), ) routers[router_key] = StepRouterRoutes(calls=calls) steps[step_index] = StepRoutes( @@ -385,7 +435,12 @@ def to_dir(self, bundle_dir: str | Path) -> None: ] = _to_tensor_cpu_contiguous( route.routing_map, dtype=torch.bool ) - call_manifest[str(call_index)] = {"num_experts": route.num_experts} + call_entry: dict[str, int] = {"num_experts": route.num_experts} + if route.sample_index is not None: + call_entry["sample_index"] = int(route.sample_index) + if route.micro_slot is not None: + call_entry["micro_slot"] = int(route.micro_slot) + call_manifest[str(call_index)] = call_entry step_manifest_routers[router_key] = call_manifest save_file(step_tensors, str(step_file_path)) manifest_steps[str(step_index)] = { @@ -457,11 +512,182 @@ def build_local_token_uids( return local_uids.reshape(-1).contiguous() +_ACTIVE_ROUTING_REPLAY_CONTROLLER: MoeRoutingReplayController | None = None + + +def _active_routing_replay_controller() -> MoeRoutingReplayController | None: + return _ACTIVE_ROUTING_REPLAY_CONTROLLER + + +def _dispatcher_local_token_uids( + controller: MoeRoutingReplayController, + dispatcher: Any, + *, + num_local_tokens: int, +) -> torch.Tensor: + step_routes = controller._active_step_routes + if step_routes is None: + raise RuntimeError("Routing replay dispatcher used without an active step") + local_uids = controller.local_token_indexer.build_local_token_uids( + global_token_uids=step_routes.global_token_uids, + num_local_tokens=num_local_tokens, + sequence_parallel=bool( + getattr(getattr(dispatcher, "config", None), "sequence_parallel", False) + ), + context_parallel_size=int( + getattr(getattr(dispatcher, "config", None), "context_parallel_size", 1) + ), + ) + if int(local_uids.numel()) != num_local_tokens: + raise RuntimeError( + "Local routing replay uid count mismatch: " + f"expected={num_local_tokens}, got={int(local_uids.numel())}" + ) + sample_index = getattr(controller, "_active_sample_index", None) + uid_span = int(step_routes.global_token_uids.numel()) + if isinstance(sample_index, int) and sample_index >= 0 and uid_span > 0: + local_uids = local_uids + sample_index * uid_span + return local_uids + + +def _trace_row_uids_from_source(source: Any) -> tuple[torch.Tensor | None, int | None]: + row_token_uids = getattr(source, TRACE_ROW_TOKEN_UIDS_ATTR, None) + if not isinstance(row_token_uids, torch.Tensor): + return None, None + uid_span = getattr(source, TRACE_UID_SPAN_ATTR, None) + uid_span_int = uid_span if isinstance(uid_span, int) and uid_span > 0 else None + return row_token_uids, uid_span_int + + +def _attach_trace_row_uids( + target: Any, + *, + row_token_uids: torch.Tensor, + uid_span: int | None, +) -> None: + setattr( + target, + TRACE_ROW_TOKEN_UIDS_ATTR, + row_token_uids.detach().to(device="cpu", dtype=torch.int64).reshape(-1), + ) + setattr(target, TRACE_UID_SPAN_ATTR, uid_span) + + +def _canonicalize_expert_token_order( + expert_inputs: torch.Tensor, + expert_probs: torch.Tensor, + expert_token_uids: torch.Tensor, + *, + tokens_per_expert: torch.Tensor | list[int], +) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + if isinstance(tokens_per_expert, torch.Tensor): + counts = [int(count) for count in tokens_per_expert.tolist()] + else: + counts = [int(count) for count in tokens_per_expert] + + if sum(counts) != int(expert_token_uids.numel()): + raise RuntimeError( + "Expert token uid count mismatch after dispatch: " + f"uids={int(expert_token_uids.numel())}, " + f"tokens_per_expert_sum={sum(counts)}" + ) + + order_segments: list[torch.Tensor] = [] + cursor = 0 + for count in counts: + if count <= 1: + order_segments.append( + torch.arange(cursor, cursor + count, dtype=torch.long) + ) + cursor += count + continue + segment_uids = expert_token_uids[cursor : cursor + count].to(device="cpu") + segment_order = torch.argsort(segment_uids, stable=True) + cursor + order_segments.append(segment_order) + cursor += count + + if not order_segments: + empty = torch.empty(0, dtype=torch.long) + return expert_inputs, expert_probs, expert_token_uids, empty + + canonical_order_cpu = torch.cat(order_segments, dim=0) + inverse_order_cpu = torch.empty_like(canonical_order_cpu) + inverse_order_cpu[canonical_order_cpu] = torch.arange( + canonical_order_cpu.numel(), dtype=torch.long + ) + + canonical_order = canonical_order_cpu.to( + device=expert_inputs.device, dtype=torch.long + ) + reordered_inputs = expert_inputs.index_select(0, canonical_order) + reordered_probs = expert_probs.index_select(0, canonical_order) + reordered_uids = expert_token_uids.index_select( + 0, + canonical_order_cpu.to(device=expert_token_uids.device, dtype=torch.long), + ) + return ( + reordered_inputs, + reordered_probs, + reordered_uids, + inverse_order_cpu, + ) + + +def _canonical_trace_row_uids( + expert_token_uids: torch.Tensor, + *, + tokens_per_expert: torch.Tensor | list[int], + local_expert_indices: list[int] | tuple[int, ...] | None, + sample_uid_span: int, + num_experts: int, +) -> tuple[torch.Tensor, int]: + if isinstance(tokens_per_expert, torch.Tensor): + counts = [int(count) for count in tokens_per_expert.tolist()] + else: + counts = [int(count) for count in tokens_per_expert] + + expert_indices = ( + [int(expert_index) for expert_index in local_expert_indices] + if local_expert_indices is not None + else list(range(len(counts))) + ) + if len(expert_indices) != len(counts): + raise RuntimeError( + "Local expert index metadata mismatch: " + f"num_expert_indices={len(expert_indices)}, num_counts={len(counts)}" + ) + row_uid_span = sample_uid_span * max(int(num_experts), 1) + row_uid_chunks: list[torch.Tensor] = [] + cursor = 0 + for global_expert_id, count in zip(expert_indices, counts): + count_int = int(count) + segment = expert_token_uids[cursor : cursor + count_int].to(dtype=torch.int64) + sample_ids = torch.div(segment, sample_uid_span, rounding_mode="floor") + local_token_ids = torch.remainder(segment, sample_uid_span) + row_uid_chunks.append( + sample_ids * row_uid_span + + int(global_expert_id) * sample_uid_span + + local_token_ids + ) + cursor += count_int + if cursor != int(expert_token_uids.numel()): + raise RuntimeError( + "Canonical trace row uid construction did not consume all expert rows: " + f"consumed={cursor}, total={int(expert_token_uids.numel())}" + ) + if not row_uid_chunks: + return expert_token_uids.new_empty((0,), dtype=torch.int64), row_uid_span + return torch.cat(row_uid_chunks, dim=0).contiguous(), row_uid_span + + def _patch_alltoall_dispatcher_preprocess() -> None: try: + from megatron.core.transformer.moe.experts import TEGroupedMLP from megatron.core.transformer.moe.token_dispatcher import ( MoEAlltoAllTokenDispatcher, ) + + from art.megatron.lora import MLPExpertsLinearFC2LoRA except Exception: return @@ -469,6 +695,12 @@ def _patch_alltoall_dispatcher_preprocess() -> None: return original_preprocess = MoEAlltoAllTokenDispatcher.preprocess + original_dispatch_preprocess = MoEAlltoAllTokenDispatcher.dispatch_preprocess + original_token_dispatch = MoEAlltoAllTokenDispatcher.token_dispatch + original_dispatch_postprocess = MoEAlltoAllTokenDispatcher.dispatch_postprocess + original_combine_preprocess = MoEAlltoAllTokenDispatcher.combine_preprocess + original_te_grouped_mlp_forward = TEGroupedMLP.forward + original_fc2_forward = MLPExpertsLinearFC2LoRA.forward def patched_preprocess( self: Any, routing_map: torch.Tensor, *args: Any, **kwargs: Any @@ -485,7 +717,212 @@ def patched_preprocess( self.num_out_tokens = int(routing_map.sum().item()) return result + def patched_dispatch_preprocess( + self: Any, + hidden_states: torch.Tensor, + routing_map: torch.Tensor, + probs: torch.Tensor, + ): + result = original_dispatch_preprocess(self, hidden_states, routing_map, probs) + self._art_replay_permuted_local_token_uids = None + self._art_replay_global_input_token_uids = None + self._art_replay_expert_input_inverse_permutation = None + + controller = _active_routing_replay_controller() + if controller is None: + return result + + local_token_uids = _dispatcher_local_token_uids( + controller, + self, + num_local_tokens=int(routing_map.shape[0]), + ) + permuted_local_uids, _, _ = permute( + local_token_uids.to( + device=hidden_states.device, dtype=torch.int64 + ).unsqueeze(-1), + self.routing_map, + num_out_tokens=self.num_out_tokens, + fused=False, + drop_and_pad=self.drop_and_pad, + ) + self._art_replay_permuted_local_token_uids = permuted_local_uids.reshape( + -1 + ).contiguous() + return result + + def patched_token_dispatch( + self: Any, + permutated_local_input_tokens: torch.Tensor, + permuted_probs: torch.Tensor, + ): + result = original_token_dispatch( + self, + permutated_local_input_tokens, + permuted_probs, + ) + controller = _active_routing_replay_controller() + permuted_local_token_uids = getattr( + self, "_art_replay_permuted_local_token_uids", None + ) + if controller is None or permuted_local_token_uids is None: + return result + + global_token_uids = permuted_local_token_uids.to( + device=permutated_local_input_tokens.device, dtype=torch.int64 + ).unsqueeze(-1) + if self.ep_size > 1: + global_token_uids = all_to_all( + self.ep_group, + global_token_uids, + self.output_splits, + self.input_splits, + ) + if self.tp_size > 1: + output_split_sizes = ( + None + if self.output_splits_tp is None + else self.output_splits_tp.tolist() + ) + global_token_uids = gather_from_sequence_parallel_region( + global_token_uids, + group=self.tp_group, + output_split_sizes=output_split_sizes, + ) + self._art_replay_global_input_token_uids = global_token_uids.reshape( + -1 + ).contiguous() + return result + + def patched_dispatch_postprocess( + self: Any, + global_input_tokens: torch.Tensor, + global_probs: torch.Tensor, + ): + expert_inputs, tokens_per_expert, expert_probs = original_dispatch_postprocess( + self, + global_input_tokens, + global_probs, + ) + controller = _active_routing_replay_controller() + global_input_token_uids = getattr( + self, "_art_replay_global_input_token_uids", None + ) + if controller is None or global_input_token_uids is None or self.drop_and_pad: + return expert_inputs, tokens_per_expert, expert_probs + + expert_token_uids = global_input_token_uids + if self.num_local_experts > 1: + sorted_token_uids, _ = sort_chunks_by_idxs( + expert_token_uids.unsqueeze(-1), + self.num_global_tokens_per_local_expert.ravel(), + self.sort_input_by_local_experts, + fused=False, + ) + expert_token_uids = sorted_token_uids.reshape(-1).contiguous() + + ( + expert_inputs, + expert_probs, + canonical_expert_token_uids, + inverse_order_cpu, + ) = _canonicalize_expert_token_order( + expert_inputs, + expert_probs, + expert_token_uids, + tokens_per_expert=tokens_per_expert, + ) + self._art_replay_expert_input_inverse_permutation = inverse_order_cpu + trace_row_uids, trace_uid_span = _canonical_trace_row_uids( + canonical_expert_token_uids, + tokens_per_expert=tokens_per_expert, + local_expert_indices=getattr(self, "local_expert_indices", None), + sample_uid_span=int( + controller._active_step_routes.global_token_uids.numel() + ), + num_experts=int(getattr(self, "num_experts", 1)), + ) + _attach_trace_row_uids( + expert_inputs, + row_token_uids=trace_row_uids, + uid_span=trace_uid_span, + ) + return expert_inputs, tokens_per_expert, expert_probs + + def patched_combine_preprocess(self: Any, hidden_states: torch.Tensor): + inverse_order_cpu = getattr( + self, "_art_replay_expert_input_inverse_permutation", None + ) + if inverse_order_cpu is not None and inverse_order_cpu.numel() > 0: + hidden_states = hidden_states.index_select( + 0, + inverse_order_cpu.to(device=hidden_states.device, dtype=torch.long), + ) + self._art_replay_expert_input_inverse_permutation = None + return original_combine_preprocess(self, hidden_states) + + def patched_te_grouped_mlp_forward( + self: Any, + permuted_local_hidden_states: torch.Tensor, + tokens_per_expert: torch.Tensor, + permuted_probs: torch.Tensor, + ): + row_token_uids, uid_span = _trace_row_uids_from_source( + permuted_local_hidden_states + ) + if row_token_uids is not None: + _attach_trace_row_uids( + self.linear_fc2, + row_token_uids=row_token_uids, + uid_span=uid_span, + ) + return original_te_grouped_mlp_forward( + self, + permuted_local_hidden_states, + tokens_per_expert, + permuted_probs, + ) + + def patched_fc2_forward( + self: Any, + x: torch.Tensor, + tokens_per_expert: list[int] | torch.Tensor, + ) -> tuple[torch.Tensor, torch.Tensor | None]: + row_token_uids, uid_span = _trace_row_uids_from_source(x) + if row_token_uids is None: + row_token_uids, uid_span = _trace_row_uids_from_source(self) + if row_token_uids is not None: + _attach_trace_row_uids( + self.linear_fc2, + row_token_uids=row_token_uids, + uid_span=uid_span, + ) + _attach_trace_row_uids( + self.lora, + row_token_uids=row_token_uids, + uid_span=uid_span, + ) + return original_fc2_forward(self, x, tokens_per_expert) + setattr(MoEAlltoAllTokenDispatcher, "preprocess", patched_preprocess) + setattr( + MoEAlltoAllTokenDispatcher, + "dispatch_preprocess", + patched_dispatch_preprocess, + ) + setattr(MoEAlltoAllTokenDispatcher, "token_dispatch", patched_token_dispatch) + setattr( + MoEAlltoAllTokenDispatcher, + "dispatch_postprocess", + patched_dispatch_postprocess, + ) + setattr( + MoEAlltoAllTokenDispatcher, + "combine_preprocess", + patched_combine_preprocess, + ) + setattr(TEGroupedMLP, "forward", patched_te_grouped_mlp_forward) + setattr(MLPExpertsLinearFC2LoRA, "forward", patched_fc2_forward) setattr(MoEAlltoAllTokenDispatcher, "_art_router_replay_preprocess_patched", True) @@ -507,9 +944,10 @@ def __init__( self._active_sample_index: int | None = None self._active_step_routes: StepRoutes | None = None self._router_call_cursors: dict[str, int] = {} - self._router_call_limits: dict[str, int] = {} + self._router_call_sequences: dict[str, list[int]] = {} self._global_uid_to_row_index: dict[int, int] = {} self._local_router_keys: set[str] = set() + self._active_micro_order: int | None = None self._patched_router_modules: list[dict[str, Any]] = [] @@ -592,6 +1030,7 @@ def routing_wrapper( ) def remove_router_patches(self) -> None: + global _ACTIVE_ROUTING_REPLAY_CONTROLLER for item in self._patched_router_modules: module = item["module"] module.routing = item["original_routing"] @@ -599,9 +1038,21 @@ def remove_router_patches(self) -> None: delattr(module, "_art_router_replay_patched") self._patched_router_modules.clear() self._local_router_keys.clear() + if _ACTIVE_ROUTING_REPLAY_CONTROLLER is self: + _ACTIVE_ROUTING_REPLAY_CONTROLLER = None - def set_step(self, *, step_index: int, sample_index: int) -> None: - from megatron.core import parallel_state as ps + def begin_micro(self, sample_index: int | None, micro_order: int) -> None: + self._active_sample_index = sample_index + self._active_micro_order = micro_order + + def set_step( + self, + *, + step_index: int, + sample_index: int | list[int | None], + global_grad_accumulation_sequences: int | None = None, + ) -> None: + global _ACTIVE_ROUTING_REPLAY_CONTROLLER if step_index not in self.bundle.steps: raise RuntimeError( @@ -610,7 +1061,14 @@ def set_step(self, *, step_index: int, sample_index: int) -> None: ) step_routes = self.bundle.steps[step_index] self._active_step_index = step_index - self._active_sample_index = sample_index + if isinstance(sample_index, list): + self._active_sample_index = next( + (index for index in sample_index if index is not None), + None, + ) + else: + self._active_sample_index = sample_index + self._active_micro_order = None self._active_step_routes = step_routes for local_router_key in sorted(self._local_router_keys): if local_router_key not in step_routes.routers: @@ -618,54 +1076,177 @@ def set_step(self, *, step_index: int, sample_index: int) -> None: "Replay bundle step is missing local router key: " f"step={step_index}, router='{local_router_key}'" ) - dp_world_size = int(ps.get_data_parallel_world_size(with_context_parallel=True)) - dp_rank = int(ps.get_data_parallel_rank(with_context_parallel=True)) self._router_call_cursors = {} - self._router_call_limits = {} + self._router_call_sequences = {} + local_call_keys = self._build_local_call_keys( + sample_index=sample_index, + ) for router_key in sorted(self._local_router_keys): - total_calls = len(step_routes.routers[router_key].calls) - call_start = 0 - call_limit = total_calls - if dp_world_size > 1: - if total_calls % dp_world_size != 0: - raise RuntimeError( - "Replay router call count is not divisible by DP world size: " - f"step={step_index}, router='{router_key}', " - f"calls={total_calls}, dp_world_size={dp_world_size}" - ) - calls_per_dp_rank = total_calls // dp_world_size - call_start = dp_rank * calls_per_dp_rank - call_limit = call_start + calls_per_dp_rank - self._router_call_cursors[router_key] = call_start - self._router_call_limits[router_key] = call_limit + router_calls = step_routes.routers[router_key].calls + if all( + self._router_call_key(route) is not None + for route in router_calls.values() + ): + calls_by_key: dict[tuple[str, int], list[int]] = defaultdict(list) + for call_index, route in sorted(router_calls.items()): + call_key = self._router_call_key(route) + assert call_key is not None + calls_by_key[call_key].append(call_index) + call_sequence = [] + for call_key in local_call_keys: + if call_key is None: + continue + matching_call_indices = calls_by_key.get(call_key) + if not matching_call_indices: + raise RuntimeError( + "Replay router call sequence is missing local micro metadata: " + f"step={step_index}, router='{router_key}', call_key={call_key}" + ) + call_sequence.extend(matching_call_indices) + else: + call_sequence = self._legacy_router_call_sequence( + step_index=step_index, + router_key=router_key, + sample_index=sample_index, + global_grad_accumulation_sequences=global_grad_accumulation_sequences, + total_calls=len(router_calls), + ) + self._router_call_cursors[router_key] = 0 + self._router_call_sequences[router_key] = call_sequence self._global_uid_to_row_index = { int(uid.item()): row_index for row_index, uid in enumerate(step_routes.global_token_uids) } + _ACTIVE_ROUTING_REPLAY_CONTROLLER = self + + def _build_local_call_keys( + self, + *, + sample_index: int | list[int | None], + ) -> list[tuple[str, int] | None]: + if not isinstance(sample_index, list): + if sample_index is None: + return [self._dummy_micro_call_key(local_micro_index=0)] + return [("sample", int(sample_index))] + return [ + self._sample_or_dummy_call_key( + global_sample_index=global_sample_index, + local_micro_index=local_micro_index, + ) + for local_micro_index, global_sample_index in enumerate(sample_index) + ] + + def _sample_or_dummy_call_key( + self, + *, + global_sample_index: int | None, + local_micro_index: int, + ) -> tuple[str, int] | None: + if global_sample_index is not None: + return ("sample", int(global_sample_index)) + return self._dummy_micro_call_key(local_micro_index=local_micro_index) + + def _dummy_micro_call_key( + self, + *, + local_micro_index: int, + ) -> tuple[str, int]: + from megatron.core import parallel_state as ps + + dp_rank = int(ps.get_data_parallel_rank()) + dp_world_size = int(ps.get_data_parallel_world_size()) + micro_slot = local_micro_index * dp_world_size + dp_rank + return ("dummy_micro_slot", micro_slot) + + @staticmethod + def _router_call_key(route: RouterCallRoute) -> tuple[str, int] | None: + if route.sample_index is not None: + return ("sample", int(route.sample_index)) + if route.micro_slot is not None: + return ("dummy_micro_slot", int(route.micro_slot)) + return None + + @staticmethod + def _legacy_router_call_sequence( + *, + step_index: int, + router_key: str, + sample_index: int | list[int | None], + global_grad_accumulation_sequences: int | None, + total_calls: int, + ) -> list[int]: + step_sample_count = global_grad_accumulation_sequences + if step_sample_count is None: + if isinstance(sample_index, list): + step_sample_count = len( + [index for index in sample_index if index is not None] + ) + else: + step_sample_count = 1 + if step_sample_count <= 0 or total_calls % step_sample_count != 0: + raise RuntimeError( + "Replay router call count is not divisible by step sample count: " + f"step={step_index}, router='{router_key}', " + f"total_calls={total_calls}, step_sample_count={step_sample_count}" + ) + calls_per_sample = total_calls // step_sample_count + step_base_sample_index = step_index * step_sample_count + if isinstance(sample_index, list): + call_sequence: list[int] = [] + for global_sample_index in sample_index: + if global_sample_index is None: + continue + sample_offset = int(global_sample_index) - step_base_sample_index + if sample_offset < 0 or sample_offset >= step_sample_count: + raise RuntimeError( + "Replay router call index is outside the step-local range: " + f"step={step_index}, router='{router_key}', " + f"global_sample_index={global_sample_index}, " + f"step_base_sample_index={step_base_sample_index}, " + f"step_sample_count={step_sample_count}" + ) + call_start = sample_offset * calls_per_sample + call_sequence.extend(range(call_start, call_start + calls_per_sample)) + return call_sequence + + sample_offset = int(sample_index) - step_base_sample_index + if sample_offset < 0 or sample_offset >= step_sample_count: + raise RuntimeError( + "Replay router call index is outside the step-local range: " + f"step={step_index}, router='{router_key}', " + f"sample_index={sample_index}, " + f"step_sample_count={step_sample_count}" + ) + call_start = sample_offset * calls_per_sample + return list(range(call_start, call_start + calls_per_sample)) def finalize_step(self) -> None: + global _ACTIVE_ROUTING_REPLAY_CONTROLLER if self._active_step_routes is None: raise RuntimeError("finalize_step called before set_step") for router_key in sorted(self._local_router_keys): consumed = self._router_call_cursors.get(router_key, 0) - expected = self._router_call_limits.get(router_key) - if expected is None: + call_sequence = self._router_call_sequences.get(router_key) + if call_sequence is None: raise RuntimeError( - "Routing replay call limits missing for router key: " + "Routing replay call sequence missing for router key: " f"step={self._active_step_index}, router='{router_key}'" ) - if consumed != expected: + if consumed != len(call_sequence): raise RuntimeError( "Routing replay step consumption mismatch: " f"step={self._active_step_index}, router='{router_key}', " - f"consumed={consumed}, expected={expected}" + f"consumed={consumed}, expected={len(call_sequence)}" ) self._active_step_index = None self._active_sample_index = None self._active_step_routes = None self._router_call_cursors = {} - self._router_call_limits = {} + self._router_call_sequences = {} self._global_uid_to_row_index = {} + self._active_micro_order = None + if _ACTIVE_ROUTING_REPLAY_CONTROLLER is self: + _ACTIVE_ROUTING_REPLAY_CONTROLLER = None def get_route_for_router( self, @@ -676,17 +1257,22 @@ def get_route_for_router( context_parallel_size: int, ) -> tuple[torch.Tensor, torch.Tensor]: step_routes = self._active_step_routes - call_index = self._router_call_cursors.get(router_key, 0) - call_limit = self._router_call_limits.get(router_key) + call_cursor = self._router_call_cursors.get(router_key, 0) + call_sequence = self._router_call_sequences.get(router_key) + if call_sequence is None: + raise RuntimeError( + "Routing replay call sequence missing for router key: " + f"step={self._active_step_index}, router='{router_key}'" + ) router_calls = step_routes.routers[router_key].calls - if call_limit is not None and call_index >= call_limit: + if call_cursor >= len(call_sequence): raise RuntimeError( - "Routing replay call cursor exceeded local call range: " + "Routing replay call cursor exceeded local call sequence: " f"step={self._active_step_index}, router='{router_key}', " - f"call_index={call_index}, limit={call_limit}" + f"call_cursor={call_cursor}, sequence_length={len(call_sequence)}" ) - route = router_calls[call_index] - self._router_call_cursors[router_key] = call_index + 1 + route = router_calls[call_sequence[call_cursor]] + self._router_call_cursors[router_key] = call_cursor + 1 num_local_tokens = int(logits.shape[0]) num_experts = int(logits.shape[1]) @@ -813,6 +1399,9 @@ def build_bundle_from_forward_trace_dir( output = call_entry.get("output") probs_2d, routing_map_2d = _extract_router_output_tensors(output) compact_route = _compact_route_from_dense(probs_2d, routing_map_2d) + sample_index, micro_slot = _trace_call_route_metadata(call_entry) + compact_route.sample_index = sample_index + compact_route.micro_slot = micro_slot router_calls[call_index] = compact_route max_topk = max(max_topk, compact_route.max_topk) token_count = compact_route.num_global_tokens diff --git a/tests/integration/megatron_forward_trace.py b/tests/integration/megatron_forward_trace.py index 90c4a3af..5e36fc87 100644 --- a/tests/integration/megatron_forward_trace.py +++ b/tests/integration/megatron_forward_trace.py @@ -1,7 +1,8 @@ from __future__ import annotations +import os from pathlib import Path -from typing import Any, cast +from typing import Any, Callable, cast import torch @@ -65,6 +66,58 @@ def _rank_metadata() -> dict[str, int]: } +def _extract_dp_slot_from_rank_meta(rank_meta: Any) -> tuple[int, int] | None: + """Returns one stable `(dp_rank, dp_world_size)` pair from merged rank metadata.""" + if isinstance(rank_meta, dict): + rank_meta = [rank_meta] + if not isinstance(rank_meta, list) or not rank_meta: + return None + dp_ranks = { + _safe_int(item.get("dp_rank"), 0) + for item in rank_meta + if isinstance(item, dict) and "dp_rank" in item + } + dp_world_sizes = { + _safe_int(item.get("dp_world_size"), 1) + for item in rank_meta + if isinstance(item, dict) and "dp_world_size" in item + } + if len(dp_ranks) != 1 or len(dp_world_sizes) != 1: + return None + return next(iter(dp_ranks)), next(iter(dp_world_sizes)) + + +def _trace_call_sort_key(call: dict[str, Any]) -> tuple[int, int]: + """Builds a stable micro identity for merged trace ordering.""" + sample_index = call.get("micro_sample_index") + if isinstance(sample_index, int): + return 0, int(sample_index) + micro_order = _safe_int(call.get("micro_order"), 0) + dp_slot = _extract_dp_slot_from_rank_meta(call.get("rank_meta")) + if dp_slot is None: + return 1, micro_order + dp_rank, dp_world_size = dp_slot + return 1, micro_order * dp_world_size + dp_rank + + +def _local_dummy_micro_slot(micro_order: int) -> int: + """Builds the stable dummy-micro slot used when one micro has no sample id.""" + dp_rank = _safe_ps_stat("get_data_parallel_rank", 0) + dp_world_size = _safe_ps_stat("get_data_parallel_world_size", 1) + return micro_order * dp_world_size + dp_rank + + +def _captured_output_sort_key( + sample_index: int | None, + micro_order: int, + micro_slot: int | None, +) -> tuple[int, int, int]: + """Builds the deterministic ordering used for captured root outputs.""" + if isinstance(sample_index, int): + return 0, int(sample_index), micro_order + return 1, _safe_int(micro_slot, micro_order), 0 + + def _shard_world_size_for_domain(domain: Any) -> int: """Returns shard-group world size for one LoRA shard domain.""" if domain == "tp": @@ -117,6 +170,22 @@ def _materialize_trace_value(value: Any) -> Any: return value +def _extract_tensor_attr(value: Any, attr_name: str) -> Any: + if isinstance(value, torch.Tensor): + return getattr(value, attr_name, None) + if isinstance(value, dict): + for item in value.values(): + attr_value = _extract_tensor_attr(item, attr_name) + if attr_value is not None: + return attr_value + if isinstance(value, (list, tuple)): + for item in value: + attr_value = _extract_tensor_attr(item, attr_name) + if attr_value is not None: + return attr_value + return None + + def _extract_router_topk(output: Any) -> tuple[torch.Tensor, torch.Tensor] | None: if not isinstance(output, tuple) or len(output) < 2: return None @@ -144,17 +213,36 @@ def __init__( *, enabled: bool, capture_name_tokens: tuple[str, ...] = CAPTURE_NAME_TOKENS, + micro_start_callback: Callable[[int | None, int], None] | None = None, ) -> None: self.enabled = enabled self.capture_name_tokens = capture_name_tokens + self.micro_start_callback = micro_start_callback self.current_step_index: int | None = None self.current_step_trace: dict[str, list[dict[str, Any]]] = {} + self.current_micro_sample_index: int | None = None + self.current_micro_order = 0 + self.current_micro_module_call_counts: dict[str, int] = {} + self.current_step_sample_indices: list[int | None] = [] + self.current_step_outputs: list[ + tuple[int | None, int, int | None, torch.Tensor] + ] = [] + self._next_micro_order = 0 self._hook_handles: list[Any] = [] if not enabled: return self._register_hooks(model_chunks) def _register_hooks(self, model_chunks: list[Any]) -> None: + if not model_chunks: + raise RuntimeError("Expected at least one model chunk for forward tracing") + root_module = model_chunks[0] + self._hook_handles.append( + root_module.register_forward_pre_hook(self._root_pre_hook) + ) + self._hook_handles.append( + root_module.register_forward_hook(self._root_post_hook) + ) for chunk_index, chunk in enumerate(model_chunks): for module_name, module in chunk.named_modules(): trace_module_name = f"chunk{chunk_index}.{module_name}" @@ -275,9 +363,12 @@ def _make_hook(self, name: str, module: Any): def _hook(_module: Any, inputs: Any, output: Any) -> None: if self.current_step_index is None: return - call_index = len(self.current_step_trace.get(name, [])) + micro_call_index = self.current_micro_module_call_counts.get(name, 0) + self.current_micro_module_call_counts[name] = micro_call_index + 1 trace_item: dict[str, Any] = { - "call_index": call_index, + "micro_call_index": micro_call_index, + "micro_order": self.current_micro_order, + "micro_sample_index": self.current_micro_sample_index, "module_type": module.__class__.__name__, "rank_meta": _rank_metadata(), "merge_hints": self._build_merge_hints(name, module), @@ -292,7 +383,16 @@ def _hook(_module: Any, inputs: Any, output: Any) -> None: topk_ids, topk_scores = router_topk trace_item["router_topk_ids"] = topk_ids trace_item["router_topk_scores"] = topk_scores - self.current_step_trace.setdefault(name, []).append(trace_item) + trace_items = self._split_expert_trace_items( + module_name=name, + module=module, + inputs=inputs, + trace_item=trace_item, + ) + trace_calls = self.current_step_trace.setdefault(name, []) + for split_item in trace_items: + split_item["call_index"] = len(trace_calls) + trace_calls.append(split_item) return _hook @@ -303,9 +403,185 @@ def guess_primary_tensor(value: Any) -> torch.Tensor | None: return None return _materialize_tensor(tensor) - def set_step(self, step_index: int) -> None: + def _sample_index_for_micro(self, micro_order: int) -> int | None: + if micro_order < len(self.current_step_sample_indices): + return self.current_step_sample_indices[micro_order] + return None + + def _root_pre_hook(self, _module: Any, _args: Any) -> None: + if self.current_step_index is None: + return + micro_order = self._next_micro_order + sample_index = self._sample_index_for_micro(micro_order) + self.begin_micro(sample_index=sample_index, micro_order=micro_order) + + def _root_post_hook(self, _module: Any, _inputs: Any, output: Any) -> None: + if self.current_step_index is None: + return + output_tensor = self.guess_primary_tensor(output) + if output_tensor is None: + raise RuntimeError( + f"Expected root forward output to contain a tensor, got {type(output)}" + ) + sample_index = self.current_micro_sample_index + micro_order = self.current_micro_order + self.current_step_outputs.append( + ( + sample_index, + micro_order, + None + if sample_index is not None + else _local_dummy_micro_slot(micro_order), + output_tensor.float(), + ) + ) + self._next_micro_order = micro_order + 1 + + def set_step( + self, + step_index: int, + sample_indices: list[int | None] | None = None, + ) -> None: self.current_step_index = step_index self.current_step_trace = {} + self.current_step_sample_indices = list(sample_indices or []) + self.current_step_outputs = [] + self.current_micro_sample_index = None + self.current_micro_order = 0 + self.current_micro_module_call_counts = {} + self._next_micro_order = 0 + + def begin_micro(self, sample_index: int | None, micro_order: int) -> None: + self.current_micro_sample_index = sample_index + self.current_micro_order = micro_order + self.current_micro_module_call_counts = {} + if self.micro_start_callback is not None: + self.micro_start_callback(sample_index, micro_order) + + @staticmethod + def _row_token_uids_for_trace( + *, + inputs: Any, + module: Any, + ) -> tuple[torch.Tensor | None, int | None]: + row_token_uids = _extract_tensor_attr(inputs, "_art_trace_row_token_uids") + if row_token_uids is None: + row_token_uids = getattr(module, "_art_trace_row_token_uids", None) + if not isinstance(row_token_uids, torch.Tensor): + return None, None + + uid_span = _extract_tensor_attr(inputs, "_art_trace_uid_span") + if uid_span is None: + uid_span = getattr(module, "_art_trace_uid_span", None) + uid_span_int = uid_span if isinstance(uid_span, int) and uid_span > 0 else None + return ( + row_token_uids.detach().to(device="cpu", dtype=torch.int64).reshape(-1), + uid_span_int, + ) + + @classmethod + def _slice_row_aligned_value( + cls, + value: Any, + *, + row_indices: torch.Tensor, + total_rows: int, + ) -> Any: + if isinstance(value, torch.Tensor): + if value.ndim > 0 and int(value.shape[0]) == total_rows: + return value.index_select(0, row_indices) + return value + if isinstance(value, dict): + return { + key: cls._slice_row_aligned_value( + item, + row_indices=row_indices, + total_rows=total_rows, + ) + for key, item in value.items() + } + if isinstance(value, list): + return [ + cls._slice_row_aligned_value( + item, + row_indices=row_indices, + total_rows=total_rows, + ) + for item in value + ] + if isinstance(value, tuple): + return tuple( + cls._slice_row_aligned_value( + item, + row_indices=row_indices, + total_rows=total_rows, + ) + for item in value + ) + return value + + @classmethod + def _split_expert_trace_items( + cls, + *, + module_name: str, + module: Any, + inputs: Any, + trace_item: dict[str, Any], + ) -> list[dict[str, Any]]: + if not cls._is_moe_expert_forward_module(module_name): + return [trace_item] + + primary_output = trace_item.get("primary_output") + if not isinstance(primary_output, torch.Tensor) or primary_output.ndim == 0: + return [trace_item] + + row_token_uids, uid_span = cls._row_token_uids_for_trace( + inputs=inputs, + module=module, + ) + if row_token_uids is None: + return [trace_item] + + total_rows = int(row_token_uids.numel()) + if total_rows == 0 or int(primary_output.shape[0]) != total_rows: + return [trace_item] + + trace_item["row_token_uids"] = row_token_uids + if uid_span is None: + return [trace_item] + + sample_ids = torch.div(row_token_uids, uid_span, rounding_mode="floor") + ordered_sample_ids: list[int] = [] + seen_sample_ids: set[int] = set() + for sample_id in sample_ids.tolist(): + sample_id_int = int(sample_id) + if sample_id_int in seen_sample_ids: + continue + seen_sample_ids.add(sample_id_int) + ordered_sample_ids.append(sample_id_int) + + if len(ordered_sample_ids) <= 1: + if ordered_sample_ids: + trace_item["micro_sample_index"] = ordered_sample_ids[0] + return [trace_item] + + split_items: list[dict[str, Any]] = [] + for sample_id in ordered_sample_ids: + row_indices = (sample_ids == sample_id).nonzero(as_tuple=False).reshape(-1) + split_item = { + key: cls._slice_row_aligned_value( + value, + row_indices=row_indices, + total_rows=total_rows, + ) + for key, value in trace_item.items() + if key not in {"call_index", "micro_sample_index", "row_token_uids"} + } + split_item["micro_sample_index"] = sample_id + split_item["row_token_uids"] = row_token_uids.index_select(0, row_indices) + split_items.append(split_item) + return split_items @staticmethod def _is_moe_expert_forward_module(module_name: str) -> bool: @@ -327,93 +603,6 @@ def _primary_output_merge_hint(call: dict[str, Any]) -> dict[str, Any] | None: return None return primary_hint - @classmethod - def _lookup_call_by_index( - cls, - trace: dict[str, list[dict[str, Any]]], - module_name: str, - call_index: int, - ) -> dict[str, Any] | None: - """Finds one call entry by call-index with positional fallback.""" - calls = trace.get(module_name) - if calls is None: - return None - for call in calls: - if int(call.get("call_index", -1)) == call_index: - return call - if 0 <= call_index < len(calls): - return calls[call_index] - return None - - @staticmethod - def _router_module_name_for_expert_module(module_name: str) -> str | None: - """Maps one expert module name to its layer router module name.""" - for token in (".mlp.experts.linear_fc1", ".mlp.experts.linear_fc2"): - token_index = module_name.find(token) - if token_index != -1: - return f"{module_name[:token_index]}.mlp.router" - return None - - @classmethod - def _build_moe_row_identities( - cls, - *, - module_name: str, - call_index: int, - trace: dict[str, list[dict[str, Any]]], - row_splits: list[int] | None, - ) -> list[tuple[int, int, int]] | None: - """Builds stable `(expert_id, token_index, topk_slot)` identities for MoE rows.""" - router_module_name = cls._router_module_name_for_expert_module(module_name) - if router_module_name is None: - return None - router_call = cls._lookup_call_by_index(trace, router_module_name, call_index) - if router_call is None: - return None - router_topk_ids = router_call.get("router_topk_ids") - if not isinstance(router_topk_ids, torch.Tensor) or router_topk_ids.ndim != 2: - return None - token_splits_raw = router_call.get("router_topk_ids__row_splits") - if row_splits is None: - if isinstance(token_splits_raw, list): - row_splits = [ - int(v) * int(router_topk_ids.shape[1]) for v in token_splits_raw - ] - else: - row_splits = [int(router_topk_ids.numel())] - if isinstance(token_splits_raw, list): - token_splits = [int(v) for v in token_splits_raw] - else: - topk = int(router_topk_ids.shape[1]) - token_splits = [int(v) // topk for v in row_splits] - if len(row_splits) != len(token_splits): - return None - row_cursor = 0 - token_cursor = 0 - identities: list[tuple[int, int, int]] = [] - for row_count, token_count in zip(row_splits, token_splits): - local_ids = router_topk_ids[token_cursor : token_cursor + token_count] - token_cursor += token_count - local_identities: list[tuple[int, int, int]] = [] - max_expert = int(local_ids.max().item()) if local_ids.numel() > 0 else -1 - for expert_id in range(max_expert + 1): - expert_rows = (local_ids == expert_id).nonzero(as_tuple=False) - for token_offset, slot_index in expert_rows.tolist(): - local_identities.append( - ( - expert_id, - token_cursor - token_count + token_offset, - slot_index, - ) - ) - if len(local_identities) != row_count: - return None - identities.extend(local_identities) - row_cursor += row_count - if row_cursor != sum(row_splits): - return None - return identities - @classmethod def _canonicalize_etp_fc1_feature_layout( cls, @@ -456,12 +645,10 @@ def _canonicalize_moe_expert_row_order( cls, *, module_name: str, - call_index: int, tensor: torch.Tensor, - trace: dict[str, list[dict[str, Any]]], call: dict[str, Any], ) -> torch.Tensor: - """Canonicalizes MoE expert-row ordering using router replay identities.""" + """Canonicalizes MoE expert rows using dispatch-time UID identities.""" if not cls._is_moe_expert_forward_module(module_name): return tensor if tensor.ndim != 2: @@ -471,34 +658,23 @@ def _canonicalize_moe_expert_row_order( primary_hint.get("op") != "concat" or primary_hint.get("dim") != 0 ): return tensor - row_splits_raw = call.get("primary_output__row_splits") - row_splits = ( - [int(v) for v in row_splits_raw] - if isinstance(row_splits_raw, list) - else None - ) - identities = cls._build_moe_row_identities( - module_name=module_name, - call_index=call_index, - trace=trace, - row_splits=row_splits, - ) - if identities is None or len(identities) != int(tensor.shape[0]): + row_token_uids = call.get("row_token_uids") + if not isinstance(row_token_uids, torch.Tensor): return tensor - order = sorted(range(len(identities)), key=lambda index: identities[index]) - return tensor[order] + if int(row_token_uids.numel()) != int(tensor.shape[0]): + return tensor + order = torch.argsort(row_token_uids, stable=True) + return tensor.index_select(0, order) @classmethod def _canonicalize_primary_output_tensor( cls, *, module_name: str, - call_index: int, tensor: torch.Tensor, - trace: dict[str, list[dict[str, Any]]], call: dict[str, Any], ) -> torch.Tensor: - """Runs all primary-output canonicalization passes for one call tensor.""" + """Runs all remaining primary-output canonicalization passes for one call.""" tensor = cls._canonicalize_etp_fc1_feature_layout( module_name=module_name, tensor=tensor, @@ -506,9 +682,7 @@ def _canonicalize_primary_output_tensor( ) return cls._canonicalize_moe_expert_row_order( module_name=module_name, - call_index=call_index, tensor=tensor, - trace=trace, call=call, ) @@ -528,9 +702,7 @@ def canonicalize_trace( if isinstance(tensor, torch.Tensor): call["primary_output"] = cls._canonicalize_primary_output_tensor( module_name=module_name, - call_index=call_index, tensor=tensor, - trace=trace, call=call, ) call[PRIMARY_OUTPUT_CANONICAL_KEY] = True @@ -705,17 +877,25 @@ def _merge_rank_traces( merged: dict[str, list[dict[str, Any]]] = {} module_names = sorted(set().union(*(trace.keys() for trace in rank_traces))) for module_name in module_names: - call_count = max(len(trace.get(module_name, [])) for trace in rank_traces) module_calls: list[dict[str, Any]] = [] - for call_index in range(call_count): - rank_values = [ - trace[module_name][call_index] - for trace in rank_traces - if module_name in trace and call_index < len(trace[module_name]) - ] - if not rank_values: - continue - module_calls.append(cls._merge_rank_call_entries(rank_values)) + grouped_calls: dict[ + tuple[int, int, int, int], + list[dict[str, Any]], + ] = {} + for trace in rank_traces: + for call in trace.get(module_name, []): + sample_kind, sample_sort_index = _trace_call_sort_key(call) + merge_key = ( + sample_kind, + sample_sort_index, + int(call.get("micro_order", 0)), + int(call.get("micro_call_index", call.get("call_index", 0))), + ) + grouped_calls.setdefault(merge_key, []).append(call) + for merged_index, merge_key in enumerate(sorted(grouped_calls)): + merged_call = cls._merge_rank_call_entries(grouped_calls[merge_key]) + merged_call["call_index"] = merged_index + module_calls.append(merged_call) merged[module_name] = module_calls return merged @@ -736,6 +916,59 @@ def _gather_rank_traces( return None return cast(list[dict[str, list[dict[str, Any]]]], gathered) + @staticmethod + def _merge_group_tensor(tensors: list[torch.Tensor]) -> torch.Tensor: + if len(tensors) == 1: + return tensors[0] + first = tensors[0] + if all(tensor.shape == first.shape for tensor in tensors[1:]) and all( + torch.equal(first, tensor) for tensor in tensors[1:] + ): + return first + raise RuntimeError( + "Mismatched output captures for the same micro output across non-DP ranks" + ) + + @staticmethod + def _gather_rank_outputs( + local_outputs: list[tuple[int | None, int, int | None, torch.Tensor]], + ) -> list[list[tuple[int | None, int, int | None, torch.Tensor]]] | None: + if ( + not torch.distributed.is_initialized() + or torch.distributed.get_world_size() == 1 + ): + return [local_outputs] + gathered: list[ + list[tuple[int | None, int, int | None, torch.Tensor]] | None + ] = [None] * torch.distributed.get_world_size() + torch.distributed.all_gather_object(gathered, local_outputs) + if torch.distributed.get_rank() != 0: + return None + return cast( + list[list[tuple[int | None, int, int | None, torch.Tensor]]], + gathered, + ) + + def ordered_step_outputs(self) -> list[torch.Tensor] | None: + if not self.enabled: + return None + gathered_outputs = self._gather_rank_outputs(self.current_step_outputs) + if gathered_outputs is None: + return None + grouped: dict[tuple[int | None, int | None, int], list[torch.Tensor]] = {} + for rank_outputs in gathered_outputs: + for sample_index, micro_order, micro_slot, tensor in rank_outputs: + group_key = (sample_index, micro_slot, micro_order) + grouped.setdefault(group_key, []).append(tensor) + ordered_group_keys = sorted( + grouped, + key=lambda item: _captured_output_sort_key(item[0], item[2], item[1]), + ) + return [ + self._merge_group_tensor(grouped[group_key]) + for group_key in ordered_group_keys + ] + def save_current_step(self, traces_dir: Path) -> Path | None: if not self.enabled or self.current_step_index is None: return None @@ -745,7 +978,9 @@ def save_current_step(self, traces_dir: Path) -> Path | None: merged_trace = self.canonicalize_trace(self._merge_rank_traces(gathered_traces)) traces_dir.mkdir(parents=True, exist_ok=True) trace_path = traces_dir / f"forward_trace_step_{self.current_step_index:03d}.pt" - torch.save(merged_trace, trace_path) + tmp_trace_path = trace_path.with_suffix(f"{trace_path.suffix}.tmp") + torch.save(merged_trace, tmp_trace_path) + os.replace(tmp_trace_path, trace_path) return trace_path @classmethod diff --git a/tests/integration/megatron_oracle_harness.py b/tests/integration/megatron_oracle_harness.py index ad19c194..033cd5b9 100644 --- a/tests/integration/megatron_oracle_harness.py +++ b/tests/integration/megatron_oracle_harness.py @@ -113,6 +113,31 @@ def world_size(self) -> int: return attention_world +TOPOLOGIES = [ + Topology(tp=1, ep=1, etp=1, dp=1, sp=False), + Topology(tp=2, ep=1, etp=1, dp=1, sp=True), + Topology(tp=2, ep=2, etp=1, dp=1, sp=True), + Topology(tp=2, ep=1, etp=2, dp=1, sp=True), +] +EXTENDED_TOPOLOGIES = [ + Topology(tp=1, ep=1, etp=1, dp=2, sp=False), + Topology(tp=1, ep=2, etp=1, dp=2, sp=False), + Topology(tp=1, ep=1, etp=2, dp=2, sp=True), +] +ORACLE_TOPOLOGY = TOPOLOGIES[0] +SENSITIVITY_TOPOLOGY = Topology(tp=2, ep=2, etp=1, dp=1, sp=True) +SENSITIVITY_TOPOLOGY_BY_MUTATION: dict[SensitivityMutation, Topology] = { + mutation: SENSITIVITY_TOPOLOGY for mutation in SUPPORTED_SENSITIVITY_MUTATIONS +} +SENSITIVITY_TOPOLOGY_BY_MUTATION["bwd_skip_sync_fc1_a"] = Topology( + tp=2, ep=1, etp=2, dp=1, sp=True +) +SENSITIVITY_TOPOLOGY_BY_MUTATION |= { + k: Topology(tp=1, ep=2, etp=1, dp=2, sp=False) + for k in ["dp_grad_accumulation_seqs", "dp_local_token_normalization"] +} + + class PackedTensorConfig(BaseModel): """Controls synthetic packed tensor generation used by oracle harness runs.""" @@ -172,6 +197,7 @@ class OracleCaseConfig(BaseModel): """Contains all deterministic run parameters for one oracle case.""" base_model: str + precision: Literal["bf16", "fp32"] = "fp32" num_layers: int = 4 seed: int = 20260304 num_steps: int = 1 @@ -420,30 +446,6 @@ def _require_not_none(value: T | None, name: str) -> T: return value -TOPOLOGIES = [ - Topology(tp=1, ep=1, etp=1, dp=1, sp=False), - Topology(tp=2, ep=1, etp=1, dp=1, sp=True), - Topology(tp=2, ep=2, etp=1, dp=1, sp=True), - Topology(tp=2, ep=1, etp=2, dp=1, sp=True), -] -EXTENDED_TOPOLOGIES = [ - Topology(tp=1, ep=1, etp=1, dp=2, sp=False), - Topology(tp=1, ep=2, etp=1, dp=2, sp=True), -] -ORACLE_TOPOLOGY = TOPOLOGIES[0] -SENSITIVITY_TOPOLOGY = Topology(tp=2, ep=2, etp=1, dp=1, sp=True) -SENSITIVITY_TOPOLOGY_BY_MUTATION: dict[SensitivityMutation, Topology] = { - mutation: SENSITIVITY_TOPOLOGY for mutation in SUPPORTED_SENSITIVITY_MUTATIONS -} -SENSITIVITY_TOPOLOGY_BY_MUTATION["bwd_skip_sync_fc1_a"] = Topology( - tp=2, ep=1, etp=2, dp=1, sp=True -) -SENSITIVITY_TOPOLOGY_BY_MUTATION |= { - k: Topology(tp=1, ep=2, etp=1, dp=2, sp=False) - for k in ["dp_grad_accumulation_seqs", "dp_local_token_normalization"] -} - - def _truthy(value: str | None) -> bool: """Parses env-var style booleans using a small accepted truthy set.""" if value is None: @@ -798,6 +800,7 @@ def _stacked_layers( import torch grouped: dict[str, list[tuple[Any, Any]]] = {} + original_names_by_group: dict[str, list[str]] = {} for name, reference, candidate in pairs: normalized = _layer_agnostic_param_key(name) if normalized is None: @@ -807,10 +810,18 @@ def _stacked_layers( grouped.setdefault(normalized, []).append( (reference.detach().float(), candidate.detach().float()) ) + original_names_by_group.setdefault(normalized, []).append(name) stacked_pairs: list[tuple[str, Any, Any]] = [] for normalized in sorted(grouped): group = grouped[normalized] + reference_shapes = {tuple(reference.shape) for reference, _ in group} + candidate_shapes = {tuple(candidate.shape) for _, candidate in group} + if len(reference_shapes) != 1 or len(candidate_shapes) != 1: + original_names = original_names_by_group[normalized] + for original_name, (reference, candidate) in zip(original_names, group): + stacked_pairs.append((original_name, reference, candidate)) + continue stacked_pairs.append( ( normalized, @@ -840,7 +851,7 @@ def __init__( self.case_dir / ORACLE_MOE_ROUTING_BUNDLE_DIRNAME ) self.shared_init_path = Path(self.case_artifacts.shared_init_adapter_path) - self.console = console or Console(width=160) + self.console = console or Console(width=140) self._oracle_initialized = False self._oracle_regenerated = False @@ -1318,7 +1329,6 @@ def print_report(self, report: VariantReport) -> None: detail_table.add_column("mean_abs_pct", justify="right") detail_table.add_column("typical_abs", justify="right") detail_table.add_column("mean_abs_diff", justify="right") - # detail_table.add_column("Thresholds") detail_table.add_column("Failure") sorted_rows = sorted( table_rows, @@ -1382,17 +1392,9 @@ def _default_phase_pass_fns() -> dict[str, PhasePassFn]: # note the metrics get averaged across layers to reduce noise # we don't expect particular layers to see errors as opposed to the others so this is helpful fwd_out_loss = MetricThresholdRule( - limits={"relative_l2": 3e-2, "mean_abs_pct": 3.0} - ) - grads = lambda summary: ( - summary["mean_abs_pct"] < 5.0 - or ( - summary["typical_abs_scale"] < 1e-6 - and summary["mean_abs_diff"] < 2e-8 - and summary["relative_l2"] < 1.0 - ) + limits={"relative_l2": 1e-2, "mean_abs_pct": 1.0} ) - deltas = lambda summary: summary["mean_abs_pct"] < 15.0 + grads_deltas = MetricThresholdRule(limits={"mean_abs_pct": 10.0}) router_topk_rule = ( MetricThresholdRule( # should be no mismatch due to router replay limits={ @@ -1402,8 +1404,8 @@ def _default_phase_pass_fns() -> dict[str, PhasePassFn]: ) ) return {key: fwd_out_loss for key in ["forward", "outputs", "losses"]} | { - "grads": grads, - "deltas": deltas, + "grads": grads_deltas, + "deltas": grads_deltas, "router_topk_ids": router_topk_rule, } diff --git a/tests/integration/megatron_oracle_worker.py b/tests/integration/megatron_oracle_worker.py index d3c5b836..33f3c08a 100644 --- a/tests/integration/megatron_oracle_worker.py +++ b/tests/integration/megatron_oracle_worker.py @@ -144,7 +144,9 @@ def _collect_lora_state( return _gather_full_state(local_state) -def _collect_lora_grads(model_chunks: list[Any]) -> dict[str, Any] | None: +def _collect_lora_grads( + model_chunks: list[Any], +) -> dict[str, Any] | None: """Collects full LoRA gradient tensors across all ranks.""" from art.megatron.lora import LoRA @@ -163,11 +165,8 @@ def _collect_lora_grads(model_chunks: list[Any]) -> dict[str, Any] | None: raise RuntimeError(f"LoRA param main_grad is None for key '{key}'") if hasattr(grad, "_local_tensor"): grad = grad._local_tensor - local_grads[key] = ( - grad[expert].detach().cpu().T - if expert is not None - else grad.detach().cpu().T - ) + captured_grad = grad[expert] if expert is not None else grad + local_grads[key] = captured_grad.detach().cpu().T return _gather_full_state(local_grads) @@ -259,12 +258,20 @@ def _configure_provider( provider.tensor_model_parallel_size = topology.tp provider.expert_model_parallel_size = topology.ep provider.expert_tensor_parallel_size = topology.etp - # These are intentionally pinned to 1 for now; switching to topology-driven - # values is the single lever to start CP/PP coverage in the harness. + # These are intentionally pinned to 1 for now provider.pipeline_model_parallel_size = 1 provider.context_parallel_size = 1 provider.sequence_parallel = topology.sp provider.num_layers = case_config.num_layers + if case_config.precision == "fp32": + provider.bf16 = False + provider.fp16 = False + provider.params_dtype = torch.float32 + provider.pipeline_dtype = torch.float32 + provider.enable_autocast = False + provider.autocast_dtype = None + provider.attention_softmax_in_fp32 = True + provider.fp32_residual_connection = True if hasattr(provider, "attention_dropout"): provider.attention_dropout = 0.0 if hasattr(provider, "hidden_dropout"): @@ -275,8 +282,26 @@ def _build_optimizer_config(case_config: OracleCaseConfig): """Builds Megatron optimizer settings for deterministic harness runs.""" from megatron.core.optimizer import OptimizerConfig + if case_config.precision == "fp32": + return OptimizerConfig( + bf16=False, + fp16=False, + params_dtype=torch.float32, + main_grads_dtype=torch.float32, + main_params_dtype=torch.float32, + exp_avg_dtype=torch.float32, + exp_avg_sq_dtype=torch.float32, + lr=case_config.learning_rate, + adam_beta1=0.9, + adam_beta2=0.99, + clip_grad=0.1, + weight_decay=0.0, + adam_eps=1e-13, + ) + return OptimizerConfig( bf16=True, + fp16=False, lr=case_config.learning_rate, adam_beta1=0.9, adam_beta2=0.99, @@ -286,6 +311,14 @@ def _build_optimizer_config(case_config: OracleCaseConfig): ) +def _configure_cuda_precision(case_config: OracleCaseConfig) -> None: + if case_config.precision != "fp32": + return + torch.backends.cuda.matmul.allow_tf32 = False + torch.backends.cudnn.allow_tf32 = False + torch.set_float32_matmul_precision("highest") + + def _assert_runtime_configuration( model_chunks: list[Any], case_config: OracleCaseConfig, @@ -470,6 +503,72 @@ def _mutated_forward(self: Any, x: Any): module.forward = original_forward +@contextmanager +def _patch_lora_for_fp32( + model_chunks: list[Any], + optimizer: Any, +): + """ + torch grouped_gemm is bf16 only, so we have a simple custom fp32 path + to make the numbers match closely + """ + from art.megatron.lora import LoRA + + del model_chunks + del optimizer + original_forward = LoRA.forward + + def _reference_forward( + self: Any, + x: torch.Tensor, + tokens_per_expert: list[int] | torch.Tensor | None = None, + ) -> torch.Tensor: + work_dtype = ( + torch.float32 + if torch.is_floating_point(x) and x.dtype != torch.float32 + else x.dtype + ) + work_x = x.to(dtype=work_dtype) + work_a = self.A_T.to(dtype=work_dtype) + work_b = self.B_T.to(dtype=work_dtype) + + if tokens_per_expert is None or self.num_local_experts == 1: + return (((work_x @ work_a) @ work_b) * self.scale).to(dtype=x.dtype) + + counts = ( + tokens_per_expert.tolist() + if isinstance(tokens_per_expert, torch.Tensor) + else list(tokens_per_expert) + ) + out = work_x.new_zeros((work_x.shape[0], work_b.shape[-1])) + + cursor = 0 + for expert_index, count in enumerate(counts): + count_int = int(count) + if count_int <= 0: + continue + next_cursor = cursor + count_int + x_chunk = work_x[cursor:next_cursor] + out[cursor:next_cursor] = (x_chunk @ work_a[expert_index]) @ work_b[ + expert_index + ] + cursor = next_cursor + + if cursor != int(work_x.shape[0]): + raise RuntimeError( + "Expert LoRA reference path did not consume all grouped rows: " + f"consumed={cursor}, rows={int(work_x.shape[0])}" + ) + + return (out * self.scale).to(dtype=x.dtype) + + LoRA.forward = _reference_forward + try: + yield + finally: + LoRA.forward = original_forward + + @contextmanager def _mutation_hook( megatron_train_module: Any, @@ -480,11 +579,14 @@ def _mutation_hook( loss_scale: float = 1.0, ): """Applies optional sensitivity mutation hooks around training steps.""" - original_finalize = megatron_train_module._finalize_grads + original_finalize = megatron_train_module.finalize_model_grads_extended original_optimizer_step = megatron_train_module._optimizer_step original_loss_fn = megatron_train_module.loss_fn - original_token_normalization_scale = ( - megatron_train_module._global_token_normalization_scale + original_local_token_count_tensor = ( + megatron_train_module._local_trainable_token_count_tensor + ) + original_build_micro_sample_indices = ( + megatron_train_module.build_micro_sample_indices ) known_mutations = {None, *SUPPORTED_SENSITIVITY_MUTATIONS} @@ -492,46 +594,55 @@ def _mutation_hook( raise ValueError(f"Unsupported mutation: {mutation}") if mutation == "skip_finalize": - megatron_train_module._finalize_grads = lambda _model: None + megatron_train_module.finalize_model_grads_extended = ( + lambda _model, **_kwargs: (None) + ) if mutation == "dp_local_token_normalization": - def _wrong_local_token_normalization_scale( + def _wrong_local_trainable_token_count_tensor( micro_inputs: list[Any], device: torch.device, - ) -> float: - del device + ) -> torch.Tensor: local_token_total = sum( megatron_train_module._count_trainable_tokens(micro) for micro in micro_inputs ) - if local_token_total <= 0.0: - return 0.0 - # Intentionally wrong normalization: use only local token total. dp_world_size = int( megatron_train_module.ps.get_data_parallel_world_size( with_context_parallel=True ) ) - return float(dp_world_size) / float(local_token_total) + wrong_local_token_total = local_token_total / max(dp_world_size, 1) + return torch.tensor( + [wrong_local_token_total], + device=device, + dtype=torch.float32, + ) - megatron_train_module._global_token_normalization_scale = ( - _wrong_local_token_normalization_scale + megatron_train_module._local_trainable_token_count_tensor = ( + _wrong_local_trainable_token_count_tensor ) if mutation == "dp_grad_accumulation_seqs": - def _wrong_resolve_local_grad_accumulation_sequences( + def _wrong_build_micro_sample_indices( + *, + step_index: int, + num_sequences: int, global_grad_accumulation_sequences: int, - ) -> int: - return megatron_train_module.resolve_local_grad_accumulation_sequences( - global_grad_accumulation_sequences=( - topology.dp * global_grad_accumulation_sequences + ) -> list[int | None]: + base_global_sample_index = step_index * global_grad_accumulation_sequences + return [ + (global_sample_index if global_sample_index < num_sequences else None) + for global_sample_index in range( + base_global_sample_index, + base_global_sample_index + global_grad_accumulation_sequences, ) - ) + ] - megatron_train_module.resolve_local_grad_accumulation_sequences = ( - _wrong_resolve_local_grad_accumulation_sequences + megatron_train_module.build_micro_sample_indices = ( + _wrong_build_micro_sample_indices ) if pre_optimizer_step_hook is not None: @@ -554,8 +665,8 @@ def _scaled_loss_fn(*args: Any, **kwargs: Any): loss = original_loss_fn(*args, **kwargs) return loss.model_copy( update={ - "mean_policy_loss": loss.mean_policy_loss * effective_loss_scale, - "mean_kl": loss.mean_kl * effective_loss_scale, + "policy_loss": loss.policy_loss * effective_loss_scale, + "kl": loss.kl * effective_loss_scale, "policy_loss_sum": loss.policy_loss_sum * effective_loss_scale, } ) @@ -572,11 +683,14 @@ def _scaled_loss_fn(*args: Any, **kwargs: Any): try: yield finally: - megatron_train_module._finalize_grads = original_finalize + megatron_train_module.finalize_model_grads_extended = original_finalize megatron_train_module._optimizer_step = original_optimizer_step megatron_train_module.loss_fn = original_loss_fn - megatron_train_module._global_token_normalization_scale = ( - original_token_normalization_scale + megatron_train_module._local_trainable_token_count_tensor = ( + original_local_token_count_tensor + ) + megatron_train_module.build_micro_sample_indices = ( + original_build_micro_sample_indices ) @@ -593,9 +707,13 @@ def _worker_run(request: WorkerRunRequest) -> None: torch.cuda.set_device(local_rank) torch.distributed.init_process_group(backend="nccl") _set_deterministic_seed(request.case_config.seed) + _configure_cuda_precision(request.case_config) runtime = megatron_train.build_training_runtime( model_identifier=request.case_config.base_model, + provider_torch_dtype=( + torch.float32 if request.case_config.precision == "fp32" else torch.bfloat16 + ), provider_configure=lambda provider: _configure_provider( provider, request.topology, request.case_config ), @@ -660,27 +778,40 @@ def _worker_run(request: WorkerRunRequest) -> None: experimental_config: dev.TrainConfig = {} step_traces: list[StepTrace] = [] captured_grads: dict[str, Any] | None = None - forward_trace_capture = ForwardTraceCapture(model_chunks, enabled=True) + routing_replay_controller = runtime.moe_routing_replay_controller + micro_start_callback = ( + routing_replay_controller.begin_micro + if routing_replay_controller is not None + else None + ) + forward_trace_capture = ForwardTraceCapture( + model_chunks, + enabled=True, + micro_start_callback=micro_start_callback, + ) def _capture_lora_grads() -> None: nonlocal captured_grads captured_grads = _collect_lora_grads(model_chunks) - with _mutation_hook( - megatron_train, - model_chunks, - request.mutation, - request.topology, - pre_optimizer_step_hook=_capture_lora_grads, - loss_scale=request.case_config.loss_scale, + with ( + _mutation_hook( + megatron_train, + model_chunks, + request.mutation, + request.topology, + pre_optimizer_step_hook=_capture_lora_grads, + loss_scale=request.case_config.loss_scale, + ), + _patch_lora_for_fp32(model_chunks, optimizer), ): for step_index in range(request.case_config.num_steps): - forward_trace_capture.set_step(step_index) micro_sample_indices = megatron_train.build_micro_sample_indices( step_index=step_index, num_sequences=request.packed_tensors.num_sequences, global_grad_accumulation_sequences=global_grad_accumulation_sequences, ) + forward_trace_capture.set_step(step_index, micro_sample_indices) micro_inputs = megatron_train.select_micro_inputs( packed_tensors, micro_sample_indices, zero_template ) @@ -698,12 +829,12 @@ def _capture_lora_grads() -> None: sample_index=micro_sample_indices, moe_routing_replay_controller=runtime.moe_routing_replay_controller, ) + ordered_micro_outputs = forward_trace_capture.ordered_step_outputs() forward_trace_capture.save_current_step(traces_dir) torch.distributed.barrier() current_lora_state = _collect_lora_state(model_chunks) if torch.distributed.get_rank() == 0: - # save artifacts (outputs, grads, lora deltas, current lora) grads = _require_not_none(captured_grads, "captured_grads") initial_state = _require_not_none( initial_lora_state, "initial_lora_state" @@ -727,16 +858,20 @@ def _capture_lora_grads() -> None: Path("traces") / f"deltas_step_{step_index:03d}.safetensors" ) lora_rel = Path(f"lora_step_{step_index:03d}.safetensors") + ordered_outputs = _require_not_none( + ordered_micro_outputs, "ordered_micro_outputs" + ) + if not ordered_outputs: + raise RuntimeError("Expected at least one captured micro output") torch.save( - step_result.new_logprobs.detach().cpu().float(), + torch.stack(ordered_outputs, dim=0), topology_dir / output_rel, ) save_file(grads, str(topology_dir / grads_rel)) save_file(saved_deltas, str(topology_dir / deltas_rel)) save_file(saved_current_state, str(topology_dir / lora_rel)) - # build and append the step trace step_traces.append( StepTrace( step_index=step_index, From 9cde0d43f6741fb24fc174cdd58682f9c1af058c Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 17 Mar 2026 20:46:16 +0000 Subject: [PATCH 19/28] Clean up type errors in Megatron correctness changes --- src/art/megatron/finalize_grads.py | 22 +++++++++----- src/art/megatron/lora.py | 20 ++++++++++--- src/art/megatron/provider.py | 7 +++-- src/art/megatron/routing_replay.py | 13 ++++++-- src/art/megatron/train.py | 24 ++++++++------- src/art/unsloth/train.py | 7 ++--- tests/integration/megatron_forward_trace.py | 26 ++++++++-------- tests/integration/megatron_oracle_worker.py | 33 +++++++++++---------- 8 files changed, 92 insertions(+), 60 deletions(-) diff --git a/src/art/megatron/finalize_grads.py b/src/art/megatron/finalize_grads.py index 6fce32c3..2a770fea 100644 --- a/src/art/megatron/finalize_grads.py +++ b/src/art/megatron/finalize_grads.py @@ -4,6 +4,7 @@ from megatron.core import parallel_state as ps from megatron.core.distributed.finalize_model_grads import finalize_model_grads +from megatron.core.transformer.module import MegatronModule import torch from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors @@ -20,7 +21,7 @@ def _iter_named_trainable_parameters( - model: list[torch.nn.Module], + model: list[MegatronModule], ) -> Iterable[tuple[str, torch.nn.Parameter]]: seen: set[int] = set() for chunk_index, model_chunk in enumerate(model): @@ -36,7 +37,7 @@ def _iter_named_trainable_parameters( def _resolve_domain_group( domain: GradSyncDomain, -) -> torch.distributed.ProcessGroup | None: +) -> Any | None: if domain == TP_DEFAULT_GRAD_SYNC_DOMAIN: group = ps.get_tensor_model_parallel_group(check_initialized=False) if group is None or group.size() <= 1: @@ -53,14 +54,14 @@ def _resolve_domain_group( def _resolve_reduce_op(op: GradSyncOp) -> Any: if op == GRAD_SYNC_OP_SUM: - return torch.distributed.ReduceOp.SUM + return torch.distributed.ReduceOp.SUM # ty: ignore[possibly-missing-attribute] if op == GRAD_SYNC_OP_AVG: - return torch.distributed.ReduceOp.AVG + return torch.distributed.ReduceOp.AVG # ty: ignore[possibly-missing-attribute] raise RuntimeError(f"Unknown grad sync op: {op}") def finalize_model_grads_extended( - model: list[torch.nn.Module], + model: list[MegatronModule], num_tokens: torch.Tensor | None = None, ) -> None: """Run Megatron finalize, then apply extra LoRA grad-sync reductions. @@ -71,7 +72,10 @@ def finalize_model_grads_extended( """ # All-reduce all model grads across DP replicas, layernorm grads for sequence parallelism, # embedding grads across first and last pipeline stages (if not tied) - finalize_model_grads(model, num_tokens=num_tokens) + finalize_model_grads( + cast(list[torch.nn.Module], model), + num_tokens=num_tokens, + ) buckets: dict[ tuple[GradSyncDomain, GradSyncOp, torch.dtype, torch.device], @@ -119,7 +123,11 @@ def finalize_model_grads_extended( if torch.is_floating_point(coalesced) and coalesced.dtype != torch.float32 else coalesced ) - torch.distributed.all_reduce(reduced, op=_resolve_reduce_op(op), group=group) + torch.distributed.all_reduce( # ty: ignore[possibly-missing-attribute] + reduced, + op=_resolve_reduce_op(op), + group=group, + ) if reduced is not coalesced: reduced = reduced.to(dtype=coalesced.dtype) for grad, synced in zip(grads, _unflatten_dense_tensors(reduced, grads)): diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index fd62a249..56aa3f86 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -45,7 +45,12 @@ class LoRAParallelSpec(BaseModel): def _distributed_initialized() -> bool: - return torch.distributed.is_available() and torch.distributed.is_initialized() + is_initialized = getattr(torch.distributed, "is_initialized", None) + return ( + torch.distributed.is_available() + and callable(is_initialized) + and bool(is_initialized()) + ) def _get_shard_world_size(domain: ShardDomain) -> int: @@ -70,7 +75,7 @@ def _get_shard_rank(domain: ShardDomain) -> int: return group.rank() -def _get_shard_group(domain: ShardDomain) -> torch.distributed.ProcessGroup | None: +def _get_shard_group(domain: ShardDomain) -> Any | None: if not _distributed_initialized(): return None if domain == "tp": @@ -193,8 +198,14 @@ def _broadcast_if_replicated(self, param: torch.nn.Parameter) -> None: raise RuntimeError( f"{self.adapter_model_prefix}: missing process group for replicated parameter domain={domain}" ) - src = torch.distributed.get_global_rank(group, 0) - torch.distributed.broadcast(param.data, src=src, group=group) + src = torch.distributed.get_global_rank( # ty: ignore[possibly-missing-attribute] + group, 0 + ) + torch.distributed.broadcast( # ty: ignore[possibly-missing-attribute] + param.data, + src=src, + group=group, + ) def reset_lora_parameters(self) -> None: """Initialize LoRA weights (A=Kaiming, B=zeros) like PEFT defaults.""" @@ -595,6 +606,7 @@ def _build_fc1_lora( alpha: float, num_local_experts: int, ) -> LoRA: + assert linear_fc1 is not None assert isinstance(linear_fc1.weight0, torch.Tensor) a_parallel_spec = LoRAParallelSpec( shard_domain="expert_tp", diff --git a/src/art/megatron/provider.py b/src/art/megatron/provider.py index acd2eda1..1b016628 100644 --- a/src/art/megatron/provider.py +++ b/src/art/megatron/provider.py @@ -1,7 +1,8 @@ import copy from functools import partial import inspect -from typing import Callable +from pathlib import Path +from typing import Callable, cast from megatron.bridge import AutoBridge from megatron.bridge.models.gpt_provider import GPTModelProvider @@ -70,9 +71,11 @@ def get_provider( "Only Qwen3 MoE models are supported" ) if torch_dtype != torch.bfloat16: + model_name_or_path = bridge.hf_pretrained.model_name_or_path + assert model_name_or_path is not None bridge.hf_pretrained._state_dict_accessor = StateDict( _CastingStateSource( - SafeTensorsStateSource(bridge.hf_pretrained.model_name_or_path), + SafeTensorsStateSource(cast(str | Path, model_name_or_path)), dtype=torch_dtype, ) ) diff --git a/src/art/megatron/routing_replay.py b/src/art/megatron/routing_replay.py index 104fe185..86f1c4df 100644 --- a/src/art/megatron/routing_replay.py +++ b/src/art/megatron/routing_replay.py @@ -833,13 +833,16 @@ def patched_dispatch_postprocess( tokens_per_expert=tokens_per_expert, ) self._art_replay_expert_input_inverse_permutation = inverse_order_cpu + active_step_routes = controller._active_step_routes + if active_step_routes is None: + raise RuntimeError( + "MoE replay dispatcher preprocess called before set_step" + ) trace_row_uids, trace_uid_span = _canonical_trace_row_uids( canonical_expert_token_uids, tokens_per_expert=tokens_per_expert, local_expert_indices=getattr(self, "local_expert_indices", None), - sample_uid_span=int( - controller._active_step_routes.global_token_uids.numel() - ), + sample_uid_span=int(active_step_routes.global_token_uids.numel()), num_experts=int(getattr(self, "num_experts", 1)), ) _attach_trace_row_uids( @@ -1257,6 +1260,10 @@ def get_route_for_router( context_parallel_size: int, ) -> tuple[torch.Tensor, torch.Tensor]: step_routes = self._active_step_routes + if step_routes is None: + raise RuntimeError( + "Routing replay get_route_for_router called before set_step" + ) call_cursor = self._router_call_cursors.get(router_key, 0) call_sequence = self._router_call_sequences.get(router_key) if call_sequence is None: diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index a67b6eea..b3da7e24 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -206,12 +206,12 @@ def build_training_runtime( ), ) - if not torch.distributed.is_initialized(): + if not torch.distributed.is_initialized(): # ty: ignore[possibly-missing-attribute] raise RuntimeError( "torch.distributed must be initialized before building runtime" ) - rank = torch.distributed.get_rank() - world_size = torch.distributed.get_world_size() + rank = torch.distributed.get_rank() # ty: ignore[possibly-missing-attribute] + world_size = torch.distributed.get_world_size() # ty: ignore[possibly-missing-attribute] if rank == 0 and print_env: print("TORCHINDUCTOR_CACHE_DIR:", os.environ["TORCHINDUCTOR_CACHE_DIR"]) @@ -404,11 +404,15 @@ def _optimizer_step( def _reduce_loss( loss: torch.Tensor, - op: torch.distributed.ReduceOp.RedOpType = torch.distributed.ReduceOp.AVG, - group: torch.distributed.ProcessGroup | None = None, + op: Any = torch.distributed.ReduceOp.AVG, # ty: ignore[possibly-missing-attribute] + group: Any | None = None, ) -> torch.Tensor: reduced_loss = loss.detach().clone() - torch.distributed.all_reduce(reduced_loss, op=op, group=group) + torch.distributed.all_reduce( # ty: ignore[possibly-missing-attribute] + reduced_loss, + op=op, + group=group, + ) return reduced_loss @@ -495,7 +499,7 @@ def run_training_step( experimental_config, reduction="sum", ) - micro_loss = loss_info.policy_loss + config.beta * loss_info.kl + micro_loss = loss_info.policy_loss micro_loss.backward() probs_corr_sum += float(loss_info.probs_corr.item()) detached_micro_loss = micro_loss.detach() @@ -515,7 +519,7 @@ def run_training_step( global_num_tokens = max(num_tokens.item(), 1.0) reduced_loss = _reduce_loss( raw_loss_sum / global_num_tokens, - op=torch.distributed.ReduceOp.SUM, + op=torch.distributed.ReduceOp.SUM, # ty: ignore[possibly-missing-attribute] group=ps.get_data_parallel_group(with_context_parallel=True), ) @@ -537,7 +541,7 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: offload_to_cpu(runtime.model, runtime.optimizer, runtime.rank, offload_state) while True: - torch.distributed.barrier() + torch.distributed.barrier() # ty: ignore[possibly-missing-attribute] jobs_dir = "/tmp/megatron_training_jobs" os.makedirs(jobs_dir, exist_ok=True) job_names = sorted( @@ -673,7 +677,7 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: gc.collect() torch.cuda.empty_cache() - torch.distributed.barrier() + torch.distributed.barrier() # ty: ignore[possibly-missing-attribute] if runtime.rank == 0: os.remove(job_path) with open( diff --git a/src/art/unsloth/train.py b/src/art/unsloth/train.py index 2f93e316..e5d4b026 100644 --- a/src/art/unsloth/train.py +++ b/src/art/unsloth/train.py @@ -207,17 +207,14 @@ def compute_loss( ) trainer._metrics["train"]["loss/learning_rate"].append(config.learning_rate) - total_loss = loss.policy_loss + config.beta * loss.kl - trainer._metrics["train"]["loss/train"].append(total_loss.item()) + trainer._metrics["train"]["loss/train"].append(loss.policy_loss.item()) if loss.entropy is not None: trainer._metrics["train"]["loss/entropy"].append(loss.entropy.item()) - if config.beta > 0.0: - trainer._metrics["train"]["loss/kl_div"].append(loss.kl.item()) if loss.kl_policy_ref is not None: trainer._metrics["train"]["loss/kl_policy_ref"].append( loss.kl_policy_ref.item() ) - return total_loss + return loss.policy_loss return compute_loss diff --git a/tests/integration/megatron_forward_trace.py b/tests/integration/megatron_forward_trace.py index 5e36fc87..98f43fc6 100644 --- a/tests/integration/megatron_forward_trace.py +++ b/tests/integration/megatron_forward_trace.py @@ -47,9 +47,9 @@ def _rank_metadata() -> dict[str, int]: """Builds lightweight distributed metadata for one trace call.""" rank = 0 world_size = 1 - if torch.distributed.is_initialized(): - rank = _safe_int(torch.distributed.get_rank(), 0) - world_size = _safe_int(torch.distributed.get_world_size(), 1) + if torch.distributed.is_initialized(): # ty: ignore[possibly-missing-attribute] + rank = _safe_int(torch.distributed.get_rank(), 0) # ty: ignore[possibly-missing-attribute] + world_size = _safe_int(torch.distributed.get_world_size(), 1) # ty: ignore[possibly-missing-attribute] return { "global_rank": rank, "world_size": world_size, @@ -904,15 +904,15 @@ def _gather_rank_traces( local_trace: dict[str, list[dict[str, Any]]], ) -> list[dict[str, list[dict[str, Any]]]] | None: if ( - not torch.distributed.is_initialized() - or torch.distributed.get_world_size() == 1 + not torch.distributed.is_initialized() # ty: ignore[possibly-missing-attribute] + or torch.distributed.get_world_size() == 1 # ty: ignore[possibly-missing-attribute] ): return [local_trace] gathered: list[dict[str, list[dict[str, Any]]] | None] = [ None - ] * torch.distributed.get_world_size() - torch.distributed.all_gather_object(gathered, local_trace) - if torch.distributed.get_rank() != 0: + ] * torch.distributed.get_world_size() # ty: ignore[possibly-missing-attribute] + torch.distributed.all_gather_object(gathered, local_trace) # ty: ignore[possibly-missing-attribute] + if torch.distributed.get_rank() != 0: # ty: ignore[possibly-missing-attribute] return None return cast(list[dict[str, list[dict[str, Any]]]], gathered) @@ -934,15 +934,15 @@ def _gather_rank_outputs( local_outputs: list[tuple[int | None, int, int | None, torch.Tensor]], ) -> list[list[tuple[int | None, int, int | None, torch.Tensor]]] | None: if ( - not torch.distributed.is_initialized() - or torch.distributed.get_world_size() == 1 + not torch.distributed.is_initialized() # ty: ignore[possibly-missing-attribute] + or torch.distributed.get_world_size() == 1 # ty: ignore[possibly-missing-attribute] ): return [local_outputs] gathered: list[ list[tuple[int | None, int, int | None, torch.Tensor]] | None - ] = [None] * torch.distributed.get_world_size() - torch.distributed.all_gather_object(gathered, local_outputs) - if torch.distributed.get_rank() != 0: + ] = [None] * torch.distributed.get_world_size() # ty: ignore[possibly-missing-attribute] + torch.distributed.all_gather_object(gathered, local_outputs) # ty: ignore[possibly-missing-attribute] + if torch.distributed.get_rank() != 0: # ty: ignore[possibly-missing-attribute] return None return cast( list[list[tuple[int | None, int, int | None, torch.Tensor]]], diff --git a/tests/integration/megatron_oracle_worker.py b/tests/integration/megatron_oracle_worker.py index 33f3c08a..f84179b3 100644 --- a/tests/integration/megatron_oracle_worker.py +++ b/tests/integration/megatron_oracle_worker.py @@ -114,10 +114,12 @@ def _gather_full_state( """Gathers local state dicts to rank 0 and merges them.""" import torch - rank = torch.distributed.get_rank() - world_size = torch.distributed.get_world_size() + rank = torch.distributed.get_rank() # ty: ignore[possibly-missing-attribute] + world_size = torch.distributed.get_world_size() # ty: ignore[possibly-missing-attribute] gathered = [None for _ in range(world_size)] if rank == 0 else None - torch.distributed.gather_object(local_state, gathered, dst=0) + torch.distributed.gather_object( # ty: ignore[possibly-missing-attribute] + local_state, gathered, dst=0 + ) if rank != 0: return None assert gathered is not None @@ -562,7 +564,7 @@ def _reference_forward( return (out * self.scale).to(dtype=x.dtype) - LoRA.forward = _reference_forward + LoRA.forward = _reference_forward # ty: ignore[invalid-assignment] try: yield finally: @@ -705,7 +707,7 @@ def _worker_run(request: WorkerRunRequest) -> None: local_rank = int(os.environ["LOCAL_RANK"]) torch.cuda.set_device(local_rank) - torch.distributed.init_process_group(backend="nccl") + torch.distributed.init_process_group(backend="nccl") # ty: ignore[possibly-missing-attribute] _set_deterministic_seed(request.case_config.seed) _configure_cuda_precision(request.case_config) @@ -738,7 +740,7 @@ def _worker_run(request: WorkerRunRequest) -> None: shared_init_path = Path(request.shared_init_adapter_path) if not shared_init_path.exists(): initial_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0: + if torch.distributed.get_rank() == 0: # ty: ignore[possibly-missing-attribute] shared_init_path.parent.mkdir(parents=True, exist_ok=True) deterministic_init = _build_deterministic_shared_init( _require_not_none(initial_state, "initial_state"), @@ -748,17 +750,17 @@ def _worker_run(request: WorkerRunRequest) -> None: deterministic_init, str(shared_init_path), ) - torch.distributed.barrier() + torch.distributed.barrier() # ty: ignore[possibly-missing-attribute] # load the shared initial lora into the model and validate we can collect it from the model adapter_model = load_file(str(shared_init_path)) megatron_train.load_adapter_into_model(model_chunks, adapter_model, optimizer) loaded_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0: + if torch.distributed.get_rank() == 0: # ty: ignore[possibly-missing-attribute] _validate_loaded_state_matches_adapter( _require_not_none(loaded_state, "loaded_state"), adapter_model ) - torch.distributed.barrier() + torch.distributed.barrier() # ty: ignore[possibly-missing-attribute] # load the inputs packed_tensors = packed_tensors_from_dir( @@ -771,7 +773,6 @@ def _worker_run(request: WorkerRunRequest) -> None: train_config = types.TrainConfig( learning_rate=request.case_config.learning_rate, - beta=request.case_config.beta, kl_penalty_coef=0.0, grad_accumulation_sequences=global_grad_accumulation_sequences, ) @@ -831,10 +832,10 @@ def _capture_lora_grads() -> None: ) ordered_micro_outputs = forward_trace_capture.ordered_step_outputs() forward_trace_capture.save_current_step(traces_dir) - torch.distributed.barrier() + torch.distributed.barrier() # ty: ignore[possibly-missing-attribute] current_lora_state = _collect_lora_state(model_chunks) - if torch.distributed.get_rank() == 0: + if torch.distributed.get_rank() == 0: # ty: ignore[possibly-missing-attribute] grads = _require_not_none(captured_grads, "captured_grads") initial_state = _require_not_none( initial_lora_state, "initial_lora_state" @@ -886,11 +887,11 @@ def _capture_lora_grads() -> None: lora_file=str(lora_rel), ) ) - torch.distributed.barrier() + torch.distributed.barrier() # ty: ignore[possibly-missing-attribute] forward_trace_capture.close() - if torch.distributed.get_rank() == 0: + if torch.distributed.get_rank() == 0: # ty: ignore[possibly-missing-attribute] # build and save the moe routing replay bundle if request.capture_moe_routing_bundle_path is not None: replay_bundle = build_bundle_from_forward_trace_dir( @@ -918,8 +919,8 @@ def _capture_lora_grads() -> None: steps=step_traces, ) _write_json(topology_dir / "manifest.json", manifest.model_dump(mode="json")) - torch.distributed.barrier() - torch.distributed.destroy_process_group() + torch.distributed.barrier() # ty: ignore[possibly-missing-attribute] + torch.distributed.destroy_process_group() # ty: ignore[possibly-missing-attribute] def run_worker_cli(run_request_path: Path) -> None: From b2494ea5843dd34d81afc92382c3e0c22d978f56 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Fri, 20 Mar 2026 09:13:19 +0000 Subject: [PATCH 20/28] Testing harness was working, but real training surfaced a few errors, mostly fixed. --- src/art/megatron/offload.py | 24 +++++++++++++++++++-- src/art/megatron/service.py | 24 ++++++++++++++++----- src/art/megatron/train.py | 42 ++++++++++++++++++++++++------------- 3 files changed, 69 insertions(+), 21 deletions(-) diff --git a/src/art/megatron/offload.py b/src/art/megatron/offload.py index 37e11be2..9e36377b 100644 --- a/src/art/megatron/offload.py +++ b/src/art/megatron/offload.py @@ -1,3 +1,4 @@ +from collections.abc import Iterator from dataclasses import dataclass, field import gc from typing import Any, Sequence @@ -11,6 +12,25 @@ class OffloadState: is_offloaded: bool = False +def _iter_megatron_optimizers(optimizer: Any) -> Iterator[Any]: + chained_optimizers = getattr(optimizer, "chained_optimizers", None) + if chained_optimizers is None: + yield optimizer + return + for child_optimizer in chained_optimizers: + yield from _iter_megatron_optimizers(child_optimizer) + + +def iter_optimizer_state_items(optimizer: Any) -> Iterator[tuple[Any, dict[str, Any]]]: + for megatron_optimizer in _iter_megatron_optimizers(optimizer): + yield from megatron_optimizer.state.items() + + +def clear_optimizer_state(optimizer: Any) -> None: + for megatron_optimizer in _iter_megatron_optimizers(optimizer): + megatron_optimizer.state.clear() + + def offload_to_cpu( model: Sequence[torch.nn.Module], optimizer: Any, @@ -62,7 +82,7 @@ def offload_to_cpu( pinned_buffers[key].copy_(param.data, non_blocking=True) param.data = pinned_buffers[key] - for param_id, opt_state in optimizer.optimizer.state.items(): + for param_id, opt_state in iter_optimizer_state_items(optimizer): for k, v in opt_state.items(): if isinstance(v, torch.Tensor) and v.device.type == "cuda": key = f"opt_{id(param_id)}_{k}" @@ -125,7 +145,7 @@ def reload_to_gpu( gpu_tensor.copy_(param.data, non_blocking=True) param.data = gpu_tensor - for opt_state in optimizer.optimizer.state.values(): + for _param_id, opt_state in iter_optimizer_state_items(optimizer): for k, v in opt_state.items(): if isinstance(v, torch.Tensor) and v.device.type == "cpu": gpu_tensor = torch.empty(v.shape, dtype=v.dtype, device=device) diff --git a/src/art/megatron/service.py b/src/art/megatron/service.py index 42ec4f9a..816d038f 100644 --- a/src/art/megatron/service.py +++ b/src/art/megatron/service.py @@ -1,10 +1,11 @@ import asyncio -from dataclasses import asdict, dataclass +from dataclasses import dataclass import datetime from functools import cached_property import json import os from pathlib import Path +import shlex import shutil import subprocess from typing import Any, AsyncIterator @@ -26,6 +27,7 @@ from ..utils.get_model_step import get_step_from_dir from ..utils.output_dirs import get_step_checkpoint_dir from ..vllm import get_llm, openai_server_task, run_on_workers +from .routing_replay import MoeRoutingReplayBundle class MegatronTrainingJob(BaseModel): @@ -40,6 +42,11 @@ class MegatronTrainingJob(BaseModel): moe_routing_replay_strict: bool = True +MegatronTrainingJob.model_rebuild( + force=True, _types_namespace={"MoeRoutingReplayBundle": MoeRoutingReplayBundle} +) + + @dataclass class MegatronService: model_name: str @@ -66,6 +73,7 @@ def _get_optimizer_state_path(self) -> str: def _default_lora_adapter_config(self) -> LoraConfig: # Keep in sync with LoRA settings in megatron/train.py. return LoraConfig( + base_model_name_or_path=self.base_model, r=1, lora_alpha=32, target_modules=[ @@ -135,8 +143,7 @@ def _ensure_lora_adapter_config( if os.path.exists(source_config): shutil.copy(source_config, config_path) return - with open(config_path, "w") as f: - json.dump(asdict(self._default_lora_adapter_config()), f) + self._default_lora_adapter_config().save_pretrained(lora_path) async def _add_lora_aliases( self, llm: AsyncLLM, step: int, checkpoint_dir: str @@ -175,13 +182,18 @@ async def _ensure_megatron_running(self) -> None: subprocess.run(["pkill", "-9", "megatron-service"], check=False) train_script = Path(__file__).parent / "train.py" + project_root = Path(__file__).resolve().parents[3] num_gpus = torch.cuda.device_count() os.environ["MODEL_IDENTIFIER"] = self.base_model command = ( - f"{setup_cmd}uv run torchrun --nproc_per_node {num_gpus} {train_script}" + f"{setup_cmd}uv run --project {shlex.quote(str(project_root))} " + f"torchrun --nproc_per_node {num_gpus} {shlex.quote(str(train_script))}" + ) + self._megatron_process = await asyncio.create_subprocess_shell( + command, + cwd=str(project_root), ) - self._megatron_process = await asyncio.create_subprocess_shell(command) async def start_openai_server( self, config: dev.OpenAIServerConfig | None @@ -234,6 +246,8 @@ async def train( lora_path = get_last_checkpoint_dir(self.output_dir) if lora_path is None: lora_path = get_step_checkpoint_dir(self.output_dir, 0) + self._latest_step = 0 + self._ensure_identity_lora(lora_path) self._ensure_lora_adapter_config(lora_path) self._optimizer_state_path = self._get_optimizer_state_path() diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index b3da7e24..312f108a 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -37,7 +37,12 @@ def _set_cache_dir(env_var: str, default_path: str) -> None: from art.megatron.finalize_grads import finalize_model_grads_extended from art.megatron.flex_attention import create_shared_prefix_attention_state from art.megatron.lora import apply_lora_adapters -from art.megatron.offload import OffloadState, offload_to_cpu, reload_to_gpu +from art.megatron.offload import ( + OffloadState, + clear_optimizer_state, + offload_to_cpu, + reload_to_gpu, +) from art.megatron.provider import get_provider from art.megatron.routing_replay import ( MoeRoutingReplayBundle, @@ -62,6 +67,12 @@ class TrainingJob(BaseModel): moe_routing_replay_strict: bool = True +TrainingJob.model_rebuild( + force=True, + _types_namespace={"MoeRoutingReplayBundle": MoeRoutingReplayBundle}, +) + + class TrainingRuntime(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) @@ -593,7 +604,7 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: optimizer_shard_path, "- resetting optimizer for new run", ) - runtime.optimizer.optimizer.state.clear() + clear_optimizer_state(runtime.optimizer) runtime.optimizer.reload_model_params() print0( @@ -614,18 +625,21 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: micro_inputs = select_micro_inputs( packed_tensors, micro_indices, zero_template ) - step_result = run_training_step( - model_chunks=runtime.model, - optimizer=runtime.optimizer, - learning_rate=config.learning_rate, - inputs=micro_inputs, - config=config, - experimental_config=experimental_config, - ref_logprobs=None, - step_index=step_index, - sample_index=micro_indices, - moe_routing_replay_controller=runtime.moe_routing_replay_controller, - ) + try: + step_result = run_training_step( + model_chunks=runtime.model, + optimizer=runtime.optimizer, + learning_rate=config.learning_rate, + inputs=micro_inputs, + config=config, + experimental_config=experimental_config, + ref_logprobs=None, + step_index=step_index, + sample_index=micro_indices, + moe_routing_replay_controller=runtime.moe_routing_replay_controller, + ) + except Exception: + raise print0( runtime.rank, "Correlation between old and new probabilities:", From a98fafce27d260281a90072ed37f24925c7be8ad Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Fri, 20 Mar 2026 09:13:23 +0000 Subject: [PATCH 21/28] Cut over Megatron LoRA to QuACK --- .python-version | 2 +- dev/bench_cute_grouped_lora.py | 539 +++++ pyproject.toml | 35 +- src/art/megatron/cute_grouped_lora_quack.py | 298 +++ src/art/megatron/lora.py | 7 +- tests/integration/test_lora_quack_cutover.py | 83 + uv.lock | 2043 +++--------------- 7 files changed, 1292 insertions(+), 1715 deletions(-) create mode 100644 dev/bench_cute_grouped_lora.py create mode 100644 src/art/megatron/cute_grouped_lora_quack.py create mode 100644 tests/integration/test_lora_quack_cutover.py diff --git a/.python-version b/.python-version index 2c073331..e4fba218 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.11 +3.12 diff --git a/dev/bench_cute_grouped_lora.py b/dev/bench_cute_grouped_lora.py new file mode 100644 index 00000000..77033276 --- /dev/null +++ b/dev/bench_cute_grouped_lora.py @@ -0,0 +1,539 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse +from datetime import datetime, timezone +import json +from pathlib import Path +from typing import Any, Callable + +from megatron.core.transformer.moe import grouped_gemm_util +from pydantic import BaseModel, ConfigDict, Field, field_validator +import torch + +from art.megatron.cute_grouped_lora_quack import quack_grouped_lora + +GroupedLoraFn = Callable[ + [torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor], + torch.Tensor, +] + + +def _parse_dtype(name: str) -> torch.dtype: + value = name.strip().lower() + if value in {"bf16", "bfloat16"}: + return torch.bfloat16 + if value in {"fp16", "float16"}: + return torch.float16 + raise ValueError(f"Unsupported dtype {name!r}") + + +def _as_cpu_counts(counts: torch.Tensor) -> torch.Tensor: + return counts.to(device="cpu", dtype=torch.int64).contiguous() + + +def _mean_ms(values: list[float]) -> float: + return float(sum(values) / len(values)) if values else 0.0 + + +class BenchmarkSpec(BaseModel): + model_config = ConfigDict(frozen=True) + + total_tokens: int = Field(gt=0) + hidden_size: int = Field(gt=0) + out_features: int = Field(gt=0) + num_experts: int = Field(gt=0) + rank: int = Field(gt=0) + dtype_name: str + warmup: int = Field(ge=0) + iters: int = Field(gt=0) + seed: int + input_scale: float = Field(gt=0.0) + weight_scale: float = Field(gt=0.0) + skew: float = Field(ge=0.0) + atol: float = Field(ge=0.0) + rtol: float = Field(ge=0.0) + + @field_validator("rank") + @classmethod + def _validate_rank(cls, value: int) -> int: + if value < 1 or value > 128 or (value & (value - 1)) != 0: + raise ValueError("rank must be a power of 2 in [1, 128]") + return value + + @property + def dtype(self) -> torch.dtype: + return _parse_dtype(self.dtype_name) + + +def _make_group_counts(spec: BenchmarkSpec) -> torch.Tensor: + generator = torch.Generator(device="cpu") + generator.manual_seed(spec.seed) + raw = torch.rand(spec.num_experts, generator=generator, dtype=torch.float32) + probs = raw.pow(1.0 + spec.skew) + probs = probs / probs.sum() + expert_ids = torch.multinomial( + probs, + spec.total_tokens, + replacement=True, + generator=generator, + ) + return torch.bincount(expert_ids, minlength=spec.num_experts).to(dtype=torch.int64) + + +def _build_problem( + spec: BenchmarkSpec, + *, + device: torch.device, +) -> dict[str, torch.Tensor]: + torch.manual_seed(spec.seed) + torch.cuda.manual_seed_all(spec.seed) + counts = _make_group_counts(spec) + loss_grad = torch.randn( + spec.total_tokens, + spec.out_features, + device=device, + dtype=spec.dtype, + ) + return { + "counts": counts, + "x": ( + torch.randn( + spec.total_tokens, + spec.hidden_size, + device=device, + dtype=spec.dtype, + ) + * spec.input_scale + ), + "a_t": ( + torch.randn( + spec.num_experts, + spec.hidden_size, + spec.rank, + device=device, + dtype=spec.dtype, + ) + * spec.weight_scale + ), + "b_t": ( + torch.randn( + spec.num_experts, + spec.rank, + spec.out_features, + device=device, + dtype=spec.dtype, + ) + * spec.weight_scale + ), + "loss_grad": loss_grad, + } + + +def eager_fused_grouped_lora( + x: torch.Tensor, + a_t: torch.Tensor, + b_t: torch.Tensor, + counts: torch.Tensor, +) -> torch.Tensor: + counts_list = [int(v) for v in counts.tolist()] + outputs: list[torch.Tensor] = [] + start = 0 + for expert_idx, token_count in enumerate(counts_list): + if token_count == 0: + continue + stop = start + token_count + outputs.append(x[start:stop] @ a_t[expert_idx] @ b_t[expert_idx]) + start = stop + if start != x.shape[0]: + raise RuntimeError( + f"Grouped split mismatch: consumed {start} tokens for shape {tuple(x.shape)}" + ) + if not outputs: + return x.new_empty((0, b_t.shape[-1])) + return torch.cat(outputs, dim=0) + + +def grouped_gemm_grouped_lora( + x: torch.Tensor, + a_t: torch.Tensor, + b_t: torch.Tensor, + counts: torch.Tensor, +) -> torch.Tensor: + counts_cpu = _as_cpu_counts(counts) + tmp = grouped_gemm_util.ops.gmm(x, a_t, counts_cpu, trans_b=False) # type: ignore[attr-defined] + return grouped_gemm_util.ops.gmm(tmp, b_t, counts_cpu, trans_b=False) # type: ignore[attr-defined] + + +def _backend_registry() -> dict[str, GroupedLoraFn]: + registry: dict[str, GroupedLoraFn] = { + "grouped_gemm": grouped_gemm_grouped_lora, + "quack_final": quack_grouped_lora, + } + return registry + + +def _default_output_json_path(spec: BenchmarkSpec) -> Path: + repo_root = Path(__file__).resolve().parents[1] + timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S") + filename = ( + f"rank_{spec.rank}_tokens_{spec.total_tokens}_hidden_{spec.hidden_size}" + f"_out_{spec.out_features}_{timestamp}.json" + ) + return repo_root / ".local" / "bench_cute_grouped_lora" / filename + + +def _clone_problem(problem: dict[str, torch.Tensor]) -> dict[str, torch.Tensor]: + return { + "counts": problem["counts"].clone(), + "loss_grad": problem["loss_grad"].clone(), + "x": problem["x"].detach().clone().requires_grad_(True), + "a_t": problem["a_t"].detach().clone().requires_grad_(True), + "b_t": problem["b_t"].detach().clone().requires_grad_(True), + } + + +def _run_backward( + fn: GroupedLoraFn, + problem: dict[str, torch.Tensor], +) -> dict[str, torch.Tensor]: + out = fn(problem["x"], problem["a_t"], problem["b_t"], problem["counts"]) + if not isinstance(out, torch.Tensor): + raise RuntimeError( + f"Backend returned {type(out).__name__} instead of torch.Tensor" + ) + if out.shape != problem["loss_grad"].shape: + raise RuntimeError( + "Output shape mismatch: " + f"expected {tuple(problem['loss_grad'].shape)}, got {tuple(out.shape)}" + ) + loss = (out.float() * problem["loss_grad"].float()).sum() / max( + 1, problem["loss_grad"].numel() + ) + loss.backward() + return { + "out": out.detach(), + "x_grad": problem["x"].grad.detach().clone(), + "a_grad": problem["a_t"].grad.detach().clone(), + "b_grad": problem["b_t"].grad.detach().clone(), + } + + +def _tensor_summary( + reference: torch.Tensor, + candidate: torch.Tensor, + *, + atol: float, + rtol: float, +) -> dict[str, Any]: + diff = (reference - candidate).abs() + ref_abs_max = float(reference.abs().max().item()) if reference.numel() else 0.0 + max_abs = float(diff.max().item()) if diff.numel() else 0.0 + max_rel = max_abs / max(ref_abs_max, 1e-12) + close = bool(torch.allclose(reference, candidate, atol=atol, rtol=rtol)) + return { + "close": close, + "max_abs_diff": max_abs, + "max_rel_to_ref_abs_max": max_rel, + } + + +def validate_backend( + *, + backend_name: str, + backend_fn: GroupedLoraFn, + spec: BenchmarkSpec, + problem: dict[str, torch.Tensor], +) -> dict[str, Any]: + reference_outputs = _run_backward(eager_fused_grouped_lora, _clone_problem(problem)) + candidate_outputs = _run_backward(backend_fn, _clone_problem(problem)) + output_summary = _tensor_summary( + reference_outputs["out"], + candidate_outputs["out"], + atol=spec.atol, + rtol=spec.rtol, + ) + x_grad_summary = _tensor_summary( + reference_outputs["x_grad"], + candidate_outputs["x_grad"], + atol=spec.atol, + rtol=spec.rtol, + ) + a_grad_summary = _tensor_summary( + reference_outputs["a_grad"], + candidate_outputs["a_grad"], + atol=spec.atol, + rtol=spec.rtol, + ) + b_grad_summary = _tensor_summary( + reference_outputs["b_grad"], + candidate_outputs["b_grad"], + atol=spec.atol, + rtol=spec.rtol, + ) + passed = all( + summary["close"] + for summary in ( + output_summary, + x_grad_summary, + a_grad_summary, + b_grad_summary, + ) + ) + return { + "backend": backend_name, + "passed": passed, + "output": output_summary, + "x_grad": x_grad_summary, + "a_grad": a_grad_summary, + "b_grad": b_grad_summary, + } + + +def benchmark_backend( + *, + backend_name: str, + backend_fn: GroupedLoraFn, + spec: BenchmarkSpec, + device: torch.device, + problem: dict[str, torch.Tensor], +) -> dict[str, Any]: + validation_pre = validate_backend( + backend_name=backend_name, + backend_fn=backend_fn, + spec=spec, + problem=problem, + ) + if not validation_pre["passed"]: + raise RuntimeError( + f"{backend_name} failed pre-benchmark validation: " + f"{json.dumps(validation_pre, indent=2)}" + ) + + candidate = _clone_problem(problem) + for _ in range(spec.warmup): + candidate["x"].grad = None + candidate["a_t"].grad = None + candidate["b_t"].grad = None + _run_backward(backend_fn, candidate) + torch.cuda.synchronize(device) + + fwd_ms: list[float] = [] + bwd_ms: list[float] = [] + total_ms: list[float] = [] + peak_alloc_bytes = 0 + + for _ in range(spec.iters): + candidate = _clone_problem(problem) + torch.cuda.reset_peak_memory_stats(device) + start = torch.cuda.Event(enable_timing=True) + middle = torch.cuda.Event(enable_timing=True) + end = torch.cuda.Event(enable_timing=True) + + start.record() + out = backend_fn( + candidate["x"], candidate["a_t"], candidate["b_t"], candidate["counts"] + ) + loss = (out.float() * candidate["loss_grad"].float()).sum() / max( + 1, candidate["loss_grad"].numel() + ) + middle.record() + loss.backward() + end.record() + torch.cuda.synchronize(device) + + fwd_ms.append(float(start.elapsed_time(middle))) + bwd_ms.append(float(middle.elapsed_time(end))) + total_ms.append(float(start.elapsed_time(end))) + peak_alloc_bytes = max( + peak_alloc_bytes, + int(torch.cuda.max_memory_allocated(device=device)), + ) + + validation_post = validate_backend( + backend_name=backend_name, + backend_fn=backend_fn, + spec=spec, + problem=problem, + ) + if not validation_post["passed"]: + raise RuntimeError( + f"{backend_name} failed post-benchmark validation: " + f"{json.dumps(validation_post, indent=2)}" + ) + + counts = problem["counts"] + nonzero_counts = counts[counts > 0] + return { + "backend": backend_name, + "timing_ms": { + "forward_mean": _mean_ms(fwd_ms), + "backward_mean": _mean_ms(bwd_ms), + "total_mean": _mean_ms(total_ms), + }, + "peak_alloc_gib": peak_alloc_bytes / (1024**3), + "counts_summary": { + "num_groups": int(counts.numel()), + "nonzero_groups": int(nonzero_counts.numel()), + "zero_groups": int((counts == 0).sum().item()), + "max_tokens_per_group": int(counts.max().item()), + "mean_tokens_per_nonzero_group": float(nonzero_counts.float().mean().item()) + if nonzero_counts.numel() + else 0.0, + }, + "validation_pre": validation_pre, + "validation_post": validation_post, + } + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description=( + "Benchmark grouped LoRA kernels with mandatory forward+backward " + "validation before and after timing." + ) + ) + parser.add_argument( + "--backends", + nargs="+", + default=["grouped_gemm", "quack_final"], + help=("Built-in backends to benchmark. Built-ins: grouped_gemm, quack_final."), + ) + parser.add_argument( + "--total-tokens", + type=int, + default=32768, + help="Default is a Qwen3-30B-A3B-style batch size target for realistic sweeps.", + ) + parser.add_argument( + "--hidden-size", + type=int, + default=2048, + help="Qwen3-30B-A3B hidden size.", + ) + parser.add_argument( + "--out-features", + type=int, + default=768, + help="Qwen3-30B-A3B moe_intermediate_size for expert up/gate-style shapes.", + ) + parser.add_argument( + "--num-experts", + type=int, + default=128, + help="Qwen3-30B-A3B number of routed experts.", + ) + parser.add_argument("--rank", type=int, default=8) + parser.add_argument("--dtype", default="bf16") + parser.add_argument("--warmup", type=int, default=10) + parser.add_argument("--iters", type=int, default=20) + parser.add_argument("--seed", type=int, default=1234) + parser.add_argument("--input-scale", type=float, default=0.05) + parser.add_argument("--weight-scale", type=float, default=0.05) + parser.add_argument( + "--skew", + type=float, + default=2.0, + help="Higher values create a longer-tailed per-expert token distribution.", + ) + parser.add_argument("--atol", type=float, default=5e-2) + parser.add_argument("--rtol", type=float, default=5e-2) + parser.add_argument("--device", default="cuda:0") + parser.add_argument( + "--output-json", + type=Path, + default=None, + help=( + "Optional override for the JSON output path. If omitted, results are " + "written under .local/bench_cute_grouped_lora/." + ), + ) + return parser.parse_args() + + +def main() -> None: + args = parse_args() + if not torch.cuda.is_available(): + raise SystemExit("CUDA is required for this benchmark.") + + spec = BenchmarkSpec( + total_tokens=args.total_tokens, + hidden_size=args.hidden_size, + out_features=args.out_features, + num_experts=args.num_experts, + rank=args.rank, + dtype_name=args.dtype, + warmup=args.warmup, + iters=args.iters, + seed=args.seed, + input_scale=args.input_scale, + weight_scale=args.weight_scale, + skew=args.skew, + atol=args.atol, + rtol=args.rtol, + ) + + device = torch.device(args.device) + torch.cuda.set_device(device) + problem = _build_problem(spec, device=device) + registry = _backend_registry() + + missing = [name for name in args.backends if name not in registry] + if missing: + raise SystemExit( + f"Unknown backends {missing}. Known backends: {sorted(registry)}" + ) + + results: dict[str, Any] = { + "config": { + "total_tokens": spec.total_tokens, + "hidden_size": spec.hidden_size, + "out_features": spec.out_features, + "num_experts": spec.num_experts, + "rank": spec.rank, + "dtype": spec.dtype_name, + "warmup": spec.warmup, + "iters": spec.iters, + "seed": spec.seed, + "input_scale": spec.input_scale, + "weight_scale": spec.weight_scale, + "skew": spec.skew, + "atol": spec.atol, + "rtol": spec.rtol, + "device": str(device), + }, + "results": {}, + } + + for backend_name in args.backends: + results["results"][backend_name] = benchmark_backend( + backend_name=backend_name, + backend_fn=registry[backend_name], + spec=spec, + device=device, + problem=problem, + ) + + baseline_name = "grouped_gemm" if "grouped_gemm" in results["results"] else None + baseline_total = ( + results["results"][baseline_name]["timing_ms"]["total_mean"] + if baseline_name is not None + else None + ) + if baseline_total is not None: + for backend_name, backend_result in results["results"].items(): + delta_ms = backend_result["timing_ms"]["total_mean"] - baseline_total + backend_result["delta_vs_grouped_gemm_ms"] = delta_ms + backend_result["delta_vs_grouped_gemm_pct"] = ( + 100.0 * delta_ms / baseline_total + ) + + output_json = args.output_json or _default_output_json_path(spec) + payload = json.dumps(results, indent=2, sort_keys=True) + print(payload) + output_json.parent.mkdir(parents=True, exist_ok=True) + output_json.write_text(payload + "\n") + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml index cca77d9c..c3e27fe6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "openpipe-art" version = "0.5.17" description = "The OpenPipe Agent Reinforcement Training (ART) library" readme = "README.md" -requires-python = ">=3.11" +requires-python = ">=3.12" dependencies = [ "openai>=2.14.0", "typer>=0.15.2", @@ -42,6 +42,7 @@ backend = [ ] megatron = [ "torch>=2.8.0", + "quack-kernels==0.2.5", "apex", "transformer-engine==2.11.0", "transformer-engine-cu12==2.11.0", @@ -123,13 +124,13 @@ markers = [ [tool.uv] required-version = ">=0.6.15" -# Override numpy to <2.0 for compatibility with megatron-core in the training -# environment. vLLM pulls opencv-python-headless>=4.13 which wants numpy>=2 on -# Python 3.9+, but megatron-core requires numpy<2. -override-dependencies = ["transformer-engine>=2.11.0", "numpy<2"] -# Keep apex build isolation enabled so uv can inject torch from -# `extra-build-dependencies` during lock/sync on non-GPU client machines. -no-build-isolation-package = ["transformer-engine", "transformer-engine-cu12", "transformer-engine-torch", "megatron-core", "megatron-bridge", "nv-grouped-gemm", "mamba-ssm", "causal-conv1d"] +override-dependencies = [ + "transformer-engine==2.11.0", + "numpy<2", + "torch==2.10.0", + "quack-kernels==0.2.5", +] +no-build-isolation-package = ["transformer-engine-torch", "megatron-core", "megatron-bridge", "nv-grouped-gemm", "mamba-ssm", "causal-conv1d"] [tool.uv.extra-build-dependencies] apex = ["torch>=2.8.0"] @@ -137,14 +138,28 @@ transformer-engine-torch = ["torch>=2.8.0"] [tool.uv.extra-build-variables] apex = { APEX_CPP_EXT = "1", APEX_CUDA_EXT = "1", APEX_FAST_LAYER_NORM = "1", APEX_PARALLEL_BUILD = "16", NVCC_APPEND_FLAGS = "--threads 4" } +transformer-engine-torch = { NVTE_NO_LOCAL_VERSION = "1" } [[tool.uv.dependency-metadata]] name = "apex" version = "0.1" requires-dist = ["packaging"] +[[tool.uv.dependency-metadata]] +name = "transformer-engine-torch" +version = "2.11.0" +requires-dist = [ + "einops", + "onnx", + "onnxscript", + "packaging", + "pydantic", + "torch", + "transformer-engine-cu12", +] + [tool.ty.environment] -python-version = "3.11" +python-version = "3.12" [tool.ty.rules] # Ignore unused-ignore-comment warnings because they vary depending on whether @@ -191,6 +206,7 @@ allowed-unresolved-imports = [ "seaborn.**", # megatron deps "megatron.**", + "quack.**", ] [dependency-groups] @@ -214,3 +230,4 @@ dev = [ [tool.uv.sources] panza = { git = "https://github.com/corbt/panza.git" } apex = { git = "https://github.com/NVIDIA/apex.git", branch = "25.09" } +transformer-engine-torch = { git = "https://github.com/NVIDIA/TransformerEngine.git", tag = "v2.11", subdirectory = "transformer_engine/pytorch" } diff --git a/src/art/megatron/cute_grouped_lora_quack.py b/src/art/megatron/cute_grouped_lora_quack.py new file mode 100644 index 00000000..70f7da89 --- /dev/null +++ b/src/art/megatron/cute_grouped_lora_quack.py @@ -0,0 +1,298 @@ +from __future__ import annotations + +from quack.gemm import gemm as quack_gemm +import torch + +_PADDED_LOW_RANK_TARGET = 8 +_PADDED_LOW_RANKS = frozenset({1, 2, 4}) +_SUPPORTED_RANKS = frozenset({1, 2, 4, 8, 16, 32, 64, 128}) + + +def _validate_supported_rank(rank: int) -> None: + if rank not in _SUPPORTED_RANKS: + raise ValueError( + f"Grouped LoRA QuACK backend only supports ranks {sorted(_SUPPORTED_RANKS)}, got {rank}" + ) + + +def _tokens_per_expert_to_tensor( + tokens_per_expert: list[int] | torch.Tensor, +) -> torch.Tensor: + if isinstance(tokens_per_expert, list): + return torch.tensor(tokens_per_expert, dtype=torch.int64, device="cpu") + if tokens_per_expert.ndim != 1: + raise ValueError( + f"tokens_per_expert must be 1D, got shape {tuple(tokens_per_expert.shape)}" + ) + return tokens_per_expert.detach().to(dtype=torch.int64).contiguous() + + +def _build_expert_offsets( + tokens_per_expert: torch.Tensor, + *, + device: torch.device, +) -> torch.Tensor: + offsets = torch.empty( + tokens_per_expert.numel() + 1, + dtype=torch.int64, + device=tokens_per_expert.device, + ) + offsets[0] = 0 + offsets[1:] = torch.cumsum(tokens_per_expert, dim=0) + return offsets.to(device=device, dtype=torch.int32) + + +def _validate_inputs( + x: torch.Tensor, + a_t: torch.Tensor, + b_t: torch.Tensor, + tokens_per_expert: list[int] | torch.Tensor, +) -> torch.Tensor: + counts = _tokens_per_expert_to_tensor(tokens_per_expert) + if x.ndim != 2: + raise ValueError(f"x must be 2D, got shape {tuple(x.shape)}") + if a_t.ndim != 3: + raise ValueError(f"a_t must be 3D, got shape {tuple(a_t.shape)}") + if b_t.ndim != 3: + raise ValueError(f"b_t must be 3D, got shape {tuple(b_t.shape)}") + rank = a_t.shape[-1] + _validate_supported_rank(rank) + if b_t.shape[-2] != rank: + raise ValueError(f"Expected b_t rank dim {rank}, got shape {tuple(b_t.shape)}") + if a_t.shape[0] != b_t.shape[0]: + raise ValueError( + "a_t and b_t must have the same number of experts, " + f"got {a_t.shape[0]} and {b_t.shape[0]}" + ) + if a_t.shape[1] != x.shape[1]: + raise ValueError( + f"a_t input dim must match x.shape[1], got {a_t.shape[1]} and {x.shape[1]}" + ) + if counts.numel() != a_t.shape[0]: + raise ValueError( + "tokens_per_expert length must match number of experts, " + f"got {counts.numel()} and {a_t.shape[0]}" + ) + if x.device.type != "cuda" or a_t.device != x.device or b_t.device != x.device: + raise ValueError("x, a_t, and b_t must be CUDA tensors on the same device") + if x.dtype not in {torch.float16, torch.bfloat16}: + raise ValueError(f"Unsupported dtype {x.dtype}; expected fp16 or bf16") + if a_t.dtype != x.dtype or b_t.dtype != x.dtype: + raise ValueError( + f"Dtype mismatch: x={x.dtype}, a_t={a_t.dtype}, b_t={b_t.dtype}" + ) + return counts + + +def _effective_rank(rank: int) -> int: + if rank in _PADDED_LOW_RANKS: + return _PADDED_LOW_RANK_TARGET + return rank + + +def _pad_a_t(a_t: torch.Tensor, effective_rank: int) -> torch.Tensor: + pad_rank = effective_rank - a_t.shape[-1] + if pad_rank <= 0: + return a_t.contiguous() + pad = a_t.new_zeros(a_t.shape[0], a_t.shape[1], pad_rank) + return torch.cat((a_t, pad), dim=-1).contiguous() + + +def _pad_b_t(b_t: torch.Tensor, effective_rank: int) -> torch.Tensor: + pad_rank = effective_rank - b_t.shape[1] + if pad_rank <= 0: + return b_t.contiguous() + pad = b_t.new_zeros(b_t.shape[0], pad_rank, b_t.shape[2]) + return torch.cat((b_t, pad), dim=1).contiguous() + + +def _proj_tile_n(rank: int) -> int: + return 64 if rank <= 64 else 128 + + +def _matmul_tile_n(out_features: int) -> int: + return 128 if out_features >= 128 else 64 + + +def _grad_a_tile_m(rank: int) -> int: + return 128 + + +def _grad_b_tile_m(rank: int) -> int: + return 64 if rank <= 64 else 128 + + +def _varlen_quack_gemm( + a: torch.Tensor, + b: torch.Tensor, + *, + out_features: int, + expert_offsets: torch.Tensor, + tile_m: int, + tile_n: int, +) -> torch.Tensor: + out = torch.empty( + a.shape[0], + out_features, + device=a.device, + dtype=a.dtype, + ) + quack_gemm( + a, + b, + out, + None, + None, + tile_M=tile_m, + tile_N=tile_n, + cluster_M=1, + cluster_N=1, + persistent=True, + cu_seqlens_m=expert_offsets, + ) + return out + + +def _varlen_quack_gemm_k( + a: torch.Tensor, + b: torch.Tensor, + *, + batch_count: int, + out_shape_m: int, + out_shape_n: int, + expert_offsets: torch.Tensor, + tile_m: int, + tile_n: int, +) -> torch.Tensor: + out = torch.empty( + batch_count, + out_shape_m, + out_shape_n, + device=a.device, + dtype=a.dtype, + ) + quack_gemm( + a, + b, + out, + None, + None, + tile_M=tile_m, + tile_N=tile_n, + cluster_M=1, + cluster_N=1, + persistent=True, + cu_seqlens_k=expert_offsets, + ) + return out + + +class _QuackGroupedLoraFn(torch.autograd.Function): + @staticmethod + def forward( + ctx, + x: torch.Tensor, + a_t: torch.Tensor, + b_t: torch.Tensor, + counts: torch.Tensor, + ) -> torch.Tensor: + expert_offsets = _build_expert_offsets(counts, device=x.device) + actual_rank = a_t.shape[-1] + effective_rank = _effective_rank(actual_rank) + a_t_eff = _pad_a_t(a_t, effective_rank) + b_t_eff = _pad_b_t(b_t, effective_rank) + proj_weights = a_t_eff.permute(0, 2, 1).contiguous() + apply_weights = b_t_eff.permute(0, 2, 1).contiguous() + + tmp = _varlen_quack_gemm( + x.contiguous(), + proj_weights, + out_features=effective_rank, + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_proj_tile_n(effective_rank), + ) + out = _varlen_quack_gemm( + tmp, + apply_weights, + out_features=b_t.shape[-1], + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_matmul_tile_n(b_t.shape[-1]), + ) + + ctx.save_for_backward(x, a_t_eff, b_t_eff, tmp, expert_offsets) + ctx.actual_rank = actual_rank + ctx.effective_rank = effective_rank + return out + + @staticmethod + def backward(ctx, grad_out: torch.Tensor): + x, a_t_eff, b_t_eff, tmp, expert_offsets = ctx.saved_tensors + effective_rank = ctx.effective_rank + actual_rank = ctx.actual_rank + grad_out_c = grad_out.contiguous() + + grad_tmp = _varlen_quack_gemm( + grad_out_c, + b_t_eff.contiguous(), + out_features=effective_rank, + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_proj_tile_n(effective_rank), + ) + grad_x = _varlen_quack_gemm( + grad_tmp, + a_t_eff.contiguous(), + out_features=x.shape[-1], + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_matmul_tile_n(x.shape[-1]), + ) + grad_a_eff = _varlen_quack_gemm_k( + x.transpose(0, 1), + grad_tmp.transpose(0, 1), + batch_count=a_t_eff.shape[0], + out_shape_m=a_t_eff.shape[1], + out_shape_n=effective_rank, + expert_offsets=expert_offsets, + tile_m=_grad_a_tile_m(effective_rank), + tile_n=_proj_tile_n(effective_rank), + ) + grad_b_eff = _varlen_quack_gemm_k( + tmp.transpose(0, 1), + grad_out_c.transpose(0, 1), + batch_count=b_t_eff.shape[0], + out_shape_m=effective_rank, + out_shape_n=b_t_eff.shape[-1], + expert_offsets=expert_offsets, + tile_m=_grad_b_tile_m(effective_rank), + tile_n=_matmul_tile_n(b_t_eff.shape[-1]), + ) + return ( + grad_x, + grad_a_eff[:, :, :actual_rank].contiguous(), + grad_b_eff[:, :actual_rank, :].contiguous(), + None, + ) + + +def quack_grouped_lora( + x: torch.Tensor, + a_t: torch.Tensor, + b_t: torch.Tensor, + counts: list[int] | torch.Tensor, +) -> torch.Tensor: + """Run grouped LoRA with the QuACK varlen GEMM backend. + + Assumptions required by the caller: + - `counts` is ordered by local expert index and `sum(counts) == x.shape[0]`. + - `counts` length matches `a_t.shape[0] == b_t.shape[0]`. + - `x.shape[1] == a_t.shape[1]` and `a_t.shape[-1] == b_t.shape[-2]`. + - `x`, `a_t`, and `b_t` are CUDA tensors on the same device with fp16 or bf16 dtype. + + The value-based `sum(counts)` check is intentionally omitted to avoid a host-device + synchronization in the hot path. + """ + counts_tensor = _validate_inputs(x, a_t, b_t, counts) + return _QuackGroupedLoraFn.apply(x, a_t, b_t, counts_tensor) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 56aa3f86..0842bee8 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -15,12 +15,13 @@ reduce_scatter_to_sequence_parallel_region, ) from megatron.core.transformer.attention import SelfAttention -from megatron.core.transformer.moe import grouped_gemm_util from megatron.core.transformer.moe.experts import TEGroupedMLP from megatron.core.transformer.transformer_layer import TransformerLayer from pydantic import BaseModel, ConfigDict import torch +from .cute_grouped_lora_quack import quack_grouped_lora + ShardDomain = Literal["tp", "expert_tp"] GradSyncDomain = Literal["tp_default", "expert_tp"] GradSyncOp = Literal["none", "sum", "avg"] @@ -366,9 +367,7 @@ def forward( # If no tokens routed locally, return zeros. if isinstance(bsz, torch.Tensor) and int(torch.count_nonzero(bsz)) == 0: return x.new_zeros((x.shape[0], self.B_T.shape[-1])) - tmp = grouped_gemm_util.ops.gmm(x, self.A_T, bsz, trans_b=False) # type: ignore[attr-defined] - out = grouped_gemm_util.ops.gmm(tmp, self.B_T, bsz, trans_b=False) # type: ignore[attr-defined] - return out * self.scale + return quack_grouped_lora(x, self.A_T, self.B_T, bsz) * self.scale return ((x @ self.A_T) @ self.B_T) * self.scale diff --git a/tests/integration/test_lora_quack_cutover.py b/tests/integration/test_lora_quack_cutover.py new file mode 100644 index 00000000..2e96b4b7 --- /dev/null +++ b/tests/integration/test_lora_quack_cutover.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +import pytest +import torch + +pytest.importorskip("quack") + +from art.megatron.lora import LoRA + + +def _eager_grouped_lora( + x: torch.Tensor, + a_t: torch.Tensor, + b_t: torch.Tensor, + counts: torch.Tensor, + *, + scale: float, +) -> torch.Tensor: + outputs: list[torch.Tensor] = [] + start = 0 + for expert_idx, token_count in enumerate(counts.tolist()): + if token_count == 0: + continue + stop = start + int(token_count) + outputs.append(x[start:stop] @ a_t[expert_idx] @ b_t[expert_idx]) + start = stop + if start != x.shape[0]: + raise RuntimeError( + f"Grouped split mismatch: consumed {start} rows for shape {tuple(x.shape)}" + ) + return torch.cat(outputs, dim=0) * scale + + +@pytest.mark.parametrize("rank", [1, 4, 16]) +def test_lora_grouped_forward_cutover_matches_reference(rank: int) -> None: + if not torch.cuda.is_available(): + pytest.skip("CUDA is required for the LoRA QuACK cutover test.") + + device = torch.device("cuda:0") + torch.manual_seed(20260323 + rank) + + lora = LoRA( + adapter_model_prefix="test.{expert}", + in_features=64, + out_features=64, + rank=rank, + alpha=32, + dtype=torch.bfloat16, + device=device, + num_local_experts=4, + ) + with torch.no_grad(): + lora.A_T.copy_(torch.randn_like(lora.A_T) * 0.05) + lora.B_T.copy_(torch.randn_like(lora.B_T) * 0.05) + + counts = torch.tensor([32, 0, 16, 24], dtype=torch.int64) + total_tokens = int(counts.sum().item()) + x = torch.randn(total_tokens, 64, device=device, dtype=torch.bfloat16) * 0.05 + loss_grad = torch.randn(total_tokens, 64, device=device, dtype=torch.bfloat16) + + x_ref = x.detach().clone().requires_grad_(True) + a_ref = lora.A_T.detach().clone().requires_grad_(True) + b_ref = lora.B_T.detach().clone().requires_grad_(True) + ref_out = _eager_grouped_lora( + x_ref, + a_ref, + b_ref, + counts, + scale=lora.scale, + ) + ref_loss = (ref_out.float() * loss_grad.float()).sum() / max(1, loss_grad.numel()) + ref_loss.backward() + + x_test = x.detach().clone().requires_grad_(True) + lora.zero_grad(set_to_none=True) + got_out = lora(x_test, tokens_per_expert=counts) + got_loss = (got_out.float() * loss_grad.float()).sum() / max(1, loss_grad.numel()) + got_loss.backward() + + assert torch.allclose(ref_out, got_out.detach(), atol=5e-2, rtol=5e-2) + assert torch.allclose(x_ref.grad, x_test.grad, atol=5e-2, rtol=5e-2) + assert torch.allclose(a_ref.grad, lora.A_T.grad, atol=5e-2, rtol=5e-2) + assert torch.allclose(b_ref.grad, lora.B_T.grad, atol=5e-2, rtol=5e-2) diff --git a/uv.lock b/uv.lock index cae64f30..077c42ca 100644 --- a/uv.lock +++ b/uv.lock @@ -1,29 +1,27 @@ version = 1 revision = 3 -requires-python = ">=3.11" +requires-python = ">=3.12" resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'linux'", "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", + "python_full_version < '3.13' and sys_platform == 'linux'", "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'emscripten'", "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version < '3.13' and sys_platform == 'win32'", + "python_full_version < '3.13' and sys_platform == 'emscripten'", + "python_full_version < '3.13' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", ] [manifest] overrides = [ { name = "numpy", specifier = "<2" }, - { name = "transformer-engine", specifier = ">=2.11.0" }, + { name = "quack-kernels", specifier = "==0.2.5" }, + { name = "torch", specifier = "==2.10.0" }, + { name = "transformer-engine", specifier = "==2.11.0" }, ] [[manifest.dependency-metadata]] @@ -31,6 +29,11 @@ name = "apex" version = "0.1" requires-dist = ["packaging"] +[[manifest.dependency-metadata]] +name = "transformer-engine-torch" +version = "2.11.0" +requires-dist = ["einops", "onnx", "onnxscript", "packaging", "pydantic", "torch", "transformer-engine-cu12"] + [[package]] name = "abnf" version = "2.2.0" @@ -63,32 +66,13 @@ dependencies = [ { name = "psutil" }, { name = "pyyaml" }, { name = "safetensors" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] sdist = { url = "https://files.pythonhosted.org/packages/97/33/47bbd507e3a851d33d19ce7b2141c5ea3689bfae91ba168044d7db24b0e9/accelerate-1.7.0.tar.gz", hash = "sha256:e8a2a5503d6237b9eee73cc8d36cf543f9c2d8dd2c6713450b322f5e6d53a610", size = 376026, upload-time = "2025-05-15T10:00:52.117Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/f8/bb/be8146c196ad6e4dec78385d91e92591f8a433576c4e04c342a636fcd811/accelerate-1.7.0-py3-none-any.whl", hash = "sha256:cf57165cca28769c6cf2650812371c81b18e05743dfa3c748524b1bb4f2b272f", size = 362095, upload-time = "2025-05-15T10:00:49.914Z" }, ] -[[package]] -name = "aiobotocore" -version = "2.26.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohttp", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "aioitertools", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "botocore", version = "1.41.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "jmespath", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "multidict", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "python-dateutil", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "wrapt", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4d/f8/99fa90d9c25b78292899fd4946fce97b6353838b5ecc139ad8ba1436e70c/aiobotocore-2.26.0.tar.gz", hash = "sha256:50567feaf8dfe2b653570b4491f5bc8c6e7fb9622479d66442462c021db4fadc", size = 122026, upload-time = "2025-11-28T07:54:59.956Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/58/3bf0b7d474607dc7fd67dd1365c4e0f392c8177eaf4054e5ddee3ebd53b5/aiobotocore-2.26.0-py3-none-any.whl", hash = "sha256:a793db51c07930513b74ea7a95bd79aaa42f545bdb0f011779646eafa216abec", size = 87333, upload-time = "2025-11-28T07:54:58.457Z" }, -] - [[package]] name = "aiodns" version = "4.0.0" @@ -134,23 +118,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/4c/a164164834f03924d9a29dc3acd9e7ee58f95857e0b467f6d04298594ebb/aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b", size = 746051, upload-time = "2026-01-03T17:29:43.287Z" }, - { url = "https://files.pythonhosted.org/packages/82/71/d5c31390d18d4f58115037c432b7e0348c60f6f53b727cad33172144a112/aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64", size = 499234, upload-time = "2026-01-03T17:29:44.822Z" }, - { url = "https://files.pythonhosted.org/packages/0e/c9/741f8ac91e14b1d2e7100690425a5b2b919a87a5075406582991fb7de920/aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea", size = 494979, upload-time = "2026-01-03T17:29:46.405Z" }, - { url = "https://files.pythonhosted.org/packages/75/b5/31d4d2e802dfd59f74ed47eba48869c1c21552c586d5e81a9d0d5c2ad640/aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a", size = 1748297, upload-time = "2026-01-03T17:29:48.083Z" }, - { url = "https://files.pythonhosted.org/packages/1a/3e/eefad0ad42959f226bb79664826883f2687d602a9ae2941a18e0484a74d3/aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540", size = 1707172, upload-time = "2026-01-03T17:29:49.648Z" }, - { url = "https://files.pythonhosted.org/packages/c5/3a/54a64299fac2891c346cdcf2aa6803f994a2e4beeaf2e5a09dcc54acc842/aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b", size = 1805405, upload-time = "2026-01-03T17:29:51.244Z" }, - { url = "https://files.pythonhosted.org/packages/6c/70/ddc1b7169cf64075e864f64595a14b147a895a868394a48f6a8031979038/aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3", size = 1899449, upload-time = "2026-01-03T17:29:53.938Z" }, - { url = "https://files.pythonhosted.org/packages/a1/7e/6815aab7d3a56610891c76ef79095677b8b5be6646aaf00f69b221765021/aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1", size = 1748444, upload-time = "2026-01-03T17:29:55.484Z" }, - { url = "https://files.pythonhosted.org/packages/6b/f2/073b145c4100da5511f457dc0f7558e99b2987cf72600d42b559db856fbc/aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3", size = 1606038, upload-time = "2026-01-03T17:29:57.179Z" }, - { url = "https://files.pythonhosted.org/packages/0a/c1/778d011920cae03ae01424ec202c513dc69243cf2db303965615b81deeea/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440", size = 1724156, upload-time = "2026-01-03T17:29:58.914Z" }, - { url = "https://files.pythonhosted.org/packages/0e/cb/3419eabf4ec1e9ec6f242c32b689248365a1cf621891f6f0386632525494/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7", size = 1722340, upload-time = "2026-01-03T17:30:01.962Z" }, - { url = "https://files.pythonhosted.org/packages/7a/e5/76cf77bdbc435bf233c1f114edad39ed4177ccbfab7c329482b179cff4f4/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c", size = 1783041, upload-time = "2026-01-03T17:30:03.609Z" }, - { url = "https://files.pythonhosted.org/packages/9d/d4/dd1ca234c794fd29c057ce8c0566b8ef7fd6a51069de5f06fa84b9a1971c/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51", size = 1596024, upload-time = "2026-01-03T17:30:05.132Z" }, - { url = "https://files.pythonhosted.org/packages/55/58/4345b5f26661a6180afa686c473620c30a66afdf120ed3dd545bbc809e85/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4", size = 1804590, upload-time = "2026-01-03T17:30:07.135Z" }, - { url = "https://files.pythonhosted.org/packages/7b/06/05950619af6c2df7e0a431d889ba2813c9f0129cec76f663e547a5ad56f2/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29", size = 1740355, upload-time = "2026-01-03T17:30:09.083Z" }, - { url = "https://files.pythonhosted.org/packages/3e/80/958f16de79ba0422d7c1e284b2abd0c84bc03394fbe631d0a39ffa10e1eb/aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239", size = 433701, upload-time = "2026-01-03T17:30:10.869Z" }, - { url = "https://files.pythonhosted.org/packages/dc/f2/27cdf04c9851712d6c1b99df6821a6623c3c9e55956d4b1e318c337b5a48/aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f", size = 457678, upload-time = "2026-01-03T17:30:12.719Z" }, { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, @@ -241,15 +208,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1a/99/84ba7273339d0f3dfa57901b846489d2e5c2cd731470167757f1935fffbd/aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54", size = 9981, upload-time = "2024-11-06T10:44:52.917Z" }, ] -[[package]] -name = "aioitertools" -version = "0.13.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fd/3c/53c4a17a05fb9ea2313ee1777ff53f5e001aefd5cc85aa2f4c2d982e1e38/aioitertools-0.13.0.tar.gz", hash = "sha256:620bd241acc0bbb9ec819f1ab215866871b4bbd1f73836a55f799200ee86950c", size = 19322, upload-time = "2025-11-06T22:17:07.609Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/a1/510b0a7fadc6f43a6ce50152e69dbd86415240835868bb0bd9b5b88b1e06/aioitertools-0.13.0-py3-none-any.whl", hash = "sha256:0be0292b856f08dfac90e31f4739432f4cb6d7520ab9eb73e143f4f2fa5259be", size = 24182, upload-time = "2025-11-06T22:17:06.502Z" }, -] - [[package]] name = "aiosignal" version = "1.4.0" @@ -318,14 +276,14 @@ name = "anthropic" version = "0.82.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "anyio", marker = "sys_platform == 'linux'" }, - { name = "distro", marker = "sys_platform == 'linux'" }, - { name = "docstring-parser", marker = "sys_platform == 'linux'" }, - { name = "httpx", marker = "sys_platform == 'linux'" }, - { name = "jiter", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "sniffio", marker = "sys_platform == 'linux'" }, - { name = "typing-extensions", marker = "sys_platform == 'linux'" }, + { name = "anyio" }, + { name = "distro" }, + { name = "docstring-parser" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/6f/94/3766b5414d9e35687d518943a5b2ffb2696cd5c53248eec13fa1e8a5c73d/anthropic-0.82.0.tar.gz", hash = "sha256:e217340ba40cb9e24c88aacccc365334a6c3f46778855eca5000a6aa83d73dde", size = 533270, upload-time = "2026-02-18T20:25:16.844Z" } wheels = [ @@ -360,12 +318,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/e3/e9/a13952726228fa6282154ecf927092396bc759739e5e045019f6ab92f3ca/apache_tvm_ffi-0.1.8.post2.tar.gz", hash = "sha256:4513e38852894f290172ecfefcbc18d34e817fd29c16a0f1770e130c82b4067e", size = 2441111, upload-time = "2026-01-13T18:11:27.864Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/86/7db24692281d80204d07d77346ad4cb87f6183f1364ed94311993a47ed1a/apache_tvm_ffi-0.1.8.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:40f5fba3e06617f16888a0fdaf7ab4049841ff6e741644be822400438b771fe7", size = 1840013, upload-time = "2026-01-13T18:10:33.724Z" }, - { url = "https://files.pythonhosted.org/packages/cf/cc/fbaef883c6ba8e2c56ffcca997f2c076d1c14787799a62f39bd52c7126d5/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9eb6d228fa22b6a5da140d761962f022a154746c91fe7608c49062deaf671f9f", size = 1995159, upload-time = "2026-01-13T18:10:35.727Z" }, - { url = "https://files.pythonhosted.org/packages/49/08/f1e984e3573d0cbd6d53f3f73a12691fba153afc529fbd506d78e739b330/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:581c0acf845859be0cc26ac79f3663a83393b662c97c7125ebb78f0228b69d96", size = 2068543, upload-time = "2026-01-13T18:10:39.12Z" }, - { url = "https://files.pythonhosted.org/packages/35/1f/5336d430a133cf66ca9dac8ae9b6e25d8b99275a6687656421a1deee9f1b/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:beadc7bb480ae02d02e2108543f6f4b4170d77e361ab3ccb43697d174ec185b0", size = 1939018, upload-time = "2026-01-13T18:10:40.621Z" }, - { url = "https://files.pythonhosted.org/packages/5f/67/969c66a27a128cf738d0c068e0d4451d691d8197929c797cbe8e59c6cfc9/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e593d191c7ca0726ebcd3b024a4bc8140694fdfce2e7b02493f38ad5c4c9ecf7", size = 2053068, upload-time = "2026-01-13T18:10:43.241Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f1/84881a799d227fdc4a61fbf0cb8d5ceb6a72ad788fa9070e5853ed9759b6/apache_tvm_ffi-0.1.8.post2-cp311-cp311-win_amd64.whl", hash = "sha256:1c685f19d0f26d9356c7c77a1cb652a3632ec9ee6cd21aa1d8cfb968743ec1fd", size = 1809557, upload-time = "2026-01-13T18:10:44.743Z" }, { url = "https://files.pythonhosted.org/packages/12/8b/a39d6c6eb1a87f6003e2717695cc6d44cc65ccd57dae5a0af944c0d25751/apache_tvm_ffi-0.1.8.post2-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:c13ec7fc8f255767998b301ace0cd1e7d17ba76b48ffeb97ca9eb22a3314e250", size = 1811882, upload-time = "2026-01-13T18:10:46.317Z" }, { url = "https://files.pythonhosted.org/packages/8e/3a/7b1c9edcaeaebb945038144896cf17eb828a40b6ace0371823e133132664/apache_tvm_ffi-0.1.8.post2-cp312-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c78b4caf17304a1f47881bccdb2f9ac24d98b3b7fbe761a6dd4fd0585934d96", size = 1967259, upload-time = "2026-01-13T18:10:47.851Z" }, { url = "https://files.pythonhosted.org/packages/6c/b6/463602f57dda2e1c69165c044c07061cd59404593f313a427a3ad9c02cf3/apache_tvm_ffi-0.1.8.post2-cp312-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4a48da3fa8f47130f3502134f01e97044388c5217e7b91be4b0acec4feab81a0", size = 2044821, upload-time = "2026-01-13T18:10:49.396Z" }, @@ -421,14 +373,6 @@ version = "0.31.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" }, - { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" }, - { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" }, - { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" }, - { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" }, - { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" }, - { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" }, { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, @@ -478,13 +422,6 @@ version = "15.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e9/c3/83e6e73d1592bc54436eae0bc61704ae0cff0c3cfbde7b58af9ed67ebb49/av-15.1.0.tar.gz", hash = "sha256:39cda2dc810e11c1938f8cb5759c41d6b630550236b3365790e67a313660ec85", size = 3774192, upload-time = "2025-08-30T04:41:56.076Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/58/4e44cf6939be7aba96a4abce024e1be11ba7539ecac74d09369b8c03aa05/av-15.1.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b785948762a8d45fc58fc24a20251496829ace1817e9a7a508a348d6de2182c3", size = 21767323, upload-time = "2025-08-30T04:39:37.989Z" }, - { url = "https://files.pythonhosted.org/packages/9b/f6/a946544cdb49f6d892d2761b1d61a8bc6ce912fe57ba06769bdc640c0a7f/av-15.1.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:9c7131494a3a318612b4ee4db98fe5bc50eb705f6b6536127c7ab776c524fd8b", size = 26946268, upload-time = "2025-08-30T04:39:40.601Z" }, - { url = "https://files.pythonhosted.org/packages/70/7c/b33513c0af73d0033af59a98f035b521c5b93445a6af7e9efbf41a6e8383/av-15.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2b9623ae848625c59213b610c8665817924f913580c7c5c91e0dc18936deb00d", size = 38062118, upload-time = "2025-08-30T04:39:43.928Z" }, - { url = "https://files.pythonhosted.org/packages/5e/95/31b7fb34f9fea7c7389240364194f4f56ad2d460095038cc720f50a90bb3/av-15.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c8ef597087db560514617143532b1fafc4825ebb2dda9a22418f548b113a0cc7", size = 39571086, upload-time = "2025-08-30T04:39:47.109Z" }, - { url = "https://files.pythonhosted.org/packages/e7/b0/7b0b45474a4e90c35c11d0032947d8b3c7386872957ce29c6f12add69a74/av-15.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:08eac47a90ebae1e2bd5935f400dd515166019bab4ff5b03c4625fa6ac3a0a5e", size = 40112634, upload-time = "2025-08-30T04:39:50.981Z" }, - { url = "https://files.pythonhosted.org/packages/aa/04/038b94bc9a1ee10a451c867d4a2fc91e845f83bfc2dae9df25893abcb57f/av-15.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d3f66ff200ea166e606cb3c5cb1bd2fc714effbec2e262a5d67ce60450c8234a", size = 40878695, upload-time = "2025-08-30T04:39:54.493Z" }, - { url = "https://files.pythonhosted.org/packages/1d/3d/9f8f96c0deeaaf648485a3dbd1699b2f0580f2ce8a36cb616c0138ba7615/av-15.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:57b99544d91121b8bea570e4ddf61700f679a6b677c1f37966bc1a22e1d4cd5c", size = 31335683, upload-time = "2025-08-30T04:39:57.861Z" }, { url = "https://files.pythonhosted.org/packages/d1/58/de78b276d20db6ffcd4371283df771721a833ba525a3d57e753d00a9fe79/av-15.1.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:40c5df37f4c354ab8190c6fd68dab7881d112f527906f64ca73da4c252a58cee", size = 21760991, upload-time = "2025-08-30T04:40:00.801Z" }, { url = "https://files.pythonhosted.org/packages/56/cc/45f85775304ae60b66976360d82ba5b152ad3fd91f9267d5020a51e9a828/av-15.1.0-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:af455ce65ada3d361f80c90c810d9bced4db5655ab9aa513024d6c71c5c476d5", size = 26953097, upload-time = "2025-08-30T04:40:03.998Z" }, { url = "https://files.pythonhosted.org/packages/f3/f8/2d781e5e71d02fc829487e775ccb1185e72f95340d05f2e84eb57a11e093/av-15.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86226d2474c80c3393fa07a9c366106029ae500716098b72b3ec3f67205524c3", size = 38319710, upload-time = "2025-08-30T04:40:07.701Z" }, @@ -515,54 +452,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b1/c9/15bb4fd7a1f39d70db35af2b9c20a0ae19e4220eb58a8b8446e903b98d72/av-15.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9a20c5eba3ec49c2f4b281797021923fc68a86aeb66c5cda4fd0252fa8004951", size = 31487337, upload-time = "2025-08-30T04:41:30.591Z" }, ] -[[package]] -name = "awscli" -version = "1.43.5" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", -] -dependencies = [ - { name = "botocore", version = "1.41.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "colorama", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "docutils", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "pyyaml", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "rsa", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "s3transfer", version = "0.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2d/15/f0b0b62dbf139b1906f99439cd292ebe8df397a3e4e35ede61cb06e1abed/awscli-1.43.5.tar.gz", hash = "sha256:4ff153bf2f9097eeb794d4150522df204ea982b6da08eb82f7fd09c138ac46cf", size = 1878320, upload-time = "2025-11-26T20:27:43.631Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/a1/1e7e0f29b7d3d327b5ebcf967f2efe697cb635f7514828c9f6dc794ed14b/awscli-1.43.5-py3-none-any.whl", hash = "sha256:8707de8e4c4e42a0fded867c1369027a5e6149c128815d487aedab34de71152c", size = 4631862, upload-time = "2025-11-26T20:27:41.651Z" }, -] - [[package]] name = "awscli" version = "1.44.42" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] dependencies = [ - { name = "botocore", version = "1.42.52", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "colorama", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "docutils", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "pyyaml", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "rsa", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "s3transfer", version = "0.16.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, + { name = "botocore" }, + { name = "colorama" }, + { name = "docutils" }, + { name = "pyyaml" }, + { name = "rsa" }, + { name = "s3transfer" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e2/2f/5511aad462c50ffd8c7358d8015a012d04ead139f804cdc6dc17e39b2aae/awscli-1.44.42.tar.gz", hash = "sha256:f3da6cecd9d5dbe7e89fe8d22342e320f6034c92bd5296f8f86cc98fb534f455", size = 1883829, upload-time = "2026-02-18T21:54:54.426Z" } wheels = [ @@ -616,38 +516,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, ] -[[package]] -name = "backports-tarfile" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, -] - [[package]] name = "backports-zstd" version = "1.3.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f4/b1/36a5182ce1d8ef9ef32bff69037bd28b389bbdb66338f8069e61da7028cb/backports_zstd-1.3.0.tar.gz", hash = "sha256:e8b2d68e2812f5c9970cabc5e21da8b409b5ed04e79b4585dbffa33e9b45ebe2", size = 997138, upload-time = "2025-12-29T17:28:06.143Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/28/ed31a0e35feb4538a996348362051b52912d50f00d25c2d388eccef9242c/backports_zstd-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:249f90b39d3741c48620021a968b35f268ca70e35f555abeea9ff95a451f35f9", size = 435660, upload-time = "2025-12-29T17:25:55.207Z" }, - { url = "https://files.pythonhosted.org/packages/00/0d/3db362169d80442adda9dd563c4f0bb10091c8c1c9a158037f4ecd53988e/backports_zstd-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b0e71e83e46154a9d3ced6d4de9a2fea8207ee1e4832aeecf364dc125eda305c", size = 362056, upload-time = "2025-12-29T17:25:56.729Z" }, - { url = "https://files.pythonhosted.org/packages/bd/00/b67ba053a7d6f6dbe2f8a704b7d3a5e01b1d2e2e8edbc9b634f2702ef73c/backports_zstd-1.3.0-cp311-cp311-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:cbc6193acd21f96760c94dd71bf32b161223e8503f5277acb0a5ab54e5598957", size = 505957, upload-time = "2025-12-29T17:25:57.941Z" }, - { url = "https://files.pythonhosted.org/packages/6f/3e/2667c0ddb53ddf28667e330bf9fe92e8e17705a481c9b698e283120565f7/backports_zstd-1.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1df583adc0ae84a8d13d7139f42eade6d90182b1dd3e0d28f7df3c564b9fd55d", size = 475569, upload-time = "2025-12-29T17:25:59.075Z" }, - { url = "https://files.pythonhosted.org/packages/eb/86/4052473217bd954ccdffda5f7264a0e99e7c4ecf70c0f729845c6a45fc5a/backports_zstd-1.3.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d833fc23aa3cc2e05aeffc7cfadd87b796654ad3a7fb214555cda3f1db2d4dc2", size = 581196, upload-time = "2025-12-29T17:26:00.508Z" }, - { url = "https://files.pythonhosted.org/packages/e5/bd/064f6fdb61db3d2c473159ebc844243e650dc032de0f8208443a00127925/backports_zstd-1.3.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:142178fe981061f1d2a57c5348f2cd31a3b6397a35593e7a17dbda817b793a7f", size = 640888, upload-time = "2025-12-29T17:26:02.134Z" }, - { url = "https://files.pythonhosted.org/packages/d8/09/0822403f40932a165a4f1df289d41653683019e4fd7a86b63ed20e9b6177/backports_zstd-1.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5eed0a09a163f3a8125a857cb031be87ed052e4a47bc75085ed7fca786e9bb5b", size = 491100, upload-time = "2025-12-29T17:26:03.418Z" }, - { url = "https://files.pythonhosted.org/packages/a6/a3/f5ac28d74039b7e182a780809dc66b9dbfc893186f5d5444340bba135389/backports_zstd-1.3.0-cp311-cp311-manylinux_2_34_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:60aa483fef5843749e993dde01229e5eedebca8c283023d27d6bf6800d1d4ce3", size = 565071, upload-time = "2025-12-29T17:26:05.022Z" }, - { url = "https://files.pythonhosted.org/packages/e1/ac/50209aeb92257a642ee987afa1e61d5b6731ab6bf0bff70905856e5aede6/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ea0886c1b619773544546e243ed73f6d6c2b1ae3c00c904ccc9903a352d731e1", size = 481519, upload-time = "2025-12-29T17:26:06.255Z" }, - { url = "https://files.pythonhosted.org/packages/08/1f/b06f64199fb4b2e9437cedbf96d0155ca08aeec35fe81d41065acd44762e/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5e137657c830a5ce99be40a1d713eb1d246bae488ada28ff0666ac4387aebdd5", size = 509465, upload-time = "2025-12-29T17:26:07.602Z" }, - { url = "https://files.pythonhosted.org/packages/f4/37/2c365196e61c8fffbbc930ffd69f1ada7aa1c7210857b3e565031c787ac6/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94048c8089755e482e4b34608029cf1142523a625873c272be2b1c9253871a72", size = 585552, upload-time = "2025-12-29T17:26:08.911Z" }, - { url = "https://files.pythonhosted.org/packages/93/8d/c2c4f448bb6b6c9df17410eaedce415e8db0eb25b60d09a3d22a98294d09/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:d339c1ec40485e97e600eb9a285fb13169dbf44c5094b945788a62f38b96e533", size = 562893, upload-time = "2025-12-29T17:26:10.566Z" }, - { url = "https://files.pythonhosted.org/packages/74/e8/2110d4d39115130f7514cbbcec673a885f4052bb68d15e41bc96a7558856/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aeee9210c54cf8bf83f4d263a6d0d6e7a0298aeb5a14a0a95e90487c5c3157c", size = 631462, upload-time = "2025-12-29T17:26:11.99Z" }, - { url = "https://files.pythonhosted.org/packages/b9/a8/d64b59ae0714fdace14e43873f794eff93613e35e3e85eead33a4f44cd80/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba7114a3099e5ea05cbb46568bd0e08bca2ca11e12c6a7b563a24b86b2b4a67f", size = 495125, upload-time = "2025-12-29T17:26:13.218Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d8/bcff0a091fcf27172c57ae463e49d8dec6dc31e01d7e7bf1ae3aad9c3566/backports_zstd-1.3.0-cp311-cp311-win32.whl", hash = "sha256:08dfdfb85da5915383bfae680b6ac10ab5769ab22e690f9a854320720011ae8e", size = 288664, upload-time = "2025-12-29T17:26:14.791Z" }, - { url = "https://files.pythonhosted.org/packages/28/1a/379061e2abf8c3150ad51c1baab9ac723e01cf7538860a6a74c48f8b73ee/backports_zstd-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8aac2e7cdcc8f310c16f98a0062b48d0a081dbb82862794f4f4f5bdafde30a4", size = 313633, upload-time = "2025-12-29T17:26:16.31Z" }, - { url = "https://files.pythonhosted.org/packages/35/e7/eca40858883029fc716660106069b23253e2ec5fd34e86b4101c8cfe864b/backports_zstd-1.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:440ef1be06e82dc0d69dbb57177f2ce98bbd2151013ee7e551e2f2b54caa6120", size = 288814, upload-time = "2025-12-29T17:26:17.571Z" }, { url = "https://files.pythonhosted.org/packages/72/d4/356da49d3053f4bc50e71a8535631b57bc9ca4e8c6d2442e073e0ab41c44/backports_zstd-1.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f4a292e357f3046d18766ce06d990ccbab97411708d3acb934e63529c2ea7786", size = 435972, upload-time = "2025-12-29T17:26:18.752Z" }, { url = "https://files.pythonhosted.org/packages/30/8f/dbe389e60c7e47af488520f31a4aa14028d66da5bf3c60d3044b571eb906/backports_zstd-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb4c386f38323698991b38edcc9c091d46d4713f5df02a3b5c80a28b40e289ea", size = 362124, upload-time = "2025-12-29T17:26:19.995Z" }, { url = "https://files.pythonhosted.org/packages/55/4b/173beafc99e99e7276ce008ef060b704471e75124c826bc5e2092815da37/backports_zstd-1.3.0-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f52523d2bdada29e653261abdc9cfcecd9e5500d305708b7e37caddb24909d4e", size = 506378, upload-time = "2025-12-29T17:26:21.855Z" }, @@ -699,12 +573,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/04/cfab76878f360f124dbb533779e1e4603c801a0f5ada72ae5c742b7c4d7d/backports_zstd-1.3.0-cp313-cp313t-win32.whl", hash = "sha256:7d3f0f2499d2049ec53d2674c605a4b3052c217cc7ee49c05258046411685adc", size = 289389, upload-time = "2025-12-29T17:27:22.287Z" }, { url = "https://files.pythonhosted.org/packages/cb/ff/dbcfb6c9c922ab6d98f3d321e7d0c7b34ecfa26f3ca71d930fe1ef639737/backports_zstd-1.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:eb2f8fab0b1ea05148394cb34a9e543a43477178765f2d6e7c84ed332e34935e", size = 314776, upload-time = "2025-12-29T17:27:23.458Z" }, { url = "https://files.pythonhosted.org/packages/01/4b/82e4baae3117806639fe1c693b1f2f7e6133a7cefd1fa2e38018c8edcd68/backports_zstd-1.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c66ad9eb5bfbe28c2387b7fc58ddcdecfb336d6e4e60bcba1694a906c1f21a6c", size = 289315, upload-time = "2025-12-29T17:27:24.601Z" }, - { url = "https://files.pythonhosted.org/packages/9a/d9/8c9c246e5ea79a4f45d551088b11b61f2dc7efcdc5dbe6df3be84a506e0c/backports_zstd-1.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:968167d29f012cee7b112ad031a8925e484e97e99288e55e4d62962c3a1013e3", size = 409666, upload-time = "2025-12-29T17:27:57.37Z" }, - { url = "https://files.pythonhosted.org/packages/a4/4f/a55b33c314ca8c9074e99daab54d04c5d212070ae7dbc435329baf1b139e/backports_zstd-1.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8f6fc7d62b71083b574193dd8fb3a60e6bb34880cc0132aad242943af301f7a", size = 339199, upload-time = "2025-12-29T17:27:58.542Z" }, - { url = "https://files.pythonhosted.org/packages/9d/13/ce31bd048b1c88d0f65d7af60b6cf89cfbed826c7c978f0ebca9a8a71cfc/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:e0f2eca6aac280fdb77991ad3362487ee91a7fb064ad40043fb5a0bf5a376943", size = 420332, upload-time = "2025-12-29T17:28:00.332Z" }, - { url = "https://files.pythonhosted.org/packages/cf/80/c0cdbc533d0037b57248588403a3afb050b2a83b8c38aa608e31b3a4d600/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:676eb5e177d4ef528cf3baaeea4fffe05f664e4dd985d3ac06960ef4619c81a9", size = 393879, upload-time = "2025-12-29T17:28:01.57Z" }, - { url = "https://files.pythonhosted.org/packages/0f/38/c97428867cac058ed196ccaeddfdf82ecd43b8a65965f2950a6e7547e77a/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:199eb9bd8aca6a9d489c41a682fad22c587dffe57b613d0fe6d492d0d38ce7c5", size = 413842, upload-time = "2025-12-29T17:28:03.113Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ec/6247be6536668fe1c7dfae3eaa9c94b00b956b716957c0fc986ba78c3cc4/backports_zstd-1.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2524bd6777a828d5e7ccd7bd1a57f9e7007ae654fc2bd1bc1a207f6428674e4a", size = 299684, upload-time = "2025-12-29T17:28:04.856Z" }, ] [[package]] @@ -732,19 +600,6 @@ version = "3.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/06/92fdc84448d324ab8434b78e65caf4fb4c6c90b4f8ad9bdd4c8021bfaf1e/bitarray-3.8.0.tar.gz", hash = "sha256:3eae38daffd77c9621ae80c16932eea3fb3a4af141fb7cc724d4ad93eff9210d", size = 151991, upload-time = "2025-11-02T21:41:15.117Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/7d/63558f1d0eb09217a3d30c1c847890879973e224a728fcff9391fab999b8/bitarray-3.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:25b9cff6c9856bc396232e2f609ea0c5ec1a8a24c500cee4cca96ba8a3cd50b6", size = 148502, upload-time = "2025-11-02T21:39:09.993Z" }, - { url = "https://files.pythonhosted.org/packages/5e/7b/f957ad211cb0172965b5f0881b67b99e2b6d41512af0a1001f44a44ddf4a/bitarray-3.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d9984017314da772f5f7460add7a0301a4ffc06c72c2998bb16c300a6253607", size = 145484, upload-time = "2025-11-02T21:39:10.904Z" }, - { url = "https://files.pythonhosted.org/packages/9f/dc/897973734f14f91467a3a795a4624752238053ecffaec7c8bbda1e363fda/bitarray-3.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbbbfbb7d039b20d289ce56b1beb46138d65769d04af50c199c6ac4cb6054d52", size = 330909, upload-time = "2025-11-02T21:39:12.276Z" }, - { url = "https://files.pythonhosted.org/packages/67/be/24b4b792426d92de289e73e09682915d567c2e69d47e8857586cbdc865d0/bitarray-3.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1f723e260c35e1c7c57a09d3a6ebe681bd56c83e1208ae3ce1869b7c0d10d4f", size = 358469, upload-time = "2025-11-02T21:39:13.766Z" }, - { url = "https://files.pythonhosted.org/packages/3e/0e/2eda69a7a59a6998df8fb57cc9d1e0e62888c599fb5237b0a8b479a01afb/bitarray-3.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cbd1660fb48827381ce3a621a4fdc237959e1cd4e98b098952a8f624a0726425", size = 369131, upload-time = "2025-11-02T21:39:15.041Z" }, - { url = "https://files.pythonhosted.org/packages/f7/7b/8a372d6635a6b2622477b2f96a569b2cd0318a62bc95a4a2144c7942c987/bitarray-3.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df6d7bf3e15b7e6e202a16ff4948a51759354016026deb04ab9b5acbbe35e096", size = 337089, upload-time = "2025-11-02T21:39:16.124Z" }, - { url = "https://files.pythonhosted.org/packages/93/f0/8eca934dbe5dee47a0e5ef44eeb72e85acacc8097c27cd164337bc4ec5d3/bitarray-3.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d5c931ec1c03111718cabf85f6012bb2815fa0ce578175567fa8d6f2cc15d3b4", size = 328504, upload-time = "2025-11-02T21:39:17.321Z" }, - { url = "https://files.pythonhosted.org/packages/88/dd/928b8e23a9950f8a8bfc42bc1e7de41f4e27f57de01a716308be5f683c2b/bitarray-3.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:41b53711f89008ba2de62e4c2d2260a8b357072fd4f18e1351b28955db2719dc", size = 356461, upload-time = "2025-11-02T21:39:18.396Z" }, - { url = "https://files.pythonhosted.org/packages/a9/93/4fb58417aff47fa2fe1874a39c9346b589a1d78c93a9cb24cccede5dc737/bitarray-3.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4f298daaaea58d45e245a132d6d2bdfb6f856da50dc03d75ebb761439fb626cf", size = 353008, upload-time = "2025-11-02T21:39:19.828Z" }, - { url = "https://files.pythonhosted.org/packages/da/54/aa04e4a7b45aa5913f08ee377d43319b0979925e3c0407882eb29df3be66/bitarray-3.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:30989a2451b693c3f9359d91098a744992b5431a0be4858f1fdf0ec76b457125", size = 334048, upload-time = "2025-11-02T21:39:20.924Z" }, - { url = "https://files.pythonhosted.org/packages/da/52/e851f41076df014c05d6ac1ce34fbf7db5fa31241da3e2f09bb2be9e283d/bitarray-3.8.0-cp311-cp311-win32.whl", hash = "sha256:e5aed4754895942ae15ffa48c52d181e1c1463236fda68d2dba29c03aa61786b", size = 142907, upload-time = "2025-11-02T21:39:22.312Z" }, - { url = "https://files.pythonhosted.org/packages/28/01/db0006148b1dd13b4ac2686df8fa57d12f5887df313a506e939af0cb0997/bitarray-3.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:22c540ed20167d3dbb1e2d868ca935180247d620c40eace90efa774504a40e3b", size = 149670, upload-time = "2025-11-02T21:39:23.341Z" }, - { url = "https://files.pythonhosted.org/packages/7b/ea/b7d55ee269b1426f758a535c9ec2a07c056f20f403fa981685c3c8b4798c/bitarray-3.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:84b52b2cf77bb7f703d16c4007b021078dbbe6cf8ffb57abe81a7bacfc175ef2", size = 146709, upload-time = "2025-11-02T21:39:24.343Z" }, { url = "https://files.pythonhosted.org/packages/82/a0/0c41d893eda756315491adfdbf9bc928aee3d377a7f97a8834d453aa5de1/bitarray-3.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2fcbe9b3a5996b417e030aa33a562e7e20dfc86271e53d7e841fc5df16268b8", size = 148575, upload-time = "2025-11-02T21:39:25.718Z" }, { url = "https://files.pythonhosted.org/packages/0e/30/12ab2f4a4429bd844b419c37877caba93d676d18be71354fbbeb21d9f4cc/bitarray-3.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd761d158f67e288fd0ebe00c3b158095ce80a4bc7c32b60c7121224003ba70d", size = 145454, upload-time = "2025-11-02T21:39:26.695Z" }, { url = "https://files.pythonhosted.org/packages/26/58/314b3e3f219533464e120f0c51ac5123e7b1c1b91f725a4073fb70c5a858/bitarray-3.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c394a3f055b49f92626f83c1a0b6d6cd2c628f1ccd72481c3e3c6aa4695f3b20", size = 332949, upload-time = "2025-11-02T21:39:27.801Z" }, @@ -806,8 +661,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "packaging" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/d8/7d/f1fe0992334b18cd8494f89aeec1dcc674635584fcd9f115784fea3a1d05/bitsandbytes-0.49.2-py3-none-macosx_14_0_arm64.whl", hash = "sha256:87be5975edeac5396d699ecbc39dfc47cf2c026daaf2d5852a94368611a6823f", size = 131940, upload-time = "2026-02-16T21:26:04.572Z" }, @@ -842,11 +696,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/13/88/560b11e521c522440af991d46848a2bde64b5f7202ec14e1f46f9509d328/black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58", size = 658785, upload-time = "2026-01-18T04:50:11.993Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/83/f05f22ff13756e1a8ce7891db517dbc06200796a16326258268f4658a745/black-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cee1487a9e4c640dc7467aaa543d6c0097c391dc8ac74eb313f2fbf9d7a7cb5", size = 1831956, upload-time = "2026-01-18T04:59:21.38Z" }, - { url = "https://files.pythonhosted.org/packages/7d/f2/b2c570550e39bedc157715e43927360312d6dd677eed2cc149a802577491/black-26.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d62d14ca31c92adf561ebb2e5f2741bf8dea28aef6deb400d49cca011d186c68", size = 1672499, upload-time = "2026-01-18T04:59:23.257Z" }, - { url = "https://files.pythonhosted.org/packages/7a/d7/990d6a94dc9e169f61374b1c3d4f4dd3037e93c2cc12b6f3b12bc663aa7b/black-26.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb1dafbbaa3b1ee8b4550a84425aac8874e5f390200f5502cf3aee4a2acb2f14", size = 1735431, upload-time = "2026-01-18T04:59:24.729Z" }, - { url = "https://files.pythonhosted.org/packages/36/1c/cbd7bae7dd3cb315dfe6eeca802bb56662cc92b89af272e014d98c1f2286/black-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:101540cb2a77c680f4f80e628ae98bd2bd8812fb9d72ade4f8995c5ff019e82c", size = 1400468, upload-time = "2026-01-18T04:59:27.381Z" }, - { url = "https://files.pythonhosted.org/packages/59/b1/9fe6132bb2d0d1f7094613320b56297a108ae19ecf3041d9678aec381b37/black-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:6f3977a16e347f1b115662be07daa93137259c711e526402aa444d7a88fdc9d4", size = 1207332, upload-time = "2026-01-18T04:59:28.711Z" }, { url = "https://files.pythonhosted.org/packages/f5/13/710298938a61f0f54cdb4d1c0baeb672c01ff0358712eddaf29f76d32a0b/black-26.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6eeca41e70b5f5c84f2f913af857cf2ce17410847e1d54642e658e078da6544f", size = 1878189, upload-time = "2026-01-18T04:59:30.682Z" }, { url = "https://files.pythonhosted.org/packages/79/a6/5179beaa57e5dbd2ec9f1c64016214057b4265647c62125aa6aeffb05392/black-26.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd39eef053e58e60204f2cdf059e2442e2eb08f15989eefe259870f89614c8b6", size = 1700178, upload-time = "2026-01-18T04:59:32.387Z" }, { url = "https://files.pythonhosted.org/packages/8c/04/c96f79d7b93e8f09d9298b333ca0d31cd9b2ee6c46c274fd0f531de9dc61/black-26.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9459ad0d6cd483eacad4c6566b0f8e42af5e8b583cee917d90ffaa3778420a0a", size = 1777029, upload-time = "2026-01-18T04:59:33.767Z" }, @@ -869,19 +718,8 @@ wheels = [ name = "blake3" version = "1.0.8" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.12' and sys_platform == 'linux'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/75/aa/abcd75e9600987a0bc6cfe9b6b2ff3f0e2cb08c170addc6e76035b5c4cb3/blake3-1.0.8.tar.gz", hash = "sha256:513cc7f0f5a7c035812604c2c852a0c1468311345573de647e310aca4ab165ba", size = 117308, upload-time = "2025-10-14T06:47:48.83Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/0a/515209b0c282c360e249b89cd85350d97cfd55fadbb4df736c67b77b27a1/blake3-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fcfe81b3ae3fb5d2e88be0d3259603ff95f0d5ed69f655c28fdaef31e49a470", size = 371092, upload-time = "2025-10-14T06:45:34.062Z" }, - { url = "https://files.pythonhosted.org/packages/a0/33/9d342a2bf5817f006bbe947335e5d387327541ea47590854947befd01251/blake3-1.0.8-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58ce8d45a5bb5326482de72ea1969a378634236186a970fef63058a5b7b8b435", size = 374859, upload-time = "2025-10-14T06:45:35.262Z" }, - { url = "https://files.pythonhosted.org/packages/5b/fc/ea4bef850a7ec9fbb383503fd3c56056dd9fa44e10c3bc61050ab7b2bac0/blake3-1.0.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83605dbf43f581d8b7175b7f3bfe5388bad5a7c6ac175c9c11d669da31133f4b", size = 448585, upload-time = "2025-10-14T06:45:36.542Z" }, - { url = "https://files.pythonhosted.org/packages/a5/67/167a65a4c431715407d07b1b8b1367698a3ad88e7260edb85f0c5293f08a/blake3-1.0.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b5573b052777142b2cecc453d022c3f21aa4aba75011258410bb98f41c1a727", size = 507519, upload-time = "2025-10-14T06:45:37.814Z" }, - { url = "https://files.pythonhosted.org/packages/32/e2/0886e192d634b264c613b0fbf380745b39992b424a0effc00ef08783644e/blake3-1.0.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe1b02ab49bfd969ef50b9f17482a2011c77536654af21807ba5c2674e0bb2a0", size = 393645, upload-time = "2025-10-14T06:45:39.146Z" }, - { url = "https://files.pythonhosted.org/packages/fc/3b/7fb2fe615448caaa5f6632b2c7551117b38ccac747a3a5769181e9751641/blake3-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7780666dc6be809b49442d6d5ce06fdbe33024a87560b58471103ec17644682", size = 387640, upload-time = "2025-10-14T06:45:40.546Z" }, - { url = "https://files.pythonhosted.org/packages/bc/8c/2bfc942c6c97cb3d20f341859343bb86ee20af723fedfc886373e606079b/blake3-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af394b50c6aa0b1b957a99453d1ee440ef67cd2d1b5669c731647dc723de8a3a", size = 550316, upload-time = "2025-10-14T06:45:42.003Z" }, - { url = "https://files.pythonhosted.org/packages/7e/75/0252be37620699b79dbaa799c9b402d63142a131d16731df4ef09d135dd7/blake3-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c63ece266a43014cf29e772a82857cd8e90315ae3ed53e3c5204851596edd5f2", size = 554463, upload-time = "2025-10-14T06:45:43.22Z" }, { url = "https://files.pythonhosted.org/packages/ee/7d/85a4c0782f613de23d114a7a78fcce270f75b193b3ff3493a0de24ba104a/blake3-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:269f255b110840e52b6ce9db02217e39660ebad3e34ddd5bca8b8d378a77e4e1", size = 371296, upload-time = "2025-10-14T06:45:49.674Z" }, { url = "https://files.pythonhosted.org/packages/e3/20/488475254976ed93fab57c67aa80d3b40df77f7d9db6528c9274bff53e08/blake3-1.0.8-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66ca28a673025c40db3eba21a9cac52f559f83637efa675b3f6bd8683f0415f3", size = 374516, upload-time = "2025-10-14T06:45:51.23Z" }, { url = "https://files.pythonhosted.org/packages/7b/21/2a1c47fedb77fb396512677ec6d46caf42ac6e9a897db77edd0a2a46f7bb/blake3-1.0.8-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb04966537777af56c1f399b35525aa70a1225816e121ff95071c33c0f7abca", size = 447911, upload-time = "2025-10-14T06:45:52.637Z" }, @@ -933,96 +771,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, ] -[[package]] -name = "boto3" -version = "1.41.5" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", -] -dependencies = [ - { name = "botocore", version = "1.41.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "jmespath", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "s3transfer", version = "0.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5b/81/450cd4143864959264a3d80f9246175a20de8c1e50ec889c710eaa28cdd9/boto3-1.41.5.tar.gz", hash = "sha256:bc7806bee681dfdff2fe2b74967b107a56274f1e66ebe4d20dc8eee1ea408d17", size = 111594, upload-time = "2025-11-26T20:27:47.021Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/56/f47a80254ed4991cce9a2f6d8ae8aafbc8df1c3270e966b2927289e5a12f/boto3-1.41.5-py3-none-any.whl", hash = "sha256:bb278111bfb4c33dca8342bda49c9db7685e43debbfa00cc2a5eb854dd54b745", size = 139344, upload-time = "2025-11-26T20:27:45.571Z" }, -] - [[package]] name = "boto3" version = "1.42.52" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] dependencies = [ - { name = "botocore", version = "1.42.52", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "jmespath", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "s3transfer", version = "0.16.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, ] sdist = { url = "https://files.pythonhosted.org/packages/41/ed/8eacb8ec7bf264079608be5f9a2a57e31e7fed7a791bb3b15500ca9274a5/boto3-1.42.52.tar.gz", hash = "sha256:ff4a4afb832f63a1358e11fe6eb321da0f4767979c6721dd32fb02e6eabcebf5", size = 112811, upload-time = "2026-02-18T21:54:57.804Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/2d/2a/de34ad6c43c56fe6dd5824bff2cd7fdef5edd9de0617cbd217040318ba97/boto3-1.42.52-py3-none-any.whl", hash = "sha256:7b3e0c4bfd8815a3df64fbe98fc9f87dfb12bd7a783cf63dfc2f166c66798c9d", size = 140556, upload-time = "2026-02-18T21:54:56.609Z" }, ] -[[package]] -name = "botocore" -version = "1.41.5" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", -] -dependencies = [ - { name = "jmespath", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "python-dateutil", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "urllib3", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/90/22/7fe08c726a2e3b11a0aef8bf177e83891c9cb2dc1809d35c9ed91a9e60e6/botocore-1.41.5.tar.gz", hash = "sha256:0367622b811597d183bfcaab4a350f0d3ede712031ce792ef183cabdee80d3bf", size = 14668152, upload-time = "2025-11-26T20:27:38.026Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/4e/21cd0b8f365449f1576f93de1ec8718ed18a7a3bc086dfbdeb79437bba7a/botocore-1.41.5-py3-none-any.whl", hash = "sha256:3fef7fcda30c82c27202d232cfdbd6782cb27f20f8e7e21b20606483e66ee73a", size = 14337008, upload-time = "2025-11-26T20:27:35.208Z" }, -] - [[package]] name = "botocore" version = "1.42.52" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] dependencies = [ - { name = "jmespath", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "python-dateutil", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "urllib3", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c2/37/7044e09d416ff746d23c7456e8c30ddade1154ecd08814b17ab7e2c20fb0/botocore-1.42.52.tar.gz", hash = "sha256:3bdef10aee4cee13ff019b6a1423a2ce3ca17352328d9918157a1829e5cc9be1", size = 14917923, upload-time = "2026-02-18T21:54:48.06Z" } wheels = [ @@ -1053,16 +823,6 @@ version = "1.2.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f7/16/c92ca344d646e71a43b8bb353f0a6490d7f6e06210f8554c8f874e454285/brotli-1.2.0.tar.gz", hash = "sha256:e310f77e41941c13340a95976fe66a8a95b01e783d430eeaf7a2f87e0a57dd0a", size = 7388632, upload-time = "2025-11-05T18:39:42.86Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/ef/f285668811a9e1ddb47a18cb0b437d5fc2760d537a2fe8a57875ad6f8448/brotli-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:15b33fe93cedc4caaff8a0bd1eb7e3dab1c61bb22a0bf5bdfdfd97cd7da79744", size = 863110, upload-time = "2025-11-05T18:38:12.978Z" }, - { url = "https://files.pythonhosted.org/packages/50/62/a3b77593587010c789a9d6eaa527c79e0848b7b860402cc64bc0bc28a86c/brotli-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:898be2be399c221d2671d29eed26b6b2713a02c2119168ed914e7d00ceadb56f", size = 445438, upload-time = "2025-11-05T18:38:14.208Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e1/7fadd47f40ce5549dc44493877db40292277db373da5053aff181656e16e/brotli-1.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350c8348f0e76fff0a0fd6c26755d2653863279d086d3aa2c290a6a7251135dd", size = 1534420, upload-time = "2025-11-05T18:38:15.111Z" }, - { url = "https://files.pythonhosted.org/packages/12/8b/1ed2f64054a5a008a4ccd2f271dbba7a5fb1a3067a99f5ceadedd4c1d5a7/brotli-1.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1ad3fda65ae0d93fec742a128d72e145c9c7a99ee2fcd667785d99eb25a7fe", size = 1632619, upload-time = "2025-11-05T18:38:16.094Z" }, - { url = "https://files.pythonhosted.org/packages/89/5a/7071a621eb2d052d64efd5da2ef55ecdac7c3b0c6e4f9d519e9c66d987ef/brotli-1.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40d918bce2b427a0c4ba189df7a006ac0c7277c180aee4617d99e9ccaaf59e6a", size = 1426014, upload-time = "2025-11-05T18:38:17.177Z" }, - { url = "https://files.pythonhosted.org/packages/26/6d/0971a8ea435af5156acaaccec1a505f981c9c80227633851f2810abd252a/brotli-1.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2a7f1d03727130fc875448b65b127a9ec5d06d19d0148e7554384229706f9d1b", size = 1489661, upload-time = "2025-11-05T18:38:18.41Z" }, - { url = "https://files.pythonhosted.org/packages/f3/75/c1baca8b4ec6c96a03ef8230fab2a785e35297632f402ebb1e78a1e39116/brotli-1.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9c79f57faa25d97900bfb119480806d783fba83cd09ee0b33c17623935b05fa3", size = 1599150, upload-time = "2025-11-05T18:38:19.792Z" }, - { url = "https://files.pythonhosted.org/packages/0d/1a/23fcfee1c324fd48a63d7ebf4bac3a4115bdb1b00e600f80f727d850b1ae/brotli-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:844a8ceb8483fefafc412f85c14f2aae2fb69567bf2a0de53cdb88b73e7c43ae", size = 1493505, upload-time = "2025-11-05T18:38:20.913Z" }, - { url = "https://files.pythonhosted.org/packages/36/e5/12904bbd36afeef53d45a84881a4810ae8810ad7e328a971ebbfd760a0b3/brotli-1.2.0-cp311-cp311-win32.whl", hash = "sha256:aa47441fa3026543513139cb8926a92a8e305ee9c71a6209ef7a97d91640ea03", size = 334451, upload-time = "2025-11-05T18:38:21.94Z" }, - { url = "https://files.pythonhosted.org/packages/02/8b/ecb5761b989629a4758c394b9301607a5880de61ee2ee5fe104b87149ebc/brotli-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:022426c9e99fd65d9475dce5c195526f04bb8be8907607e27e747893f6ee3e24", size = 369035, upload-time = "2025-11-05T18:38:22.941Z" }, { url = "https://files.pythonhosted.org/packages/11/ee/b0a11ab2315c69bb9b45a2aaed022499c9c24a205c3a49c3513b541a7967/brotli-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:35d382625778834a7f3061b15423919aa03e4f5da34ac8e02c074e4b75ab4f84", size = 861543, upload-time = "2025-11-05T18:38:24.183Z" }, { url = "https://files.pythonhosted.org/packages/e1/2f/29c1459513cd35828e25531ebfcbf3e92a5e49f560b1777a9af7203eb46e/brotli-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a61c06b334bd99bc5ae84f1eeb36bfe01400264b3c352f968c6e30a10f9d08b", size = 444288, upload-time = "2025-11-05T18:38:25.139Z" }, { url = "https://files.pythonhosted.org/packages/3d/6f/feba03130d5fceadfa3a1bb102cb14650798c848b1df2a808356f939bb16/brotli-1.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acec55bb7c90f1dfc476126f9711a8e81c9af7fb617409a9ee2953115343f08d", size = 1528071, upload-time = "2025-11-05T18:38:26.081Z" }, @@ -1114,10 +874,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e4/9c/d51486bf366fc7d6735f0e46b5b96ca58dc005b250263525a1eea3cd5d21/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33cfb408d0cff64cd50bef268c0fed397c46fbb53944aa37264148614a62e990", size = 1536547, upload-time = "2025-11-21T18:17:45.729Z" }, { url = "https://files.pythonhosted.org/packages/1b/37/293a9a0a7caf17e6e657668bebb92dfe730305999fe8c0e2703b8888789c/brotlicffi-1.2.0.0-cp38-abi3-win32.whl", hash = "sha256:23e5c912fdc6fd37143203820230374d24babd078fc054e18070a647118158f6", size = 343085, upload-time = "2025-11-21T18:17:48.887Z" }, { url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586, upload-time = "2025-11-21T18:17:50.531Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ec/52488a0563f1663e2ccc75834b470650f4b8bcdea3132aef3bf67219c661/brotlicffi-1.2.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fa102a60e50ddbd08de86a63431a722ea216d9bc903b000bf544149cc9b823dc", size = 402002, upload-time = "2025-11-21T18:17:51.76Z" }, - { url = "https://files.pythonhosted.org/packages/e4/63/d4aea4835fd97da1401d798d9b8ba77227974de565faea402f520b37b10f/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d3c4332fc808a94e8c1035950a10d04b681b03ab585ce897ae2a360d479037c", size = 406447, upload-time = "2025-11-21T18:17:53.614Z" }, - { url = "https://files.pythonhosted.org/packages/62/4e/5554ecb2615ff035ef8678d4e419549a0f7a28b3f096b272174d656749fb/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb4eb5830026b79a93bf503ad32b2c5257315e9ffc49e76b2715cffd07c8e3db", size = 402521, upload-time = "2025-11-21T18:17:54.875Z" }, - { url = "https://files.pythonhosted.org/packages/b5/d3/b07f8f125ac52bbee5dc00ef0d526f820f67321bf4184f915f17f50a4657/brotlicffi-1.2.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3832c66e00d6d82087f20a972b2fc03e21cd99ef22705225a6f8f418a9158ecc", size = 374730, upload-time = "2025-11-21T18:17:56.334Z" }, ] [[package]] @@ -1148,8 +904,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ninja" }, { name = "packaging" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] sdist = { url = "https://files.pythonhosted.org/packages/db/df/63a384c49743b9fc8fec4c05dbd0b515e1c1c2b07e4559acc4fc37c69223/causal_conv1d-1.6.0.tar.gz", hash = "sha256:4eae3220d08e1e88238f3a0a88783147cbdf47f612cc610add75127c7a37ca3e", size = 29356, upload-time = "2026-01-12T17:33:32.794Z" } @@ -1159,10 +914,6 @@ version = "5.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d9/8e/8b4fdde28e42ffcd741a37f4ffa9fb59cd4fe01625b544dfcfd9ccb54f01/cbor2-5.8.0.tar.gz", hash = "sha256:b19c35fcae9688ac01ef75bad5db27300c2537eb4ee00ed07e05d8456a0d4931", size = 107825, upload-time = "2025-12-30T18:44:22.455Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/17/f664201080b2a7d0f57c16c8e9e5922013b92f202e294863ec7e75b7ff7f/cbor2-5.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fddee9103a17d7bed5753f0c7fc6663faa506eb953e50d8287804eccf7b048e6", size = 268316, upload-time = "2025-12-30T18:43:37.161Z" }, - { url = "https://files.pythonhosted.org/packages/d0/e1/072745b4ff01afe9df2cd627f8fc51a1acedb5d3d1253765625d2929db91/cbor2-5.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8d2ea26fad620aba5e88d7541be8b10c5034a55db9a23809b7cb49f36803f05b", size = 258874, upload-time = "2025-12-30T18:43:38.878Z" }, - { url = "https://files.pythonhosted.org/packages/a7/10/61c262b886d22b62c56e8aac6d10fa06d0953c997879ab882a31a624952b/cbor2-5.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:de68b4b310b072b082d317adc4c5e6910173a6d9455412e6183d72c778d1f54c", size = 261971, upload-time = "2025-12-30T18:43:40.401Z" }, - { url = "https://files.pythonhosted.org/packages/7e/42/b7862f5e64364b10ad120ea53e87ec7e891fb268cb99c572348e647cf7e9/cbor2-5.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:418d2cf0e03e90160fa1474c05a40fe228bbb4a92d1628bdbbd13a48527cb34d", size = 254151, upload-time = "2025-12-30T18:43:41.938Z" }, { url = "https://files.pythonhosted.org/packages/38/81/0d0cf0796fe8081492a61c45278f03def21a929535a492dd97c8438f5dbe/cbor2-5.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:518c118a5e00001854adb51f3164e647aa99b6a9877d2a733a28cb5c0a4d6857", size = 286242, upload-time = "2025-12-30T18:43:47.026Z" }, { url = "https://files.pythonhosted.org/packages/7b/a9/fdab6c10190cfb8d639e01f2b168f2406fc847a2a6bc00e7de78c3381d0a/cbor2-5.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cff2a1999e49cd51c23d1b6786a012127fd8f722c5946e82bd7ab3eb307443f3", size = 285412, upload-time = "2025-12-30T18:43:48.563Z" }, { url = "https://files.pythonhosted.org/packages/31/59/746a8e630996217a3afd523f583fcf7e3d16640d63f9a03f0f4e4f74b5b1/cbor2-5.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c4492160212374973cdc14e46f0565f2462721ef922b40f7ea11e7d613dfb2a", size = 278041, upload-time = "2025-12-30T18:43:49.92Z" }, @@ -1196,19 +947,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, - { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, - { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, - { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, - { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, - { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, - { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, - { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, @@ -1272,22 +1010,6 @@ version = "3.4.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, - { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, - { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, - { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, - { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, - { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, - { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, - { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, - { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, - { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, - { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, - { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, - { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, - { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, @@ -1392,10 +1114,10 @@ name = "compressed-tensors" version = "0.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "loguru", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "transformers", marker = "sys_platform == 'linux'" }, + { name = "loguru" }, + { name = "pydantic" }, + { name = "torch" }, + { name = "transformers" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fc/65/88dd1c58fb9d0ded51b5c86471b937a1525f91fad2211a6f051dc1ea822d/compressed_tensors-0.13.0.tar.gz", hash = "sha256:23893824d3498ea3f1a829f14a8fa85f9a5e76a34c711a038b8d7c619ca9a67c", size = 200995, upload-time = "2025-12-16T16:03:55.397Z" } wheels = [ @@ -1411,17 +1133,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" }, - { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" }, - { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" }, - { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" }, - { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" }, - { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" }, - { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" }, - { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" }, - { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" }, - { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" }, { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, @@ -1477,11 +1188,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, - { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" }, - { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" }, - { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" }, - { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" }, - { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" }, ] [[package]] @@ -1490,21 +1196,6 @@ version = "7.13.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" }, - { url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" }, - { url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" }, - { url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" }, - { url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" }, - { url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" }, - { url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" }, - { url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" }, - { url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" }, - { url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" }, - { url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" }, - { url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" }, - { url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" }, - { url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" }, { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, @@ -1616,57 +1307,25 @@ wheels = [ name = "cuda-bindings" version = "12.9.4" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", -] dependencies = [ - { name = "cuda-pathfinder", marker = "sys_platform == 'linux'" }, + { name = "cuda-pathfinder" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/2b/ebcbb60aa6dba830474cd360c42e10282f7a343c0a1f58d24fbd3b7c2d77/cuda_bindings-12.9.4-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a6a429dc6c13148ff1e27c44f40a3dd23203823e637b87fd0854205195988306", size = 11840604, upload-time = "2025-10-21T14:51:34.565Z" }, - { url = "https://files.pythonhosted.org/packages/45/e7/b47792cc2d01c7e1d37c32402182524774dadd2d26339bd224e0e913832e/cuda_bindings-12.9.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c912a3d9e6b6651853eed8eed96d6800d69c08e94052c292fec3f282c5a817c9", size = 12210593, upload-time = "2025-10-21T14:51:36.574Z" }, { url = "https://files.pythonhosted.org/packages/0c/c2/65bfd79292b8ff18be4dd7f7442cea37bcbc1a228c1886f1dea515c45b67/cuda_bindings-12.9.4-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:694ba35023846625ef471257e6b5a4bc8af690f961d197d77d34b1d1db393f56", size = 11760260, upload-time = "2025-10-21T14:51:40.79Z" }, { url = "https://files.pythonhosted.org/packages/a9/c1/dabe88f52c3e3760d861401bb994df08f672ec893b8f7592dc91626adcf3/cuda_bindings-12.9.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fda147a344e8eaeca0c6ff113d2851ffca8f7dfc0a6c932374ee5c47caa649c8", size = 12151019, upload-time = "2025-10-21T14:51:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/df/6b/9c1b1a6c01392bfdd758e9486f52a1a72bc8f49e98f9355774ef98b5fb4e/cuda_bindings-12.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:696ca75d249ddf287d01b9a698b8e2d8a05046495a9c051ca15659dc52d17615", size = 11586961, upload-time = "2025-10-21T14:51:45.394Z" }, { url = "https://files.pythonhosted.org/packages/05/8b/b4b2d1c7775fa403b64333e720cfcfccef8dcb9cdeb99947061ca5a77628/cuda_bindings-12.9.4-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cf8bfaedc238f3b115d957d1fd6562b7e8435ba57f6d0e2f87d0e7149ccb2da5", size = 11570071, upload-time = "2025-10-21T14:51:47.472Z" }, { url = "https://files.pythonhosted.org/packages/63/56/e465c31dc9111be3441a9ba7df1941fe98f4aa6e71e8788a3fb4534ce24d/cuda_bindings-12.9.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:32bdc5a76906be4c61eb98f546a6786c5773a881f3b166486449b5d141e4a39f", size = 11906628, upload-time = "2025-10-21T14:51:49.905Z" }, + { url = "https://files.pythonhosted.org/packages/05/d0/d0e4e2e047d8e899f023fa15ad5e9894ce951253f4c894f1cd68490fdb14/cuda_bindings-12.9.4-cp313-cp313-win_amd64.whl", hash = "sha256:a2e82c8985948f953c2be51df45c3fe11c812a928fca525154fb9503190b3e64", size = 11556719, upload-time = "2025-10-21T14:51:52.248Z" }, { url = "https://files.pythonhosted.org/packages/ec/07/6aff13bc1e977e35aaa6b22f52b172e2890c608c6db22438cf7ed2bf43a6/cuda_bindings-12.9.4-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3adf4958dcf68ae7801a59b73fb00a8b37f8d0595060d66ceae111b1002de38d", size = 11566797, upload-time = "2025-10-21T14:51:54.581Z" }, { url = "https://files.pythonhosted.org/packages/a3/84/1e6be415e37478070aeeee5884c2022713c1ecc735e6d82d744de0252eee/cuda_bindings-12.9.4-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56e0043c457a99ac473ddc926fe0dc4046694d99caef633e92601ab52cbe17eb", size = 11925991, upload-time = "2025-10-21T14:51:56.535Z" }, + { url = "https://files.pythonhosted.org/packages/4d/3c/972edfddb4ae8a9fccd3c3766ed47453b6f805b6026b32f10209dd4b8ad4/cuda_bindings-12.9.4-cp313-cp313t-win_amd64.whl", hash = "sha256:b32d8b685f0e66f5658bcf4601ef034e89fc2843582886f0a58784a4302da06c", size = 11894363, upload-time = "2025-10-21T14:51:58.633Z" }, { url = "https://files.pythonhosted.org/packages/1e/b5/96a6696e20c4ffd2b327f54c7d0fde2259bdb998d045c25d5dedbbe30290/cuda_bindings-12.9.4-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f53a7f453d4b2643d8663d036bafe29b5ba89eb904c133180f295df6dc151e5", size = 11624530, upload-time = "2025-10-21T14:52:01.539Z" }, { url = "https://files.pythonhosted.org/packages/d1/af/6dfd8f2ed90b1d4719bc053ff8940e494640fe4212dc3dd72f383e4992da/cuda_bindings-12.9.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8b72ee72a9cc1b531db31eebaaee5c69a8ec3500e32c6933f2d3b15297b53686", size = 11922703, upload-time = "2025-10-21T14:52:03.585Z" }, + { url = "https://files.pythonhosted.org/packages/e6/87/652796522cc1a7af559460e1ce59b642e05c1468b9c08522a9a096b4cf04/cuda_bindings-12.9.4-cp314-cp314-win_amd64.whl", hash = "sha256:53a10c71fdbdb743e0268d07964e5a996dd00b4e43831cbfce9804515d97d575", size = 11517716, upload-time = "2025-10-21T14:52:06.013Z" }, { url = "https://files.pythonhosted.org/packages/39/73/d2fc40c043bac699c3880bf88d3cebe9d88410cd043795382826c93a89f0/cuda_bindings-12.9.4-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:20f2699d61d724de3eb3f3369d57e2b245f93085cab44fd37c3bea036cea1a6f", size = 11565056, upload-time = "2025-10-21T14:52:08.338Z" }, { url = "https://files.pythonhosted.org/packages/6c/19/90ac264acc00f6df8a49378eedec9fd2db3061bf9263bf9f39fd3d8377c3/cuda_bindings-12.9.4-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d80bffc357df9988dca279734bc9674c3934a654cab10cadeed27ce17d8635ee", size = 11924658, upload-time = "2025-10-21T14:52:10.411Z" }, -] - -[[package]] -name = "cuda-bindings" -version = "13.1.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] -dependencies = [ - { name = "cuda-pathfinder", marker = "sys_platform != 'linux'" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/1c/e7ea27d4cb7d07331c88e3bbed3cacc947d2237471801086c7447b3e195d/cuda_bindings-13.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:ec33b84f4bd65a86a734427f2b9cb8f221bedab2c4cfb681488cabc82f1d64ab", size = 15210672, upload-time = "2025-12-09T22:05:53.369Z" }, - { url = "https://files.pythonhosted.org/packages/60/1f/ecc4701ade3e85f091c625a920574527b9daf7fb354189fbfbc5516af6cd/cuda_bindings-13.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:ccde9c95c0e953b31fe7731bb08da9d0a34b1770498df9a3c156fdfdbe3951ad", size = 15250028, upload-time = "2025-12-09T22:06:00.346Z" }, - { url = "https://files.pythonhosted.org/packages/2a/56/433093bec0121f031edb582ea3a72f71031e8fbebecaaf329809344da4c7/cuda_bindings-13.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:9e4f348cd7a779657d51e6f71aac3965fb1738f40ff3bbe75265a3242fd6f29f", size = 15216463, upload-time = "2025-12-09T22:06:07.296Z" }, - { url = "https://files.pythonhosted.org/packages/f6/33/7739cc5e9a3373df8e7dea9060528bee5f70cf6e28b9c14f765502816c71/cuda_bindings-13.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:f2e079182014dbc162562b46467815272c14c7afe5b988978fa968728b0ac726", size = 15373212, upload-time = "2025-12-09T22:06:13.989Z" }, - { url = "https://files.pythonhosted.org/packages/e3/5c/38b92080c5b6c4ddb09f0be2536123f81c7e9e1a89e4573f20cb00347ee3/cuda_bindings-13.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8205eee6b8b458a2110c0384923ace206855d0f1b436fc1b145fcbaa1653b501", size = 16044390, upload-time = "2025-12-09T22:06:20.945Z" }, + { url = "https://files.pythonhosted.org/packages/ab/52/a30f46e822bfa6b4a659d1e8de8c4a4adf908ea075dac568b55362541bd8/cuda_bindings-12.9.4-cp314-cp314t-win_amd64.whl", hash = "sha256:53e11991a92ff6f26a0c8a98554cd5d6721c308a6b7bfb08bebac9201e039e43", size = 12055608, upload-time = "2025-10-21T14:52:12.335Z" }, ] [[package]] @@ -1681,45 +1340,13 @@ wheels = [ name = "cuda-python" version = "12.9.4" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", -] dependencies = [ - { name = "cuda-bindings", version = "12.9.4", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "cuda-bindings" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/af/f3/6b032a554019cfb3447e671798c1bd3e79b5f1af20d10253f56cea269ef2/cuda_python-12.9.4-py3-none-any.whl", hash = "sha256:d2cacea882a69863f1e7d27ee71d75f0684f4c76910aff839067e4f89c902279", size = 7594, upload-time = "2025-10-21T14:55:12.846Z" }, ] -[[package]] -name = "cuda-python" -version = "13.1.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] -dependencies = [ - { name = "cuda-bindings", version = "13.1.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "cuda-pathfinder", marker = "sys_platform != 'linux'" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/08/b5e3b9822662d72d540d830531e3ab6a7cabbda3dd56175696aabccfeb76/cuda_python-13.1.1-py3-none-any.whl", hash = "sha256:944cc4fe6482673d28dd545797a28840945a1668739328fa2ad1e9be4f7050d9", size = 8038, upload-time = "2025-12-09T22:13:10.719Z" }, -] - [[package]] name = "cudo-compute" version = "0.3.6" @@ -1741,12 +1368,10 @@ name = "cupy-cuda12x" version = "14.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cuda-pathfinder", marker = "sys_platform == 'linux'" }, - { name = "numpy", marker = "sys_platform == 'linux'" }, + { name = "cuda-pathfinder" }, + { name = "numpy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/a1/15c59a9abe6720be3a7c3f59bf1ff7ba1ceb5f44869b5cb80b3e60cea170/cupy_cuda12x-14.0.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:e0c8a6b9430313cd5576c473254bbfee1eff4503b00b097ac34e45f4b45af05b", size = 146240038, upload-time = "2026-02-17T00:51:34.109Z" }, - { url = "https://files.pythonhosted.org/packages/63/12/a59be7517c2c8f05a31cbf4a84597b8d686050acf99e77b1f95fcc0da327/cupy_cuda12x-14.0.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:60b4a0e88171ec811f282c8b0ddf2ea51aa335e5739a638f2dc7438a81410833", size = 135081905, upload-time = "2026-02-17T00:51:39.969Z" }, { url = "https://files.pythonhosted.org/packages/37/f6/c561e31d37655d4b9c4e53d43314ed5a7ad715f6c12a329a256380b59c11/cupy_cuda12x-14.0.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:680b2d8be84dc6984ceae41000a0a5a272adef6c9e1650ee43afeb937c319233", size = 145577288, upload-time = "2026-02-17T00:51:49.173Z" }, { url = "https://files.pythonhosted.org/packages/58/c9/7d0a990e01e1082c857c7c869127e8f70f5be07d7600ca318b77844dfbfd/cupy_cuda12x-14.0.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:321953e346995fc5fb970b7eec05be7463b33628fd0673a54f7f206270786c68", size = 134612571, upload-time = "2026-02-17T00:51:54.023Z" }, { url = "https://files.pythonhosted.org/packages/d3/f5/e882da76d77cbed5ba75a9c4a9deedfe3dcd17128407fea17599b87c949d/cupy_cuda12x-14.0.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:5a23a07a51391e4d46dc02b0cf05aa7eb1cb405e3fa8abcec65d8b07259c0ce7", size = 145123903, upload-time = "2026-02-17T00:52:02.841Z" }, @@ -1760,9 +1385,8 @@ name = "cut-cross-entropy" version = "25.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "triton", version = "3.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, + { name = "triton", marker = "sys_platform == 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/7e/97/45ff09cfcda7b200389204daa0125168e6544fba257adbbcdf728501d4f9/cut_cross_entropy-25.1.1.tar.gz", hash = "sha256:5fe5924509248b1aea5c890f8887c6a7759f7c8b1ebc0490e42c247c4f7c1e34", size = 22972, upload-time = "2025-01-07T12:21:53.896Z" } wheels = [ @@ -1815,10 +1439,6 @@ version = "1.8.20" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e0/b7/cd8080344452e4874aae67c40d8940e2b4d47b01601a8fd9f44786c757c7/debugpy-1.8.20.tar.gz", hash = "sha256:55bc8701714969f1ab89a6d5f2f3d40c36f91b2cbe2f65d98bf8196f6a6a2c33", size = 1645207, upload-time = "2026-01-29T23:03:28.199Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/56/c3baf5cbe4dd77427fd9aef99fcdade259ad128feeb8a786c246adb838e5/debugpy-1.8.20-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:eada6042ad88fa1571b74bd5402ee8b86eded7a8f7b827849761700aff171f1b", size = 2208318, upload-time = "2026-01-29T23:03:36.481Z" }, - { url = "https://files.pythonhosted.org/packages/9a/7d/4fa79a57a8e69fe0d9763e98d1110320f9ecd7f1f362572e3aafd7417c9d/debugpy-1.8.20-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:7de0b7dfeedc504421032afba845ae2a7bcc32ddfb07dae2c3ca5442f821c344", size = 3171493, upload-time = "2026-01-29T23:03:37.775Z" }, - { url = "https://files.pythonhosted.org/packages/7d/f2/1e8f8affe51e12a26f3a8a8a4277d6e60aa89d0a66512f63b1e799d424a4/debugpy-1.8.20-cp311-cp311-win32.whl", hash = "sha256:773e839380cf459caf73cc533ea45ec2737a5cc184cf1b3b796cd4fd98504fec", size = 5209240, upload-time = "2026-01-29T23:03:39.109Z" }, - { url = "https://files.pythonhosted.org/packages/d5/92/1cb532e88560cbee973396254b21bece8c5d7c2ece958a67afa08c9f10dc/debugpy-1.8.20-cp311-cp311-win_amd64.whl", hash = "sha256:1f7650546e0eded1902d0f6af28f787fa1f1dbdbc97ddabaf1cd963a405930cb", size = 5233481, upload-time = "2026-01-29T23:03:40.659Z" }, { url = "https://files.pythonhosted.org/packages/14/57/7f34f4736bfb6e00f2e4c96351b07805d83c9a7b33d28580ae01374430f7/debugpy-1.8.20-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:4ae3135e2089905a916909ef31922b2d733d756f66d87345b3e5e52b7a55f13d", size = 2550686, upload-time = "2026-01-29T23:03:42.023Z" }, { url = "https://files.pythonhosted.org/packages/ab/78/b193a3975ca34458f6f0e24aaf5c3e3da72f5401f6054c0dfd004b41726f/debugpy-1.8.20-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:88f47850a4284b88bd2bfee1f26132147d5d504e4e86c22485dfa44b97e19b4b", size = 4310588, upload-time = "2026-01-29T23:03:43.314Z" }, { url = "https://files.pythonhosted.org/packages/c1/55/f14deb95eaf4f30f07ef4b90a8590fc05d9e04df85ee379712f6fb6736d7/debugpy-1.8.20-cp312-cp312-win32.whl", hash = "sha256:4057ac68f892064e5f98209ab582abfee3b543fb55d2e87610ddc133a954d390", size = 5331372, upload-time = "2026-01-29T23:03:45.526Z" }, @@ -1869,8 +1489,8 @@ name = "depyf" version = "0.20.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "astor", marker = "sys_platform == 'linux'" }, - { name = "dill", marker = "sys_platform == 'linux'" }, + { name = "astor" }, + { name = "dill" }, ] sdist = { url = "https://files.pythonhosted.org/packages/88/35/83fb0178212279aa0af031031905804c6de5618435d229f41ed21bb9ad2c/depyf-0.20.0.tar.gz", hash = "sha256:fb7683bd72c44f67b56029df2c47721e9a02ffa4d7b19095f1c54c4ebf797a98", size = 6168761, upload-time = "2025-10-13T12:33:38.589Z" } wheels = [ @@ -1966,13 +1586,6 @@ version = "1.4.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/36/9d/ab66a06e416d71b7bdcb9904cdf8d4db3379ef632bb8e9495646702d9718/duckdb-1.4.4.tar.gz", hash = "sha256:8bba52fd2acb67668a4615ee17ee51814124223de836d9e2fdcbc4c9021b3d3c", size = 18419763, upload-time = "2026-01-26T11:50:37.68Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/68/19233412033a2bc5a144a3f531f64e3548d4487251e3f16b56c31411a06f/duckdb-1.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5ba684f498d4e924c7e8f30dd157da8da34c8479746c5011b6c0e037e9c60ad2", size = 28883816, upload-time = "2026-01-26T11:49:01.009Z" }, - { url = "https://files.pythonhosted.org/packages/b3/3e/cec70e546c298ab76d80b990109e111068d82cca67942c42328eaa7d6fdb/duckdb-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5536eb952a8aa6ae56469362e344d4e6403cc945a80bc8c5c2ebdd85d85eb64b", size = 15339662, upload-time = "2026-01-26T11:49:04.058Z" }, - { url = "https://files.pythonhosted.org/packages/d3/f0/cf4241a040ec4f571859a738007ec773b642fbc27df4cbcf34b0c32ea559/duckdb-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47dd4162da6a2be59a0aef640eb08d6360df1cf83c317dcc127836daaf3b7f7c", size = 13670044, upload-time = "2026-01-26T11:49:06.627Z" }, - { url = "https://files.pythonhosted.org/packages/11/64/de2bb4ec1e35ec9ebf6090a95b930fc56934a0ad6f34a24c5972a14a77ef/duckdb-1.4.4-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6cb357cfa3403910e79e2eb46c8e445bb1ee2fd62e9e9588c6b999df4256abc1", size = 18409951, upload-time = "2026-01-26T11:49:09.808Z" }, - { url = "https://files.pythonhosted.org/packages/79/a2/ac0f5ee16df890d141304bcd48733516b7202c0de34cd3555634d6eb4551/duckdb-1.4.4-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c25d5b0febda02b7944e94fdae95aecf952797afc8cb920f677b46a7c251955", size = 20411739, upload-time = "2026-01-26T11:49:12.652Z" }, - { url = "https://files.pythonhosted.org/packages/37/a2/9a3402edeedaecf72de05fe9ff7f0303d701b8dfc136aea4a4be1a5f7eee/duckdb-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:6703dd1bb650025b3771552333d305d62ddd7ff182de121483d4e042ea6e2e00", size = 12256972, upload-time = "2026-01-26T11:49:15.468Z" }, - { url = "https://files.pythonhosted.org/packages/f6/e6/052ea6dcdf35b259fd182eff3efd8d75a071de4010c9807556098df137b9/duckdb-1.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:bf138201f56e5d6fc276a25138341b3523e2f84733613fc43f02c54465619a95", size = 13006696, upload-time = "2026-01-26T11:49:18.054Z" }, { url = "https://files.pythonhosted.org/packages/58/33/beadaa69f8458afe466126f2c5ee48c4759cc9d5d784f8703d44e0b52c3c/duckdb-1.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ddcfd9c6ff234da603a1edd5fd8ae6107f4d042f74951b65f91bc5e2643856b3", size = 28896535, upload-time = "2026-01-26T11:49:21.232Z" }, { url = "https://files.pythonhosted.org/packages/76/66/82413f386df10467affc87f65bac095b7c88dbd9c767584164d5f4dc4cb8/duckdb-1.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6792ca647216bd5c4ff16396e4591cfa9b4a72e5ad7cdd312cec6d67e8431a7c", size = 15349716, upload-time = "2026-01-26T11:49:23.989Z" }, { url = "https://files.pythonhosted.org/packages/5d/8c/c13d396fd4e9bf970916dc5b4fea410c1b10fe531069aea65f1dcf849a71/duckdb-1.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1f8d55843cc940e36261689054f7dfb6ce35b1f5b0953b0d355b6adb654b0d52", size = 13672403, upload-time = "2026-01-26T11:49:26.741Z" }, @@ -2086,14 +1699,14 @@ all = [ { name = "uvicorn", extra = ["standard"] }, ] standard = [ - { name = "email-validator", marker = "sys_platform == 'linux'" }, - { name = "fastapi-cli", extra = ["standard"], marker = "sys_platform == 'linux'" }, - { name = "httpx", marker = "sys_platform == 'linux'" }, - { name = "jinja2", marker = "sys_platform == 'linux'" }, - { name = "pydantic-extra-types", marker = "sys_platform == 'linux'" }, - { name = "pydantic-settings", marker = "sys_platform == 'linux'" }, - { name = "python-multipart", marker = "sys_platform == 'linux'" }, - { name = "uvicorn", extra = ["standard"], marker = "sys_platform == 'linux'" }, + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "httpx" }, + { name = "jinja2" }, + { name = "pydantic-extra-types" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "uvicorn", extra = ["standard"] }, ] [[package]] @@ -2141,21 +1754,6 @@ version = "0.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/69/e7/f89d54fb04104114dd0552836dc2b47914f416cc0e200b409dd04a33de5e/fastar-0.8.0.tar.gz", hash = "sha256:f4d4d68dbf1c4c2808f0e730fac5843493fc849f70fe3ad3af60dfbaf68b9a12", size = 68524, upload-time = "2025-11-26T02:36:00.72Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/15/1c764530b81b266f6d27d78d49b6bef22a73b3300cd83a280bfd244908c5/fastar-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cd9c0d3ebf7a0a6f642f771cf41b79f7c98d40a3072a8abe1174fbd9bd615bd3", size = 708427, upload-time = "2025-11-26T02:34:36.502Z" }, - { url = "https://files.pythonhosted.org/packages/41/fc/75d42c008516543219e4293e4d8ac55da57a5c63147484f10468bd1bc24e/fastar-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2875a077340fe4f8099bd3ed8fa90d9595e1ac3cd62ae19ab690d5bf550eeb35", size = 631740, upload-time = "2025-11-26T02:34:20.718Z" }, - { url = "https://files.pythonhosted.org/packages/50/8d/9632984f7824ed2210157dcebd8e9821ef6d4f2b28510d0516db6625ff9b/fastar-0.8.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a999263d9f87184bf2801833b2ecf105e03c0dd91cac78685673b70da564fd64", size = 871628, upload-time = "2025-11-26T02:33:49.279Z" }, - { url = "https://files.pythonhosted.org/packages/05/97/3eb6ea71b7544d45cd29cacb764ca23cde8ce0aed1a6a02251caa4c0a818/fastar-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c41111da56430f638cbfc498ebdcc7d30f63416e904b27b7695c29bd4889cb8", size = 765005, upload-time = "2025-11-26T02:32:45.833Z" }, - { url = "https://files.pythonhosted.org/packages/d6/45/3eb0ee945a0b5d5f9df7e7c25c037ce7fa441cd0b4d44f76d286e2f4396a/fastar-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3719541a12bb09ab1eae91d2c987a9b2b7d7149c52e7109ba6e15b74aabc49b1", size = 765587, upload-time = "2025-11-26T02:33:01.174Z" }, - { url = "https://files.pythonhosted.org/packages/51/bb/7defd6ec0d9570b1987d8ebde52d07d97f3f26e10b592fb3e12738eba39a/fastar-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a9b0fff8079b18acdface7ef1b7f522fd9a589f65ca4a1a0dd7c92a0886c2a2", size = 931150, upload-time = "2025-11-26T02:33:17.374Z" }, - { url = "https://files.pythonhosted.org/packages/28/54/62e51e684dab347c61878afbf09e177029c1a91eb1e39ef244e6b3ef9efa/fastar-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac073576c1931959191cb20df38bab21dd152f66c940aa3ca8b22e39f753b2f3", size = 821354, upload-time = "2025-11-26T02:33:32.083Z" }, - { url = "https://files.pythonhosted.org/packages/53/a8/12708ea4d21e3cf9f485b2a67d44ce84d949a6eddcc9aa5b3d324585ab43/fastar-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:003b59a7c3e405b6a7bff8fab17d31e0ccbc7f06730a8f8ca1694eeea75f3c76", size = 821626, upload-time = "2025-11-26T02:34:05.685Z" }, - { url = "https://files.pythonhosted.org/packages/e7/c4/1b4d3347c7a759853f963410bf6baf42fe014d587c50c39c8e145f4bf1a0/fastar-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a7b96748425efd9fc155cd920d65088a1b0d754421962418ea73413d02ff515a", size = 986187, upload-time = "2025-11-26T02:34:52.047Z" }, - { url = "https://files.pythonhosted.org/packages/dc/59/2dbe0dc2570764475e60030403738faa261a9d3bff16b08629c378ab939a/fastar-0.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:90957a30e64418b02df5b4d525bea50403d98a4b1f29143ce5914ddfa7e54ee4", size = 1041536, upload-time = "2025-11-26T02:35:08.926Z" }, - { url = "https://files.pythonhosted.org/packages/d9/0f/639b295669c7ca6fbc2b4be2a7832aaeac1a5e06923f15a8a6d6daecbc7d/fastar-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f6e784a8015623fbb7ccca1af372fd82cb511b408ddd2348dc929fc6e415df73", size = 1047149, upload-time = "2025-11-26T02:35:26.597Z" }, - { url = "https://files.pythonhosted.org/packages/cb/e7/23e3a19e06d261d1894f98eca9458f98c090c505a0c712dafc0ff1fc2965/fastar-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a03eaf287bbc93064688a1220580ce261e7557c8898f687f4d0b281c85b28d3c", size = 994992, upload-time = "2025-11-26T02:35:44.009Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7a/3ea4726bae3ac9358d02107ae48f3e10ee186dbed554af79e00b7b498c44/fastar-0.8.0-cp311-cp311-win32.whl", hash = "sha256:661a47ed90762f419406c47e802f46af63a08254ba96abd1c8191e4ce967b665", size = 456449, upload-time = "2025-11-26T02:36:25.291Z" }, - { url = "https://files.pythonhosted.org/packages/cb/3c/0142bee993c431ee91cf5535e6e4b079ad491f620c215fcd79b7e5ffeb2b/fastar-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b48abd6056fef7bc3d414aafb453c5b07fdf06d2df5a2841d650288a3aa1e9d3", size = 490863, upload-time = "2025-11-26T02:36:11.114Z" }, - { url = "https://files.pythonhosted.org/packages/3b/18/d119944f6bdbf6e722e204e36db86390ea45684a1bf6be6e3aa42abd471f/fastar-0.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:50c18788b3c6ffb85e176dcb8548bb8e54616a0519dcdbbfba66f6bbc4316933", size = 462230, upload-time = "2025-11-26T02:36:01.917Z" }, { url = "https://files.pythonhosted.org/packages/58/f1/5b2ff898abac7f1a418284aad285e3a4f68d189c572ab2db0f6c9079dd16/fastar-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f10d2adfe40f47ff228f4efaa32d409d732ded98580e03ed37c9535b5fc923d", size = 706369, upload-time = "2025-11-26T02:34:37.783Z" }, { url = "https://files.pythonhosted.org/packages/23/60/8046a386dca39154f80c927cbbeeb4b1c1267a3271bffe61552eb9995757/fastar-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b930da9d598e3bc69513d131f397e6d6be4643926ef3de5d33d1e826631eb036", size = 629097, upload-time = "2025-11-26T02:34:21.888Z" }, { url = "https://files.pythonhosted.org/packages/22/7e/1ae005addc789924a9268da2394d3bb5c6f96836f7e37b7e3d23c2362675/fastar-0.8.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9d210da2de733ca801de83e931012349d209f38b92d9630ccaa94bd445bdc9b8", size = 868938, upload-time = "2025-11-26T02:33:51.119Z" }, @@ -2216,18 +1814,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/e2/dfa19a4b260b8ab3581b7484dcb80c09b25324f4daa6b6ae1c7640d1607a/fastar-0.8.0-cp314-cp314t-win32.whl", hash = "sha256:187f61dc739afe45ac8e47ed7fd1adc45d52eac110cf27d579155720507d6fbe", size = 455767, upload-time = "2025-11-26T02:36:34.758Z" }, { url = "https://files.pythonhosted.org/packages/51/47/df65c72afc1297797b255f90c4778b5d6f1f0f80282a134d5ab610310ed9/fastar-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:40e9d763cf8bf85ce2fa256e010aa795c0fe3d3bd1326d5c3084e6ce7857127e", size = 489971, upload-time = "2025-11-26T02:36:22.081Z" }, { url = "https://files.pythonhosted.org/packages/85/11/0aa8455af26f0ae89e42be67f3a874255ee5d7f0f026fc86e8d56f76b428/fastar-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e59673307b6a08210987059a2bdea2614fe26e3335d0e5d1a3d95f49a05b1418", size = 460467, upload-time = "2025-11-26T02:36:07.978Z" }, - { url = "https://files.pythonhosted.org/packages/98/6e/6c46aa7f8c8734e7f96ee5141acd3877667ce66f34eea10703aa7571d191/fastar-0.8.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:998e3fa4b555b63eb134e6758437ed739ad1652fdd2a61dfe1dacbfddc35fe66", size = 710662, upload-time = "2025-11-26T02:34:47.593Z" }, - { url = "https://files.pythonhosted.org/packages/70/27/fd622442f2fbd4ff5459677987481ef1c60e077cb4e63a2ed4d8dce6f869/fastar-0.8.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5f83e60d845091f3a12bc37f412774264d161576eaf810ed8b43567eb934b7e5", size = 634049, upload-time = "2025-11-26T02:34:32.365Z" }, - { url = "https://files.pythonhosted.org/packages/8f/ee/aa4d08aea25b5419a7277132e738ab1cd775f26aebddce11413b07e2fdff/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:299672e1c74d8b73c61684fac9159cfc063d35f4b165996a88facb0e26862cb5", size = 872055, upload-time = "2025-11-26T02:34:01.377Z" }, - { url = "https://files.pythonhosted.org/packages/92/9a/2bf2f77aade575e67997e0c759fd55cb1c66b7a5b437b1cd0e97d8b241bc/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3d3a27066b84d015deab5faee78565509bb33b137896443e4144cb1be1a5f90", size = 766787, upload-time = "2025-11-26T02:32:57.161Z" }, - { url = "https://files.pythonhosted.org/packages/0b/90/23a3f6c252f11b10c70f854bce09abc61f71b5a0e6a4b0eac2bcb9a2c583/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef0bcf4385bbdd3c1acecce2d9ea7dab7cc9b8ee0581bbccb7ab11908a7ce288", size = 766861, upload-time = "2025-11-26T02:33:12.824Z" }, - { url = "https://files.pythonhosted.org/packages/76/bb/beeb9078380acd4484db5c957d066171695d9340e3526398eb230127b0c2/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f10ef62b6eda6cb6fd9ba8e1fe08a07d7b2bdcc8eaa00eb91566143b92ed7eee", size = 932667, upload-time = "2025-11-26T02:33:28.405Z" }, - { url = "https://files.pythonhosted.org/packages/f4/6d/b034cc637bd0ee638d5a85d08e941b0b8ffd44cf391fb751ba98233734f7/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4f6c82a8ee98c17aa48585ee73b51c89c1b010e5c951af83e07c3436180e3fc", size = 822712, upload-time = "2025-11-26T02:33:44.27Z" }, - { url = "https://files.pythonhosted.org/packages/e2/2b/7d183c63f59227c4689792042d6647f2586a5e7273b55e81745063088d81/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6129067fcb86276635b5857010f4e9b9c7d5d15dd571bb03c6c1ed73c40fd92", size = 822659, upload-time = "2025-11-26T02:34:16.815Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f9/716e0cd9de2427fdf766bc68176f76226cd01fffef3a56c5046fa863f5f0/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4cc9e77019e489f1ddac446b6a5b9dfb5c3d9abd142652c22a1d9415dbcc0e47", size = 987412, upload-time = "2025-11-26T02:35:04.259Z" }, - { url = "https://files.pythonhosted.org/packages/a4/b9/9a8c3fd59958c1c8027bc075af11722cdc62c4968bb277e841d131232289/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:382bfe82c026086487cb17fee12f4c1e2b4e67ce230f2e04487d3e7ddfd69031", size = 1042911, upload-time = "2025-11-26T02:35:21.857Z" }, - { url = "https://files.pythonhosted.org/packages/e2/2f/c3f30963b47022134b8a231c12845f4d7cfba520f59bbc1a82468aea77c7/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:908d2b9a1ff3d549cc304b32f95706a536da8f0bcb0bc0f9e4c1cce39b80e218", size = 1047464, upload-time = "2025-11-26T02:35:39.376Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8a/218ab6d9a2bab3b07718e6cd8405529600edc1e9c266320e8524c8f63251/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1aa7dbde2d2d73eb5b6203d0f74875cb66350f0f1b4325b4839fc8fbbf5d074e", size = 997309, upload-time = "2025-11-26T02:35:57.722Z" }, ] [[package]] @@ -2245,17 +1831,6 @@ version = "0.14.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386, upload-time = "2025-10-19T22:42:40.176Z" }, - { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569, upload-time = "2025-10-19T22:25:50.977Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366, upload-time = "2025-10-19T22:29:49.166Z" }, - { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978, upload-time = "2025-10-19T22:35:41.306Z" }, - { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692, upload-time = "2025-10-19T22:25:36.997Z" }, - { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384, upload-time = "2025-10-19T22:29:46.578Z" }, - { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921, upload-time = "2025-10-19T22:36:42.006Z" }, - { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575, upload-time = "2025-10-19T22:28:18.975Z" }, - { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317, upload-time = "2025-10-19T22:25:32.75Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804, upload-time = "2025-10-19T22:24:15.615Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099, upload-time = "2025-10-19T22:24:31.646Z" }, { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164, upload-time = "2025-10-19T22:31:45.635Z" }, { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837, upload-time = "2025-10-19T22:38:38.53Z" }, { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370, upload-time = "2025-10-19T22:40:26.07Z" }, @@ -2321,68 +1896,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/79/1b8fa1bb3568781e84c9200f951c735f3f157429f44be0495da55894d620/filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25", size = 19970, upload-time = "2022-11-02T17:34:01.425Z" }, ] -[[package]] -name = "flashinfer-python" -version = "0.6.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] -dependencies = [ - { name = "apache-tvm-ffi", marker = "sys_platform != 'linux'" }, - { name = "click", marker = "sys_platform != 'linux'" }, - { name = "einops", marker = "sys_platform != 'linux'" }, - { name = "ninja", marker = "sys_platform != 'linux'" }, - { name = "numpy", marker = "sys_platform != 'linux'" }, - { name = "nvidia-cudnn-frontend", marker = "sys_platform != 'linux'" }, - { name = "nvidia-cutlass-dsl", marker = "sys_platform != 'linux'" }, - { name = "nvidia-ml-py", marker = "sys_platform != 'linux'" }, - { name = "packaging", marker = "sys_platform != 'linux'" }, - { name = "requests", marker = "sys_platform != 'linux'" }, - { name = "tabulate", marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "tqdm", marker = "sys_platform != 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/81/5a84e14df7358d2c2903b18c6f2779bd4b4a6739076d01a847d4c18fb102/flashinfer_python-0.6.1.tar.gz", hash = "sha256:8dc2fc5dc187fc70151d5f39ef560fde8a38117a4f6cf40dce0ddb09cbd4f0bf", size = 5141191, upload-time = "2026-01-14T05:40:27.825Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/d5/bca632bb5781689415186421bbee2ad39ae8a39b0996d579c76901e5c66f/flashinfer_python-0.6.1-py3-none-any.whl", hash = "sha256:610dd4ac15e7a0874b79e7577d027cb35133e8dc31dc3137c2f2d6497fe46f18", size = 7580432, upload-time = "2026-01-14T05:40:25.636Z" }, -] - [[package]] name = "flashinfer-python" version = "0.6.4" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", -] dependencies = [ - { name = "apache-tvm-ffi", marker = "sys_platform == 'linux'" }, - { name = "click", marker = "sys_platform == 'linux'" }, - { name = "einops", marker = "sys_platform == 'linux'" }, - { name = "ninja", marker = "sys_platform == 'linux'" }, - { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "nvidia-cudnn-frontend", marker = "sys_platform == 'linux'" }, - { name = "nvidia-cutlass-dsl", marker = "sys_platform == 'linux'" }, - { name = "nvidia-ml-py", marker = "sys_platform == 'linux'" }, - { name = "packaging", marker = "sys_platform == 'linux'" }, - { name = "requests", marker = "sys_platform == 'linux'" }, - { name = "tabulate", marker = "sys_platform == 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "tqdm", marker = "sys_platform == 'linux'" }, + { name = "apache-tvm-ffi" }, + { name = "click" }, + { name = "einops" }, + { name = "ninja" }, + { name = "numpy" }, + { name = "nvidia-cudnn-frontend" }, + { name = "nvidia-cutlass-dsl" }, + { name = "nvidia-ml-py" }, + { name = "packaging" }, + { name = "requests" }, + { name = "tabulate" }, + { name = "torch" }, + { name = "tqdm" }, ] sdist = { url = "https://files.pythonhosted.org/packages/77/45/15645d2a4ee81d08206f3e132a77323e48312f510462415d7cd1122eba43/flashinfer_python-0.6.4.tar.gz", hash = "sha256:e6ab798bd1030e5ff7a3bc6952f36386c406928f60b79cf964a6db7aa7ccde75", size = 5337134, upload-time = "2026-02-19T07:33:36.647Z" } wheels = [ @@ -2427,14 +1958,6 @@ version = "4.61.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/12/bf9f4eaa2fad039356cc627587e30ed008c03f1cebd3034376b5ee8d1d44/fonttools-4.61.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6604b735bb12fef8e0efd5578c9fb5d3d8532d5001ea13a19cddf295673ee09", size = 2852213, upload-time = "2025-12-12T17:29:46.675Z" }, - { url = "https://files.pythonhosted.org/packages/ac/49/4138d1acb6261499bedde1c07f8c2605d1d8f9d77a151e5507fd3ef084b6/fonttools-4.61.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ce02f38a754f207f2f06557523cd39a06438ba3aafc0639c477ac409fc64e37", size = 2401689, upload-time = "2025-12-12T17:29:48.769Z" }, - { url = "https://files.pythonhosted.org/packages/e5/fe/e6ce0fe20a40e03aef906af60aa87668696f9e4802fa283627d0b5ed777f/fonttools-4.61.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77efb033d8d7ff233385f30c62c7c79271c8885d5c9657d967ede124671bbdfb", size = 5058809, upload-time = "2025-12-12T17:29:51.701Z" }, - { url = "https://files.pythonhosted.org/packages/79/61/1ca198af22f7dd22c17ab86e9024ed3c06299cfdb08170640e9996d501a0/fonttools-4.61.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:75c1a6dfac6abd407634420c93864a1e274ebc1c7531346d9254c0d8f6ca00f9", size = 5036039, upload-time = "2025-12-12T17:29:53.659Z" }, - { url = "https://files.pythonhosted.org/packages/99/cc/fa1801e408586b5fce4da9f5455af8d770f4fc57391cd5da7256bb364d38/fonttools-4.61.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0de30bfe7745c0d1ffa2b0b7048fb7123ad0d71107e10ee090fa0b16b9452e87", size = 5034714, upload-time = "2025-12-12T17:29:55.592Z" }, - { url = "https://files.pythonhosted.org/packages/bf/aa/b7aeafe65adb1b0a925f8f25725e09f078c635bc22754f3fecb7456955b0/fonttools-4.61.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58b0ee0ab5b1fc9921eccfe11d1435added19d6494dde14e323f25ad2bc30c56", size = 5158648, upload-time = "2025-12-12T17:29:57.861Z" }, - { url = "https://files.pythonhosted.org/packages/99/f9/08ea7a38663328881384c6e7777bbefc46fd7d282adfd87a7d2b84ec9d50/fonttools-4.61.1-cp311-cp311-win32.whl", hash = "sha256:f79b168428351d11e10c5aeb61a74e1851ec221081299f4cf56036a95431c43a", size = 2280681, upload-time = "2025-12-12T17:29:59.943Z" }, - { url = "https://files.pythonhosted.org/packages/07/ad/37dd1ae5fa6e01612a1fbb954f0927681f282925a86e86198ccd7b15d515/fonttools-4.61.1-cp311-cp311-win_amd64.whl", hash = "sha256:fe2efccb324948a11dd09d22136fe2ac8a97d6c1347cf0b58a911dcd529f66b7", size = 2331951, upload-time = "2025-12-12T17:30:02.254Z" }, { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" }, { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" }, { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" }, @@ -2476,22 +1999,6 @@ version = "1.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, - { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, - { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, - { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, - { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, - { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, - { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, - { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, - { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, - { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, - { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, - { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, - { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, - { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, @@ -2594,9 +2101,9 @@ name = "gguf" version = "0.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "pyyaml", marker = "sys_platform == 'linux'" }, - { name = "tqdm", marker = "sys_platform == 'linux'" }, + { name = "numpy" }, + { name = "pyyaml" }, + { name = "tqdm" }, ] sdist = { url = "https://files.pythonhosted.org/packages/08/08/7de1ca4b71e7bf33b547f82bb22505e221b5fa42f67d635e200e0ad22ad6/gguf-0.17.1.tar.gz", hash = "sha256:36ad71aad900a3e75fc94ebe96ea6029f03a4e44be7627ef7ad3d03e8c7bcb53", size = 89338, upload-time = "2025-06-19T14:00:33.705Z" } wheels = [ @@ -2635,8 +2142,7 @@ dependencies = [ { name = "google-auth" }, { name = "googleapis-common-protos" }, { name = "proto-plus" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/22/98/586ec94553b569080caef635f98a3723db36a38eac0e3d7eb3ea9d2e4b9a/google_api_core-2.30.0.tar.gz", hash = "sha256:02edfa9fab31e17fc0befb5f161b3bf93c9096d99aed584625f38065c511ad9b", size = 176959, upload-time = "2026-02-18T20:28:11.926Z" } @@ -2723,11 +2229,6 @@ version = "1.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/03/41/4b9c02f99e4c5fb477122cd5437403b552873f014616ac1d19ac8221a58d/google_crc32c-1.8.0.tar.gz", hash = "sha256:a428e25fb7691024de47fecfbff7ff957214da51eddded0da0ae0e0f03a2cf79", size = 14192, upload-time = "2025-12-16T00:35:25.142Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/ef/21ccfaab3d5078d41efe8612e0ed0bfc9ce22475de074162a91a25f7980d/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:014a7e68d623e9a4222d663931febc3033c5c7c9730785727de2a81f87d5bab8", size = 31298, upload-time = "2025-12-16T00:20:32.241Z" }, - { url = "https://files.pythonhosted.org/packages/c5/b8/f8413d3f4b676136e965e764ceedec904fe38ae8de0cdc52a12d8eb1096e/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:86cfc00fe45a0ac7359e5214a1704e51a99e757d0272554874f419f79838c5f7", size = 30872, upload-time = "2025-12-16T00:33:58.785Z" }, - { url = "https://files.pythonhosted.org/packages/f6/fd/33aa4ec62b290477181c55bb1c9302c9698c58c0ce9a6ab4874abc8b0d60/google_crc32c-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:19b40d637a54cb71e0829179f6cb41835f0fbd9e8eb60552152a8b52c36cbe15", size = 33243, upload-time = "2025-12-16T00:40:21.46Z" }, - { url = "https://files.pythonhosted.org/packages/71/03/4820b3bd99c9653d1a5210cb32f9ba4da9681619b4d35b6a052432df4773/google_crc32c-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:17446feb05abddc187e5441a45971b8394ea4c1b6efd88ab0af393fd9e0a156a", size = 33608, upload-time = "2025-12-16T00:40:22.204Z" }, - { url = "https://files.pythonhosted.org/packages/7c/43/acf61476a11437bf9733fb2f70599b1ced11ec7ed9ea760fdd9a77d0c619/google_crc32c-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:71734788a88f551fbd6a97be9668a0020698e07b2bf5b3aa26a36c10cdfb27b2", size = 34439, upload-time = "2025-12-16T00:35:20.458Z" }, { url = "https://files.pythonhosted.org/packages/e9/5f/7307325b1198b59324c0fa9807cafb551afb65e831699f2ce211ad5c8240/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:4b8286b659c1335172e39563ab0a768b8015e88e08329fa5321f774275fc3113", size = 31300, upload-time = "2025-12-16T00:21:56.723Z" }, { url = "https://files.pythonhosted.org/packages/21/8e/58c0d5d86e2220e6a37befe7e6a94dd2f6006044b1a33edf1ff6d9f7e319/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:2a3dc3318507de089c5384cc74d54318401410f82aa65b2d9cdde9d297aca7cb", size = 30867, upload-time = "2025-12-16T00:38:31.302Z" }, { url = "https://files.pythonhosted.org/packages/ce/a9/a780cc66f86335a6019f557a8aaca8fbb970728f0efd2430d15ff1beae0e/google_crc32c-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14f87e04d613dfa218d6135e81b78272c3b904e2a7053b841481b38a7d901411", size = 33364, upload-time = "2025-12-16T00:40:22.96Z" }, @@ -2743,8 +2244,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/56/15/c25671c7aad70f8179d858c55a6ae8404902abe0cdcf32a29d581792b491/google_crc32c-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0d1a7afc6e8e4635564ba8aa5c0548e3173e41b6384d7711a9123165f582de2", size = 33381, upload-time = "2025-12-16T00:40:26.268Z" }, { url = "https://files.pythonhosted.org/packages/42/fa/f50f51260d7b0ef5d4898af122d8a7ec5a84e2984f676f746445f783705f/google_crc32c-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3f68782f3cbd1bce027e48768293072813469af6a61a86f6bb4977a4380f21", size = 33734, upload-time = "2025-12-16T00:40:27.028Z" }, { url = "https://files.pythonhosted.org/packages/08/a5/7b059810934a09fb3ccb657e0843813c1fee1183d3bc2c8041800374aa2c/google_crc32c-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:d511b3153e7011a27ab6ee6bb3a5404a55b994dc1a7322c0b87b29606d9790e2", size = 34878, upload-time = "2025-12-16T00:35:23.142Z" }, - { url = "https://files.pythonhosted.org/packages/52/c5/c171e4d8c44fec1422d801a6d2e5d7ddabd733eeda505c79730ee9607f07/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:87fa445064e7db928226b2e6f0d5304ab4cd0339e664a4e9a25029f384d9bb93", size = 28615, upload-time = "2025-12-16T00:40:29.298Z" }, - { url = "https://files.pythonhosted.org/packages/9c/97/7d75fe37a7a6ed171a2cf17117177e7aab7e6e0d115858741b41e9dd4254/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f639065ea2042d5c034bf258a9f085eaa7af0cd250667c0635a3118e8f92c69c", size = 28800, upload-time = "2025-12-16T00:40:30.322Z" }, ] [[package]] @@ -2764,8 +2263,7 @@ name = "googleapis-common-protos" version = "1.72.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" } wheels = [ @@ -2816,15 +2314,6 @@ version = "3.3.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8a/99/1cd3411c56a410994669062bd73dd58270c00cc074cac15f385a1fd91f8a/greenlet-3.3.1.tar.gz", hash = "sha256:41848f3230b58c08bb43dee542e74a2a2e34d3c59dc3076cec9151aeeedcae98", size = 184690, upload-time = "2026-01-23T15:31:02.076Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/e8/2e1462c8fdbe0f210feb5ac7ad2d9029af8be3bf45bd9fa39765f821642f/greenlet-3.3.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:5fd23b9bc6d37b563211c6abbb1b3cab27db385a4449af5c32e932f93017080c", size = 274974, upload-time = "2026-01-23T15:31:02.891Z" }, - { url = "https://files.pythonhosted.org/packages/7e/a8/530a401419a6b302af59f67aaf0b9ba1015855ea7e56c036b5928793c5bd/greenlet-3.3.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f51496a0bfbaa9d74d36a52d2580d1ef5ed4fdfcff0a73730abfbbbe1403dd", size = 577175, upload-time = "2026-01-23T16:00:56.213Z" }, - { url = "https://files.pythonhosted.org/packages/8e/89/7e812bb9c05e1aaef9b597ac1d0962b9021d2c6269354966451e885c4e6b/greenlet-3.3.1-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb0feb07fe6e6a74615ee62a880007d976cf739b6669cce95daa7373d4fc69c5", size = 590401, upload-time = "2026-01-23T16:05:26.365Z" }, - { url = "https://files.pythonhosted.org/packages/70/ae/e2d5f0e59b94a2269b68a629173263fa40b63da32f5c231307c349315871/greenlet-3.3.1-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:67ea3fc73c8cd92f42467a72b75e8f05ed51a0e9b1d15398c913416f2dafd49f", size = 601161, upload-time = "2026-01-23T16:15:53.456Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ae/8d472e1f5ac5efe55c563f3eabb38c98a44b832602e12910750a7c025802/greenlet-3.3.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39eda9ba259cc9801da05351eaa8576e9aa83eb9411e8f0c299e05d712a210f2", size = 590272, upload-time = "2026-01-23T15:32:49.411Z" }, - { url = "https://files.pythonhosted.org/packages/a8/51/0fde34bebfcadc833550717eade64e35ec8738e6b097d5d248274a01258b/greenlet-3.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e2e7e882f83149f0a71ac822ebf156d902e7a5d22c9045e3e0d1daf59cee2cc9", size = 1550729, upload-time = "2026-01-23T16:04:20.867Z" }, - { url = "https://files.pythonhosted.org/packages/16/c9/2fb47bee83b25b119d5a35d580807bb8b92480a54b68fef009a02945629f/greenlet-3.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80aa4d79eb5564f2e0a6144fcc744b5a37c56c4a92d60920720e99210d88db0f", size = 1615552, upload-time = "2026-01-23T15:33:45.743Z" }, - { url = "https://files.pythonhosted.org/packages/1f/54/dcf9f737b96606f82f8dd05becfb8d238db0633dd7397d542a296fe9cad3/greenlet-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:32e4ca9777c5addcbf42ff3915d99030d8e00173a56f80001fb3875998fe410b", size = 226462, upload-time = "2026-01-23T15:36:50.422Z" }, - { url = "https://files.pythonhosted.org/packages/91/37/61e1015cf944ddd2337447d8e97fb423ac9bc21f9963fb5f206b53d65649/greenlet-3.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:da19609432f353fed186cc1b85e9440db93d489f198b4bdf42ae19cc9d9ac9b4", size = 225715, upload-time = "2026-01-23T15:33:17.298Z" }, { url = "https://files.pythonhosted.org/packages/f9/c8/9d76a66421d1ae24340dfae7e79c313957f6e3195c144d2c73333b5bfe34/greenlet-3.3.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7e806ca53acf6d15a888405880766ec84721aa4181261cd11a457dfe9a7a4975", size = 276443, upload-time = "2026-01-23T15:30:10.066Z" }, { url = "https://files.pythonhosted.org/packages/81/99/401ff34bb3c032d1f10477d199724f5e5f6fbfb59816ad1455c79c1eb8e7/greenlet-3.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d842c94b9155f1c9b3058036c24ffb8ff78b428414a19792b2380be9cecf4f36", size = 597359, upload-time = "2026-01-23T16:00:57.394Z" }, { url = "https://files.pythonhosted.org/packages/2b/bc/4dcc0871ed557792d304f50be0f7487a14e017952ec689effe2180a6ff35/greenlet-3.3.1-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20fedaadd422fa02695f82093f9a98bad3dab5fcda793c658b945fcde2ab27ba", size = 607805, upload-time = "2026-01-23T16:05:28.068Z" }, @@ -2871,16 +2360,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/c7/d0b780a29b0837bf4ca9580904dfb275c1fc321ded7897d620af7047ec57/grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6", size = 5951525, upload-time = "2026-02-06T09:55:01.989Z" }, - { url = "https://files.pythonhosted.org/packages/c5/b1/96920bf2ee61df85a9503cb6f733fe711c0ff321a5a697d791b075673281/grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e", size = 11830418, upload-time = "2026-02-06T09:55:04.462Z" }, - { url = "https://files.pythonhosted.org/packages/83/0c/7c1528f098aeb75a97de2bae18c530f56959fb7ad6c882db45d9884d6edc/grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911", size = 6524477, upload-time = "2026-02-06T09:55:07.111Z" }, - { url = "https://files.pythonhosted.org/packages/8d/52/e7c1f3688f949058e19a011c4e0dec973da3d0ae5e033909677f967ae1f4/grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e", size = 7198266, upload-time = "2026-02-06T09:55:10.016Z" }, - { url = "https://files.pythonhosted.org/packages/e5/61/8ac32517c1e856677282c34f2e7812d6c328fa02b8f4067ab80e77fdc9c9/grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303", size = 6730552, upload-time = "2026-02-06T09:55:12.207Z" }, - { url = "https://files.pythonhosted.org/packages/bd/98/b8ee0158199250220734f620b12e4a345955ac7329cfd908d0bf0fda77f0/grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04", size = 7304296, upload-time = "2026-02-06T09:55:15.044Z" }, - { url = "https://files.pythonhosted.org/packages/bd/0f/7b72762e0d8840b58032a56fdbd02b78fc645b9fa993d71abf04edbc54f4/grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec", size = 8288298, upload-time = "2026-02-06T09:55:17.276Z" }, - { url = "https://files.pythonhosted.org/packages/24/ae/ae4ce56bc5bb5caa3a486d60f5f6083ac3469228faa734362487176c15c5/grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074", size = 7730953, upload-time = "2026-02-06T09:55:19.545Z" }, - { url = "https://files.pythonhosted.org/packages/b5/6e/8052e3a28eb6a820c372b2eb4b5e32d195c661e137d3eca94d534a4cfd8a/grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856", size = 4076503, upload-time = "2026-02-06T09:55:21.521Z" }, - { url = "https://files.pythonhosted.org/packages/08/62/f22c98c5265dfad327251fa2f840b591b1df5f5e15d88b19c18c86965b27/grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558", size = 4799767, upload-time = "2026-02-06T09:55:24.107Z" }, { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, @@ -2918,8 +2397,8 @@ name = "grpcio-reflection" version = "1.71.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "grpcio", marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "grpcio" }, + { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/41/14/4e5f8e902fa9461abae292773b921a578f68333c7c3e731bcff7514f78cd/grpcio_reflection-1.71.2.tar.gz", hash = "sha256:bedfac3d2095d6c066b16b66bfce85b4be3e92dc9f3b7121e6f019d24a9c09c0", size = 18798, upload-time = "2025-06-28T04:24:06.019Z" } wheels = [ @@ -3092,13 +2571,6 @@ version = "0.7.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, - { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, - { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, - { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, - { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, - { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, @@ -3220,17 +2692,6 @@ version = "3.4.0.post0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2d/30/7ab4b9e88e7946f6beef419f74edcc541df3ea562c7882257b4eaa82417d/ijson-3.4.0.post0.tar.gz", hash = "sha256:9aa02dc70bb245670a6ca7fba737b992aeeb4895360980622f7e568dbf23e41e", size = 67216, upload-time = "2025-10-10T05:29:25.62Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/ac/3d57249d4acba66a33eaef794edb5b2a2222ca449ae08800f8abe9286645/ijson-3.4.0.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b473112e72c0c506da425da3278367b6680f340ecc093084693a1e819d28435", size = 88278, upload-time = "2025-10-10T05:27:55.403Z" }, - { url = "https://files.pythonhosted.org/packages/12/fb/2d068d23d1a665f500282ceb6f2473952a95fc7107d739fd629b4ab41959/ijson-3.4.0.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:043f9b7cf9cc744263a78175e769947733710d2412d25180df44b1086b23ebd5", size = 59898, upload-time = "2025-10-10T05:27:56.361Z" }, - { url = "https://files.pythonhosted.org/packages/26/3d/8b14589dfb0e5dbb7bcf9063e53d3617c041cf315ff3dfa60945382237ce/ijson-3.4.0.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b55e49045f4c8031f3673f56662fd828dc9e8d65bd3b03a9420dda0d370e64ba", size = 59945, upload-time = "2025-10-10T05:27:57.581Z" }, - { url = "https://files.pythonhosted.org/packages/77/57/086a75094397d4b7584698a540a279689e12905271af78cdfc903bf9eaf8/ijson-3.4.0.post0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11f13b73194ea2a5a8b4a2863f25b0b4624311f10db3a75747b510c4958179b0", size = 131318, upload-time = "2025-10-10T05:27:58.453Z" }, - { url = "https://files.pythonhosted.org/packages/df/35/7f61e9ce4a9ff1306ec581eb851f8a660439126d92ee595c6dc8084aac97/ijson-3.4.0.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:659acb2843433e080c271ecedf7d19c71adde1ee5274fc7faa2fec0a793f9f1c", size = 137990, upload-time = "2025-10-10T05:27:59.328Z" }, - { url = "https://files.pythonhosted.org/packages/59/bf/590bbc3c3566adce5e2f43ba5894520cbaf19a3e7f38c1250926ba67eee4/ijson-3.4.0.post0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:deda4cfcaafa72ca3fa845350045b1d0fef9364ec9f413241bb46988afbe6ee6", size = 134416, upload-time = "2025-10-10T05:28:00.317Z" }, - { url = "https://files.pythonhosted.org/packages/24/c1/fb719049851979df71f3e039d6f1a565d349c9cb1b29c0f8775d9db141b4/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47352563e8c594360bacee2e0753e97025f0861234722d02faace62b1b6d2b2a", size = 138034, upload-time = "2025-10-10T05:28:01.627Z" }, - { url = "https://files.pythonhosted.org/packages/10/ce/ccda891f572876aaf2c43f0b2079e31d5b476c3ae53196187eab1a788eff/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5a48b9486242d1295abe7fd0fbb6308867da5ca3f69b55c77922a93c2b6847aa", size = 132510, upload-time = "2025-10-10T05:28:03.141Z" }, - { url = "https://files.pythonhosted.org/packages/11/b5/ca8e64ab7cf5252f358e467be767630f085b5bbcd3c04333a3a5f36c3dd3/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c0886234d1fae15cf4581a430bdba03d79251c1ab3b07e30aa31b13ef28d01c", size = 134907, upload-time = "2025-10-10T05:28:04.438Z" }, - { url = "https://files.pythonhosted.org/packages/93/14/63a4d5dc548690f29f0c2fc9cabd5ecbb37532547439c05f5b3b9ce73021/ijson-3.4.0.post0-cp311-cp311-win32.whl", hash = "sha256:fecae19b5187d92900c73debb3a979b0b3290a53f85df1f8f3c5ba7d1e9fb9cb", size = 52006, upload-time = "2025-10-10T05:28:05.424Z" }, - { url = "https://files.pythonhosted.org/packages/fa/bf/932740899e572a97f9be0c6cd64ebda557eae7701ac216fc284aba21786d/ijson-3.4.0.post0-cp311-cp311-win_amd64.whl", hash = "sha256:b39dbf87071f23a23c8077eea2ae7cfeeca9ff9ffec722dfc8b5f352e4dd729c", size = 54410, upload-time = "2025-10-10T05:28:06.264Z" }, { url = "https://files.pythonhosted.org/packages/7d/fe/3b6af0025288e769dbfa30485dae1b3bd3f33f00390f3ee532cbb1c33e9b/ijson-3.4.0.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b607a500fca26101be47d2baf7cddb457b819ab60a75ce51ed1092a40da8b2f9", size = 87847, upload-time = "2025-10-10T05:28:07.229Z" }, { url = "https://files.pythonhosted.org/packages/6e/a5/95ee2ca82f3b1a57892452f6e5087607d56c620beb8ce625475194568698/ijson-3.4.0.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4827d9874a6a81625412c59f7ca979a84d01f7f6bfb3c6d4dc4c46d0382b14e0", size = 59815, upload-time = "2025-10-10T05:28:08.448Z" }, { url = "https://files.pythonhosted.org/packages/51/8d/5a704ab3c17c55c21c86423458db8610626ca99cc9086a74dfeb7ee9054c/ijson-3.4.0.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4d4afec780881edb2a0d2dd40b1cdbe246e630022d5192f266172a0307986a7", size = 59648, upload-time = "2025-10-10T05:28:09.307Z" }, @@ -3286,12 +2747,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/15/f3/6419d1d5795a16591233d3aa3747b084e82c0c1d7184bdad9be638174560/ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b982a3597b0439ce9c8f4cfc929d86c6ed43907908be1e8463a34dc35fe5b258", size = 204825, upload-time = "2025-10-10T05:29:04.242Z" }, { url = "https://files.pythonhosted.org/packages/1f/8d/a520e6902129c55fa94428ea0a22e8547540d5e7ca30f18b39594a5feea2/ijson-3.4.0.post0-cp314-cp314t-win32.whl", hash = "sha256:4e39bfdc36b0b460ef15a06550a6a385c64c81f7ac205ccff39bd45147918912", size = 55559, upload-time = "2025-10-10T05:29:05.681Z" }, { url = "https://files.pythonhosted.org/packages/20/67/0ac6dd0045957ba1270b7b1860864f7d8cea4062e70b1083134c587e5768/ijson-3.4.0.post0-cp314-cp314t-win_amd64.whl", hash = "sha256:17e45262a5ddef39894013fb1548ee7094e444c8389eb1a97f86708b19bea03e", size = 58238, upload-time = "2025-10-10T05:29:06.656Z" }, - { url = "https://files.pythonhosted.org/packages/43/66/27cfcea16e85b95e33814eae2052dab187206b8820cdd90aa39d32ffb441/ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:add9242f886eae844a7410b84aee2bbb8bdc83c624f227cb1fdb2d0476a96cb1", size = 57029, upload-time = "2025-10-10T05:29:19.733Z" }, - { url = "https://files.pythonhosted.org/packages/b8/1b/df3f1561c6629241fb2f8bd7ea1da14e3c2dd16fe9d7cbc97120870ed09c/ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:69718ed41710dfcaa7564b0af42abc05875d4f7aaa24627c808867ef32634bc7", size = 56523, upload-time = "2025-10-10T05:29:20.641Z" }, - { url = "https://files.pythonhosted.org/packages/39/0a/6c6a3221ddecf62b696fde0e864415237e05b9a36ab6685a606b8fb3b5a2/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:636b6eca96c6c43c04629c6b37fad0181662eaacf9877c71c698485637f752f9", size = 70546, upload-time = "2025-10-10T05:29:21.526Z" }, - { url = "https://files.pythonhosted.org/packages/42/cb/edf69755e86a3a9f8b418efd60239cb308af46c7c8e12f869423f51c9851/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb5e73028f6e63d27b3d286069fe350ed80a4ccc493b022b590fea4bb086710d", size = 70532, upload-time = "2025-10-10T05:29:22.718Z" }, - { url = "https://files.pythonhosted.org/packages/96/7e/c8730ea39b8712622cd5a1bdff676098208400e37bb92052ba52f93e2aa1/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461acf4320219459dabe5ed90a45cb86c9ba8cc6d6db9dad0d9427d42f57794c", size = 67927, upload-time = "2025-10-10T05:29:23.596Z" }, - { url = "https://files.pythonhosted.org/packages/ec/f2/53b6e9bdd2a91202066764eaa74b572ba4dede0fe47a5a26f4de34b7541a/ijson-3.4.0.post0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a0fedf09c0f6ffa2a99e7e7fd9c5f3caf74e655c1ee015a0797383e99382ebc3", size = 54657, upload-time = "2025-10-10T05:29:24.482Z" }, ] [[package]] @@ -3397,7 +2852,6 @@ dependencies = [ { name = "pygments" }, { name = "stack-data" }, { name = "traitlets" }, - { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a6/60/2111715ea11f39b1535bed6024b7dec7918b71e5e5d30855a5b503056b50/ipython-9.10.0.tar.gz", hash = "sha256:cd9e656be97618a0676d058134cd44e6dc7012c0e5cb36a9ce96a8c904adaf77", size = 4426526, upload-time = "2026-02-02T10:00:33.594Z" } wheels = [ @@ -3466,9 +2920,6 @@ wheels = [ name = "jaraco-context" version = "6.1.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, -] sdist = { url = "https://files.pythonhosted.org/packages/cb/9c/a788f5bb29c61e456b8ee52ce76dbdd32fd72cd73dd67bc95f42c7a8d13c/jaraco_context-6.1.0.tar.gz", hash = "sha256:129a341b0a85a7db7879e22acd66902fda67882db771754574338898b2d5d86f", size = 15850, upload-time = "2026-01-13T02:53:53.847Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/8d/48/aa685dbf1024c7bd82bede569e3a85f82c32fd3d79ba5fea578f0159571a/jaraco_context-6.1.0-py3-none-any.whl", hash = "sha256:a43b5ed85815223d0d3cfdb6d7ca0d2bc8946f28f30b6f3216bda070f68badda", size = 7065, upload-time = "2026-01-13T02:53:53.031Z" }, @@ -3525,19 +2976,6 @@ version = "0.13.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/29/499f8c9eaa8a16751b1c0e45e6f5f1761d180da873d417996cc7bddc8eef/jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096", size = 311157, upload-time = "2026-02-02T12:35:37.758Z" }, - { url = "https://files.pythonhosted.org/packages/50/f6/566364c777d2ab450b92100bea11333c64c38d32caf8dc378b48e5b20c46/jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911", size = 319729, upload-time = "2026-02-02T12:35:39.246Z" }, - { url = "https://files.pythonhosted.org/packages/73/dd/560f13ec5e4f116d8ad2658781646cca91b617ae3b8758d4a5076b278f70/jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701", size = 354766, upload-time = "2026-02-02T12:35:40.662Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0d/061faffcfe94608cbc28a0d42a77a74222bdf5055ccdbe5fd2292b94f510/jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c", size = 362587, upload-time = "2026-02-02T12:35:42.025Z" }, - { url = "https://files.pythonhosted.org/packages/92/c9/c66a7864982fd38a9773ec6e932e0398d1262677b8c60faecd02ffb67bf3/jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4", size = 487537, upload-time = "2026-02-02T12:35:43.459Z" }, - { url = "https://files.pythonhosted.org/packages/6c/86/84eb4352cd3668f16d1a88929b5888a3fe0418ea8c1dfc2ad4e7bf6e069a/jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165", size = 373717, upload-time = "2026-02-02T12:35:44.928Z" }, - { url = "https://files.pythonhosted.org/packages/6e/09/9fe4c159358176f82d4390407a03f506a8659ed13ca3ac93a843402acecf/jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018", size = 362683, upload-time = "2026-02-02T12:35:46.636Z" }, - { url = "https://files.pythonhosted.org/packages/c9/5e/85f3ab9caca0c1d0897937d378b4a515cae9e119730563572361ea0c48ae/jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411", size = 392345, upload-time = "2026-02-02T12:35:48.088Z" }, - { url = "https://files.pythonhosted.org/packages/12/4c/05b8629ad546191939e6f0c2f17e29f542a398f4a52fb987bc70b6d1eb8b/jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5", size = 517775, upload-time = "2026-02-02T12:35:49.482Z" }, - { url = "https://files.pythonhosted.org/packages/4d/88/367ea2eb6bc582c7052e4baf5ddf57ebe5ab924a88e0e09830dfb585c02d/jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3", size = 551325, upload-time = "2026-02-02T12:35:51.104Z" }, - { url = "https://files.pythonhosted.org/packages/f3/12/fa377ffb94a2f28c41afaed093e0d70cfe512035d5ecb0cad0ae4792d35e/jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1", size = 204709, upload-time = "2026-02-02T12:35:52.467Z" }, - { url = "https://files.pythonhosted.org/packages/cb/16/8e8203ce92f844dfcd3d9d6a5a7322c77077248dbb12da52d23193a839cd/jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654", size = 204560, upload-time = "2026-02-02T12:35:53.925Z" }, - { url = "https://files.pythonhosted.org/packages/44/26/97cc40663deb17b9e13c3a5cf29251788c271b18ee4d262c8f94798b8336/jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5", size = 189608, upload-time = "2026-02-02T12:35:55.304Z" }, { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, @@ -3594,10 +3032,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, - { url = "https://files.pythonhosted.org/packages/79/b3/3c29819a27178d0e461a8571fb63c6ae38be6dc36b78b3ec2876bbd6a910/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c", size = 307016, upload-time = "2026-02-02T12:37:42.755Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ae/60993e4b07b1ac5ebe46da7aa99fdbb802eb986c38d26e3883ac0125c4e0/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2", size = 305024, upload-time = "2026-02-02T12:37:44.774Z" }, - { url = "https://files.pythonhosted.org/packages/77/fa/2227e590e9cf98803db2811f172b2d6460a21539ab73006f251c66f44b14/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434", size = 339337, upload-time = "2026-02-02T12:37:46.668Z" }, - { url = "https://files.pythonhosted.org/packages/2d/92/015173281f7eb96c0ef580c997da8ef50870d4f7f4c9e03c845a1d62ae04/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d", size = 346395, upload-time = "2026-02-02T12:37:48.09Z" }, { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, @@ -3714,8 +3148,6 @@ version = "1.22.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/3a/2c/84076b352107ce12d56f28c313f1aca1be332d953dd96aec7b84976e6d53/kaldi-native-fbank-1.22.3.tar.gz", hash = "sha256:387bf87225c6b83c93ae652eeaef1b4d531994b6e398e7a77189de340674f9af", size = 71013, upload-time = "2025-10-09T02:31:21.487Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/53/720ffbe8b30de203570f397866334eb4c6364c9214699010f2086de911ff/kaldi_native_fbank-1.22.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48e5dd8e897bf4509be2c6eeb4bbab728eaaef1f214ae0510c96219c4253d17", size = 299054, upload-time = "2025-10-09T02:28:42.011Z" }, - { url = "https://files.pythonhosted.org/packages/52/3f/beb161e4fdf6710938ccf18418c147d87ba8f102903d6c6e4eda25588e22/kaldi_native_fbank-1.22.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce84c65779c9eed6ec02699797a4ba1859451977537a993be3ea8167a210ec3e", size = 321921, upload-time = "2025-10-09T02:31:21.646Z" }, { url = "https://files.pythonhosted.org/packages/43/28/6f4fd8953c0b3f30de4526fd024095032abcdc25b6736c77a891687c604e/kaldi_native_fbank-1.22.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f5a44b4a83cf9bf13d3f77858928068b06d3ec2238c27ff2e39393fbf7749c9f", size = 298887, upload-time = "2025-10-09T02:30:53.739Z" }, { url = "https://files.pythonhosted.org/packages/84/90/01ef7331c52b1eaf9916f3f7a535155aac2e9e2ddad12a141613d92758c7/kaldi_native_fbank-1.22.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f16e74372fe9e20abb4183f98a8e2288d5ee4c48d04d94b6160311170e007661", size = 322002, upload-time = "2025-10-09T02:30:13.04Z" }, { url = "https://files.pythonhosted.org/packages/9a/72/adb11d27c545aca1db442da744ee430a6aae377a33574bfd2ec159dcf673/kaldi_native_fbank-1.22.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f74b85948328ab4b4c88522f98a59f83dd5295443b08483e945c7de2c35e5dcc", size = 299276, upload-time = "2025-10-09T02:30:38.1Z" }, @@ -3729,7 +3161,6 @@ name = "keyring" version = "25.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, { name = "jaraco-classes" }, { name = "jaraco-context" }, { name = "jaraco-functools" }, @@ -3748,19 +3179,6 @@ version = "1.4.9" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" }, - { url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596, upload-time = "2025-08-10T21:25:56.861Z" }, - { url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548, upload-time = "2025-08-10T21:25:58.246Z" }, - { url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618, upload-time = "2025-08-10T21:25:59.857Z" }, - { url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437, upload-time = "2025-08-10T21:26:01.105Z" }, - { url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742, upload-time = "2025-08-10T21:26:02.675Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810, upload-time = "2025-08-10T21:26:04.009Z" }, - { url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579, upload-time = "2025-08-10T21:26:05.317Z" }, - { url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071, upload-time = "2025-08-10T21:26:06.686Z" }, - { url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840, upload-time = "2025-08-10T21:26:07.94Z" }, - { url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159, upload-time = "2025-08-10T21:26:09.048Z" }, { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, @@ -3825,11 +3243,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, - { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" }, - { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" }, - { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" }, - { url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009, upload-time = "2025-08-10T21:27:46.376Z" }, - { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" }, ] [[package]] @@ -4009,8 +3422,6 @@ version = "0.44.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, { url = "https://files.pythonhosted.org/packages/d2/1b/656f5a357de7135a3777bd735cc7c9b8f23b4d37465505bd0eaf4be9befe/llvmlite-0.44.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46224058b13c96af1365290bdfebe9a6264ae62fb79b2b55693deed11657a8bf", size = 42361904, upload-time = "2025-01-20T11:14:22.949Z" }, @@ -4022,10 +3433,10 @@ name = "lm-format-enforcer" version = "0.11.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "interegular", marker = "sys_platform == 'linux'" }, - { name = "packaging", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "pyyaml", marker = "sys_platform == 'linux'" }, + { name = "interegular" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "pyyaml" }, ] sdist = { url = "https://files.pythonhosted.org/packages/84/d5/41cd417ba7dfdbbcfe46cebf81fb3dfd7c591b89897560ad05bb410a465d/lm_format_enforcer-0.11.3.tar.gz", hash = "sha256:e68081c108719cce284a9bcc889709b26ffb085a1945b5eba3a12cfa96d528da", size = 40258, upload-time = "2025-08-24T19:37:47.527Z" } wheels = [ @@ -4036,6 +3447,9 @@ wheels = [ name = "loguru" version = "0.7.3" source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, @@ -4062,11 +3476,9 @@ dependencies = [ { name = "ninja" }, { name = "packaging" }, { name = "setuptools" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, { name = "transformers" }, - { name = "triton", version = "3.5.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "triton", version = "3.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "triton" }, ] sdist = { url = "https://files.pythonhosted.org/packages/54/69/a87f06d9dba78c041adb81f2228e978aab179477c64f1a210c0fe0d63e8d/mamba_ssm-2.3.0.tar.gz", hash = "sha256:8294e12125f76021e4e190f4137e84a84935920eeda5d0037a6917524456b303", size = 121116, upload-time = "2026-01-12T17:07:22.152Z" } @@ -4097,17 +3509,6 @@ version = "3.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, - { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, - { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, - { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, - { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, - { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, - { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, - { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, - { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, - { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, @@ -4182,13 +3583,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" }, - { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" }, - { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f4/b8347351da9a5b3f41e26cf547252d861f685c6867d179a7c9d60ad50189/matplotlib-3.10.8-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d56a1efd5bfd61486c8bc968fa18734464556f0fb8e51690f4ac25d85cbbbbc2", size = 9540997, upload-time = "2025-12-10T22:55:23.258Z" }, - { url = "https://files.pythonhosted.org/packages/9e/c0/c7b914e297efe0bc36917bf216b2acb91044b91e930e878ae12981e461e5/matplotlib-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238b7ce5717600615c895050239ec955d91f321c209dd110db988500558e70d6", size = 9596825, upload-time = "2025-12-10T22:55:25.217Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d3/a4bbc01c237ab710a1f22b4da72f4ff6d77eb4c7735ea9811a94ae239067/matplotlib-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:18821ace09c763ec93aef5eeff087ee493a24051936d7b9ebcad9662f66501f9", size = 8135090, upload-time = "2025-12-10T22:55:27.162Z" }, - { url = "https://files.pythonhosted.org/packages/89/dd/a0b6588f102beab33ca6f5218b31725216577b2a24172f327eaf6417d5c9/matplotlib-3.10.8-cp311-cp311-win_arm64.whl", hash = "sha256:bab485bcf8b1c7d2060b4fcb6fc368a9e6f4cd754c9c2fea281f4be21df394a2", size = 8012377, upload-time = "2025-12-10T22:55:29.185Z" }, { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, @@ -4224,9 +3618,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" }, { url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" }, { url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" }, - { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" }, - { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" }, - { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" }, ] [[package]] @@ -4246,19 +3637,19 @@ name = "mcp" version = "1.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "anyio", marker = "sys_platform == 'linux'" }, - { name = "httpx", marker = "sys_platform == 'linux'" }, - { name = "httpx-sse", marker = "sys_platform == 'linux'" }, - { name = "jsonschema", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "pydantic-settings", marker = "sys_platform == 'linux'" }, - { name = "pyjwt", extra = ["crypto"], marker = "sys_platform == 'linux'" }, - { name = "python-multipart", marker = "sys_platform == 'linux'" }, - { name = "sse-starlette", marker = "sys_platform == 'linux'" }, - { name = "starlette", marker = "sys_platform == 'linux'" }, - { name = "typing-extensions", marker = "sys_platform == 'linux'" }, - { name = "typing-inspection", marker = "sys_platform == 'linux'" }, - { name = "uvicorn", marker = "sys_platform == 'linux'" }, + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-multipart" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" } wheels = [ @@ -4304,13 +3695,10 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "packaging" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0a/e5/6698af70623ac6a4d6403f34520550ac4d9743913206f79c96c3497b87fc/megatron_core-0.15.2.tar.gz", hash = "sha256:8397898a0ade70c3ba96fc21335a7e022587bc139b91bf625ef76cd0e66d7b4f", size = 878187, upload-time = "2026-01-08T15:38:47.965Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/65/d994d36aeecbd236e24d8860c3347399c714c913c733c2717824ff10d1c8/megatron_core-0.15.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:015375a4632432ba8ff63a12f6ed89df3f525290b7192f3cde70d79c93ea8ecc", size = 2218355, upload-time = "2026-01-08T15:38:37.74Z" }, - { url = "https://files.pythonhosted.org/packages/3f/28/a6119ce2211268e2c9c1bbf31cfb560c4e03932bed3bcb4f03bdde329b54/megatron_core-0.15.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5220f1a9c19695c2b0c614f83e4575b2032d12ea7276f1e38bde4a2d90ee6ed3", size = 2250269, upload-time = "2026-01-08T15:38:40.207Z" }, { url = "https://files.pythonhosted.org/packages/32/61/96ae649e1f22e978db34006161b84e58b3dbc80587da096981f045ffa3ec/megatron_core-0.15.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744fb4b788791342c53927df584d35d8cfb14330d3bdde95fc1555f6406bc50a", size = 2238920, upload-time = "2026-01-08T15:38:41.475Z" }, { url = "https://files.pythonhosted.org/packages/10/f0/42adab418f2850bcb56e7d9c03398cb85df98ff1ac996b225958207a5775/megatron_core-0.15.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2402411a4ec077b05fcc703d948d6527d29f11200e9ec0b9c505b5576e24f7f2", size = 2263262, upload-time = "2026-01-08T15:38:43.577Z" }, { url = "https://files.pythonhosted.org/packages/b9/94/8235ad4525e3941cf02b40febb81ae3c75c1212628ae7a12c095a82e75ca/megatron_core-0.15.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35c6b5fc4b3e8d332c33143d1a85793451f2845cdb766b8b62fefffef6d3a327", size = 2238145, upload-time = "2026-01-08T15:38:44.94Z" }, @@ -4322,8 +3710,7 @@ dev = [ { name = "av" }, { name = "causal-conv1d" }, { name = "einops" }, - { name = "flashinfer-python", version = "0.6.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "flashinfer-python", version = "0.6.4", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "flashinfer-python" }, { name = "mamba-ssm" }, { name = "megatron-energon", extra = ["av-decode"] }, { name = "multi-storage-client" }, @@ -4358,10 +3745,8 @@ dependencies = [ { name = "numpy" }, { name = "pillow" }, { name = "pyyaml" }, - { name = "s3fs", version = "0.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "s3fs", version = "2025.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "s3fs" }, + { name = "torch" }, { name = "tqdm" }, { name = "webdataset" }, ] @@ -4385,14 +3770,14 @@ name = "mistral-common" version = "1.9.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "jsonschema", marker = "sys_platform == 'linux'" }, - { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "pillow", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "pydantic-extra-types", extra = ["pycountry"], marker = "sys_platform == 'linux'" }, - { name = "requests", marker = "sys_platform == 'linux'" }, - { name = "tiktoken", marker = "sys_platform == 'linux'" }, - { name = "typing-extensions", marker = "sys_platform == 'linux'" }, + { name = "jsonschema" }, + { name = "numpy" }, + { name = "pillow" }, + { name = "pydantic" }, + { name = "pydantic-extra-types", extra = ["pycountry"] }, + { name = "requests" }, + { name = "tiktoken" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/db/ce/685b8127a326478e05501cb4c9ca23d1cd9f37e16c465a1e832c75aea709/mistral_common-1.9.1.tar.gz", hash = "sha256:550583d70a395c3586cfb748ffab53bd1d7c3409507f0efc0118bff30ffb26e9", size = 6338922, upload-time = "2026-02-12T10:53:41.639Z" } wheels = [ @@ -4401,7 +3786,7 @@ wheels = [ [package.optional-dependencies] image = [ - { name = "opencv-python-headless", marker = "sys_platform == 'linux'" }, + { name = "opencv-python-headless" }, ] [[package]] @@ -4413,11 +3798,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314, upload-time = "2025-11-17T22:32:31.031Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/5e/712092cfe7e5eb667b8ad9ca7c54442f21ed7ca8979745f1000e24cf8737/ml_dtypes-0.5.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6c7ecb74c4bd71db68a6bea1edf8da8c34f3d9fe218f038814fd1d310ac76c90", size = 679734, upload-time = "2025-11-17T22:31:39.223Z" }, - { url = "https://files.pythonhosted.org/packages/4f/cf/912146dfd4b5c0eea956836c01dcd2fce6c9c844b2691f5152aca196ce4f/ml_dtypes-0.5.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc11d7e8c44a65115d05e2ab9989d1e045125d7be8e05a071a48bc76eb6d6040", size = 5056165, upload-time = "2025-11-17T22:31:41.071Z" }, - { url = "https://files.pythonhosted.org/packages/a9/80/19189ea605017473660e43762dc853d2797984b3c7bf30ce656099add30c/ml_dtypes-0.5.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:19b9a53598f21e453ea2fbda8aa783c20faff8e1eeb0d7ab899309a0053f1483", size = 5034975, upload-time = "2025-11-17T22:31:42.758Z" }, - { url = "https://files.pythonhosted.org/packages/b4/24/70bd59276883fdd91600ca20040b41efd4902a923283c4d6edcb1de128d2/ml_dtypes-0.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:7c23c54a00ae43edf48d44066a7ec31e05fdc2eee0be2b8b50dd1903a1db94bb", size = 210742, upload-time = "2025-11-17T22:31:44.068Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c9/64230ef14e40aa3f1cb254ef623bf812735e6bec7772848d19131111ac0d/ml_dtypes-0.5.4-cp311-cp311-win_arm64.whl", hash = "sha256:557a31a390b7e9439056644cb80ed0735a6e3e3bb09d67fd5687e4b04238d1de", size = 160709, upload-time = "2025-11-17T22:31:46.557Z" }, { url = "https://files.pythonhosted.org/packages/a8/b8/3c70881695e056f8a32f8b941126cf78775d9a4d7feba8abcb52cb7b04f2/ml_dtypes-0.5.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a174837a64f5b16cab6f368171a1a03a27936b31699d167684073ff1c4237dac", size = 676927, upload-time = "2025-11-17T22:31:48.182Z" }, { url = "https://files.pythonhosted.org/packages/54/0f/428ef6881782e5ebb7eca459689448c0394fa0a80bea3aa9262cba5445ea/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7f7c643e8b1320fd958bf098aa7ecf70623a42ec5154e3be3be673f4c34d900", size = 5028464, upload-time = "2025-11-17T22:31:50.135Z" }, { url = "https://files.pythonhosted.org/packages/3a/cb/28ce52eb94390dda42599c98ea0204d74799e4d8047a0eb559b6fd648056/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ad459e99793fa6e13bd5b7e6792c8f9190b4e5a1b45c63aba14a4d0a7f1d5ff", size = 5009002, upload-time = "2025-11-17T22:31:52.001Z" }, @@ -4450,13 +3830,13 @@ name = "model-hosting-container-standards" version = "0.1.13" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "fastapi", marker = "sys_platform == 'linux'" }, - { name = "httpx", marker = "sys_platform == 'linux'" }, - { name = "jmespath", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "setuptools", marker = "sys_platform == 'linux'" }, - { name = "starlette", marker = "sys_platform == 'linux'" }, - { name = "supervisor", marker = "sys_platform == 'linux'" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "jmespath" }, + { name = "pydantic" }, + { name = "setuptools" }, + { name = "starlette" }, + { name = "supervisor" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d7/b7/a6a31b4dfd30d14b1019dc358f09c9d88ca38e555ba7c976e7d3e6b593fe/model_hosting_container_standards-0.1.13.tar.gz", hash = "sha256:27a1333410dde2719286a300a2803e24fdde407baa91894eb845c0f268aa194d", size = 79116, upload-time = "2026-01-09T21:45:20.683Z" } wheels = [ @@ -4513,10 +3893,6 @@ version = "1.1.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" }, - { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" }, - { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" }, - { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" }, { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, @@ -4541,14 +3917,6 @@ version = "0.20.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ea/9c/bfbd12955a49180cbd234c5d29ec6f74fe641698f0cd9df154a854fc8a15/msgspec-0.20.0.tar.gz", hash = "sha256:692349e588fde322875f8d3025ac01689fead5901e7fb18d6870a44519d62a29", size = 317862, upload-time = "2025-11-24T03:56:28.934Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/59/fdcb3af72f750a8de2bcf39d62ada70b5eb17b06d7f63860e0a679cb656b/msgspec-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:09e0efbf1ac641fedb1d5496c59507c2f0dc62a052189ee62c763e0aae217520", size = 193345, upload-time = "2025-11-24T03:55:20.613Z" }, - { url = "https://files.pythonhosted.org/packages/5a/15/3c225610da9f02505d37d69a77f4a2e7daae2a125f99d638df211ba84e59/msgspec-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23ee3787142e48f5ee746b2909ce1b76e2949fbe0f97f9f6e70879f06c218b54", size = 186867, upload-time = "2025-11-24T03:55:22.4Z" }, - { url = "https://files.pythonhosted.org/packages/81/36/13ab0c547e283bf172f45491edfdea0e2cecb26ae61e3a7b1ae6058b326d/msgspec-0.20.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81f4ac6f0363407ac0465eff5c7d4d18f26870e00674f8fcb336d898a1e36854", size = 215351, upload-time = "2025-11-24T03:55:23.958Z" }, - { url = "https://files.pythonhosted.org/packages/6b/96/5c095b940de3aa6b43a71ec76275ac3537b21bd45c7499b5a17a429110fa/msgspec-0.20.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb4d873f24ae18cd1334f4e37a178ed46c9d186437733351267e0a269bdf7e53", size = 219896, upload-time = "2025-11-24T03:55:25.356Z" }, - { url = "https://files.pythonhosted.org/packages/98/7a/81a7b5f01af300761087b114dafa20fb97aed7184d33aab64d48874eb187/msgspec-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b92b8334427b8393b520c24ff53b70f326f79acf5f74adb94fd361bcff8a1d4e", size = 220389, upload-time = "2025-11-24T03:55:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/70/c0/3d0cce27db9a9912421273d49eab79ce01ecd2fed1a2f1b74af9b445f33c/msgspec-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:562c44b047c05cc0384e006fae7a5e715740215c799429e0d7e3e5adf324285a", size = 223348, upload-time = "2025-11-24T03:55:28.311Z" }, - { url = "https://files.pythonhosted.org/packages/89/5e/406b7d578926b68790e390d83a1165a9bfc2d95612a1a9c1c4d5c72ea815/msgspec-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:d1dcc93a3ce3d3195985bfff18a48274d0b5ffbc96fa1c5b89da6f0d9af81b29", size = 188713, upload-time = "2025-11-24T03:55:29.553Z" }, - { url = "https://files.pythonhosted.org/packages/47/87/14fe2316624ceedf76a9e94d714d194cbcb699720b210ff189f89ca4efd7/msgspec-0.20.0-cp311-cp311-win_arm64.whl", hash = "sha256:aa387aa330d2e4bd69995f66ea8fdc87099ddeedf6fdb232993c6a67711e7520", size = 174229, upload-time = "2025-11-24T03:55:31.107Z" }, { url = "https://files.pythonhosted.org/packages/d9/6f/1e25eee957e58e3afb2a44b94fa95e06cebc4c236193ed0de3012fff1e19/msgspec-0.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2aba22e2e302e9231e85edc24f27ba1f524d43c223ef5765bd8624c7df9ec0a5", size = 196391, upload-time = "2025-11-24T03:55:32.677Z" }, { url = "https://files.pythonhosted.org/packages/7f/ee/af51d090ada641d4b264992a486435ba3ef5b5634bc27e6eb002f71cef7d/msgspec-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:716284f898ab2547fedd72a93bb940375de9fbfe77538f05779632dc34afdfde", size = 188644, upload-time = "2025-11-24T03:55:33.934Z" }, { url = "https://files.pythonhosted.org/packages/49/d6/9709ee093b7742362c2934bfb1bbe791a1e09bed3ea5d8a18ce552fbfd73/msgspec-0.20.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:558ed73315efa51b1538fa8f1d3b22c8c5ff6d9a2a62eff87d25829b94fc5054", size = 218852, upload-time = "2025-11-24T03:55:35.575Z" }, @@ -4619,9 +3987,6 @@ dependencies = [ { name = "xattr" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/90/1e69cb6d71418b38a9409b0e2564efe1e7c12e18e63e478591ae0317dbcc/multi_storage_client-0.42.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3db30610d6bb15a5c211af9d7b11c8a1a13265893c1a625d5aaadacdb61a9a8e", size = 8805275, upload-time = "2026-02-06T20:58:10.943Z" }, - { url = "https://files.pythonhosted.org/packages/de/dd/a55dc9e60113f98af10075c3e33b97007bfbfd2e6f8bc6a1b2b1b43857c8/multi_storage_client-0.42.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8c2d491475eec5e80ad706eca7005d9bd17d30b29166e891c18695b42336493", size = 5155309, upload-time = "2026-02-06T20:56:22.528Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b6/648a1d6b4482634fbb0d5bc0ba156b42fafd4f364227f9203bc4ac70dbac/multi_storage_client-0.42.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a87e05e0e09b8fbd6804bb1ac85a28213d4371e91d06d9c35ad12b247f28ec", size = 5422770, upload-time = "2026-02-06T21:01:41.97Z" }, { url = "https://files.pythonhosted.org/packages/d4/5a/6af92f30d09c97a314594029c115da0c44d5fa14e772983d88ad8023d355/multi_storage_client-0.42.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5c71c128b9f81cfbd59f1e2c2acfb2559658dfecde904496b7845901f0161430", size = 8798046, upload-time = "2026-02-06T21:02:32.674Z" }, { url = "https://files.pythonhosted.org/packages/c1/b2/e686bcbe754bfede1773153d928422b2c4b25453faf0e228cf9cadfa73e0/multi_storage_client-0.42.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afe72fcb3f44ddc23488ab65bbab8575181fe15f63d297074703a36f4d8f7cc9", size = 5155767, upload-time = "2026-02-06T21:01:02.151Z" }, { url = "https://files.pythonhosted.org/packages/05/44/2b7e0ec6fa68f208cb919c38df346cca37c910906f564a43f74731eb6cdb/multi_storage_client-0.42.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30410d59d1f93758640a15779af6379a961bfa0f9607809a2b869e8b750efac7", size = 5421800, upload-time = "2026-02-06T21:03:04.852Z" }, @@ -4636,24 +4001,6 @@ version = "6.7.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/f1/a90635c4f88fb913fbf4ce660b83b7445b7a02615bda034b2f8eb38fd597/multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d", size = 76626, upload-time = "2026-01-26T02:43:26.485Z" }, - { url = "https://files.pythonhosted.org/packages/a6/9b/267e64eaf6fc637a15b35f5de31a566634a2740f97d8d094a69d34f524a4/multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e", size = 44706, upload-time = "2026-01-26T02:43:27.607Z" }, - { url = "https://files.pythonhosted.org/packages/dd/a4/d45caf2b97b035c57267791ecfaafbd59c68212004b3842830954bb4b02e/multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855", size = 44356, upload-time = "2026-01-26T02:43:28.661Z" }, - { url = "https://files.pythonhosted.org/packages/fd/d2/0a36c8473f0cbaeadd5db6c8b72d15bbceeec275807772bfcd059bef487d/multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3", size = 244355, upload-time = "2026-01-26T02:43:31.165Z" }, - { url = "https://files.pythonhosted.org/packages/5d/16/8c65be997fd7dd311b7d39c7b6e71a0cb449bad093761481eccbbe4b42a2/multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e", size = 246433, upload-time = "2026-01-26T02:43:32.581Z" }, - { url = "https://files.pythonhosted.org/packages/01/fb/4dbd7e848d2799c6a026ec88ad39cf2b8416aa167fcc903baa55ecaa045c/multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a", size = 225376, upload-time = "2026-01-26T02:43:34.417Z" }, - { url = "https://files.pythonhosted.org/packages/b6/8a/4a3a6341eac3830f6053062f8fbc9a9e54407c80755b3f05bc427295c2d0/multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8", size = 257365, upload-time = "2026-01-26T02:43:35.741Z" }, - { url = "https://files.pythonhosted.org/packages/f7/a2/dd575a69c1aa206e12d27d0770cdf9b92434b48a9ef0cd0d1afdecaa93c4/multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0", size = 254747, upload-time = "2026-01-26T02:43:36.976Z" }, - { url = "https://files.pythonhosted.org/packages/5a/56/21b27c560c13822ed93133f08aa6372c53a8e067f11fbed37b4adcdac922/multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144", size = 246293, upload-time = "2026-01-26T02:43:38.258Z" }, - { url = "https://files.pythonhosted.org/packages/5a/a4/23466059dc3854763423d0ad6c0f3683a379d97673b1b89ec33826e46728/multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49", size = 242962, upload-time = "2026-01-26T02:43:40.034Z" }, - { url = "https://files.pythonhosted.org/packages/1f/67/51dd754a3524d685958001e8fa20a0f5f90a6a856e0a9dcabff69be3dbb7/multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71", size = 237360, upload-time = "2026-01-26T02:43:41.752Z" }, - { url = "https://files.pythonhosted.org/packages/64/3f/036dfc8c174934d4b55d86ff4f978e558b0e585cef70cfc1ad01adc6bf18/multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3", size = 245940, upload-time = "2026-01-26T02:43:43.042Z" }, - { url = "https://files.pythonhosted.org/packages/3d/20/6214d3c105928ebc353a1c644a6ef1408bc5794fcb4f170bb524a3c16311/multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c", size = 253502, upload-time = "2026-01-26T02:43:44.371Z" }, - { url = "https://files.pythonhosted.org/packages/b1/e2/c653bc4ae1be70a0f836b82172d643fcf1dade042ba2676ab08ec08bff0f/multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0", size = 247065, upload-time = "2026-01-26T02:43:45.745Z" }, - { url = "https://files.pythonhosted.org/packages/c8/11/a854b4154cd3bd8b1fd375e8a8ca9d73be37610c361543d56f764109509b/multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa", size = 241870, upload-time = "2026-01-26T02:43:47.054Z" }, - { url = "https://files.pythonhosted.org/packages/13/bf/9676c0392309b5fdae322333d22a829715b570edb9baa8016a517b55b558/multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a", size = 41302, upload-time = "2026-01-26T02:43:48.753Z" }, - { url = "https://files.pythonhosted.org/packages/c9/68/f16a3a8ba6f7b6dc92a1f19669c0810bd2c43fc5a02da13b1cbf8e253845/multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b", size = 45981, upload-time = "2026-01-26T02:43:49.921Z" }, - { url = "https://files.pythonhosted.org/packages/ac/ad/9dd5305253fa00cd3c7555dbef69d5bf4133debc53b87ab8d6a44d411665/multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6", size = 43159, upload-time = "2026-01-26T02:43:51.635Z" }, { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, @@ -4883,13 +4230,11 @@ name = "numba" version = "0.61.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "llvmlite", marker = "sys_platform == 'linux'" }, - { name = "numpy", marker = "sys_platform == 'linux'" }, + { name = "llvmlite" }, + { name = "numpy" }, ] sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, { url = "https://files.pythonhosted.org/packages/0d/e0/5ea04e7ad2c39288c0f0f9e8d47638ad70f28e275d092733b5817cf243c9/numba-0.61.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bdbca73ad81fa196bd53dc12e3aaf1564ae036e0c125f237c7644fe64a4928ab", size = 3893918, upload-time = "2025-04-09T02:58:02.933Z" }, @@ -4902,14 +4247,6 @@ version = "1.26.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" }, - { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" }, - { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" }, - { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" }, - { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" }, - { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" }, { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" }, { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" }, { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" }, @@ -4927,8 +4264,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "absl-py" }, { name = "numpy" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] sdist = { url = "https://files.pythonhosted.org/packages/02/ad/046a097b63a96c1ba1d85f0031dbe7fcbdb33e6c445dfbaba2ffaefdd497/nv_grouped_gemm-1.1.4.post8.tar.gz", hash = "sha256:ab321693f0292cfd8a26dc7b6f14decd9eb00e209494de7218e4fad36191275d", size = 20821209, upload-time = "2025-12-17T02:22:38.432Z" } @@ -4969,7 +4305,7 @@ name = "nvidia-cudnn-cu12" version = "9.10.2.21" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-cublas-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467, upload-time = "2025-06-06T21:54:08.597Z" }, @@ -4980,9 +4316,6 @@ name = "nvidia-cudnn-frontend" version = "1.18.0" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/9a/83d3d080118de4a7810fa019349edec634b8b37b9cafaacd05719de62dd6/nvidia_cudnn_frontend-1.18.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6d4d0b88d617b233a503c84980b54d840b60b2734497d1a7a071ec5293daec2", size = 2023709, upload-time = "2026-01-27T23:32:10.912Z" }, - { url = "https://files.pythonhosted.org/packages/13/c7/c3624b3ed77b102618f26295e816b27f1c3ebb1143730237a9f51d403c3f/nvidia_cudnn_frontend-1.18.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:382ea063b92cbfd5b442cb75ff8422932d78276aecf139e46713ed1ad3d07af4", size = 2155568, upload-time = "2026-01-27T23:07:13.277Z" }, - { url = "https://files.pythonhosted.org/packages/52/dd/8613dfd029d076b86a8a87efe3f4bb4ab73cec15fa8fc27e665098f4d167/nvidia_cudnn_frontend-1.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:baa509effc4d299d3f04e549d4188f88bca8a8b527f483cbd2f66bc18f13a8b1", size = 1591244, upload-time = "2026-01-27T23:08:44.691Z" }, { url = "https://files.pythonhosted.org/packages/e3/b4/604e230378680ee117849a4e1045baca092f93161a829291a84d5acce70c/nvidia_cudnn_frontend-1.18.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:310b417f2848a83d1437203fcaeea320a74fb7f28af20bf42bf5afc9c01f1c12", size = 2027408, upload-time = "2026-01-27T23:32:46.576Z" }, { url = "https://files.pythonhosted.org/packages/c6/52/08f98262e77b1cbcc834cc1a5db494d0661ea1dbdea58c2e2d51a57fdaca/nvidia_cudnn_frontend-1.18.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c023539ca6de99234cf5102c3ec0d6af817f5396fc93028a22ba5b834a35b8a", size = 2159245, upload-time = "2026-01-27T23:07:32.664Z" }, { url = "https://files.pythonhosted.org/packages/aa/1f/751a5a8cfdc95fb4dc556192d37369ae488c30c473fe9a3ec720b23d07ea/nvidia_cudnn_frontend-1.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:e13f7dd46cdb4762dde87f181f06d1c5e15e9478bbdd547bfa74d9b11f415aae", size = 1591041, upload-time = "2026-01-27T23:09:04.118Z" }, @@ -4999,7 +4332,7 @@ name = "nvidia-cufft-cu12" version = "11.3.3.83" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, @@ -5026,9 +4359,9 @@ name = "nvidia-cusolver-cu12" version = "11.7.3.90" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, - { name = "nvidia-cusparse-cu12", marker = "sys_platform == 'linux'" }, - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-cublas-cu12" }, + { name = "nvidia-cusparse-cu12" }, + { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, @@ -5039,7 +4372,7 @@ name = "nvidia-cusparse-cu12" version = "12.5.8.93" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, @@ -5069,14 +4402,11 @@ name = "nvidia-cutlass-dsl-libs-base" version = "4.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cuda-python", version = "12.9.4", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "cuda-python", version = "13.1.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "cuda-python" }, { name = "numpy" }, { name = "typing-extensions" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/08/1b1481b382f0bfddb91fe19c425dae7ffcb0dacb19a60d4fa490f19cabdf/nvidia_cutlass_dsl_libs_base-4.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:18249a0c13a7b7fe08fbf600ce38a871538067cfe7b20ef2bc131a5902a67377", size = 75457259, upload-time = "2026-02-14T03:44:48.408Z" }, - { url = "https://files.pythonhosted.org/packages/1a/2f/4d525af7805a7cf04f25efd9900d9acca1d6a8973f436b6058dfec5b545f/nvidia_cutlass_dsl_libs_base-4.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c09ee076f2b61ba26523686f550a2c642a35ec178861a5e0a38f2979ad515604", size = 74345003, upload-time = "2026-02-14T03:46:37.751Z" }, { url = "https://files.pythonhosted.org/packages/33/34/63a1dce4d65cd6fd29b9d50286abbfcdd965c3ca2156cf423eda2ab1fc5d/nvidia_cutlass_dsl_libs_base-4.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9cde72efb065d9bea29a92ca85835eaedec20bf89af22798d2d2a551ccd51731", size = 75458501, upload-time = "2026-02-14T03:45:15.866Z" }, { url = "https://files.pythonhosted.org/packages/cf/ae/5bbd3c9d7909d64a7f139b480c70ff3220554f64775e941c95438265ef1f/nvidia_cutlass_dsl_libs_base-4.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e31a2fcc9854417242ee072c9b8fd1257d5ee422166dfd85eb3f8784fee34dd8", size = 74345995, upload-time = "2026-02-14T03:45:42.9Z" }, { url = "https://files.pythonhosted.org/packages/48/5c/c76ec134e0fbd4ee2f31b32e1fbcb727e7f6323d136a3fc7a8ea3aa3e75d/nvidia_cutlass_dsl_libs_base-4.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad63fe382b36f69f2a9b51d35e95cbcb240565d06a990e5a19a8eacae49c8b94", size = 75456473, upload-time = "2026-02-14T03:43:51.005Z" }, @@ -5107,8 +4437,7 @@ dependencies = [ { name = "rich" }, { name = "safetensors" }, { name = "scipy" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, { name = "tqdm" }, ] wheels = [ @@ -5158,12 +4487,9 @@ dependencies = [ { name = "psutil" }, { name = "pynvml" }, { name = "pyyaml" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/46/77/8cda264b262e2868a4e6ebcddaea112200b1e34b8d5a35a2fe3b4978d137/nvidia_resiliency_ext-0.4.1-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:d8ca454a8b8abef72e0ff0e33914686c263414e8891471c02a9f6af9d2d6b925", size = 443649, upload-time = "2025-07-17T03:49:16.183Z" }, - { url = "https://files.pythonhosted.org/packages/3a/53/029cc7493b5833cb8dfa201f15a1e422e2e1cc6308d34c5b0a90028a73fd/nvidia_resiliency_ext-0.4.1-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:dde6034f29350ac6326cdd861ceec641bdd93be0eddbf034739f4cd9452a4dd9", size = 449189, upload-time = "2025-07-17T03:52:15.24Z" }, { url = "https://files.pythonhosted.org/packages/70/05/38d491962273c7905708762279f440520eb79f3c00b67a023497215ad023/nvidia_resiliency_ext-0.4.1-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:b3bd5f01535574b16d0f38bca6e39afe3806c4a2896eee1b321cd944e00025a7", size = 444570, upload-time = "2025-07-17T03:50:58.877Z" }, { url = "https://files.pythonhosted.org/packages/18/8b/4cb8aa2bbdf3705d3034c3f3dacdadb03b3b7dd3dc7f5200e64663fb477f/nvidia_resiliency_ext-0.4.1-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:ca9f8de465af345952bedbea53c90c0e2323d88cfd830ded0e806fad91845c0e", size = 450280, upload-time = "2025-07-17T03:49:55.327Z" }, ] @@ -5174,9 +4500,6 @@ version = "0.2.14" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0e/03/b8a4391523a92163167fd0fee6769c223e8612043cb07aebc1173ca83fc9/nvtx-0.2.14.tar.gz", hash = "sha256:12945242a31bde70b1f15cae867f8706bdff290e2f808a11738e03ebefdf847f", size = 119864, upload-time = "2025-12-01T18:06:16.674Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/a6/4d473abd7c07a6d1060c0f708e21ddf46a960258532ffc897681db5c0f46/nvtx-0.2.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:227f6406d2fe1a4b890be17eb1f4c1f5bd4df8f7032dd1cb8c7651d379f35541", size = 732764, upload-time = "2025-11-27T17:26:21.853Z" }, - { url = "https://files.pythonhosted.org/packages/94/06/3ab72e5a463af1b95934638cb8377e99f58e5ef21a47cbf69b92267d6602/nvtx-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0664aa75b24e2ad0abdd0fa52c49e9c8a120652f2194289c85dc2d93cbc6017f", size = 724555, upload-time = "2025-11-27T17:22:36.402Z" }, - { url = "https://files.pythonhosted.org/packages/18/1d/64f6078a5ab4134af91ba294035ee1ebb3512edaaa9d60d8f0f023178620/nvtx-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:10f5971661d61c1a90cd36c3069240452c904ecec4b3a08d0d6fdba1e5398165", size = 119660, upload-time = "2025-11-27T17:32:30.406Z" }, { url = "https://files.pythonhosted.org/packages/8a/de/2cc15bb805b1b18317b60837b853ed023757730d0db82de291635fc88bc3/nvtx-0.2.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ece46f555e725db879df06549980744f89db5923a77e6f7a5aecda75292421a", size = 727708, upload-time = "2025-11-27T17:25:20.836Z" }, { url = "https://files.pythonhosted.org/packages/81/94/b37d634fef8677ce525b5bfd2886737ea2c064bc3576fc84423973ff5b97/nvtx-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17efe5d903996bceb0c8a12cae80fa9b66bee7ee895923bd9d8ec2a5af1aabd8", size = 737691, upload-time = "2025-11-27T17:21:27.87Z" }, { url = "https://files.pythonhosted.org/packages/ad/c1/f633aa32003050ff83626a19402f03c83990a15b4df658a7bf1b590ee83e/nvtx-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:f40db4746714d525d3020c702a0df866c2335efd6a27c41e869e577402a53a4b", size = 119193, upload-time = "2025-11-27T17:31:42.943Z" }, @@ -5223,18 +4546,11 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ml-dtypes" }, { name = "numpy" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3b/8a/335c03a8683a88a32f9a6bb98899ea6df241a41df64b37b9696772414794/onnx-1.20.1.tar.gz", hash = "sha256:ded16de1df563d51fbc1ad885f2a426f814039d8b5f4feb77febe09c0295ad67", size = 12048980, upload-time = "2026-01-10T01:40:03.043Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/38/1a0e74d586c08833404100f5c052f92732fb5be417c0b2d7cb0838443bfe/onnx-1.20.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:53426e1b458641e7a537e9f176330012ff59d90206cac1c1a9d03cdd73ed3095", size = 17904965, upload-time = "2026-01-10T01:39:13.532Z" }, - { url = "https://files.pythonhosted.org/packages/96/25/64b076e9684d17335f80b15b3bf502f7a8e1a89f08a6b208d4f2861b3011/onnx-1.20.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ca7281f8c576adf396c338cf43fff26faee8d4d2e2577b8e73738f37ceccf945", size = 17415179, upload-time = "2026-01-10T01:39:16.516Z" }, - { url = "https://files.pythonhosted.org/packages/ac/d5/6743b409421ced20ad5af1b3a7b4c4e568689ffaca86db431692fca409a6/onnx-1.20.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2297f428c51c7fc6d8fad0cf34384284dfeff3f86799f8e83ef905451348ade0", size = 17513672, upload-time = "2026-01-10T01:39:19.35Z" }, - { url = "https://files.pythonhosted.org/packages/9a/6b/dae82e6fdb2043302f29adca37522312ea2be55b75907b59be06fbdffe87/onnx-1.20.1-cp311-cp311-win32.whl", hash = "sha256:63d9cbcab8c96841eadeb7c930e07bfab4dde8081eb76fb68e0dfb222706b81e", size = 16239336, upload-time = "2026-01-10T01:39:22.506Z" }, - { url = "https://files.pythonhosted.org/packages/8e/17/a0d7863390c1f2067d7c02dcc1477034965c32aaa1407bfcf775305ffee4/onnx-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:d78cde72d7ca8356a2d99c5dc0dbf67264254828cae2c5780184486c0cd7b3bf", size = 16392120, upload-time = "2026-01-10T01:39:25.106Z" }, - { url = "https://files.pythonhosted.org/packages/aa/72/9b879a46eb7a3322223791f36bf9c25d95da9ed93779eabb75a560f22e5b/onnx-1.20.1-cp311-cp311-win_arm64.whl", hash = "sha256:0104bb2d4394c179bcea3df7599a45a2932b80f4633840896fcf0d7d8daecea2", size = 16346923, upload-time = "2026-01-10T01:39:27.782Z" }, { url = "https://files.pythonhosted.org/packages/7c/4c/4b17e82f91ab9aa07ff595771e935ca73547b035030dc5f5a76e63fbfea9/onnx-1.20.1-cp312-abi3-macosx_12_0_universal2.whl", hash = "sha256:1d923bb4f0ce1b24c6859222a7e6b2f123e7bfe7623683662805f2e7b9e95af2", size = 17903547, upload-time = "2026-01-10T01:39:31.015Z" }, { url = "https://files.pythonhosted.org/packages/64/5e/1bfa100a9cb3f2d3d5f2f05f52f7e60323b0e20bb0abace1ae64dbc88f25/onnx-1.20.1-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddc0b7d8b5a94627dc86c533d5e415af94cbfd103019a582669dad1f56d30281", size = 17412021, upload-time = "2026-01-10T01:39:33.885Z" }, { url = "https://files.pythonhosted.org/packages/fb/71/d3fec0dcf9a7a99e7368112d9c765154e81da70fcba1e3121131a45c245b/onnx-1.20.1-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9336b6b8e6efcf5c490a845f6afd7e041c89a56199aeda384ed7d58fb953b080", size = 17510450, upload-time = "2026-01-10T01:39:36.589Z" }, @@ -5304,7 +4620,7 @@ name = "openai-harmony" version = "0.0.8" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pydantic", marker = "sys_platform == 'linux'" }, + { name = "pydantic" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3e/92/2d038d096f29179c7c9571b431f9e739f87a487121901725e23fe338dd9d/openai_harmony-0.0.8.tar.gz", hash = "sha256:6e43f98e6c242fa2de6f8ea12eab24af63fa2ed3e89c06341fb9d92632c5cbdf", size = 284777, upload-time = "2025-11-05T19:07:06.727Z" } wheels = [ @@ -5324,7 +4640,7 @@ name = "opencv-python-headless" version = "4.13.0.92" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", marker = "sys_platform == 'linux'" }, + { name = "numpy" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/21/76/9417a6aef9def70e467a5bf560579f816148a4c658b7d525581b356eda9e/opencv_python_headless-4.13.0.92-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c8cfc8e87ed452b5cecb9419473ee5560a989859fe1d10d1ce11ae87b09a2cb", size = 33703709, upload-time = "2026-02-05T10:24:46.469Z" }, @@ -5351,8 +4667,7 @@ dependencies = [ [package.optional-dependencies] backend = [ { name = "accelerate" }, - { name = "awscli", version = "1.43.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "awscli", version = "1.44.42", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, + { name = "awscli" }, { name = "bitsandbytes" }, { name = "duckdb" }, { name = "gql" }, @@ -5363,8 +4678,7 @@ backend = [ { name = "pyarrow" }, { name = "pytest" }, { name = "setuptools" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, { name = "torchao" }, { name = "transformers" }, { name = "trl" }, @@ -5384,8 +4698,8 @@ megatron = [ { name = "megatron-core" }, { name = "ml-dtypes", marker = "python_full_version < '3.13'" }, { name = "nvidia-ml-py" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "quack-kernels" }, + { name = "torch" }, { name = "transformer-engine" }, { name = "transformer-engine-cu12" }, { name = "transformer-engine-torch" }, @@ -5403,8 +4717,7 @@ tinker = [ { name = "pyarrow" }, { name = "pydantic" }, { name = "tinker" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, { name = "transformers" }, { name = "uvicorn" }, ] @@ -5460,6 +4773,7 @@ requires-dist = [ { name = "pyarrow", marker = "extra == 'tinker'", specifier = ">=15.0.0" }, { name = "pydantic", marker = "extra == 'tinker'", specifier = ">=2.12.5" }, { name = "pytest", marker = "extra == 'backend'", specifier = ">=8.4.1" }, + { name = "quack-kernels", marker = "extra == 'megatron'", specifier = "==0.2.5" }, { name = "seaborn", marker = "extra == 'plotting'", specifier = ">=0.13.2" }, { name = "setproctitle", specifier = ">=1.3.6" }, { name = "setuptools", marker = "extra == 'backend'", specifier = ">=78.1.0" }, @@ -5471,7 +4785,7 @@ requires-dist = [ { name = "torchao", marker = "extra == 'backend'", specifier = "==0.15.0" }, { name = "transformer-engine", marker = "extra == 'megatron'", specifier = "==2.11.0" }, { name = "transformer-engine-cu12", marker = "extra == 'megatron'", specifier = "==2.11.0" }, - { name = "transformer-engine-torch", marker = "extra == 'megatron'", specifier = "==2.11.0" }, + { name = "transformer-engine-torch", marker = "extra == 'megatron'", git = "https://github.com/NVIDIA/TransformerEngine.git?subdirectory=transformer_engine%2Fpytorch&tag=v2.11" }, { name = "transformers", marker = "extra == 'backend'", specifier = ">=4.55.2,<=4.57.3" }, { name = "transformers", marker = "extra == 'tinker'", specifier = ">=4.55.2,<=4.57.3" }, { name = "trl", marker = "extra == 'backend'", specifier = "==0.20.0" }, @@ -5521,8 +4835,8 @@ name = "opentelemetry-exporter-otlp" version = "1.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-exporter-otlp-proto-grpc", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-exporter-otlp-proto-http", marker = "sys_platform == 'linux'" }, + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b1/3f/c8ad4f1c3aaadcea2b0f1b4d7970e7b7898c145699769a789f3435143f69/opentelemetry_exporter_otlp-1.33.1.tar.gz", hash = "sha256:4d050311ea9486e3994575aa237e32932aad58330a31fba24fdba5c0d531cf04", size = 6189, upload-time = "2025-05-16T18:52:43.176Z" } wheels = [ @@ -5534,7 +4848,7 @@ name = "opentelemetry-exporter-otlp-proto-common" version = "1.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-proto", marker = "sys_platform == 'linux'" }, + { name = "opentelemetry-proto" }, ] sdist = { url = "https://files.pythonhosted.org/packages/7a/18/a1ec9dcb6713a48b4bdd10f1c1e4d5d2489d3912b80d2bcc059a9a842836/opentelemetry_exporter_otlp_proto_common-1.33.1.tar.gz", hash = "sha256:c57b3fa2d0595a21c4ed586f74f948d259d9949b58258f11edb398f246bec131", size = 20828, upload-time = "2025-05-16T18:52:43.795Z" } wheels = [ @@ -5546,13 +4860,13 @@ name = "opentelemetry-exporter-otlp-proto-grpc" version = "1.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated", marker = "sys_platform == 'linux'" }, - { name = "googleapis-common-protos", marker = "sys_platform == 'linux'" }, - { name = "grpcio", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-api", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-exporter-otlp-proto-common", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-proto", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-sdk", marker = "sys_platform == 'linux'" }, + { name = "deprecated" }, + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d8/5f/75ef5a2a917bd0e6e7b83d3fb04c99236ee958f6352ba3019ea9109ae1a6/opentelemetry_exporter_otlp_proto_grpc-1.33.1.tar.gz", hash = "sha256:345696af8dc19785fac268c8063f3dc3d5e274c774b308c634f39d9c21955728", size = 22556, upload-time = "2025-05-16T18:52:44.76Z" } wheels = [ @@ -5564,13 +4878,13 @@ name = "opentelemetry-exporter-otlp-proto-http" version = "1.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated", marker = "sys_platform == 'linux'" }, - { name = "googleapis-common-protos", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-api", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-exporter-otlp-proto-common", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-proto", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-sdk", marker = "sys_platform == 'linux'" }, - { name = "requests", marker = "sys_platform == 'linux'" }, + { name = "deprecated" }, + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, ] sdist = { url = "https://files.pythonhosted.org/packages/60/48/e4314ac0ed2ad043c07693d08c9c4bf5633857f5b72f2fefc64fd2b114f6/opentelemetry_exporter_otlp_proto_http-1.33.1.tar.gz", hash = "sha256:46622d964a441acb46f463ebdc26929d9dec9efb2e54ef06acdc7305e8593c38", size = 15353, upload-time = "2025-05-16T18:52:45.522Z" } wheels = [ @@ -5582,7 +4896,7 @@ name = "opentelemetry-proto" version = "1.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f6/dc/791f3d60a1ad8235930de23eea735ae1084be1c6f96fdadf38710662a7e5/opentelemetry_proto-1.33.1.tar.gz", hash = "sha256:9627b0a5c90753bf3920c398908307063e4458b287bb890e5c1d6fa11ad50b68", size = 34363, upload-time = "2025-05-16T18:52:52.141Z" } wheels = [ @@ -5594,9 +4908,9 @@ name = "opentelemetry-sdk" version = "1.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "opentelemetry-api", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-semantic-conventions", marker = "sys_platform == 'linux'" }, - { name = "typing-extensions", marker = "sys_platform == 'linux'" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/67/12/909b98a7d9b110cce4b28d49b2e311797cffdce180371f35eba13a72dd00/opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531", size = 161885, upload-time = "2025-05-16T18:52:52.832Z" } wheels = [ @@ -5608,8 +4922,8 @@ name = "opentelemetry-semantic-conventions" version = "0.54b1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "deprecated", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-api", marker = "sys_platform == 'linux'" }, + { name = "deprecated" }, + { name = "opentelemetry-api" }, ] sdist = { url = "https://files.pythonhosted.org/packages/5b/2c/d7990fc1ffc82889d466e7cd680788ace44a26789809924813b164344393/opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee", size = 118642, upload-time = "2025-05-16T18:52:53.962Z" } wheels = [ @@ -5631,21 +4945,6 @@ version = "3.11.7" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/53/45/b268004f745ede84e5798b48ee12b05129d19235d0e15267aa57dcdb400b/orjson-3.11.7.tar.gz", hash = "sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49", size = 6144992, upload-time = "2026-02-02T15:38:49.29Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/02/da6cb01fc6087048d7f61522c327edf4250f1683a58a839fdcc435746dd5/orjson-3.11.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c", size = 228664, upload-time = "2026-02-02T15:37:25.542Z" }, - { url = "https://files.pythonhosted.org/packages/c1/c2/5885e7a5881dba9a9af51bc564e8967225a642b3e03d089289a35054e749/orjson-3.11.7-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b", size = 125344, upload-time = "2026-02-02T15:37:26.92Z" }, - { url = "https://files.pythonhosted.org/packages/a4/1d/4e7688de0a92d1caf600dfd5fb70b4c5bfff51dfa61ac555072ef2d0d32a/orjson-3.11.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e", size = 128404, upload-time = "2026-02-02T15:37:28.108Z" }, - { url = "https://files.pythonhosted.org/packages/2f/b2/ec04b74ae03a125db7bd69cffd014b227b7f341e3261bf75b5eb88a1aa92/orjson-3.11.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5", size = 123677, upload-time = "2026-02-02T15:37:30.287Z" }, - { url = "https://files.pythonhosted.org/packages/4c/69/f95bdf960605f08f827f6e3291fe243d8aa9c5c9ff017a8d7232209184c3/orjson-3.11.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62", size = 128950, upload-time = "2026-02-02T15:37:31.595Z" }, - { url = "https://files.pythonhosted.org/packages/a4/1b/de59c57bae1d148ef298852abd31909ac3089cff370dfd4cd84cc99cbc42/orjson-3.11.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910", size = 141756, upload-time = "2026-02-02T15:37:32.985Z" }, - { url = "https://files.pythonhosted.org/packages/ee/9e/9decc59f4499f695f65c650f6cfa6cd4c37a3fbe8fa235a0a3614cb54386/orjson-3.11.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b", size = 130812, upload-time = "2026-02-02T15:37:34.204Z" }, - { url = "https://files.pythonhosted.org/packages/28/e6/59f932bcabd1eac44e334fe8e3281a92eacfcb450586e1f4bde0423728d8/orjson-3.11.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960", size = 133444, upload-time = "2026-02-02T15:37:35.446Z" }, - { url = "https://files.pythonhosted.org/packages/f1/36/b0f05c0eaa7ca30bc965e37e6a2956b0d67adb87a9872942d3568da846ae/orjson-3.11.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8", size = 138609, upload-time = "2026-02-02T15:37:36.657Z" }, - { url = "https://files.pythonhosted.org/packages/b8/03/58ec7d302b8d86944c60c7b4b82975d5161fcce4c9bc8c6cb1d6741b6115/orjson-3.11.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504", size = 408918, upload-time = "2026-02-02T15:37:38.076Z" }, - { url = "https://files.pythonhosted.org/packages/06/3a/868d65ef9a8b99be723bd510de491349618abd9f62c826cf206d962db295/orjson-3.11.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e", size = 143998, upload-time = "2026-02-02T15:37:39.706Z" }, - { url = "https://files.pythonhosted.org/packages/5b/c7/1e18e1c83afe3349f4f6dc9e14910f0ae5f82eac756d1412ea4018938535/orjson-3.11.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561", size = 134802, upload-time = "2026-02-02T15:37:41.002Z" }, - { url = "https://files.pythonhosted.org/packages/d4/0b/ccb7ee1a65b37e8eeb8b267dc953561d72370e85185e459616d4345bab34/orjson-3.11.7-cp311-cp311-win32.whl", hash = "sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d", size = 127828, upload-time = "2026-02-02T15:37:42.241Z" }, - { url = "https://files.pythonhosted.org/packages/af/9e/55c776dffda3f381e0f07d010a4f5f3902bf48eaba1bb7684d301acd4924/orjson-3.11.7-cp311-cp311-win_amd64.whl", hash = "sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471", size = 124941, upload-time = "2026-02-02T15:37:43.444Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8e/424a620fa7d263b880162505fb107ef5e0afaa765b5b06a88312ac291560/orjson-3.11.7-cp311-cp311-win_arm64.whl", hash = "sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d", size = 126245, upload-time = "2026-02-02T15:37:45.18Z" }, { url = "https://files.pythonhosted.org/packages/80/bf/76f4f1665f6983385938f0e2a5d7efa12a58171b8456c252f3bae8a4cf75/orjson-3.11.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f", size = 228545, upload-time = "2026-02-02T15:37:46.376Z" }, { url = "https://files.pythonhosted.org/packages/79/53/6c72c002cb13b5a978a068add59b25a8bdf2800ac1c9c8ecdb26d6d97064/orjson-3.11.7-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b", size = 125224, upload-time = "2026-02-02T15:37:47.697Z" }, { url = "https://files.pythonhosted.org/packages/2c/83/10e48852865e5dd151bdfe652c06f7da484578ed02c5fca938e3632cb0b8/orjson-3.11.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a", size = 128154, upload-time = "2026-02-02T15:37:48.954Z" }, @@ -5699,15 +4998,6 @@ version = "1.12.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/12/0c/f1761e21486942ab9bb6feaebc610fa074f7c5e496e6962dea5873348077/ormsgpack-1.12.2.tar.gz", hash = "sha256:944a2233640273bee67521795a73cf1e959538e0dfb7ac635505010455e53b33", size = 39031, upload-time = "2026-01-18T20:55:28.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/08/8b68f24b18e69d92238aa8f258218e6dfeacf4381d9d07ab8df303f524a9/ormsgpack-1.12.2-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bd5f4bf04c37888e864f08e740c5a573c4017f6fd6e99fa944c5c935fabf2dd9", size = 378266, upload-time = "2026-01-18T20:55:59.876Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/29fc13044ecb7c153523ae0a1972269fcd613650d1fa1a9cec1044c6b666/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d5b28b3570e9fed9a5a76528fc7230c3c76333bc214798958e58e9b79cc18a", size = 203035, upload-time = "2026-01-18T20:55:30.59Z" }, - { url = "https://files.pythonhosted.org/packages/ad/c2/00169fb25dd8f9213f5e8a549dfb73e4d592009ebc85fbbcd3e1dcac575b/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3708693412c28f3538fb5a65da93787b6bbab3484f6bc6e935bfb77a62400ae5", size = 210539, upload-time = "2026-01-18T20:55:48.569Z" }, - { url = "https://files.pythonhosted.org/packages/1b/33/543627f323ff3c73091f51d6a20db28a1a33531af30873ea90c5ac95a9b5/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43013a3f3e2e902e1d05e72c0f1aeb5bedbb8e09240b51e26792a3c89267e181", size = 212401, upload-time = "2026-01-18T20:56:10.101Z" }, - { url = "https://files.pythonhosted.org/packages/e8/5d/f70e2c3da414f46186659d24745483757bcc9adccb481a6eb93e2b729301/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7c8b1667a72cbba74f0ae7ecf3105a5e01304620ed14528b2cb4320679d2869b", size = 387082, upload-time = "2026-01-18T20:56:12.047Z" }, - { url = "https://files.pythonhosted.org/packages/c0/d6/06e8dc920c7903e051f30934d874d4afccc9bb1c09dcaf0bc03a7de4b343/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:df6961442140193e517303d0b5d7bc2e20e69a879c2d774316125350c4a76b92", size = 482346, upload-time = "2026-01-18T20:56:05.152Z" }, - { url = "https://files.pythonhosted.org/packages/66/c4/f337ac0905eed9c393ef990c54565cd33644918e0a8031fe48c098c71dbf/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6a4c34ddef109647c769d69be65fa1de7a6022b02ad45546a69b3216573eb4a", size = 425181, upload-time = "2026-01-18T20:55:37.83Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/6d5758fabef3babdf4bbbc453738cc7de9cd3334e4c38dd5737e27b85653/ormsgpack-1.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:73670ed0375ecc303858e3613f407628dd1fca18fe6ac57b7b7ce66cc7bb006c", size = 117182, upload-time = "2026-01-18T20:55:31.472Z" }, - { url = "https://files.pythonhosted.org/packages/c4/57/17a15549233c37e7fd054c48fe9207492e06b026dbd872b826a0b5f833b6/ormsgpack-1.12.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2be829954434e33601ae5da328cccce3266b098927ca7a30246a0baec2ce7bd", size = 111464, upload-time = "2026-01-18T20:55:38.811Z" }, { url = "https://files.pythonhosted.org/packages/4c/36/16c4b1921c308a92cef3bf6663226ae283395aa0ff6e154f925c32e91ff5/ormsgpack-1.12.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7a29d09b64b9694b588ff2f80e9826bdceb3a2b91523c5beae1fab27d5c940e7", size = 378618, upload-time = "2026-01-18T20:55:50.835Z" }, { url = "https://files.pythonhosted.org/packages/c0/68/468de634079615abf66ed13bb5c34ff71da237213f29294363beeeca5306/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b39e629fd2e1c5b2f46f99778450b59454d1f901bc507963168985e79f09c5d", size = 203186, upload-time = "2026-01-18T20:56:11.163Z" }, { url = "https://files.pythonhosted.org/packages/73/a9/d756e01961442688b7939bacd87ce13bfad7d26ce24f910f6028178b2cc8/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:958dcb270d30a7cb633a45ee62b9444433fa571a752d2ca484efdac07480876e", size = 210738, upload-time = "2026-01-18T20:56:09.181Z" }, @@ -5747,8 +5037,6 @@ version = "0.2.11" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1a/d3/e04e9145f8f806723dec9b9e5227ad695a3efcd3ced7794cf7c22b15df5e/outlines_core-0.2.11.tar.gz", hash = "sha256:dfce56f717ff5083e54cbcfdb66cad243365437fccbb5509adaa7e31e030f1d8", size = 197263, upload-time = "2025-05-19T10:12:51.719Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/db/32c6e1170f139420e948fdd18a09a6175244bc0760dcf4dc2470e18411b9/outlines_core-0.2.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:132605b8dd1e3d1369da6a851992dd357f6376068292f6bd47caa7a28b794d19", size = 2289078, upload-time = "2025-05-19T10:12:12.118Z" }, - { url = "https://files.pythonhosted.org/packages/25/c3/b6e6f4e08fa84d2424f82705a6dc47fee33cb91989010fa678736957dcf6/outlines_core-0.2.11-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b31d5fc83b78aad282dd667b8d6e684614481fe08a7609ce0ce45dee64cd2991", size = 2115075, upload-time = "2025-05-19T10:12:13.761Z" }, { url = "https://files.pythonhosted.org/packages/92/c7/a65d1fddf49830ebc41422294eacde35286d9f68994a8aa905cb14f5aade/outlines_core-0.2.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86df9740368866295077346440d911df4972da2b3f1f54b8125e6f329e8a8891", size = 2287677, upload-time = "2025-05-19T10:12:24.24Z" }, { url = "https://files.pythonhosted.org/packages/23/79/8795aed8be9b77dd69d78e7cfbfcf28c179e6b08da6e56bbbf48a09fe55f/outlines_core-0.2.11-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:96ce4dd78f106799be4a0a5795cefd1352806162973756a4b6fce4bb6eddd7e4", size = 2113000, upload-time = "2025-05-19T10:12:25.446Z" }, { url = "https://files.pythonhosted.org/packages/87/96/7dcdc5198844145ab35528f9f93a58c3d47b87e54d0f79357c631d7b7a9a/outlines_core-0.2.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daef6eaaf8c3403455ab5cbf265cb5c6838df571eb7c4b23cddac19cfc701726", size = 2287320, upload-time = "2025-05-19T10:12:35.515Z" }, @@ -5775,14 +5063,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/07/c7087e003ceee9b9a82539b40414ec557aa795b584a1a346e89180853d79/pandas-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de09668c1bf3b925c07e5762291602f0d789eca1b3a781f99c1c78f6cac0e7ea", size = 10323380, upload-time = "2026-02-17T22:18:16.133Z" }, - { url = "https://files.pythonhosted.org/packages/c1/27/90683c7122febeefe84a56f2cde86a9f05f68d53885cebcc473298dfc33e/pandas-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24ba315ba3d6e5806063ac6eb717504e499ce30bd8c236d8693a5fd3f084c796", size = 9923455, upload-time = "2026-02-17T22:18:19.13Z" }, - { url = "https://files.pythonhosted.org/packages/0e/f1/ed17d927f9950643bc7631aa4c99ff0cc83a37864470bc419345b656a41f/pandas-3.0.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:406ce835c55bac912f2a0dcfaf27c06d73c6b04a5dde45f1fd3169ce31337389", size = 10753464, upload-time = "2026-02-17T22:18:21.134Z" }, - { url = "https://files.pythonhosted.org/packages/2e/7c/870c7e7daec2a6c7ff2ac9e33b23317230d4e4e954b35112759ea4a924a7/pandas-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:830994d7e1f31dd7e790045235605ab61cff6c94defc774547e8b7fdfbff3dc7", size = 11255234, upload-time = "2026-02-17T22:18:24.175Z" }, - { url = "https://files.pythonhosted.org/packages/5c/39/3653fe59af68606282b989c23d1a543ceba6e8099cbcc5f1d506a7bae2aa/pandas-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a64ce8b0f2de1d2efd2ae40b0abe7f8ae6b29fbfb3812098ed5a6f8e235ad9bf", size = 11767299, upload-time = "2026-02-17T22:18:26.824Z" }, - { url = "https://files.pythonhosted.org/packages/9b/31/1daf3c0c94a849c7a8dab8a69697b36d313b229918002ba3e409265c7888/pandas-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9832c2c69da24b602c32e0c7b1b508a03949c18ba08d4d9f1c1033426685b447", size = 12333292, upload-time = "2026-02-17T22:18:28.996Z" }, - { url = "https://files.pythonhosted.org/packages/1f/67/af63f83cd6ca603a00fe8530c10a60f0879265b8be00b5930e8e78c5b30b/pandas-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:84f0904a69e7365f79a0c77d3cdfccbfb05bf87847e3a51a41e1426b0edb9c79", size = 9892176, upload-time = "2026-02-17T22:18:31.79Z" }, - { url = "https://files.pythonhosted.org/packages/79/ab/9c776b14ac4b7b4140788eca18468ea39894bc7340a408f1d1e379856a6b/pandas-3.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:4a68773d5a778afb31d12e34f7dd4612ab90de8c6fb1d8ffe5d4a03b955082a1", size = 9151328, upload-time = "2026-02-17T22:18:35.721Z" }, { url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" }, { url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" }, { url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" }, @@ -5900,8 +5180,7 @@ dependencies = [ { name = "psutil" }, { name = "pyyaml" }, { name = "safetensors" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, { name = "tqdm" }, { name = "transformers" }, ] @@ -5920,16 +5199,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/cb/72/9a51afa0a822b09e286c4cb827ed7b00bc818dac7bd11a5f161e493a217d/pendulum-3.2.0.tar.gz", hash = "sha256:e80feda2d10fa3ff8b1526715f7d33dcb7e08494b3088f2c8a3ac92d4a4331ce", size = 86912, upload-time = "2026-01-30T11:22:24.093Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/27/a4be6ec12161b503dd036f8d7cc57f8626170ae31bb298038be9af0001ce/pendulum-3.2.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5d775cc608c909ad415c8e789c84a9f120bb6a794c4215b2d8d910893cf0ec6a", size = 337923, upload-time = "2026-01-30T11:20:51.61Z" }, - { url = "https://files.pythonhosted.org/packages/59/e1/2a214e18355ec2a6ce3f683a97eecdb6050866ff3a6cf165d411450aeb1b/pendulum-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8de794a7f665aebc8c1ba4dd4b05ab8fe1a36ce9c0498366adf1d1edd79b2686", size = 327379, upload-time = "2026-01-30T11:20:53.085Z" }, - { url = "https://files.pythonhosted.org/packages/9d/01/7392e58ebc1d9e70b987dc8bb0c89710b47ac8125067efe7aa4c420b616f/pendulum-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bac7df7696e1c942e17c0556b3a7bcdd1d7aa5b24faee7620cb071e754a0622", size = 340115, upload-time = "2026-01-30T11:20:54.635Z" }, - { url = "https://files.pythonhosted.org/packages/ef/33/80de84c5ca1a3e4f7f3b75090c9b61b6dbb6d095e302ee592cebbaf0bbfb/pendulum-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db0f6a8a04475d9cba26ce701e7d66d266fd97227f2f5f499270eba04be1c7e9", size = 373969, upload-time = "2026-01-30T11:20:56.209Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/f7b4c1818927ab394a2a0a9b7011f360a0a75839a22678833c5bc0a84183/pendulum-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c352c63c1ff05f2198409b28498d7158547a8be23e1fbd4aa2cf5402fb239b55", size = 379058, upload-time = "2026-01-30T11:20:57.618Z" }, - { url = "https://files.pythonhosted.org/packages/36/94/9947cf710620afcc68751683f2f8de88d902505e7c13c0349d7e9d362f97/pendulum-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de8c1ad1d1aa7d4ceae341528bab35a0f8c88a5aa63f2f5d84e16b517d1b32c2", size = 348403, upload-time = "2026-01-30T11:20:59.56Z" }, - { url = "https://files.pythonhosted.org/packages/6f/12/0e6ba0bb00fa57907af2a3fca8643bded5dba1e87072d50673776a0d6ed2/pendulum-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1ba955511c12fec2252038b0c866c25c0c30b720bf74d3023710f121e42b1498", size = 517457, upload-time = "2026-01-30T11:21:01.602Z" }, - { url = "https://files.pythonhosted.org/packages/c6/fe/dae5fbfe67bd41d943def0ad8f1e7f6988aa8e527255e433cd7c494f9ad5/pendulum-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4115bf364a2ec6d5ddc476751ceaa4164a04f2c15589f0d29aa210ddb784b15d", size = 561103, upload-time = "2026-01-30T11:21:03.924Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a0/8f646160b98abfc19152505af19bd643a4279ec2bdbe0959f16b7025fc6b/pendulum-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:4151a903356413fdd9549de0997b708fb95a214ed97803ffb479ffd834088378", size = 260595, upload-time = "2026-01-30T11:21:05.495Z" }, - { url = "https://files.pythonhosted.org/packages/79/01/feead7af9ded7a13f2d798fb6573e70f469113eafcd8cc8f59671584ca3e/pendulum-3.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:acfdee9ddc56053cb7c8c075afbfde0857322d09e56a56195b9cd127fae87e4c", size = 255382, upload-time = "2026-01-30T11:21:06.847Z" }, { url = "https://files.pythonhosted.org/packages/41/56/dd0ea9f97d25a0763cda09e2217563b45714786118d8c68b0b745395d6eb/pendulum-3.2.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:bf0b489def51202a39a2a665dcc4162d5e46934a740fe4c4fe3068979610156c", size = 337830, upload-time = "2026-01-30T11:21:08.298Z" }, { url = "https://files.pythonhosted.org/packages/cf/98/83d62899bf7226fc12396de4bc1fb2b5da27e451c7c60790043aaf8b4731/pendulum-3.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:937a529aa302efa18dcf25e53834964a87ffb2df8f80e3669ab7757a6126beaf", size = 327574, upload-time = "2026-01-30T11:21:09.715Z" }, { url = "https://files.pythonhosted.org/packages/76/fa/ff2aa992b23f0543c709b1a3f3f9ed760ec71fd02c8bb01f93bf008b52e4/pendulum-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85c7689defc65c4dc29bf257f7cca55d210fabb455de9476e1748d2ab2ae80d7", size = 339891, upload-time = "2026-01-30T11:21:11.089Z" }, @@ -5960,13 +5229,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/37/b4f2b5f1200351c4869b8b46ad5c21019e3dbe0417f5867ae969fad7b5fe/pendulum-3.2.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:a50d8cf42f06d3d8c3f8bb2a7ac47fa93b5145e69de6a7209be6a47afdd9cf76", size = 561926, upload-time = "2026-01-30T11:21:51.698Z" }, { url = "https://files.pythonhosted.org/packages/a0/9e/567376582da58f5fe8e4f579db2bcfbf243cf619a5825bdf1023ad1436b3/pendulum-3.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:e5bbb92b155cd5018b3cf70ee49ed3b9c94398caaaa7ed97fe41e5bb5a968418", size = 258817, upload-time = "2026-01-30T11:21:53.074Z" }, { url = "https://files.pythonhosted.org/packages/95/67/dfffd7eb50d67fa821cd4d92cf71575ead6162930202bc40dfcedf78c38c/pendulum-3.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:d53134418e04335c3029a32e9341cccc9b085a28744fb5ee4e6a8f5039363b1a", size = 253292, upload-time = "2026-01-30T11:21:54.484Z" }, - { url = "https://files.pythonhosted.org/packages/c9/0d/d5ac8468a1b40f09a62d6e91654088de432367907579dd161c0fb1bdf222/pendulum-3.2.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9585594d32faa71efa5a78f576f1ee4f79e9c5340d7c6f0cd6c5dfe725effaaa", size = 338760, upload-time = "2026-01-30T11:22:12.225Z" }, - { url = "https://files.pythonhosted.org/packages/a0/e5/7fa8c8be6caac8e0be78fbe7668df571f44820ed779cb3736fab645fcba8/pendulum-3.2.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:26401e2de77c437e8f3b6160c08c6c5d45518d906f8f9b48fd7cb5aa0f4e2aff", size = 328333, upload-time = "2026-01-30T11:22:13.811Z" }, - { url = "https://files.pythonhosted.org/packages/ad/78/73a1031b7d1bf7986e8e655cea3f018164b3470aecfea25a4074e77dda73/pendulum-3.2.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:637e65af042f383a2764a886aa28ccc6f853bf7a142df18e41c720542934c13b", size = 340841, upload-time = "2026-01-30T11:22:15.278Z" }, - { url = "https://files.pythonhosted.org/packages/49/40/4e36e9074e92b0164c088b9ada3c02bfea386d83e24fa98b30fe9b6e61a8/pendulum-3.2.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e46c28f4d067233c4a4c42748f4ffa641d9289c09e0e81488beb6d4b3fab51", size = 348959, upload-time = "2026-01-30T11:22:16.718Z" }, - { url = "https://files.pythonhosted.org/packages/24/99/8bf7fcb91b526e1efe17d047faa845709b88800fff915ff848ff26054293/pendulum-3.2.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:71d46bcc86269f97bfd8c5f1475d55e717696a0a010b1871023605ca94624031", size = 518102, upload-time = "2026-01-30T11:22:18.2Z" }, - { url = "https://files.pythonhosted.org/packages/b8/b0/a36c468d2d0dec62ddea7c5e4177e93abb12f48ac90f09f24d0581c5189f/pendulum-3.2.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5cd956d4176afc7bfe8a91bf3f771b46ff8d326f6c5bf778eb5010eb742ebba6", size = 561884, upload-time = "2026-01-30T11:22:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/c5/4d/dad105261898907bf806cabca53d3878529a9fa2c0d5d7f95f2035246fc2/pendulum-3.2.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:39ef129d7b90aab49708645867abdd207b714ba7bff12dae549975b0aca09716", size = 261236, upload-time = "2026-01-30T11:22:21.059Z" }, { url = "https://files.pythonhosted.org/packages/02/fb/d65db067a67df7252f18b0cb7420dda84078b9e8bfb375215469c14a50be/pendulum-3.2.0-py3-none-any.whl", hash = "sha256:f3a9c18a89b4d9ef39c5fa6a78722aaff8d5be2597c129a3b16b9f40a561acf3", size = 114111, upload-time = "2026-01-30T11:22:22.361Z" }, ] @@ -5997,17 +5259,6 @@ version = "12.1.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/46/5da1ec4a5171ee7bf1a0efa064aba70ba3d6e0788ce3f5acd1375d23c8c0/pillow-12.1.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e879bb6cd5c73848ef3b2b48b8af9ff08c5b71ecda8048b7dd22d8a33f60be32", size = 5304084, upload-time = "2026-02-11T04:20:27.501Z" }, - { url = "https://files.pythonhosted.org/packages/78/93/a29e9bc02d1cf557a834da780ceccd54e02421627200696fcf805ebdc3fb/pillow-12.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:365b10bb9417dd4498c0e3b128018c4a624dc11c7b97d8cc54effe3b096f4c38", size = 4657866, upload-time = "2026-02-11T04:20:29.827Z" }, - { url = "https://files.pythonhosted.org/packages/13/84/583a4558d492a179d31e4aae32eadce94b9acf49c0337c4ce0b70e0a01f2/pillow-12.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d4ce8e329c93845720cd2014659ca67eac35f6433fd3050393d85f3ecef0dad5", size = 6232148, upload-time = "2026-02-11T04:20:31.329Z" }, - { url = "https://files.pythonhosted.org/packages/d5/e2/53c43334bbbb2d3b938978532fbda8e62bb6e0b23a26ce8592f36bcc4987/pillow-12.1.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc354a04072b765eccf2204f588a7a532c9511e8b9c7f900e1b64e3e33487090", size = 8038007, upload-time = "2026-02-11T04:20:34.225Z" }, - { url = "https://files.pythonhosted.org/packages/b8/a6/3d0e79c8a9d58150dd98e199d7c1c56861027f3829a3a60b3c2784190180/pillow-12.1.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e7976bf1910a8116b523b9f9f58bf410f3e8aa330cd9a2bb2953f9266ab49af", size = 6345418, upload-time = "2026-02-11T04:20:35.858Z" }, - { url = "https://files.pythonhosted.org/packages/a2/c8/46dfeac5825e600579157eea177be43e2f7ff4a99da9d0d0a49533509ac5/pillow-12.1.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:597bd9c8419bc7c6af5604e55847789b69123bbe25d65cc6ad3012b4f3c98d8b", size = 7034590, upload-time = "2026-02-11T04:20:37.91Z" }, - { url = "https://files.pythonhosted.org/packages/af/bf/e6f65d3db8a8bbfeaf9e13cc0417813f6319863a73de934f14b2229ada18/pillow-12.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2c1fc0f2ca5f96a3c8407e41cca26a16e46b21060fe6d5b099d2cb01412222f5", size = 6458655, upload-time = "2026-02-11T04:20:39.496Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c2/66091f3f34a25894ca129362e510b956ef26f8fb67a0e6417bc5744e56f1/pillow-12.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:578510d88c6229d735855e1f278aa305270438d36a05031dfaae5067cc8eb04d", size = 7159286, upload-time = "2026-02-11T04:20:41.139Z" }, - { url = "https://files.pythonhosted.org/packages/7b/5a/24bc8eb526a22f957d0cec6243146744966d40857e3d8deb68f7902ca6c1/pillow-12.1.1-cp311-cp311-win32.whl", hash = "sha256:7311c0a0dcadb89b36b7025dfd8326ecfa36964e29913074d47382706e516a7c", size = 6328663, upload-time = "2026-02-11T04:20:43.184Z" }, - { url = "https://files.pythonhosted.org/packages/31/03/bef822e4f2d8f9d7448c133d0a18185d3cce3e70472774fffefe8b0ed562/pillow-12.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:fbfa2a7c10cc2623f412753cddf391c7f971c52ca40a3f65dc5039b2939e8563", size = 7031448, upload-time = "2026-02-11T04:20:44.696Z" }, - { url = "https://files.pythonhosted.org/packages/49/70/f76296f53610bd17b2e7d31728b8b7825e3ac3b5b3688b51f52eab7c0818/pillow-12.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:b81b5e3511211631b3f672a595e3221252c90af017e399056d0faabb9538aa80", size = 2453651, upload-time = "2026-02-11T04:20:46.243Z" }, { url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" }, { url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" }, { url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" }, @@ -6069,13 +5320,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" }, { url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" }, { url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" }, - { url = "https://files.pythonhosted.org/packages/56/11/5d43209aa4cb58e0cc80127956ff1796a68b928e6324bbf06ef4db34367b/pillow-12.1.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:600fd103672b925fe62ed08e0d874ea34d692474df6f4bf7ebe148b30f89f39f", size = 5228606, upload-time = "2026-02-11T04:22:52.106Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d5/3b005b4e4fda6698b371fa6c21b097d4707585d7db99e98d9b0b87ac612a/pillow-12.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:665e1b916b043cef294bc54d47bf02d87e13f769bc4bc5fa225a24b3a6c5aca9", size = 4622321, upload-time = "2026-02-11T04:22:53.827Z" }, - { url = "https://files.pythonhosted.org/packages/df/36/ed3ea2d594356fd8037e5a01f6156c74bc8d92dbb0fa60746cc96cabb6e8/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:495c302af3aad1ca67420ddd5c7bd480c8867ad173528767d906428057a11f0e", size = 5247579, upload-time = "2026-02-11T04:22:56.094Z" }, - { url = "https://files.pythonhosted.org/packages/54/9a/9cc3e029683cf6d20ae5085da0dafc63148e3252c2f13328e553aaa13cfb/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fd420ef0c52c88b5a035a0886f367748c72147b2b8f384c9d12656678dfdfa9", size = 6989094, upload-time = "2026-02-11T04:22:58.288Z" }, - { url = "https://files.pythonhosted.org/packages/00/98/fc53ab36da80b88df0967896b6c4b4cd948a0dc5aa40a754266aa3ae48b3/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f975aa7ef9684ce7e2c18a3aa8f8e2106ce1e46b94ab713d156b2898811651d3", size = 5313850, upload-time = "2026-02-11T04:23:00.554Z" }, - { url = "https://files.pythonhosted.org/packages/30/02/00fa585abfd9fe9d73e5f6e554dc36cc2b842898cbfc46d70353dae227f8/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8089c852a56c2966cf18835db62d9b34fef7ba74c726ad943928d494fa7f4735", size = 5963343, upload-time = "2026-02-11T04:23:02.934Z" }, - { url = "https://files.pythonhosted.org/packages/f2/26/c56ce33ca856e358d27fda9676c055395abddb82c35ac0f593877ed4562e/pillow-12.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e", size = 7029880, upload-time = "2026-02-11T04:23:04.783Z" }, ] [[package]] @@ -6208,8 +5452,8 @@ name = "prometheus-fastapi-instrumentator" version = "7.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "prometheus-client", marker = "sys_platform == 'linux'" }, - { name = "starlette", marker = "sys_platform == 'linux'" }, + { name = "prometheus-client" }, + { name = "starlette" }, ] sdist = { url = "https://files.pythonhosted.org/packages/69/6d/24d53033cf93826aa7857699a4450c1c67e5b9c710e925b1ed2b320c04df/prometheus_fastapi_instrumentator-7.1.0.tar.gz", hash = "sha256:be7cd61eeea4e5912aeccb4261c6631b3f227d8924542d79eaf5af3f439cbe5e", size = 20220, upload-time = "2025-03-19T19:35:05.351Z" } wheels = [ @@ -6234,21 +5478,6 @@ version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, - { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, - { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, - { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, - { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, - { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, - { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, - { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, - { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, - { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, - { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, - { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, - { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, - { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, @@ -6332,8 +5561,7 @@ name = "proto-plus" version = "1.27.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/3a/02/8832cde80e7380c600fbf55090b6ab7b62bd6825dbedde6d6657c15a1f8e/proto_plus-1.27.1.tar.gz", hash = "sha256:912a7460446625b792f6448bade9e55cd4e41e6ac10e27009ef71a7f317fa147", size = 56929, upload-time = "2026-02-02T17:34:49.035Z" } wheels = [ @@ -6344,45 +5572,16 @@ wheels = [ name = "protobuf" version = "5.29.6" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", -] sdist = { url = "https://files.pythonhosted.org/packages/7e/57/394a763c103e0edf87f0938dafcd918d53b4c011dfc5c8ae80f3b0452dbb/protobuf-5.29.6.tar.gz", hash = "sha256:da9ee6a5424b6b30fd5e45c5ea663aef540ca95f9ad99d1e887e819cdf9b8723", size = 425623, upload-time = "2026-02-04T22:54:40.584Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/88/9ee58ff7863c479d6f8346686d4636dd4c415b0cbeed7a6a7d0617639c2a/protobuf-5.29.6-cp310-abi3-win32.whl", hash = "sha256:62e8a3114992c7c647bce37dcc93647575fc52d50e48de30c6fcb28a6a291eb1", size = 423357, upload-time = "2026-02-04T22:54:25.805Z" }, + { url = "https://files.pythonhosted.org/packages/1c/66/2dc736a4d576847134fb6d80bd995c569b13cdc7b815d669050bf0ce2d2c/protobuf-5.29.6-cp310-abi3-win_amd64.whl", hash = "sha256:7e6ad413275be172f67fdee0f43484b6de5a904cc1c3ea9804cb6fe2ff366eda", size = 435175, upload-time = "2026-02-04T22:54:28.592Z" }, + { url = "https://files.pythonhosted.org/packages/06/db/49b05966fd208ae3f44dcd33837b6243b4915c57561d730a43f881f24dea/protobuf-5.29.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:b5a169e664b4057183a34bdc424540e86eea47560f3c123a0d64de4e137f9269", size = 418619, upload-time = "2026-02-04T22:54:30.266Z" }, { url = "https://files.pythonhosted.org/packages/b7/d7/48cbf6b0c3c39761e47a99cb483405f0fde2be22cf00d71ef316ce52b458/protobuf-5.29.6-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:a8866b2cff111f0f863c1b3b9e7572dc7eaea23a7fae27f6fc613304046483e6", size = 320284, upload-time = "2026-02-04T22:54:31.782Z" }, { url = "https://files.pythonhosted.org/packages/e3/dd/cadd6ec43069247d91f6345fa7a0d2858bef6af366dbd7ba8f05d2c77d3b/protobuf-5.29.6-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:e3387f44798ac1106af0233c04fb8abf543772ff241169946f698b3a9a3d3ab9", size = 320478, upload-time = "2026-02-04T22:54:32.909Z" }, { url = "https://files.pythonhosted.org/packages/5a/cb/e3065b447186cb70aa65acc70c86baf482d82bf75625bf5a2c4f6919c6a3/protobuf-5.29.6-py3-none-any.whl", hash = "sha256:6b9edb641441b2da9fa8f428760fc136a49cf97a52076010cf22a2ff73438a86", size = 173126, upload-time = "2026-02-04T22:54:39.462Z" }, ] -[[package]] -name = "protobuf" -version = "6.33.5" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] -sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, - { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, - { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, - { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, -] - [[package]] name = "psutil" version = "7.2.2" @@ -6417,17 +5616,6 @@ version = "2.9.11" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, - { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, - { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, - { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, - { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, - { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, - { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, - { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, - { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, - { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, - { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, @@ -6505,13 +5693,6 @@ version = "23.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/41/8e6b6ef7e225d4ceead8459427a52afdc23379768f54dd3566014d7618c1/pyarrow-23.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6f0147ee9e0386f519c952cc670eb4a8b05caa594eeffe01af0e25f699e4e9bb", size = 34302230, upload-time = "2026-02-16T10:09:03.859Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4a/1472c00392f521fea03ae93408bf445cc7bfa1ab81683faf9bc188e36629/pyarrow-23.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:0ae6e17c828455b6265d590100c295193f93cc5675eb0af59e49dbd00d2de350", size = 35850050, upload-time = "2026-02-16T10:09:11.877Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b2/bd1f2f05ded56af7f54d702c8364c9c43cd6abb91b0e9933f3d77b4f4132/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:fed7020203e9ef273360b9e45be52a2a47d3103caf156a30ace5247ffb51bdbd", size = 44491918, upload-time = "2026-02-16T10:09:18.144Z" }, - { url = "https://files.pythonhosted.org/packages/0b/62/96459ef5b67957eac38a90f541d1c28833d1b367f014a482cb63f3b7cd2d/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:26d50dee49d741ac0e82185033488d28d35be4d763ae6f321f97d1140eb7a0e9", size = 47562811, upload-time = "2026-02-16T10:09:25.792Z" }, - { url = "https://files.pythonhosted.org/packages/7d/94/1170e235add1f5f45a954e26cd0e906e7e74e23392dcb560de471f7366ec/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c30143b17161310f151f4a2bcfe41b5ff744238c1039338779424e38579d701", size = 48183766, upload-time = "2026-02-16T10:09:34.645Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/39a42af4570377b99774cdb47f63ee6c7da7616bd55b3d5001aa18edfe4f/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db2190fa79c80a23fdd29fef4b8992893f024ae7c17d2f5f4db7171fa30c2c78", size = 50607669, upload-time = "2026-02-16T10:09:44.153Z" }, - { url = "https://files.pythonhosted.org/packages/00/ca/db94101c187f3df742133ac837e93b1f269ebdac49427f8310ee40b6a58f/pyarrow-23.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:f00f993a8179e0e1c9713bcc0baf6d6c01326a406a9c23495ec1ba9c9ebf2919", size = 27527698, upload-time = "2026-02-16T10:09:50.263Z" }, { url = "https://files.pythonhosted.org/packages/9a/4b/4166bb5abbfe6f750fc60ad337c43ecf61340fa52ab386da6e8dbf9e63c4/pyarrow-23.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f4b0dbfa124c0bb161f8b5ebb40f1a680b70279aa0c9901d44a2b5a20806039f", size = 34214575, upload-time = "2026-02-16T10:09:56.225Z" }, { url = "https://files.pythonhosted.org/packages/e1/da/3f941e3734ac8088ea588b53e860baeddac8323ea40ce22e3d0baa865cc9/pyarrow-23.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7707d2b6673f7de054e2e83d59f9e805939038eebe1763fe811ee8fa5c0cd1a7", size = 35832540, upload-time = "2026-02-16T10:10:03.428Z" }, { url = "https://files.pythonhosted.org/packages/88/7c/3d841c366620e906d54430817531b877ba646310296df42ef697308c2705/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86ff03fb9f1a320266e0de855dee4b17da6794c595d207f89bba40d16b5c78b9", size = 44470940, upload-time = "2026-02-16T10:10:10.704Z" }, @@ -6576,20 +5757,6 @@ version = "1.4.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/aa/b8/4ed5c7ad5ec15b08d35cc79ace6145d5c1ae426e46435f4987379439dfea/pybase64-1.4.3.tar.gz", hash = "sha256:c2ed274c9e0ba9c8f9c4083cfe265e66dd679126cd9c2027965d807352f3f053", size = 137272, upload-time = "2025-12-06T13:27:04.013Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/fb/bb06a5b9885e7d853ac1e801c4d8abfdb4c8506deee33e53d55aa6690e67/pybase64-1.4.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f9ef0388878bc15a084bd9bf73ec1b2b4ee513d11009b1506375e10a7aae5032", size = 68331, upload-time = "2025-12-06T13:22:54.197Z" }, - { url = "https://files.pythonhosted.org/packages/64/15/8d60b9ec5e658185fc2ee3333e01a6e30d717cf677b24f47cbb3a859d13c/pybase64-1.4.3-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95a57cccf106352a72ed8bc8198f6820b16cc7d55aa3867a16dea7011ae7c218", size = 71370, upload-time = "2025-12-06T13:22:55.517Z" }, - { url = "https://files.pythonhosted.org/packages/ac/29/a3e5c1667cc8c38d025a4636855de0fc117fc62e2afeb033a3c6f12c6a22/pybase64-1.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cd1c47dfceb9c7bd3de210fb4e65904053ed2d7c9dce6d107f041ff6fbd7e21", size = 59834, upload-time = "2025-12-06T13:22:56.682Z" }, - { url = "https://files.pythonhosted.org/packages/a9/00/8ffcf9810bd23f3984698be161cf7edba656fd639b818039a7be1d6405d4/pybase64-1.4.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.whl", hash = "sha256:9fe9922698f3e2f72874b26890d53a051c431d942701bb3a37aae94da0b12107", size = 56652, upload-time = "2025-12-06T13:22:57.724Z" }, - { url = "https://files.pythonhosted.org/packages/81/62/379e347797cdea4ab686375945bc77ad8d039c688c0d4d0cfb09d247beb9/pybase64-1.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:af5f4bd29c86b59bb4375e0491d16ec8a67548fa99c54763aaedaf0b4b5a6632", size = 59382, upload-time = "2025-12-06T13:22:58.758Z" }, - { url = "https://files.pythonhosted.org/packages/c6/f2/9338ffe2f487086f26a2c8ca175acb3baa86fce0a756ff5670a0822bb877/pybase64-1.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c302f6ca7465262908131411226e02100f488f531bb5e64cb901aa3f439bccd9", size = 59990, upload-time = "2025-12-06T13:23:01.007Z" }, - { url = "https://files.pythonhosted.org/packages/f9/a4/85a6142b65b4df8625b337727aa81dc199642de3d09677804141df6ee312/pybase64-1.4.3-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:2f3f439fa4d7fde164ebbbb41968db7d66b064450ab6017c6c95cef0afa2b349", size = 54923, upload-time = "2025-12-06T13:23:02.369Z" }, - { url = "https://files.pythonhosted.org/packages/ac/00/e40215d25624012bf5b7416ca37f168cb75f6dd15acdb91ea1f2ea4dc4e7/pybase64-1.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a23c6866551043f8b681a5e1e0d59469148b2920a3b4fc42b1275f25ea4217a", size = 58664, upload-time = "2025-12-06T13:23:03.378Z" }, - { url = "https://files.pythonhosted.org/packages/b0/73/d7e19a63e795c13837f2356268d95dc79d1180e756f57ced742a1e52fdeb/pybase64-1.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:56e6526f8565642abc5f84338cc131ce298a8ccab696b19bdf76fa6d7dc592ef", size = 52338, upload-time = "2025-12-06T13:23:04.458Z" }, - { url = "https://files.pythonhosted.org/packages/f2/32/3c746d7a310b69bdd9df77ffc85c41b80bce00a774717596f869b0d4a20e/pybase64-1.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6a792a8b9d866ffa413c9687d9b611553203753987a3a582d68cbc51cf23da45", size = 68993, upload-time = "2025-12-06T13:23:05.526Z" }, - { url = "https://files.pythonhosted.org/packages/5d/b3/63cec68f9d6f6e4c0b438d14e5f1ef536a5fe63ce14b70733ac5e31d7ab8/pybase64-1.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:62ad29a5026bb22cfcd1ca484ec34b0a5ced56ddba38ceecd9359b2818c9c4f9", size = 58055, upload-time = "2025-12-06T13:23:06.931Z" }, - { url = "https://files.pythonhosted.org/packages/d5/cb/7acf7c3c06f9692093c07f109668725dc37fb9a3df0fa912b50add645195/pybase64-1.4.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:11b9d1d2d32ec358c02214363b8fc3651f6be7dd84d880ecd597a6206a80e121", size = 54430, upload-time = "2025-12-06T13:23:07.936Z" }, - { url = "https://files.pythonhosted.org/packages/33/39/4eb33ff35d173bfff4002e184ce8907f5d0a42d958d61cd9058ef3570179/pybase64-1.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0aebaa7f238caa0a0d373616016e2040c6c879ebce3ba7ab3c59029920f13640", size = 56272, upload-time = "2025-12-06T13:23:09.253Z" }, - { url = "https://files.pythonhosted.org/packages/19/97/a76d65c375a254e65b730c6f56bf528feca91305da32eceab8bcc08591e6/pybase64-1.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e504682b20c63c2b0c000e5f98a80ea867f8d97642e042a5a39818e44ba4d599", size = 70904, upload-time = "2025-12-06T13:23:10.336Z" }, { url = "https://files.pythonhosted.org/packages/43/1b/9a8cab0042b464e9a876d5c65fe5127445a2436da36fda64899b119b1a1b/pybase64-1.4.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f0b3f200c3e06316f6bebabd458b4e4bcd4c2ca26af7c0c766614d91968dee27", size = 68210, upload-time = "2025-12-06T13:23:18.813Z" }, { url = "https://files.pythonhosted.org/packages/62/f7/965b79ff391ad208b50e412b5d3205ccce372a2d27b7218ae86d5295b105/pybase64-1.4.3-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb632edfd132b3eaf90c39c89aa314beec4e946e210099b57d40311f704e11d4", size = 71599, upload-time = "2025-12-06T13:23:20.195Z" }, { url = "https://files.pythonhosted.org/packages/03/4b/a3b5175130b3810bbb8ccfa1edaadbd3afddb9992d877c8a1e2f274b476e/pybase64-1.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:356ef1d74648ce997f5a777cf8f1aefecc1c0b4fe6201e0ef3ec8a08170e1b54", size = 59922, upload-time = "2025-12-06T13:23:21.487Z" }, @@ -6666,13 +5833,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/b8/f544a2e37c778d59208966d4ef19742a0be37c12fc8149ff34483c176616/pybase64-1.4.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d94020ef09f624d841aa9a3a6029df8cf65d60d7a6d5c8687579fa68bd679b65", size = 58295, upload-time = "2025-12-06T13:25:20.822Z" }, { url = "https://files.pythonhosted.org/packages/03/99/1fae8a3b7ac181e36f6e7864a62d42d5b1f4fa7edf408c6711e28fba6b4d/pybase64-1.4.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:f64ce70d89942a23602dee910dec9b48e5edf94351e1b378186b74fcc00d7f66", size = 60960, upload-time = "2025-12-06T13:25:22.099Z" }, { url = "https://files.pythonhosted.org/packages/9d/9e/cd4c727742345ad8384569a4466f1a1428f4e5cc94d9c2ab2f53d30be3fe/pybase64-1.4.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8ea99f56e45c469818b9781903be86ba4153769f007ba0655fa3b46dc332803d", size = 74863, upload-time = "2025-12-06T13:25:23.442Z" }, - { url = "https://files.pythonhosted.org/packages/bf/44/d4b7adc7bf4fd5b52d8d099121760c450a52c390223806b873f0b6a2d551/pybase64-1.4.3-graalpy311-graalpy242_311_native-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a492518f3078a4e3faaef310697d21df9c6bc71908cebc8c2f6fbfa16d7d6b1f", size = 43227, upload-time = "2025-12-06T13:26:21.845Z" }, - { url = "https://files.pythonhosted.org/packages/08/86/2ba2d8734ef7939debeb52cf9952e457ba7aa226cae5c0e6dd631f9b851f/pybase64-1.4.3-graalpy311-graalpy242_311_native-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae1a0f47784fd16df90d8acc32011c8d5fcdd9ab392c9ec49543e5f6a9c43a4", size = 35804, upload-time = "2025-12-06T13:26:23.149Z" }, { url = "https://files.pythonhosted.org/packages/fa/8f/43c3bb11ca9bacf81cb0b7a71500bb65b2eda6d5fe07433c09b543de97f3/pybase64-1.4.3-graalpy312-graalpy250_312_native-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5c29a582b0ea3936d02bd6fe9bf674ab6059e6e45ab71c78404ab2c913224414", size = 43461, upload-time = "2025-12-06T13:26:28.906Z" }, { url = "https://files.pythonhosted.org/packages/2d/4c/2a5258329200be57497d3972b5308558c6de42e3749c6cc2aa1cbe34b25a/pybase64-1.4.3-graalpy312-graalpy250_312_native-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b6b664758c804fa919b4f1257aa8cf68e95db76fc331de5f70bfc3a34655afe1", size = 36058, upload-time = "2025-12-06T13:26:30.092Z" }, - { url = "https://files.pythonhosted.org/packages/d3/22/832a2f9e76cdf39b52e01e40d8feeb6a04cf105494f2c3e3126d0149717f/pybase64-1.4.3-pp311-pypy311_pp73-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:bd4d2293de9fd212e294c136cec85892460b17d24e8c18a6ba18750928037750", size = 40681, upload-time = "2025-12-06T13:26:43.782Z" }, - { url = "https://files.pythonhosted.org/packages/12/d7/6610f34a8972415fab3bb4704c174a1cc477bffbc3c36e526428d0f3957d/pybase64-1.4.3-pp311-pypy311_pp73-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af6d0d3a691911cc4c9a625f3ddcd3af720738c21be3d5c72de05629139d393", size = 41294, upload-time = "2025-12-06T13:26:44.936Z" }, - { url = "https://files.pythonhosted.org/packages/64/25/ed24400948a6c974ab1374a233cb7e8af0a5373cea0dd8a944627d17c34a/pybase64-1.4.3-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5cfc8c49a28322d82242088378f8542ce97459866ba73150b062a7073e82629d", size = 35447, upload-time = "2025-12-06T13:26:46.098Z" }, ] [[package]] @@ -6684,18 +5846,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/df/a0/9c823651872e6a0face3f0311de2a40c8bbcb9c8dcb15680bd019ac56ac7/pycares-5.0.1.tar.gz", hash = "sha256:5a3c249c830432631439815f9a818463416f2a8cbdb1e988e78757de9ae75081", size = 652222, upload-time = "2026-01-01T12:37:00.604Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/78/43b09f4b8e5fb8a6024661b458b48987abdb39304c78117b106b10a029f1/pycares-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c29ca77ff9712e20787201ca8e76ad89384771c0e058a0a4f3dc05afbc4b32de", size = 136177, upload-time = "2026-01-01T12:35:11.567Z" }, - { url = "https://files.pythonhosted.org/packages/19/05/194c0e039ff52b166b50e79ff166c61f931fbca2bf94fc0dbaaf39041518/pycares-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f11424bf5cf6226d0b136ed47daa58434e377c61b62d0100d1de7793f8e34a72", size = 130960, upload-time = "2026-01-01T12:35:12.828Z" }, - { url = "https://files.pythonhosted.org/packages/0d/84/5fce65cc058c5ab619c0dd1370d539667235a5565da72ca77f3f741cdc70/pycares-5.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d765afb52d579879f5c4f005763827d3b1eb86b23139e9614e6089c9f98db017", size = 220584, upload-time = "2026-01-01T12:35:14.005Z" }, - { url = "https://files.pythonhosted.org/packages/f6/74/d82304297308f6c24a17961bf589b53eefa5f7f2724158c842c67fa0b302/pycares-5.0.1-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ea0d57ba5add4bfbcc40cbdfa92bbb8a5ef0c4c21881e26c7229d9bdc92a4533", size = 252166, upload-time = "2026-01-01T12:35:15.293Z" }, - { url = "https://files.pythonhosted.org/packages/39/a2/0ead3ba4228a490b52eb44d43514dae172c90421bb30a3659516e5b251a2/pycares-5.0.1-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9ec2aa3553d33e6220aeb1a05f4853fb83fce4cec3e0dea2dc970338ea47dc", size = 239085, upload-time = "2026-01-01T12:35:16.594Z" }, - { url = "https://files.pythonhosted.org/packages/26/ad/e59f173933f0e696a6afbbd63935114d1400524a72da4f2cbafc6002a398/pycares-5.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c63fb2498b05e9f5670a1bf3b900c5d09343b3b6d5001a9714d593f9eb54de1", size = 222936, upload-time = "2026-01-01T12:35:17.521Z" }, - { url = "https://files.pythonhosted.org/packages/98/fa/d85bfe663a9c292efd8e699779027612c0c65ff50dc4cc9eb7a143613460/pycares-5.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71316f7a87c15a8d32127ff01374dc2c969c37410693cc0cf6532590b7f18e7a", size = 223506, upload-time = "2026-01-01T12:35:18.535Z" }, - { url = "https://files.pythonhosted.org/packages/2a/6b/4c225a5b10a4c9f88891a20bfe363eca1b1ce7d5244b396e5683c6070998/pycares-5.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a2117dffbb78615bfdb41ad77b17038689e4e01c66f153649e80d268c6228b4f", size = 251633, upload-time = "2026-01-01T12:35:19.819Z" }, - { url = "https://files.pythonhosted.org/packages/26/ce/ba2349413b5197b72ec19c46e07f6be3a324f80a7b1579c7cbb1b82d6dc2/pycares-5.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7d7c4f5d8b88b586ef2288142b806250020e6490b9f2bd8fd5f634a78fd20fcf", size = 237703, upload-time = "2026-01-01T12:35:20.827Z" }, - { url = "https://files.pythonhosted.org/packages/84/2f/1fd794e6fca10d9e20569113d10a4f92cc2b4242d3eb45524419a37cca6b/pycares-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433b9a4b5a7e10ef8aef0b957e6cd0bfc1bb5bc730d2729f04e93c91c25979c0", size = 222622, upload-time = "2026-01-01T12:35:22.518Z" }, - { url = "https://files.pythonhosted.org/packages/c9/07/7db7977649b210092a7e02d550fcebdfa69bc995c684a3b960c88a5dc4ce/pycares-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:cf2699883b88713670d3f9c0a1e44ac24c70aeace9f8c6aa7f0b9f222d5b08a5", size = 117438, upload-time = "2026-01-01T12:35:23.402Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ca/f322ddaa8b3414667de8faeea944ce9d3ddfaf1455839f499a21fcea4cec/pycares-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:9528dc11749e5e098c996475b60f879e1db5a6cb3dd0cdc747530620bb1a8941", size = 108920, upload-time = "2026-01-01T12:35:24.599Z" }, { url = "https://files.pythonhosted.org/packages/75/67/e84ba11d3fec3bf1322c3b302c4df13c85e0a1bc48f16d65cd0f59ad9853/pycares-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ee551be4f3f3ac814ac8547586c464c9035e914f5122a534d25de147fa745e1", size = 136241, upload-time = "2026-01-01T12:35:25.439Z" }, { url = "https://files.pythonhosted.org/packages/ce/ae/50fbb3b4e52b9f1d16a36ffabd051ef8b2106b3f0a0d1c1113904d187a9d/pycares-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:252d4e5a52a68f825eaa90e16b595f9baee22c760f51e286ab612c6829b96de3", size = 131069, upload-time = "2026-01-01T12:35:26.293Z" }, { url = "https://files.pythonhosted.org/packages/0e/ea/f431599f1ac42149ea4768e516db7cdae3a503a6646319ae63ab66da1486/pycares-5.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c1aa549b8c2f2e224215c793d660270778dcba9abc3b85abbc7c41eabe4f1e5", size = 221120, upload-time = "2026-01-01T12:35:27.143Z" }, @@ -6806,20 +5956,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, - { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, - { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, - { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, - { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, - { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, - { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, - { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, - { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, - { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, - { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, - { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, - { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, @@ -6876,22 +6012,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, - { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, - { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, - { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, - { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, - { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, - { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, - { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, - { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, - { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, ] [[package]] @@ -6909,7 +6033,7 @@ wheels = [ [package.optional-dependencies] pycountry = [ - { name = "pycountry", marker = "sys_platform == 'linux'" }, + { name = "pycountry" }, ] [[package]] @@ -7138,11 +6262,6 @@ version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, - { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, - { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, - { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, - { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, @@ -7190,15 +6309,6 @@ version = "6.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, - { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, - { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, - { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, - { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, - { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, - { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, - { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, - { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, @@ -7248,16 +6358,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/5d/305323ba86b284e6fcb0d842d6adaa2999035f70f8c38a9b6d21ad28c3d4/pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86", size = 1333328, upload-time = "2025-09-08T23:07:45.946Z" }, - { url = "https://files.pythonhosted.org/packages/bd/a0/fc7e78a23748ad5443ac3275943457e8452da67fda347e05260261108cbc/pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581", size = 908803, upload-time = "2025-09-08T23:07:47.551Z" }, - { url = "https://files.pythonhosted.org/packages/7e/22/37d15eb05f3bdfa4abea6f6d96eb3bb58585fbd3e4e0ded4e743bc650c97/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f", size = 668836, upload-time = "2025-09-08T23:07:49.436Z" }, - { url = "https://files.pythonhosted.org/packages/b1/c4/2a6fe5111a01005fc7af3878259ce17684fabb8852815eda6225620f3c59/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e", size = 857038, upload-time = "2025-09-08T23:07:51.234Z" }, - { url = "https://files.pythonhosted.org/packages/cb/eb/bfdcb41d0db9cd233d6fb22dc131583774135505ada800ebf14dfb0a7c40/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e", size = 1657531, upload-time = "2025-09-08T23:07:52.795Z" }, - { url = "https://files.pythonhosted.org/packages/ab/21/e3180ca269ed4a0de5c34417dfe71a8ae80421198be83ee619a8a485b0c7/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2", size = 2034786, upload-time = "2025-09-08T23:07:55.047Z" }, - { url = "https://files.pythonhosted.org/packages/3b/b1/5e21d0b517434b7f33588ff76c177c5a167858cc38ef740608898cd329f2/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394", size = 1894220, upload-time = "2025-09-08T23:07:57.172Z" }, - { url = "https://files.pythonhosted.org/packages/03/f2/44913a6ff6941905efc24a1acf3d3cb6146b636c546c7406c38c49c403d4/pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f", size = 567155, upload-time = "2025-09-08T23:07:59.05Z" }, - { url = "https://files.pythonhosted.org/packages/23/6d/d8d92a0eb270a925c9b4dd039c0b4dc10abc2fcbc48331788824ef113935/pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97", size = 633428, upload-time = "2025-09-08T23:08:00.663Z" }, - { url = "https://files.pythonhosted.org/packages/ae/14/01afebc96c5abbbd713ecfc7469cfb1bc801c819a74ed5c9fad9a48801cb/pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07", size = 559497, upload-time = "2025-09-08T23:08:02.15Z" }, { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" }, { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" }, { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" }, @@ -7290,26 +6390,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c4/59/a5f38970f9bf07cee96128de79590bb354917914a9be11272cfc7ff26af0/pyzmq-27.1.0-cp314-cp314t-win32.whl", hash = "sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92", size = 587472, upload-time = "2025-09-08T23:08:58.18Z" }, { url = "https://files.pythonhosted.org/packages/70/d8/78b1bad170f93fcf5e3536e70e8fadac55030002275c9a29e8f5719185de/pyzmq-27.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0", size = 661401, upload-time = "2025-09-08T23:08:59.802Z" }, { url = "https://files.pythonhosted.org/packages/81/d6/4bfbb40c9a0b42fc53c7cf442f6385db70b40f74a783130c5d0a5aa62228/pyzmq-27.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7", size = 575170, upload-time = "2025-09-08T23:09:01.418Z" }, - { url = "https://files.pythonhosted.org/packages/4c/c6/c4dcdecdbaa70969ee1fdced6d7b8f60cfabe64d25361f27ac4665a70620/pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066", size = 836265, upload-time = "2025-09-08T23:09:49.376Z" }, - { url = "https://files.pythonhosted.org/packages/3e/79/f38c92eeaeb03a2ccc2ba9866f0439593bb08c5e3b714ac1d553e5c96e25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604", size = 800208, upload-time = "2025-09-08T23:09:51.073Z" }, - { url = "https://files.pythonhosted.org/packages/49/0e/3f0d0d335c6b3abb9b7b723776d0b21fa7f3a6c819a0db6097059aada160/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c", size = 567747, upload-time = "2025-09-08T23:09:52.698Z" }, - { url = "https://files.pythonhosted.org/packages/a1/cf/f2b3784d536250ffd4be70e049f3b60981235d70c6e8ce7e3ef21e1adb25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271", size = 747371, upload-time = "2025-09-08T23:09:54.563Z" }, - { url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862, upload-time = "2025-09-08T23:09:56.509Z" }, ] [[package]] name = "quack-kernels" -version = "0.2.10" +version = "0.2.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "apache-tvm-ffi", marker = "sys_platform == 'linux'" }, - { name = "nvidia-cutlass-dsl", marker = "sys_platform == 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "torch-c-dlpack-ext", marker = "sys_platform == 'linux'" }, + { name = "apache-tvm-ffi" }, + { name = "nvidia-cutlass-dsl" }, + { name = "torch" }, + { name = "torch-c-dlpack-ext" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/23/28/231c862500fec531080cc733e5766b46518edaefe0a068d46b276c380a25/quack_kernels-0.2.10.tar.gz", hash = "sha256:df86e981ea76542467ae2cd9ac606d587658e8d648a51c34dc0f2913a3e26bf6", size = 161102, upload-time = "2026-02-18T22:20:50.17Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/de/472a20a625495e31c33a99a30867c1d58335a1afa02dc30019f667702d1d/quack_kernels-0.2.5.tar.gz", hash = "sha256:06241a5962c09b4a2c27d4d21208e31790836fecde4373c6e9d874fdd88b5590", size = 152256, upload-time = "2026-01-31T09:07:09.998Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/ac/8f70ddd397aff1d606d7aa6fbe857a2bc58a817965099cd97d91264175a8/quack_kernels-0.2.10-py3-none-any.whl", hash = "sha256:a5b604c5cf28d9e601aae00488b6b603bb4060ccab8409a4443e72a649226f74", size = 165298, upload-time = "2026-02-18T22:20:48.978Z" }, + { url = "https://files.pythonhosted.org/packages/e4/7a/1a6d9997f979ce6985210a1783766b6c9b85bf6c21dcb990728526ca4d41/quack_kernels-0.2.5-py3-none-any.whl", hash = "sha256:5f7c246c8cb55c560f7601c952d60bddb4ba3e5c741220703a0c781a0aac3aa2", size = 156759, upload-time = "2026-01-31T09:07:08.989Z" }, ] [[package]] @@ -7317,18 +6412,16 @@ name = "ray" version = "2.54.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "click", marker = "sys_platform == 'linux'" }, - { name = "filelock", marker = "sys_platform == 'linux'" }, - { name = "jsonschema", marker = "sys_platform == 'linux'" }, - { name = "msgpack", marker = "sys_platform == 'linux'" }, - { name = "packaging", marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "pyyaml", marker = "sys_platform == 'linux'" }, - { name = "requests", marker = "sys_platform == 'linux'" }, + { name = "click" }, + { name = "filelock" }, + { name = "jsonschema" }, + { name = "msgpack" }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "pyyaml" }, + { name = "requests" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/29/7871f4206e6b00a9bb784c16dad32ccd01e9df5a93545db92de220eb2871/ray-2.54.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:491ae56ab80d8822c4eaf4d5bb96dcf32a6231d8d7b76eb8034400eb9be1bb18", size = 72066630, upload-time = "2026-02-18T04:05:04.957Z" }, - { url = "https://files.pythonhosted.org/packages/1d/e8/d2c8ebd9cd945abc817b01ad02a29df78cdb86cd07d764587e16977389d0/ray-2.54.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:928bb09245a3c6f7c3c113ba8eafc69f948da9602d7f33e8251ecdf97c157615", size = 72895723, upload-time = "2026-02-18T04:05:10.686Z" }, { url = "https://files.pythonhosted.org/packages/60/ad/e07aca3637e9c3ec4857ec4366208099cf8488ece8061a9925ba29b66382/ray-2.54.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:795ae21d6b764245d3f521bc5833446d58569e7dfde9c5777417eb285d87450f", size = 72107346, upload-time = "2026-02-18T04:05:27.999Z" }, { url = "https://files.pythonhosted.org/packages/9e/b9/cc5ea8460c3dc602e6b7198277a7c59ba2b8929374ab22efa8df9f3deac8/ray-2.54.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:a972afd5aa3dda99d0b2f369b5f62e5dd95865ab7d37bf2e0a0e0d2cfbd9b325", size = 72967230, upload-time = "2026-02-18T04:05:33.771Z" }, { url = "https://files.pythonhosted.org/packages/fd/8c/4a4a38eaec6e9614076a96967f58540f4f8d4aa0c793f43150c5df23cb9a/ray-2.54.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:8952c23a8aa94f10728c2d16e0dc3732d09aa0e6254801757ff494984a214f45", size = 72013826, upload-time = "2026-02-18T04:05:49.866Z" }, @@ -7337,7 +6430,7 @@ wheels = [ [package.optional-dependencies] cgraph = [ - { name = "cupy-cuda12x", marker = "sys_platform == 'linux'" }, + { name = "cupy-cuda12x", marker = "sys_platform != 'darwin'" }, ] [[package]] @@ -7360,22 +6453,6 @@ version = "2026.1.15" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/c9/0c80c96eab96948363d270143138d671d5731c3a692b417629bf3492a9d6/regex-2026.1.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ae6020fb311f68d753b7efa9d4b9a5d47a5d6466ea0d5e3b5a471a960ea6e4a", size = 488168, upload-time = "2026-01-14T23:14:16.129Z" }, - { url = "https://files.pythonhosted.org/packages/17/f0/271c92f5389a552494c429e5cc38d76d1322eb142fb5db3c8ccc47751468/regex-2026.1.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eddf73f41225942c1f994914742afa53dc0d01a6e20fe14b878a1b1edc74151f", size = 290636, upload-time = "2026-01-14T23:14:17.715Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f9/5f1fd077d106ca5655a0f9ff8f25a1ab55b92128b5713a91ed7134ff688e/regex-2026.1.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e8cd52557603f5c66a548f69421310886b28b7066853089e1a71ee710e1cdc1", size = 288496, upload-time = "2026-01-14T23:14:19.326Z" }, - { url = "https://files.pythonhosted.org/packages/b5/e1/8f43b03a4968c748858ec77f746c286d81f896c2e437ccf050ebc5d3128c/regex-2026.1.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5170907244b14303edc5978f522f16c974f32d3aa92109fabc2af52411c9433b", size = 793503, upload-time = "2026-01-14T23:14:20.922Z" }, - { url = "https://files.pythonhosted.org/packages/8d/4e/a39a5e8edc5377a46a7c875c2f9a626ed3338cb3bb06931be461c3e1a34a/regex-2026.1.15-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2748c1ec0663580b4510bd89941a31560b4b439a0b428b49472a3d9944d11cd8", size = 860535, upload-time = "2026-01-14T23:14:22.405Z" }, - { url = "https://files.pythonhosted.org/packages/dc/1c/9dce667a32a9477f7a2869c1c767dc00727284a9fa3ff5c09a5c6c03575e/regex-2026.1.15-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2f2775843ca49360508d080eaa87f94fa248e2c946bbcd963bb3aae14f333413", size = 907225, upload-time = "2026-01-14T23:14:23.897Z" }, - { url = "https://files.pythonhosted.org/packages/a4/3c/87ca0a02736d16b6262921425e84b48984e77d8e4e572c9072ce96e66c30/regex-2026.1.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9ea2604370efc9a174c1b5dcc81784fb040044232150f7f33756049edfc9026", size = 800526, upload-time = "2026-01-14T23:14:26.039Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ff/647d5715aeea7c87bdcbd2f578f47b415f55c24e361e639fe8c0cc88878f/regex-2026.1.15-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0dcd31594264029b57bf16f37fd7248a70b3b764ed9e0839a8f271b2d22c0785", size = 773446, upload-time = "2026-01-14T23:14:28.109Z" }, - { url = "https://files.pythonhosted.org/packages/af/89/bf22cac25cb4ba0fe6bff52ebedbb65b77a179052a9d6037136ae93f42f4/regex-2026.1.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c08c1f3e34338256732bd6938747daa3c0d5b251e04b6e43b5813e94d503076e", size = 783051, upload-time = "2026-01-14T23:14:29.929Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f4/6ed03e71dca6348a5188363a34f5e26ffd5db1404780288ff0d79513bce4/regex-2026.1.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e43a55f378df1e7a4fa3547c88d9a5a9b7113f653a66821bcea4718fe6c58763", size = 854485, upload-time = "2026-01-14T23:14:31.366Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9a/8e8560bd78caded8eb137e3e47612430a05b9a772caf60876435192d670a/regex-2026.1.15-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:f82110ab962a541737bd0ce87978d4c658f06e7591ba899192e2712a517badbb", size = 762195, upload-time = "2026-01-14T23:14:32.802Z" }, - { url = "https://files.pythonhosted.org/packages/38/6b/61fc710f9aa8dfcd764fe27d37edfaa023b1a23305a0d84fccd5adb346ea/regex-2026.1.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:27618391db7bdaf87ac6c92b31e8f0dfb83a9de0075855152b720140bda177a2", size = 845986, upload-time = "2026-01-14T23:14:34.898Z" }, - { url = "https://files.pythonhosted.org/packages/fd/2e/fbee4cb93f9d686901a7ca8d94285b80405e8c34fe4107f63ffcbfb56379/regex-2026.1.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bfb0d6be01fbae8d6655c8ca21b3b72458606c4aec9bbc932db758d47aba6db1", size = 788992, upload-time = "2026-01-14T23:14:37.116Z" }, - { url = "https://files.pythonhosted.org/packages/ed/14/3076348f3f586de64b1ab75a3fbabdaab7684af7f308ad43be7ef1849e55/regex-2026.1.15-cp311-cp311-win32.whl", hash = "sha256:b10e42a6de0e32559a92f2f8dc908478cc0fa02838d7dbe764c44dca3fa13569", size = 265893, upload-time = "2026-01-14T23:14:38.426Z" }, - { url = "https://files.pythonhosted.org/packages/0f/19/772cf8b5fc803f5c89ba85d8b1870a1ca580dc482aa030383a9289c82e44/regex-2026.1.15-cp311-cp311-win_amd64.whl", hash = "sha256:e9bf3f0bbdb56633c07d7116ae60a576f846efdd86a8848f8d62b749e1209ca7", size = 277840, upload-time = "2026-01-14T23:14:39.785Z" }, - { url = "https://files.pythonhosted.org/packages/78/84/d05f61142709474da3c0853222d91086d3e1372bcdab516c6fd8d80f3297/regex-2026.1.15-cp311-cp311-win_arm64.whl", hash = "sha256:41aef6f953283291c4e4e6850607bd71502be67779586a61472beacb315c97ec", size = 270374, upload-time = "2026-01-14T23:14:41.592Z" }, { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" }, { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" }, { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" }, @@ -7531,21 +6608,6 @@ version = "0.7.6" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e5/f5/8bed2310abe4ae04b67a38374a4d311dd85220f5d8da56f47ae9361be0b0/rignore-0.7.6.tar.gz", hash = "sha256:00d3546cd793c30cb17921ce674d2c8f3a4b00501cb0e3dd0e82217dbeba2671", size = 57140, upload-time = "2025-11-05T21:41:21.968Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/41/b6e2be3069ef3b7f24e35d2911bd6deb83d20ed5642ad81d5a6d1c015473/rignore-0.7.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:40be8226e12d6653abbebaffaea2885f80374c1c8f76fe5ca9e0cadd120a272c", size = 885285, upload-time = "2025-11-05T20:42:39.763Z" }, - { url = "https://files.pythonhosted.org/packages/52/66/ba7f561b6062402022887706a7f2b2c2e2e2a28f1e3839202b0a2f77e36d/rignore-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182f4e5e4064d947c756819446a7d4cdede8e756b8c81cf9e509683fe38778d7", size = 823882, upload-time = "2025-11-05T20:42:23.488Z" }, - { url = "https://files.pythonhosted.org/packages/f5/81/4087453df35a90b07370647b19017029324950c1b9137d54bf1f33843f17/rignore-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b63047648a916a87be1e51bb5c009063f1b8b6f5afe4f04f875525507e63dc", size = 899362, upload-time = "2025-11-05T20:40:51.111Z" }, - { url = "https://files.pythonhosted.org/packages/fb/c9/390a8fdfabb76d71416be773bd9f162977bd483084f68daf19da1dec88a6/rignore-0.7.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba5524f5178deca4d7695e936604ebc742acb8958f9395776e1fcb8133f8257a", size = 873633, upload-time = "2025-11-05T20:41:06.193Z" }, - { url = "https://files.pythonhosted.org/packages/df/c9/79404fcb0faa76edfbc9df0901f8ef18568d1104919ebbbad6d608c888d1/rignore-0.7.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62020dbb89a1dd4b84ab3d60547b3b2eb2723641d5fb198463643f71eaaed57d", size = 1167633, upload-time = "2025-11-05T20:41:22.491Z" }, - { url = "https://files.pythonhosted.org/packages/6e/8d/b3466d32d445d158a0aceb80919085baaae495b1f540fb942f91d93b5e5b/rignore-0.7.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34acd532769d5a6f153a52a98dcb81615c949ab11697ce26b2eb776af2e174d", size = 941434, upload-time = "2025-11-05T20:41:38.151Z" }, - { url = "https://files.pythonhosted.org/packages/e8/40/9cd949761a7af5bc27022a939c91ff622d29c7a0b66d0c13a863097dde2d/rignore-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e53b752f9de44dff7b3be3c98455ce3bf88e69d6dc0cf4f213346c5e3416c", size = 959461, upload-time = "2025-11-05T20:42:08.476Z" }, - { url = "https://files.pythonhosted.org/packages/b5/87/1e1a145731f73bdb7835e11f80da06f79a00d68b370d9a847de979575e6d/rignore-0.7.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25b3536d13a5d6409ce85f23936f044576eeebf7b6db1d078051b288410fc049", size = 985323, upload-time = "2025-11-05T20:41:52.735Z" }, - { url = "https://files.pythonhosted.org/packages/6c/31/1ecff992fc3f59c4fcdcb6c07d5f6c1e6dfb55ccda19c083aca9d86fa1c6/rignore-0.7.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e01cad2b0b92f6b1993f29fc01f23f2d78caf4bf93b11096d28e9d578eb08ce", size = 1079173, upload-time = "2025-11-05T21:40:12.007Z" }, - { url = "https://files.pythonhosted.org/packages/17/18/162eedadb4c2282fa4c521700dbf93c9b14b8842e8354f7d72b445b8d593/rignore-0.7.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5991e46ab9b4868334c9e372ab0892b0150f3f586ff2b1e314272caeb38aaedb", size = 1139012, upload-time = "2025-11-05T21:40:29.399Z" }, - { url = "https://files.pythonhosted.org/packages/78/96/a9ca398a8af74bb143ad66c2a31303c894111977e28b0d0eab03867f1b43/rignore-0.7.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6c8ae562e5d1246cba5eaeb92a47b2a279e7637102828dde41dcbe291f529a3e", size = 1118827, upload-time = "2025-11-05T21:40:46.6Z" }, - { url = "https://files.pythonhosted.org/packages/9f/22/1c1a65047df864def9a047dbb40bc0b580b8289a4280e62779cd61ae21f2/rignore-0.7.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaf938530dcc0b47c4cfa52807aa2e5bfd5ca6d57a621125fe293098692f6345", size = 1128182, upload-time = "2025-11-05T21:41:04.239Z" }, - { url = "https://files.pythonhosted.org/packages/bd/f4/1526eb01fdc2235aca1fd9d0189bee4021d009a8dcb0161540238c24166e/rignore-0.7.6-cp311-cp311-win32.whl", hash = "sha256:166ebce373105dd485ec213a6a2695986346e60c94ff3d84eb532a237b24a4d5", size = 646547, upload-time = "2025-11-05T21:41:49.439Z" }, - { url = "https://files.pythonhosted.org/packages/7c/c8/dda0983e1845706beb5826459781549a840fe5a7eb934abc523e8cd17814/rignore-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:44f35ee844b1a8cea50d056e6a595190ce9d42d3cccf9f19d280ae5f3058973a", size = 727139, upload-time = "2025-11-05T21:41:34.367Z" }, - { url = "https://files.pythonhosted.org/packages/e3/47/eb1206b7bf65970d41190b879e1723fc6bbdb2d45e53565f28991a8d9d96/rignore-0.7.6-cp311-cp311-win_arm64.whl", hash = "sha256:14b58f3da4fa3d5c3fa865cab49821675371f5e979281c683e131ae29159a581", size = 657598, upload-time = "2025-11-05T21:41:23.758Z" }, { url = "https://files.pythonhosted.org/packages/0b/0e/012556ef3047a2628842b44e753bb15f4dc46806780ff090f1e8fe4bf1eb/rignore-0.7.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:03e82348cb7234f8d9b2834f854400ddbbd04c0f8f35495119e66adbd37827a8", size = 883488, upload-time = "2025-11-05T20:42:41.359Z" }, { url = "https://files.pythonhosted.org/packages/93/b0/d4f1f3fe9eb3f8e382d45ce5b0547ea01c4b7e0b4b4eb87bcd66a1d2b888/rignore-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9e624f6be6116ea682e76c5feb71ea91255c67c86cb75befe774365b2931961", size = 820411, upload-time = "2025-11-05T20:42:24.782Z" }, { url = "https://files.pythonhosted.org/packages/4a/c8/dea564b36dedac8de21c18e1851789545bc52a0c22ece9843444d5608a6a/rignore-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda49950d405aa8d0ebe26af807c4e662dd281d926530f03f29690a2e07d649a", size = 897821, upload-time = "2025-11-05T20:40:52.613Z" }, @@ -7606,18 +6668,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fc/d3/18210222b37e87e36357f7b300b7d98c6dd62b133771e71ae27acba83a4f/rignore-0.7.6-cp314-cp314t-win32.whl", hash = "sha256:c1d8f117f7da0a4a96a8daef3da75bc090e3792d30b8b12cfadc240c631353f9", size = 647033, upload-time = "2025-11-05T21:42:00.095Z" }, { url = "https://files.pythonhosted.org/packages/3e/87/033eebfbee3ec7d92b3bb1717d8f68c88e6fc7de54537040f3b3a405726f/rignore-0.7.6-cp314-cp314t-win_amd64.whl", hash = "sha256:ca36e59408bec81de75d307c568c2d0d410fb880b1769be43611472c61e85c96", size = 725647, upload-time = "2025-11-05T21:41:44.449Z" }, { url = "https://files.pythonhosted.org/packages/79/62/b88e5879512c55b8ee979c666ee6902adc4ed05007226de266410ae27965/rignore-0.7.6-cp314-cp314t-win_arm64.whl", hash = "sha256:b83adabeb3e8cf662cabe1931b83e165b88c526fa6af6b3aa90429686e474896", size = 656035, upload-time = "2025-11-05T21:41:31.13Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/a6250ff0c49a3cdb943910ada4116e708118e9b901c878cfae616c80a904/rignore-0.7.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a20b6fb61bcced9a83dfcca6599ad45182b06ba720cff7c8d891e5b78db5b65f", size = 886470, upload-time = "2025-11-05T20:42:52.314Z" }, - { url = "https://files.pythonhosted.org/packages/35/af/c69c0c51b8f9f7914d95c4ea91c29a2ac067572048cae95dd6d2efdbe05d/rignore-0.7.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:392dcabfecbe176c9ebbcb40d85a5e86a5989559c4f988c2741da7daf1b5be25", size = 825976, upload-time = "2025-11-05T20:42:35.118Z" }, - { url = "https://files.pythonhosted.org/packages/f1/d2/1b264f56132264ea609d3213ab603d6a27016b19559a1a1ede1a66a03dcd/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22baa462abdc36fdd5a5e2dae423107723351b85ff093762f9261148b9d0a04a", size = 899739, upload-time = "2025-11-05T20:41:01.518Z" }, - { url = "https://files.pythonhosted.org/packages/55/e4/b3c5dfdd8d8a10741dfe7199ef45d19a0e42d0c13aa377c83bd6caf65d90/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53fb28882d2538cb2d231972146c4927a9d9455e62b209f85d634408c4103538", size = 874843, upload-time = "2025-11-05T20:41:17.687Z" }, - { url = "https://files.pythonhosted.org/packages/cc/10/d6f3750233881a2a154cefc9a6a0a9b19da526b19f7f08221b552c6f827d/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87409f7eeb1103d6b77f3472a3a0d9a5953e3ae804a55080bdcb0120ee43995b", size = 1170348, upload-time = "2025-11-05T20:41:34.21Z" }, - { url = "https://files.pythonhosted.org/packages/6e/10/ad98ca05c9771c15af734cee18114a3c280914b6e34fde9ffea2e61e88aa/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:684014e42e4341ab3ea23a203551857fcc03a7f8ae96ca3aefb824663f55db32", size = 942315, upload-time = "2025-11-05T20:41:48.508Z" }, - { url = "https://files.pythonhosted.org/packages/de/00/ab5c0f872acb60d534e687e629c17e0896c62da9b389c66d3aa16b817aa8/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77356ebb01ba13f8a425c3d30fcad40e57719c0e37670d022d560884a30e4767", size = 961047, upload-time = "2025-11-05T20:42:19.403Z" }, - { url = "https://files.pythonhosted.org/packages/b8/86/3030fdc363a8f0d1cd155b4c453d6db9bab47a24fcc64d03f61d9d78fe6a/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6cbd8a48abbd3747a6c830393cd578782fab5d43f4deea48c5f5e344b8fed2b0", size = 986090, upload-time = "2025-11-05T20:42:03.581Z" }, - { url = "https://files.pythonhosted.org/packages/33/b8/133aa4002cee0ebbb39362f94e4898eec7fbd09cec9fcbce1cd65b355b7f/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2673225dcec7f90497e79438c35e34638d0d0391ccea3cbb79bfb9adc0dc5bd7", size = 1079656, upload-time = "2025-11-05T21:40:24.89Z" }, - { url = "https://files.pythonhosted.org/packages/67/56/36d5d34210e5e7dfcd134eed8335b19e80ae940ee758f493e4f2b344dd70/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:c081f17290d8a2b96052b79207622aa635686ea39d502b976836384ede3d303c", size = 1139789, upload-time = "2025-11-05T21:40:42.119Z" }, - { url = "https://files.pythonhosted.org/packages/6b/5b/bb4f9420802bf73678033a4a55ab1bede36ce2e9b41fec5f966d83d932b3/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:57e8327aacc27f921968cb2a174f9e47b084ce9a7dd0122c8132d22358f6bd79", size = 1120308, upload-time = "2025-11-05T21:40:59.402Z" }, - { url = "https://files.pythonhosted.org/packages/ce/8b/a1299085b28a2f6135e30370b126e3c5055b61908622f2488ade67641479/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d8955b57e42f2a5434670d5aa7b75eaf6e74602ccd8955dddf7045379cd762fb", size = 1129444, upload-time = "2025-11-05T21:41:17.906Z" }, ] [[package]] @@ -7626,21 +6676,6 @@ version = "0.30.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" }, - { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" }, - { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" }, - { url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" }, - { url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" }, - { url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" }, - { url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" }, - { url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" }, - { url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" }, - { url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" }, - { url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" }, - { url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" }, - { url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" }, - { url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" }, { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, @@ -7714,18 +6749,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, - { url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" }, - { url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" }, - { url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" }, - { url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" }, - { url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" }, - { url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" }, - { url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" }, - { url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" }, - { url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" }, - { url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" }, - { url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" }, ] [[package]] @@ -7782,8 +6805,7 @@ dependencies = [ { name = "aiohttp", extra = ["speedups"] }, { name = "aiohttp-retry" }, { name = "backoff" }, - { name = "boto3", version = "1.41.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "boto3", version = "1.42.52", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, + { name = "boto3" }, { name = "click" }, { name = "colorama" }, { name = "cryptography" }, @@ -7809,87 +6831,21 @@ wheels = [ name = "s3fs" version = "0.4.2" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] dependencies = [ - { name = "botocore", version = "1.42.52", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, - { name = "fsspec", marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, + { name = "botocore" }, + { name = "fsspec" }, ] sdist = { url = "https://files.pythonhosted.org/packages/d9/9a/504cb277632c4d325beabbd03bb43778f0decb9be22d9e0e6c62f44540c7/s3fs-0.4.2.tar.gz", hash = "sha256:2ca5de8dc18ad7ad350c0bd01aef0406aa5d0fff78a561f0f710f9d9858abdd0", size = 57527, upload-time = "2020-03-31T15:24:26.388Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b8/e4/b8fc59248399d2482b39340ec9be4bb2493846ac23641b43115a7e5cd675/s3fs-0.4.2-py3-none-any.whl", hash = "sha256:91c1dfb45e5217bd441a7a560946fe865ced6225ff7eb0fb459fe6e601a95ed3", size = 19791, upload-time = "2020-03-31T15:24:24.952Z" }, ] -[[package]] -name = "s3fs" -version = "2025.9.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", -] -dependencies = [ - { name = "aiobotocore", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "aiohttp", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, - { name = "fsspec", marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ee/f3/8e6371436666aedfd16e63ff68a51b8a8fcf5f33a0eee33c35e0b2476b27/s3fs-2025.9.0.tar.gz", hash = "sha256:6d44257ef19ea64968d0720744c4af7a063a05f5c1be0e17ce943bef7302bc30", size = 77823, upload-time = "2025-09-02T19:18:21.781Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/37/b3/ca7d58ca25b1bb6df57e6cbd0ca8d6437a4b9ce1cd35adc8a6b2949c113b/s3fs-2025.9.0-py3-none-any.whl", hash = "sha256:c33c93d48f66ed440dbaf6600be149cdf8beae4b6f8f0201a209c5801aeb7e30", size = 30319, upload-time = "2025-09-02T19:18:20.563Z" }, -] - -[[package]] -name = "s3transfer" -version = "0.15.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", -] -dependencies = [ - { name = "botocore", version = "1.41.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14' and sys_platform == 'linux'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ca/bb/940d6af975948c1cc18f44545ffb219d3c35d78ec972b42ae229e8e37e08/s3transfer-0.15.0.tar.gz", hash = "sha256:d36fac8d0e3603eff9b5bfa4282c7ce6feb0301a633566153cbd0b93d11d8379", size = 152185, upload-time = "2025-11-20T20:28:56.327Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/e1/5ef25f52973aa12a19cf4e1375d00932d7fb354ffd310487ba7d44225c1a/s3transfer-0.15.0-py3-none-any.whl", hash = "sha256:6f8bf5caa31a0865c4081186689db1b2534cef721d104eb26101de4b9d6a5852", size = 85984, upload-time = "2025-11-20T20:28:55.046Z" }, -] - [[package]] name = "s3transfer" version = "0.16.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] dependencies = [ - { name = "botocore", version = "1.42.52", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14' or sys_platform != 'linux'" }, + { name = "botocore" }, ] sdist = { url = "https://files.pythonhosted.org/packages/05/04/74127fc843314818edfa81b5540e26dd537353b123a4edc563109d8f17dd/s3transfer-0.16.0.tar.gz", hash = "sha256:8e990f13268025792229cd52fa10cb7163744bf56e719e0b9cb925ab79abf920", size = 153827, upload-time = "2025-12-01T02:30:59.114Z" } wheels = [ @@ -7927,12 +6883,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/bd/f5af70c28c6da2227e510875cadf64879855193a687fb19951f0f44cfd6b/scipy-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042", size = 32862414, upload-time = "2026-01-10T21:24:52.566Z" }, - { url = "https://files.pythonhosted.org/packages/ef/df/df1457c4df3826e908879fe3d76bc5b6e60aae45f4ee42539512438cfd5d/scipy-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4", size = 35112380, upload-time = "2026-01-10T21:24:58.433Z" }, - { url = "https://files.pythonhosted.org/packages/5f/bb/88e2c16bd1dd4de19d80d7c5e238387182993c2fb13b4b8111e3927ad422/scipy-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0", size = 34922676, upload-time = "2026-01-10T21:25:04.287Z" }, - { url = "https://files.pythonhosted.org/packages/02/ba/5120242cc735f71fc002cff0303d536af4405eb265f7c60742851e7ccfe9/scipy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449", size = 37507599, upload-time = "2026-01-10T21:25:09.851Z" }, - { url = "https://files.pythonhosted.org/packages/52/c8/08629657ac6c0da198487ce8cd3de78e02cfde42b7f34117d56a3fe249dc/scipy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea", size = 36380284, upload-time = "2026-01-10T21:25:15.632Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4a/465f96d42c6f33ad324a40049dfd63269891db9324aa66c4a1c108c6f994/scipy-1.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379", size = 24370427, upload-time = "2026-01-10T21:25:20.514Z" }, { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, @@ -7998,14 +6948,6 @@ version = "0.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/15/15/2e7a025fc62d764b151ae6d0f2a92f8081755ebe8d4a64099accc6f77ba6/sentencepiece-0.2.1.tar.gz", hash = "sha256:8138cec27c2f2282f4a34d9a016e3374cd40e5c6e9cb335063db66a0a3b71fad", size = 3228515, upload-time = "2025-08-12T07:00:51.718Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/15/46afbab00733d81788b64be430ca1b93011bb9388527958e26cc31832de5/sentencepiece-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6356d0986b8b8dc351b943150fcd81a1c6e6e4d439772e8584c64230e58ca987", size = 1942560, upload-time = "2025-08-12T06:59:25.82Z" }, - { url = "https://files.pythonhosted.org/packages/fa/79/7c01b8ef98a0567e9d84a4e7a910f8e7074fcbf398a5cd76f93f4b9316f9/sentencepiece-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f8ba89a3acb3dc1ae90f65ec1894b0b9596fdb98ab003ff38e058f898b39bc7", size = 1325385, upload-time = "2025-08-12T06:59:27.722Z" }, - { url = "https://files.pythonhosted.org/packages/bb/88/2b41e07bd24f33dcf2f18ec3b74247aa4af3526bad8907b8727ea3caba03/sentencepiece-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:02593eca45440ef39247cee8c47322a34bdcc1d8ae83ad28ba5a899a2cf8d79a", size = 1253319, upload-time = "2025-08-12T06:59:29.306Z" }, - { url = "https://files.pythonhosted.org/packages/a0/54/38a1af0c6210a3c6f95aa46d23d6640636d020fba7135cd0d9a84ada05a7/sentencepiece-0.2.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a0d15781a171d188b661ae4bde1d998c303f6bd8621498c50c671bd45a4798e", size = 1316162, upload-time = "2025-08-12T06:59:30.914Z" }, - { url = "https://files.pythonhosted.org/packages/ef/66/fb191403ade791ad2c3c1e72fe8413e63781b08cfa3aa4c9dfc536d6e795/sentencepiece-0.2.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f5a3e0d9f445ed9d66c0fec47d4b23d12cfc858b407a03c194c1b26c2ac2a63", size = 1387785, upload-time = "2025-08-12T06:59:32.491Z" }, - { url = "https://files.pythonhosted.org/packages/a9/2d/3bd9b08e70067b2124518b308db6a84a4f8901cc8a4317e2e4288cdd9b4d/sentencepiece-0.2.1-cp311-cp311-win32.whl", hash = "sha256:6d297a1748d429ba8534eebe5535448d78b8acc32d00a29b49acf28102eeb094", size = 999555, upload-time = "2025-08-12T06:59:34.475Z" }, - { url = "https://files.pythonhosted.org/packages/32/b8/f709977f5fda195ae1ea24f24e7c581163b6f142b1005bc3d0bbfe4d7082/sentencepiece-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:82d9ead6591015f009cb1be1cb1c015d5e6f04046dbb8c9588b931e869a29728", size = 1054617, upload-time = "2025-08-12T06:59:36.461Z" }, - { url = "https://files.pythonhosted.org/packages/7a/40/a1fc23be23067da0f703709797b464e8a30a1c78cc8a687120cd58d4d509/sentencepiece-0.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:39f8651bd10974eafb9834ce30d9bcf5b73e1fc798a7f7d2528f9820ca86e119", size = 1033877, upload-time = "2025-08-12T06:59:38.391Z" }, { url = "https://files.pythonhosted.org/packages/4a/be/32ce495aa1d0e0c323dcb1ba87096037358edee539cac5baf8755a6bd396/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:57cae326c8727de58c85977b175af132a7138d84c764635d7e71bbee7e774133", size = 1943152, upload-time = "2025-08-12T06:59:40.048Z" }, { url = "https://files.pythonhosted.org/packages/88/7e/ff23008899a58678e98c6ff592bf4d368eee5a71af96d0df6b38a039dd4f/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:56dd39a3c4d6493db3cdca7e8cc68c6b633f0d4195495cbadfcf5af8a22d05a6", size = 1325651, upload-time = "2025-08-12T06:59:41.536Z" }, { url = "https://files.pythonhosted.org/packages/19/84/42eb3ce4796777a1b5d3699dfd4dca85113e68b637f194a6c8d786f16a04/sentencepiece-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9381351182ff9888cc80e41c632e7e274b106f450de33d67a9e8f6043da6f76", size = 1253645, upload-time = "2025-08-12T06:59:42.903Z" }, @@ -8067,16 +7009,6 @@ version = "1.3.7" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8d/48/49393a96a2eef1ab418b17475fb92b8fcfad83d099e678751b05472e69de/setproctitle-1.3.7.tar.gz", hash = "sha256:bc2bc917691c1537d5b9bca1468437176809c7e11e5694ca79a9ca12345dcb9e", size = 27002, upload-time = "2025-09-05T12:51:25.278Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/cd/1b7ba5cad635510720ce19d7122154df96a2387d2a74217be552887c93e5/setproctitle-1.3.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a600eeb4145fb0ee6c287cb82a2884bd4ec5bbb076921e287039dcc7b7cc6dd0", size = 18085, upload-time = "2025-09-05T12:49:22.183Z" }, - { url = "https://files.pythonhosted.org/packages/8f/1a/b2da0a620490aae355f9d72072ac13e901a9fec809a6a24fc6493a8f3c35/setproctitle-1.3.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97a090fed480471bb175689859532709e28c085087e344bca45cf318034f70c4", size = 13097, upload-time = "2025-09-05T12:49:23.322Z" }, - { url = "https://files.pythonhosted.org/packages/18/2e/bd03ff02432a181c1787f6fc2a678f53b7dacdd5ded69c318fe1619556e8/setproctitle-1.3.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1607b963e7b53e24ec8a2cb4e0ab3ae591d7c6bf0a160feef0551da63452b37f", size = 32191, upload-time = "2025-09-05T12:49:24.567Z" }, - { url = "https://files.pythonhosted.org/packages/28/78/1e62fc0937a8549f2220445ed2175daacee9b6764c7963b16148119b016d/setproctitle-1.3.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a20fb1a3974e2dab857870cf874b325b8705605cb7e7e8bcbb915bca896f52a9", size = 33203, upload-time = "2025-09-05T12:49:25.871Z" }, - { url = "https://files.pythonhosted.org/packages/a0/3c/65edc65db3fa3df400cf13b05e9d41a3c77517b4839ce873aa6b4043184f/setproctitle-1.3.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f8d961bba676e07d77665204f36cffaa260f526e7b32d07ab3df6a2c1dfb44ba", size = 34963, upload-time = "2025-09-05T12:49:27.044Z" }, - { url = "https://files.pythonhosted.org/packages/a1/32/89157e3de997973e306e44152522385f428e16f92f3cf113461489e1e2ee/setproctitle-1.3.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:db0fd964fbd3a9f8999b502f65bd2e20883fdb5b1fae3a424e66db9a793ed307", size = 32398, upload-time = "2025-09-05T12:49:28.909Z" }, - { url = "https://files.pythonhosted.org/packages/4a/18/77a765a339ddf046844cb4513353d8e9dcd8183da9cdba6e078713e6b0b2/setproctitle-1.3.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:db116850fcf7cca19492030f8d3b4b6e231278e8fe097a043957d22ce1bdf3ee", size = 33657, upload-time = "2025-09-05T12:49:30.323Z" }, - { url = "https://files.pythonhosted.org/packages/6b/63/f0b6205c64d74d2a24a58644a38ec77bdbaa6afc13747e75973bf8904932/setproctitle-1.3.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:316664d8b24a5c91ee244460bdaf7a74a707adaa9e14fbe0dc0a53168bb9aba1", size = 31836, upload-time = "2025-09-05T12:49:32.309Z" }, - { url = "https://files.pythonhosted.org/packages/ba/51/e1277f9ba302f1a250bbd3eedbbee747a244b3cc682eb58fb9733968f6d8/setproctitle-1.3.7-cp311-cp311-win32.whl", hash = "sha256:b74774ca471c86c09b9d5037c8451fff06bb82cd320d26ae5a01c758088c0d5d", size = 12556, upload-time = "2025-09-05T12:49:33.529Z" }, - { url = "https://files.pythonhosted.org/packages/b6/7b/822a23f17e9003dfdee92cd72758441ca2a3680388da813a371b716fb07f/setproctitle-1.3.7-cp311-cp311-win_amd64.whl", hash = "sha256:acb9097213a8dd3410ed9f0dc147840e45ca9797785272928d4be3f0e69e3be4", size = 13243, upload-time = "2025-09-05T12:49:34.553Z" }, { url = "https://files.pythonhosted.org/packages/fb/f0/2dc88e842077719d7384d86cc47403e5102810492b33680e7dadcee64cd8/setproctitle-1.3.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2dc99aec591ab6126e636b11035a70991bc1ab7a261da428491a40b84376654e", size = 18049, upload-time = "2025-09-05T12:49:36.241Z" }, { url = "https://files.pythonhosted.org/packages/f0/b4/50940504466689cda65680c9e9a1e518e5750c10490639fa687489ac7013/setproctitle-1.3.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdd8aa571b7aa39840fdbea620e308a19691ff595c3a10231e9ee830339dd798", size = 13079, upload-time = "2025-09-05T12:49:38.088Z" }, { url = "https://files.pythonhosted.org/packages/d0/99/71630546b9395b095f4082be41165d1078204d1696c2d9baade3de3202d0/setproctitle-1.3.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2906b6c7959cdb75f46159bf0acd8cc9906cf1361c9e1ded0d065fe8f9039629", size = 32932, upload-time = "2025-09-05T12:49:39.271Z" }, @@ -8127,9 +7059,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/e3/54b496ac724e60e61cc3447f02690105901ca6d90da0377dffe49ff99fc7/setproctitle-1.3.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1fae595d032b30dab4d659bece20debd202229fce12b55abab978b7f30783d73", size = 33958, upload-time = "2025-09-05T12:50:39.841Z" }, { url = "https://files.pythonhosted.org/packages/ea/a8/c84bb045ebf8c6fdc7f7532319e86f8380d14bbd3084e6348df56bdfe6fd/setproctitle-1.3.7-cp314-cp314t-win32.whl", hash = "sha256:02432f26f5d1329ab22279ff863c83589894977063f59e6c4b4845804a08f8c2", size = 12745, upload-time = "2025-09-05T12:50:41.377Z" }, { url = "https://files.pythonhosted.org/packages/08/b6/3a5a4f9952972791a9114ac01dfc123f0df79903577a3e0a7a404a695586/setproctitle-1.3.7-cp314-cp314t-win_amd64.whl", hash = "sha256:cbc388e3d86da1f766d8fc2e12682e446064c01cea9f88a88647cfe7c011de6a", size = 13469, upload-time = "2025-09-05T12:50:42.67Z" }, - { url = "https://files.pythonhosted.org/packages/c3/5b/5e1c117ac84e3cefcf8d7a7f6b2461795a87e20869da065a5c087149060b/setproctitle-1.3.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:b1cac6a4b0252b8811d60b6d8d0f157c0fdfed379ac89c25a914e6346cf355a1", size = 12587, upload-time = "2025-09-05T12:51:21.195Z" }, - { url = "https://files.pythonhosted.org/packages/73/02/b9eadc226195dcfa90eed37afe56b5dd6fa2f0e5220ab8b7867b8862b926/setproctitle-1.3.7-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f1704c9e041f2b1dc38f5be4552e141e1432fba3dd52c72eeffd5bc2db04dc65", size = 14286, upload-time = "2025-09-05T12:51:22.61Z" }, - { url = "https://files.pythonhosted.org/packages/28/26/1be1d2a53c2a91ec48fa2ff4a409b395f836798adf194d99de9c059419ea/setproctitle-1.3.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b08b61976ffa548bd5349ce54404bf6b2d51bd74d4f1b241ed1b0f25bce09c3a", size = 13282, upload-time = "2025-09-05T12:51:24.094Z" }, ] [[package]] @@ -8236,8 +7165,7 @@ cudo = [ { name = "greenlet" }, { name = "grpcio" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pyjwt" }, { name = "sqlalchemy-adapter" }, ] @@ -8251,8 +7179,7 @@ do = [ { name = "greenlet" }, { name = "grpcio" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pydo" }, { name = "pyjwt" }, { name = "sqlalchemy-adapter" }, @@ -8265,8 +7192,7 @@ fluidstack = [ { name = "greenlet" }, { name = "grpcio" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pyjwt" }, { name = "sqlalchemy-adapter" }, ] @@ -8280,8 +7206,7 @@ gcp = [ { name = "greenlet" }, { name = "grpcio" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pyjwt" }, { name = "pyopenssl" }, { name = "sqlalchemy-adapter" }, @@ -8295,8 +7220,7 @@ kubernetes = [ { name = "grpcio" }, { name = "kubernetes" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pyjwt" }, { name = "python-dateutil" }, { name = "sqlalchemy-adapter" }, @@ -8310,8 +7234,7 @@ lambda = [ { name = "greenlet" }, { name = "grpcio" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pyjwt" }, { name = "sqlalchemy-adapter" }, ] @@ -8323,8 +7246,7 @@ paperspace = [ { name = "greenlet" }, { name = "grpcio" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pyjwt" }, { name = "sqlalchemy-adapter" }, ] @@ -8336,8 +7258,7 @@ runpod = [ { name = "greenlet" }, { name = "grpcio" }, { name = "passlib" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pyjwt" }, { name = "runpod" }, { name = "sqlalchemy-adapter" }, @@ -8400,13 +7321,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393, upload-time = "2026-01-21T18:03:45.119Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/ac/b42ad16800d0885105b59380ad69aad0cce5a65276e269ce2729a2343b6a/sqlalchemy-2.0.46-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:261c4b1f101b4a411154f1da2b76497d73abbfc42740029205d4d01fa1052684", size = 2154851, upload-time = "2026-01-21T18:27:30.54Z" }, - { url = "https://files.pythonhosted.org/packages/a0/60/d8710068cb79f64d002ebed62a7263c00c8fd95f4ebd4b5be8f7ca93f2bc/sqlalchemy-2.0.46-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:181903fe8c1b9082995325f1b2e84ac078b1189e2819380c2303a5f90e114a62", size = 3311241, upload-time = "2026-01-21T18:32:33.45Z" }, - { url = "https://files.pythonhosted.org/packages/2b/0f/20c71487c7219ab3aa7421c7c62d93824c97c1460f2e8bb72404b0192d13/sqlalchemy-2.0.46-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:590be24e20e2424a4c3c1b0835e9405fa3d0af5823a1a9fc02e5dff56471515f", size = 3310741, upload-time = "2026-01-21T18:44:57.887Z" }, - { url = "https://files.pythonhosted.org/packages/65/80/d26d00b3b249ae000eee4db206fcfc564bf6ca5030e4747adf451f4b5108/sqlalchemy-2.0.46-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7568fe771f974abadce52669ef3a03150ff03186d8eb82613bc8adc435a03f01", size = 3263116, upload-time = "2026-01-21T18:32:35.044Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/74dda7506640923821340541e8e45bd3edd8df78664f1f2e0aae8077192b/sqlalchemy-2.0.46-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf7e1e78af38047e08836d33502c7a278915698b7c2145d045f780201679999", size = 3285327, upload-time = "2026-01-21T18:44:59.254Z" }, - { url = "https://files.pythonhosted.org/packages/9f/25/6dcf8abafff1389a21c7185364de145107b7394ecdcb05233815b236330d/sqlalchemy-2.0.46-cp311-cp311-win32.whl", hash = "sha256:9d80ea2ac519c364a7286e8d765d6cd08648f5b21ca855a8017d9871f075542d", size = 2114564, upload-time = "2026-01-21T18:33:15.85Z" }, - { url = "https://files.pythonhosted.org/packages/93/5f/e081490f8523adc0088f777e4ebad3cac21e498ec8a3d4067074e21447a1/sqlalchemy-2.0.46-cp311-cp311-win_amd64.whl", hash = "sha256:585af6afe518732d9ccd3aea33af2edaae4a7aa881af5d8f6f4fe3a368699597", size = 2139233, upload-time = "2026-01-21T18:33:17.528Z" }, { url = "https://files.pythonhosted.org/packages/b6/35/d16bfa235c8b7caba3730bba43e20b1e376d2224f407c178fbf59559f23e/sqlalchemy-2.0.46-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a9a72b0da8387f15d5810f1facca8f879de9b85af8c645138cba61ea147968c", size = 2153405, upload-time = "2026-01-21T19:05:54.143Z" }, { url = "https://files.pythonhosted.org/packages/06/6c/3192e24486749862f495ddc6584ed730c0c994a67550ec395d872a2ad650/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2347c3f0efc4de367ba00218e0ae5c4ba2306e47216ef80d6e31761ac97cb0b9", size = 3334702, upload-time = "2026-01-21T18:46:45.384Z" }, { url = "https://files.pythonhosted.org/packages/ea/a2/b9f33c8d68a3747d972a0bb758c6b63691f8fb8a49014bc3379ba15d4274/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9094c8b3197db12aa6f05c51c05daaad0a92b8c9af5388569847b03b1007fb1b", size = 3347664, upload-time = "2026-01-21T18:40:09.979Z" }, @@ -8457,8 +7371,8 @@ name = "sse-starlette" version = "3.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "anyio", marker = "sys_platform == 'linux'" }, - { name = "starlette", marker = "sys_platform == 'linux'" }, + { name = "anyio" }, + { name = "starlette" }, ] sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" } wheels = [ @@ -8560,8 +7474,7 @@ dependencies = [ { name = "numpy" }, { name = "packaging" }, { name = "pillow" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "setuptools" }, { name = "tensorboard-data-server" }, { name = "werkzeug" }, @@ -8590,11 +7503,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/43/f6/e2403fc05b97ba74ad408a98a42c288e6e1b8eacc23780c153b0e5166179/tensorstore-0.1.81.tar.gz", hash = "sha256:687546192ea6f6c8ae28d18f13103336f68017d928b9f5a00325e9b0548d9c25", size = 7120819, upload-time = "2026-02-06T18:56:12.535Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/df/f472bd0dee801d7e33c53335ad0fcde9c71e5f9324241faa0a6b4be4270a/tensorstore-0.1.81-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:f64fb510f293079f9e5c63cb227e8a76904655a32912fc107c1e63bd8dc3e187", size = 16501390, upload-time = "2026-02-06T18:55:13.678Z" }, - { url = "https://files.pythonhosted.org/packages/5a/93/5f40c51d7b15d3574b1788a251dd4e3abd0415dab71811e126d2da5e826b/tensorstore-0.1.81-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4282587598885ff447f08369ac9bb681a65e224888cfa8ef8f3dd63544759e6c", size = 14535592, upload-time = "2026-02-06T18:55:16.44Z" }, - { url = "https://files.pythonhosted.org/packages/76/48/b7adcc8eca502ce8050c18cea066ca0c0122df7a686e10da6470e55456b4/tensorstore-0.1.81-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b4ea06038f6912bb6ed8a89db0c31e4e3d1b2404f3365dc756e4bc42bd6a89c", size = 19038732, upload-time = "2026-02-06T18:55:18.924Z" }, - { url = "https://files.pythonhosted.org/packages/40/b0/99294895b030bd7d9ebc06e7ed523d0c09ab65667e031f8a67923f398f86/tensorstore-0.1.81-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51d59f7db9cdae02fce9d347300c0ccfb8265052945757e95592a265eb620b15", size = 21038447, upload-time = "2026-02-06T18:55:21.085Z" }, - { url = "https://files.pythonhosted.org/packages/32/e6/1ce977baf09aa3889f10f04460b588a6c8876ea441e51090c671f0400a6f/tensorstore-0.1.81-cp311-cp311-win_amd64.whl", hash = "sha256:fdb9579a729cccc02127cab5abf26f57a0e27968ba65c9c548ad058f5a45417f", size = 13221673, upload-time = "2026-02-06T18:55:23.195Z" }, { url = "https://files.pythonhosted.org/packages/85/82/00037db699f74d792efe2696305ddd6932e04306899e3701824a7f7de961/tensorstore-0.1.81-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:7aefa1e3eadca804bce05215184c9cde29205ac2f3b443ca15a4e1846d31af4e", size = 16521245, upload-time = "2026-02-06T18:55:25.559Z" }, { url = "https://files.pythonhosted.org/packages/86/2e/1deca1b955cb959eec13fd342ffaa2fd84e4770b4e2bcb95a2f541875a52/tensorstore-0.1.81-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e001d3edc6758eb5dc80556da9e945c1381f0529102fcc0301358ba6b9b70ed", size = 14543561, upload-time = "2026-02-06T18:55:27.624Z" }, { url = "https://files.pythonhosted.org/packages/6c/e4/b4343eae773f72a8777f82c5328191a06d8a5195e62105c14b7dcc49823f/tensorstore-0.1.81-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c27e07f4e91e6dc6a0878e13e2c5931d1716196b67b0df927f2f571de2576e9", size = 19043982, upload-time = "2026-02-06T18:55:30.076Z" }, @@ -8626,13 +7534,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, - { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, - { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, - { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, - { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, @@ -8723,15 +7624,6 @@ version = "2.4.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, - { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, - { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, - { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, - { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, - { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, - { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, - { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, @@ -8789,64 +7681,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl", hash = "sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680", size = 39310, upload-time = "2026-01-13T01:14:51.965Z" }, ] -[[package]] -name = "torch" -version = "2.9.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] -dependencies = [ - { name = "filelock", marker = "sys_platform != 'linux'" }, - { name = "fsspec", marker = "sys_platform != 'linux'" }, - { name = "jinja2", marker = "sys_platform != 'linux'" }, - { name = "networkx", marker = "sys_platform != 'linux'" }, - { name = "setuptools", marker = "python_full_version >= '3.12' and sys_platform != 'linux'" }, - { name = "sympy", marker = "sys_platform != 'linux'" }, - { name = "typing-extensions", marker = "sys_platform != 'linux'" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/cc/7a2949e38dfe3244c4df21f0e1c27bce8aedd6c604a587dd44fc21017cb4/torch-2.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:0d06b30a9207b7c3516a9e0102114024755a07045f0c1d2f2a56b1819ac06bcb", size = 110973074, upload-time = "2025-11-12T15:21:39.958Z" }, - { url = "https://files.pythonhosted.org/packages/1e/ce/7d251155a783fb2c1bb6837b2b7023c622a2070a0a72726ca1df47e7ea34/torch-2.9.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:52347912d868653e1528b47cafaf79b285b98be3f4f35d5955389b1b95224475", size = 74463887, upload-time = "2025-11-12T15:20:36.611Z" }, - { url = "https://files.pythonhosted.org/packages/b1/1a/64f5769025db846a82567fa5b7d21dba4558a7234ee631712ee4771c436c/torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083", size = 110940568, upload-time = "2025-11-12T15:21:18.689Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191, upload-time = "2025-11-12T15:21:25.816Z" }, - { url = "https://files.pythonhosted.org/packages/a6/47/c7843d69d6de8938c1cbb1eba426b1d48ddf375f101473d3e31a5fc52b74/torch-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2", size = 110944162, upload-time = "2025-11-12T15:21:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/28/0e/2a37247957e72c12151b33a01e4df651d9d155dd74d8cfcbfad15a79b44a/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e", size = 74830751, upload-time = "2025-11-12T15:21:43.792Z" }, - { url = "https://files.pythonhosted.org/packages/1f/9f/6986b83a53b4d043e36f3f898b798ab51f7f20fdf1a9b01a2720f445043d/torch-2.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db", size = 111176995, upload-time = "2025-11-12T15:22:01.618Z" }, - { url = "https://files.pythonhosted.org/packages/40/60/71c698b466dd01e65d0e9514b5405faae200c52a76901baf6906856f17e4/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587", size = 74480347, upload-time = "2025-11-12T15:21:57.648Z" }, - { url = "https://files.pythonhosted.org/packages/48/50/c4b5112546d0d13cc9eaa1c732b823d676a9f49ae8b6f97772f795874a03/torch-2.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1edee27a7c9897f4e0b7c14cfc2f3008c571921134522d5b9b5ec4ebbc69041a", size = 74433245, upload-time = "2025-11-12T15:22:39.027Z" }, - { url = "https://files.pythonhosted.org/packages/63/5d/e8d4e009e52b6b2cf1684bde2a6be157b96fb873732542fb2a9a99e85a83/torch-2.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:d187566a2cdc726fc80138c3cdb260970fab1c27e99f85452721f7759bbd554d", size = 110934845, upload-time = "2025-11-12T15:22:48.367Z" }, - { url = "https://files.pythonhosted.org/packages/bd/b2/2d15a52516b2ea3f414643b8de68fa4cb220d3877ac8b1028c83dc8ca1c4/torch-2.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cb10896a1f7fedaddbccc2017ce6ca9ecaaf990f0973bdfcf405439750118d2c", size = 74823558, upload-time = "2025-11-12T15:22:43.392Z" }, - { url = "https://files.pythonhosted.org/packages/db/2b/f7818f6ec88758dfd21da46b6cd46af9d1b3433e53ddbb19ad1e0da17f9b/torch-2.9.1-cp314-cp314t-win_amd64.whl", hash = "sha256:c88d3299ddeb2b35dcc31753305612db485ab6f1823e37fb29451c8b2732b87e", size = 111163659, upload-time = "2025-11-12T15:23:20.009Z" }, -] - [[package]] name = "torch" version = "2.10.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", -] dependencies = [ - { name = "cuda-bindings", version = "12.9.4", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "filelock", marker = "sys_platform == 'linux'" }, - { name = "fsspec", marker = "sys_platform == 'linux'" }, - { name = "jinja2", marker = "sys_platform == 'linux'" }, - { name = "networkx", marker = "sys_platform == 'linux'" }, + { name = "cuda-bindings", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "filelock" }, + { name = "fsspec" }, + { name = "jinja2" }, + { name = "networkx" }, { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, @@ -8862,30 +7706,39 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "setuptools", marker = "python_full_version >= '3.12' and sys_platform == 'linux'" }, - { name = "sympy", marker = "sys_platform == 'linux'" }, - { name = "triton", version = "3.6.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "typing-extensions", marker = "sys_platform == 'linux'" }, + { name = "setuptools" }, + { name = "sympy" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "typing-extensions" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/36/ab/7b562f1808d3f65414cd80a4f7d4bb00979d9355616c034c171249e1a303/torch-2.10.0-3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac5bdcbb074384c66fa160c15b1ead77839e3fe7ed117d667249afce0acabfac", size = 915518691, upload-time = "2026-03-11T14:15:43.147Z" }, + { url = "https://files.pythonhosted.org/packages/d3/54/a2ba279afcca44bbd320d4e73675b282fcee3d81400ea1b53934efca6462/torch-2.10.0-2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:13ec4add8c3faaed8d13e0574f5cd4a323c11655546f91fbe6afa77b57423574", size = 79498202, upload-time = "2026-02-10T21:44:52.603Z" }, + { url = "https://files.pythonhosted.org/packages/ec/23/2c9fe0c9c27f7f6cb865abcea8a4568f29f00acaeadfc6a37f6801f84cb4/torch-2.10.0-2-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:e521c9f030a3774ed770a9c011751fb47c4d12029a3d6522116e48431f2ff89e", size = 79498254, upload-time = "2026-02-10T21:44:44.095Z" }, { url = "https://files.pythonhosted.org/packages/b3/7a/abada41517ce0011775f0f4eacc79659bc9bc6c361e6bfe6f7052a6b9363/torch-2.10.0-3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:98c01b8bb5e3240426dcde1446eed6f40c778091c8544767ef1168fc663a05a6", size = 915622781, upload-time = "2026-03-11T14:17:11.354Z" }, { url = "https://files.pythonhosted.org/packages/ab/c6/4dfe238342ffdcec5aef1c96c457548762d33c40b45a1ab7033bb26d2ff2/torch-2.10.0-3-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:80b1b5bfe38eb0e9f5ff09f206dcac0a87aadd084230d4a36eea5ec5232c115b", size = 915627275, upload-time = "2026-03-11T14:16:11.325Z" }, { url = "https://files.pythonhosted.org/packages/d8/f0/72bf18847f58f877a6a8acf60614b14935e2f156d942483af1ffc081aea0/torch-2.10.0-3-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:46b3574d93a2a8134b3f5475cfb98e2eb46771794c57015f6ad1fb795ec25e49", size = 915523474, upload-time = "2026-03-11T14:17:44.422Z" }, { url = "https://files.pythonhosted.org/packages/f4/39/590742415c3030551944edc2ddc273ea1fdfe8ffb2780992e824f1ebee98/torch-2.10.0-3-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:b1d5e2aba4eb7f8e87fbe04f86442887f9167a35f092afe4c237dfcaaef6e328", size = 915632474, upload-time = "2026-03-11T14:15:13.666Z" }, { url = "https://files.pythonhosted.org/packages/b6/8e/34949484f764dde5b222b7fe3fede43e4a6f0da9d7f8c370bb617d629ee2/torch-2.10.0-3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:0228d20b06701c05a8f978357f657817a4a63984b0c90745def81c18aedfa591", size = 915523882, upload-time = "2026-03-11T14:14:46.311Z" }, - { url = "https://files.pythonhosted.org/packages/78/89/f5554b13ebd71e05c0b002f95148033e730d3f7067f67423026cc9c69410/torch-2.10.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3282d9febd1e4e476630a099692b44fdc214ee9bf8ee5377732d9d9dfe5712e4", size = 145992610, upload-time = "2026-01-21T16:25:26.327Z" }, - { url = "https://files.pythonhosted.org/packages/ae/30/a3a2120621bf9c17779b169fc17e3dc29b230c29d0f8222f499f5e159aa8/torch-2.10.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a2f9edd8dbc99f62bc4dfb78af7bf89499bca3d753423ac1b4e06592e467b763", size = 915607863, upload-time = "2026-01-21T16:25:06.696Z" }, { url = "https://files.pythonhosted.org/packages/cc/af/758e242e9102e9988969b5e621d41f36b8f258bb4a099109b7a4b4b50ea4/torch-2.10.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5fd4117d89ffd47e3dcc71e71a22efac24828ad781c7e46aaaf56bf7f2796acf", size = 145996088, upload-time = "2026-01-21T16:24:44.171Z" }, { url = "https://files.pythonhosted.org/packages/23/8e/3c74db5e53bff7ed9e34c8123e6a8bfef718b2450c35eefab85bb4a7e270/torch-2.10.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:787124e7db3b379d4f1ed54dd12ae7c741c16a4d29b49c0226a89bea50923ffb", size = 915711952, upload-time = "2026-01-21T16:23:53.503Z" }, + { url = "https://files.pythonhosted.org/packages/6e/01/624c4324ca01f66ae4c7cd1b74eb16fb52596dce66dbe51eff95ef9e7a4c/torch-2.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2c66c61f44c5f903046cc696d088e21062644cbe541c7f1c4eaae88b2ad23547", size = 113757972, upload-time = "2026-01-21T16:24:39.516Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5c/dee910b87c4d5c0fcb41b50839ae04df87c1cfc663cf1b5fca7ea565eeaa/torch-2.10.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:6d3707a61863d1c4d6ebba7be4ca320f42b869ee657e9b2c21c736bf17000294", size = 79498198, upload-time = "2026-01-21T16:24:34.704Z" }, { url = "https://files.pythonhosted.org/packages/c9/6f/f2e91e34e3fcba2e3fc8d8f74e7d6c22e74e480bbd1db7bc8900fdf3e95c/torch-2.10.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5c4d217b14741e40776dd7074d9006fd28b8a97ef5654db959d8635b2fe5f29b", size = 146004247, upload-time = "2026-01-21T16:24:29.335Z" }, { url = "https://files.pythonhosted.org/packages/98/fb/5160261aeb5e1ee12ee95fe599d0541f7c976c3701d607d8fc29e623229f/torch-2.10.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6b71486353fce0f9714ca0c9ef1c850a2ae766b409808acd58e9678a3edb7738", size = 915716445, upload-time = "2026-01-21T16:22:45.353Z" }, + { url = "https://files.pythonhosted.org/packages/6a/16/502fb1b41e6d868e8deb5b0e3ae926bbb36dab8ceb0d1b769b266ad7b0c3/torch-2.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:c2ee399c644dc92ef7bc0d4f7e74b5360c37cdbe7c5ba11318dda49ffac2bc57", size = 113757050, upload-time = "2026-01-21T16:24:19.204Z" }, + { url = "https://files.pythonhosted.org/packages/1a/0b/39929b148f4824bc3ad6f9f72a29d4ad865bcf7ebfc2fa67584773e083d2/torch-2.10.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:3202429f58309b9fa96a614885eace4b7995729f44beb54d3e4a47773649d382", size = 79851305, upload-time = "2026-01-21T16:24:09.209Z" }, { url = "https://files.pythonhosted.org/packages/d8/14/21fbce63bc452381ba5f74a2c0a959fdf5ad5803ccc0c654e752e0dbe91a/torch-2.10.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:aae1b29cd68e50a9397f5ee897b9c24742e9e306f88a807a27d617f07adb3bd8", size = 146005472, upload-time = "2026-01-21T16:22:29.022Z" }, { url = "https://files.pythonhosted.org/packages/54/fd/b207d1c525cb570ef47f3e9f836b154685011fce11a2f444ba8a4084d042/torch-2.10.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6021db85958db2f07ec94e1bc77212721ba4920c12a18dc552d2ae36a3eb163f", size = 915612644, upload-time = "2026-01-21T16:21:47.019Z" }, + { url = "https://files.pythonhosted.org/packages/36/53/0197f868c75f1050b199fe58f9bf3bf3aecac9b4e85cc9c964383d745403/torch-2.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff43db38af76fda183156153983c9a096fc4c78d0cd1e07b14a2314c7f01c2c8", size = 113997015, upload-time = "2026-01-21T16:23:00.767Z" }, + { url = "https://files.pythonhosted.org/packages/0e/13/e76b4d9c160e89fff48bf16b449ea324bda84745d2ab30294c37c2434c0d/torch-2.10.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:cdf2a523d699b70d613243211ecaac14fe9c5df8a0b0a9c02add60fb2a413e0f", size = 79498248, upload-time = "2026-01-21T16:23:09.315Z" }, + { url = "https://files.pythonhosted.org/packages/4f/93/716b5ac0155f1be70ed81bacc21269c3ece8dba0c249b9994094110bfc51/torch-2.10.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:bf0d9ff448b0218e0433aeb198805192346c4fd659c852370d5cc245f602a06a", size = 79464992, upload-time = "2026-01-21T16:23:05.162Z" }, { url = "https://files.pythonhosted.org/packages/69/2b/51e663ff190c9d16d4a8271203b71bc73a16aa7619b9f271a69b9d4a936b/torch-2.10.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:233aed0659a2503b831d8a67e9da66a62c996204c0bba4f4c442ccc0c68a3f60", size = 146018567, upload-time = "2026-01-21T16:22:23.393Z" }, { url = "https://files.pythonhosted.org/packages/5e/cd/4b95ef7f293b927c283db0b136c42be91c8ec6845c44de0238c8c23bdc80/torch-2.10.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:682497e16bdfa6efeec8cde66531bc8d1fbbbb4d8788ec6173c089ed3cc2bfe5", size = 915721646, upload-time = "2026-01-21T16:21:16.983Z" }, + { url = "https://files.pythonhosted.org/packages/56/97/078a007208f8056d88ae43198833469e61a0a355abc0b070edd2c085eb9a/torch-2.10.0-cp314-cp314-win_amd64.whl", hash = "sha256:6528f13d2a8593a1a412ea07a99812495bec07e9224c28b2a25c0a30c7da025c", size = 113752373, upload-time = "2026-01-21T16:22:13.471Z" }, + { url = "https://files.pythonhosted.org/packages/d8/94/71994e7d0d5238393df9732fdab607e37e2b56d26a746cb59fdb415f8966/torch-2.10.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:f5ab4ba32383061be0fb74bda772d470140a12c1c3b58a0cfbf3dae94d164c28", size = 79850324, upload-time = "2026-01-21T16:22:09.494Z" }, { url = "https://files.pythonhosted.org/packages/e2/65/1a05346b418ea8ccd10360eef4b3e0ce688fba544e76edec26913a8d0ee0/torch-2.10.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:716b01a176c2a5659c98f6b01bf868244abdd896526f1c692712ab36dbaf9b63", size = 146006482, upload-time = "2026-01-21T16:22:18.42Z" }, { url = "https://files.pythonhosted.org/packages/1d/b9/5f6f9d9e859fc3235f60578fa64f52c9c6e9b4327f0fe0defb6de5c0de31/torch-2.10.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:d8f5912ba938233f86361e891789595ff35ca4b4e2ac8fe3670895e5976731d6", size = 915613050, upload-time = "2026-01-21T16:20:49.035Z" }, + { url = "https://files.pythonhosted.org/packages/66/4d/35352043ee0eaffdeff154fad67cd4a31dbed7ff8e3be1cc4549717d6d51/torch-2.10.0-cp314-cp314t-win_amd64.whl", hash = "sha256:71283a373f0ee2c89e0f0d5f446039bdabe8dbc3c9ccf35f0f784908b0acd185", size = 113995816, upload-time = "2026-01-21T16:22:05.312Z" }, ] [[package]] @@ -8893,18 +7746,22 @@ name = "torch-c-dlpack-ext" version = "0.1.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] sdist = { url = "https://files.pythonhosted.org/packages/37/de/921b6491efce5c389a5ef9bbed3d2d6660005840dae488124173180859ab/torch_c_dlpack_ext-0.1.5.tar.gz", hash = "sha256:d06f0357d575d22a168cc77acb9020fc4bae30968ceb6718a055dcbe92bacabe", size = 12913, upload-time = "2026-01-12T11:25:08.484Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/e1/64e1e579d107064785549e70758e38a42376ab7e73d86897ed4beab10e74/torch_c_dlpack_ext-0.1.5-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fba674110e1fab0b176bb5a28223e157db65c90767d4ba74abdbee9f537b0e9d", size = 440949, upload-time = "2026-01-12T11:24:39.716Z" }, - { url = "https://files.pythonhosted.org/packages/64/5c/3e1382a620824f92920ab3fae132d8fb4e85898284c99e0c6a7764e452ce/torch_c_dlpack_ext-0.1.5-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3448c4f0d64104d0b2e58080a7efa72304a04960c18f338024b80b13cd3eca26", size = 897768, upload-time = "2026-01-12T11:24:41.209Z" }, + { url = "https://files.pythonhosted.org/packages/b1/67/10d236698525d7b7db4d74ec0a4b01f5b2db33968995fdd9ac6b4635e327/torch_c_dlpack_ext-0.1.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:c0f2bd51fcd99c0e5b50314e1985f2728c4941bfa821f065e6c30951d1f995ca", size = 5291237, upload-time = "2026-01-12T11:24:44.011Z" }, { url = "https://files.pythonhosted.org/packages/87/06/8d760997307a5c3be4384424667bf31aae0a42060838c532c7d846516175/torch_c_dlpack_ext-0.1.5-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3562ee411258676f9c38b8ad39306d1c8d027b6a86f6a87c920d2d009a9d1510", size = 443069, upload-time = "2026-01-12T11:24:45.451Z" }, { url = "https://files.pythonhosted.org/packages/e2/79/a914539b4785f3e44f891aa012a886edb8bc10fe081c440981c57543ce21/torch_c_dlpack_ext-0.1.5-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e6f9da4bb9af70e27facc777458be62e10dbbbddda7672d16138db0553c5a524", size = 897846, upload-time = "2026-01-12T11:24:48.168Z" }, + { url = "https://files.pythonhosted.org/packages/3a/e6/7d7a97a3953208d6d6ce749180c34d1dab48464ded9a76cecabe9d021ce6/torch_c_dlpack_ext-0.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:670fbbab70123cc228bed41693a3720757af57a0ad22669063c9db25321e8f55", size = 1482855, upload-time = "2026-01-12T11:24:49.581Z" }, + { url = "https://files.pythonhosted.org/packages/ca/c6/65346a201d921b616731311fc9941f15137672b444cebdad702cb52ccee0/torch_c_dlpack_ext-0.1.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:74acea2ed395cadda63342845b9e9ee7cd4537846223dacfb4431b4610109265", size = 1993243, upload-time = "2026-01-12T11:24:51.079Z" }, { url = "https://files.pythonhosted.org/packages/fd/ec/faf10be09a5812b1c5ec9922b53fb5def5fc4080b81a653b9347bb169ebb/torch_c_dlpack_ext-0.1.5-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49f1e99d13c64e22dac0a34a1560e9e5a398a49a9fa81df83053e04fde6ec5bd", size = 443798, upload-time = "2026-01-12T11:24:52.754Z" }, { url = "https://files.pythonhosted.org/packages/2d/68/f434b48700f3e04f33882f54d8d3910327b935f55e14ec49da7d607bf470/torch_c_dlpack_ext-0.1.5-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:debe62e5ef93e631065d6b9f6e60d3d39bae6b89fa1b25d9523f40b3efbf8aba", size = 755004, upload-time = "2026-01-12T11:24:54.004Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/cc64e563f05ea99bd79bdb43f71f0f46452d3acd734da4843ede5fc73a35/torch_c_dlpack_ext-0.1.5-cp313-cp313-win_amd64.whl", hash = "sha256:30e3eab616dbc81dfdb7492aca557be551a9163ba9b585f97394a42b336b113a", size = 999126, upload-time = "2026-01-12T11:24:55.44Z" }, + { url = "https://files.pythonhosted.org/packages/96/5e/449324ca8e81573e650b6851fc31c1038f750d1de85d0b185d788e1c7a3a/torch_c_dlpack_ext-0.1.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:cac94a4905d391889e679a8da31e46dc325af5d55d13b7c70c0ce3d71d1ced6d", size = 1982154, upload-time = "2026-01-12T11:24:58.038Z" }, { url = "https://files.pythonhosted.org/packages/20/62/11c05b99f69aa5152bca0313e0dfa6d125a020cf890dc888ef009aa7891c/torch_c_dlpack_ext-0.1.5-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a58fdf45fb0bda7bc459632cec891570f31c11636d5851c825cf308ec8b73c2", size = 163825, upload-time = "2026-01-12T11:24:59.474Z" }, { url = "https://files.pythonhosted.org/packages/15/b5/be613cd8e71c9982bd07af530f86c5a7f30df7831d14cec5414857af7149/torch_c_dlpack_ext-0.1.5-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b985a324c68241cf83a9474b28015524b66775b12a91930dd4c0760aa628d01", size = 171740, upload-time = "2026-01-12T11:25:00.776Z" }, + { url = "https://files.pythonhosted.org/packages/5c/11/52e291f1659e2ec70a09f5ca4ad27e015eb4f0a1371ae68d23a9fbd1c704/torch_c_dlpack_ext-0.1.5-cp314-cp314-win_amd64.whl", hash = "sha256:d794e19fa3f330ab7a29987c07e031fc08e4953aec516d35701d0827863e356b", size = 277086, upload-time = "2026-01-12T11:25:01.901Z" }, ] [[package]] @@ -8921,11 +7778,9 @@ name = "torchaudio" version = "2.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/b7/c66dc34a27441d78997e20d0ffe2f5ad73db9f7b1267511be255bb94ac9b/torchaudio-2.10.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:87c841a21e82703ebd4a29170c4e60c25a2b47312dc212930087ad58965ac0c8", size = 391843, upload-time = "2026-01-21T16:28:43.093Z" }, - { url = "https://files.pythonhosted.org/packages/13/ae/a2a34a64947c4fa4a61b4c86d8f36fbcb4ebfec30fdde140267db260f96c/torchaudio-2.10.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b2c77fb9114dd463dc805560bf55a1ac2a52e219794cc32b7b32cf2aeffd2826", size = 1894140, upload-time = "2026-01-21T16:28:35.892Z" }, { url = "https://files.pythonhosted.org/packages/ea/3f/df620439a76ece170472d41438d11a1545d5db5dc9f1eaeab8c6e055a328/torchaudio-2.10.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:42b148a0921a3721abd1f6ae098b1ec9f89703e555c4f7a0d44da87b8decbcb9", size = 391973, upload-time = "2026-01-21T16:28:39.732Z" }, { url = "https://files.pythonhosted.org/packages/98/25/e55a30d7138f8fe56ed006df25b0a3c27681f0ec7bc9989e1778e6d559c3/torchaudio-2.10.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0e77b2956448d63790a99beed0b74ac8b8cd3a94dcdd9ad01974411078f46278", size = 1895234, upload-time = "2026-01-21T16:28:37.034Z" }, { url = "https://files.pythonhosted.org/packages/49/fd/831c2595c81b17141180ca11ab3c0836cc544ef13e15aa0e7b2cb619e582/torchaudio-2.10.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5bc39ff3ea341097ce1ab023dd88c9dd8ca5f96ebf48821e7d23766137bb55d7", size = 392757, upload-time = "2026-01-21T16:28:33.631Z" }, @@ -8938,72 +7793,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e1/68/e37e8fbbae986afa80f8851e08fc017eb8ae5f7b398ee28ed92303da163e/torchaudio-2.10.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:f7aa33a8198e87949896e16ea245ea731906445becdf10130e8823c68494a94a", size = 1897289, upload-time = "2026-01-21T16:28:17.059Z" }, ] -[[package]] -name = "torchvision" -version = "0.24.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] -dependencies = [ - { name = "numpy", marker = "sys_platform != 'linux'" }, - { name = "pillow", marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/69/30f5f03752aa1a7c23931d2519b31e557f3f10af5089d787cddf3b903ecf/torchvision-0.24.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:056c525dc875f18fe8e9c27079ada166a7b2755cea5a2199b0bc7f1f8364e600", size = 1891436, upload-time = "2025-11-12T15:25:04.3Z" }, - { url = "https://files.pythonhosted.org/packages/fa/bb/cfc6a6f6ccc84a534ed1fdf029ae5716dd6ff04e57ed9dc2dab38bf652d5/torchvision-0.24.1-cp311-cp311-win_amd64.whl", hash = "sha256:a9308cdd37d8a42e14a3e7fd9d271830c7fecb150dd929b642f3c1460514599a", size = 4037588, upload-time = "2025-11-12T15:25:14.402Z" }, - { url = "https://files.pythonhosted.org/packages/f0/af/18e2c6b9538a045f60718a0c5a058908ccb24f88fde8e6f0fc12d5ff7bd3/torchvision-0.24.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e48bf6a8ec95872eb45763f06499f87bd2fb246b9b96cb00aae260fda2f96193", size = 1891433, upload-time = "2025-11-12T15:25:03.232Z" }, - { url = "https://files.pythonhosted.org/packages/69/98/16e583f59f86cd59949f59d52bfa8fc286f86341a229a9d15cbe7a694f0c/torchvision-0.24.1-cp312-cp312-win_amd64.whl", hash = "sha256:4aa6cb806eb8541e92c9b313e96192c6b826e9eb0042720e2fa250d021079952", size = 4302006, upload-time = "2025-11-12T15:25:16.184Z" }, - { url = "https://files.pythonhosted.org/packages/e4/97/ab40550f482577f2788304c27220e8ba02c63313bd74cf2f8920526aac20/torchvision-0.24.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:8a6696db7fb71eadb2c6a48602106e136c785642e598eb1533e0b27744f2cce6", size = 1891435, upload-time = "2025-11-12T15:25:28.642Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ec/54a96ae9ab6a0dd66d4bba27771f892e36478a9c3489fa56e51c70abcc4d/torchvision-0.24.1-cp313-cp313-win_amd64.whl", hash = "sha256:16274823b93048e0a29d83415166a2e9e0bf4e1b432668357b657612a4802864", size = 4319808, upload-time = "2025-11-12T15:25:17.318Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f3/a90a389a7e547f3eb8821b13f96ea7c0563cdefbbbb60a10e08dda9720ff/torchvision-0.24.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e3f96208b4bef54cd60e415545f5200346a65024e04f29a26cd0006dbf9e8e66", size = 2005342, upload-time = "2025-11-12T15:25:11.871Z" }, - { url = "https://files.pythonhosted.org/packages/4f/2b/ba02e4261369c3798310483028495cf507e6cb3f394f42e4796981ecf3a7/torchvision-0.24.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d83e16d70ea85d2f196d678bfb702c36be7a655b003abed84e465988b6128938", size = 4251604, upload-time = "2025-11-12T15:25:34.069Z" }, - { url = "https://files.pythonhosted.org/packages/42/84/577b2cef8f32094add5f52887867da4c2a3e6b4261538447e9b48eb25812/torchvision-0.24.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cccf4b4fec7fdfcd3431b9ea75d1588c0a8596d0333245dafebee0462abe3388", size = 2005319, upload-time = "2025-11-12T15:25:23.827Z" }, - { url = "https://files.pythonhosted.org/packages/6d/ba/8fae3525b233e109317ce6a9c1de922ab2881737b029a7e88021f81e068f/torchvision-0.24.1-cp314-cp314-win_amd64.whl", hash = "sha256:18f9cb60e64b37b551cd605a3d62c15730c086362b40682d23e24b616a697d41", size = 4234459, upload-time = "2025-11-12T15:25:19.859Z" }, - { url = "https://files.pythonhosted.org/packages/50/33/481602c1c72d0485d4b3a6b48c9534b71c2957c9d83bf860eb837bf5a620/torchvision-0.24.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ec9d7379c519428395e4ffda4dbb99ec56be64b0a75b95989e00f9ec7ae0b2d7", size = 2005336, upload-time = "2025-11-12T15:25:27.225Z" }, - { url = "https://files.pythonhosted.org/packages/d6/ab/e2bcc7c2f13d882a58f8b30ff86f794210b075736587ea50f8c545834f8a/torchvision-0.24.1-cp314-cp314t-win_amd64.whl", hash = "sha256:480b271d6edff83ac2e8d69bbb4cf2073f93366516a50d48f140ccfceedb002e", size = 4335190, upload-time = "2025-11-12T15:25:35.745Z" }, -] - [[package]] name = "torchvision" version = "0.25.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", -] dependencies = [ - { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "pillow", marker = "sys_platform == 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "numpy" }, + { name = "pillow" }, + { name = "torch" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/e9/f143cd71232430de1f547ceab840f68c55e127d72558b1061a71d0b193cd/torchvision-0.25.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f49964f96644dbac2506dffe1a0a7ec0f2bf8cf7a588c3319fed26e6329ffdf3", size = 2344808, upload-time = "2026-01-21T16:27:43.191Z" }, - { url = "https://files.pythonhosted.org/packages/43/ae/ad5d6165797de234c9658752acb4fce65b78a6a18d82efdf8367c940d8da/torchvision-0.25.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:153c0d2cbc34b7cf2da19d73450f24ba36d2b75ec9211b9962b5022fb9e4ecee", size = 8070752, upload-time = "2026-01-21T16:27:33.748Z" }, + { url = "https://files.pythonhosted.org/packages/56/3a/6ea0d73f49a9bef38a1b3a92e8dd455cea58470985d25635beab93841748/torchvision-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2abe430c90b1d5e552680037d68da4eb80a5852ebb1c811b2b89d299b10573b", size = 1874920, upload-time = "2026-01-21T16:27:45.348Z" }, { url = "https://files.pythonhosted.org/packages/51/f8/c0e1ef27c66e15406fece94930e7d6feee4cb6374bbc02d945a630d6426e/torchvision-0.25.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b75deafa2dfea3e2c2a525559b04783515e3463f6e830cb71de0fb7ea36fe233", size = 2344556, upload-time = "2026-01-21T16:27:40.125Z" }, { url = "https://files.pythonhosted.org/packages/68/2f/f24b039169db474e8688f649377de082a965fbf85daf4e46c44412f1d15a/torchvision-0.25.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f25aa9e380865b11ea6e9d99d84df86b9cc959f1a007cd966fc6f1ab2ed0e248", size = 8072351, upload-time = "2026-01-21T16:27:21.074Z" }, + { url = "https://files.pythonhosted.org/packages/ad/16/8f650c2e288977cf0f8f85184b90ee56ed170a4919347fc74ee99286ed6f/torchvision-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:f9c55ae8d673ab493325d1267cbd285bb94d56f99626c00ac4644de32a59ede3", size = 4303059, upload-time = "2026-01-21T16:27:11.08Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5b/1562a04a6a5a4cf8cf40016a0cdeda91ede75d6962cff7f809a85ae966a5/torchvision-0.25.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:24e11199e4d84ba9c5ee7825ebdf1cd37ce8deec225117f10243cae984ced3ec", size = 1874918, upload-time = "2026-01-21T16:27:39.02Z" }, { url = "https://files.pythonhosted.org/packages/36/b1/3d6c42f62c272ce34fcce609bb8939bdf873dab5f1b798fd4e880255f129/torchvision-0.25.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5f271136d2d2c0b7a24c5671795c6e4fd8da4e0ea98aeb1041f62bc04c4370ef", size = 2309106, upload-time = "2026-01-21T16:27:30.624Z" }, { url = "https://files.pythonhosted.org/packages/c7/60/59bb9c8b67cce356daeed4cb96a717caa4f69c9822f72e223a0eae7a9bd9/torchvision-0.25.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:855c0dc6d37f462482da7531c6788518baedca1e0847f3df42a911713acdfe52", size = 8071522, upload-time = "2026-01-21T16:27:29.392Z" }, + { url = "https://files.pythonhosted.org/packages/32/a5/9a9b1de0720f884ea50dbf9acb22cbe5312e51d7b8c4ac6ba9b51efd9bba/torchvision-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:cef0196be31be421f6f462d1e9da1101be7332d91984caa6f8022e6c78a5877f", size = 4321911, upload-time = "2026-01-21T16:27:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/52/99/dca81ed21ebaeff2b67cc9f815a20fdaa418b69f5f9ea4c6ed71721470db/torchvision-0.25.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a8f8061284395ce31bcd460f2169013382ccf411148ceb2ee38e718e9860f5a7", size = 1896209, upload-time = "2026-01-21T16:27:32.159Z" }, { url = "https://files.pythonhosted.org/packages/28/cc/2103149761fdb4eaed58a53e8437b2d716d48f05174fab1d9fcf1e2a2244/torchvision-0.25.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:146d02c9876858420adf41f3189fe90e3d6a409cbfa65454c09f25fb33bf7266", size = 2310735, upload-time = "2026-01-21T16:27:22.327Z" }, { url = "https://files.pythonhosted.org/packages/76/ad/f4c985ad52ddd3b22711c588501be1b330adaeaf6850317f66751711b78c/torchvision-0.25.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c4d395cb2c4a2712f6eb93a34476cdf7aae74bb6ea2ea1917f858e96344b00aa", size = 8089557, upload-time = "2026-01-21T16:27:27.666Z" }, + { url = "https://files.pythonhosted.org/packages/63/cc/0ea68b5802e5e3c31f44b307e74947bad5a38cc655231d845534ed50ddb8/torchvision-0.25.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5e6b449e9fa7d642142c0e27c41e5a43b508d57ed8e79b7c0a0c28652da8678c", size = 4344260, upload-time = "2026-01-21T16:27:17.018Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1f/fa839532660e2602b7e704d65010787c5bb296258b44fa8b9c1cd6175e7d/torchvision-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:620a236288d594dcec7634c754484542dc0a5c1b0e0b83a34bda5e91e9b7c3a1", size = 1896193, upload-time = "2026-01-21T16:27:24.785Z" }, { url = "https://files.pythonhosted.org/packages/80/ed/d51889da7ceaf5ff7a0574fb28f9b6b223df19667265395891f81b364ab3/torchvision-0.25.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:0b5e7f50002a8145a98c5694a018e738c50e2972608310c7e88e1bd4c058f6ce", size = 2309331, upload-time = "2026-01-21T16:27:19.97Z" }, { url = "https://files.pythonhosted.org/packages/90/a5/f93fcffaddd8f12f9e812256830ec9c9ca65abbf1bc369379f9c364d1ff4/torchvision-0.25.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:632db02300e83793812eee4f61ae6a2686dab10b4cfd628b620dc47747aa9d03", size = 8088713, upload-time = "2026-01-21T16:27:15.281Z" }, + { url = "https://files.pythonhosted.org/packages/1f/eb/d0096eed5690d962853213f2ee00d91478dfcb586b62dbbb449fb8abc3a6/torchvision-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:d1abd5ed030c708f5dbf4812ad5f6fbe9384b63c40d6bd79f8df41a4a759a917", size = 4325058, upload-time = "2026-01-21T16:27:26.165Z" }, + { url = "https://files.pythonhosted.org/packages/97/36/96374a4c7ab50dea9787ce987815614ccfe988a42e10ac1a2e3e5b60319a/torchvision-0.25.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad9a8a5877782944d99186e4502a614770fe906626d76e9cd32446a0ac3075f2", size = 1896207, upload-time = "2026-01-21T16:27:23.383Z" }, { url = "https://files.pythonhosted.org/packages/b5/e2/7abb10a867db79b226b41da419b63b69c0bd5b82438c4a4ed50e084c552f/torchvision-0.25.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:40a122c3cf4d14b651f095e0f672b688dde78632783fc5cd3d4d5e4f6a828563", size = 2310741, upload-time = "2026-01-21T16:27:18.712Z" }, { url = "https://files.pythonhosted.org/packages/08/e6/0927784e6ffc340b6676befde1c60260bd51641c9c574b9298d791a9cda4/torchvision-0.25.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:846890161b825b38aa85fc37fb3ba5eea74e7091ff28bab378287111483b6443", size = 8089772, upload-time = "2026-01-21T16:27:14.048Z" }, + { url = "https://files.pythonhosted.org/packages/b6/37/e7ca4ec820d434c0f23f824eb29f0676a0c3e7a118f1514f5b949c3356da/torchvision-0.25.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f07f01d27375ad89d72aa2b3f2180f07da95dd9d2e4c758e015c0acb2da72977", size = 4425879, upload-time = "2026-01-21T16:27:12.579Z" }, ] [[package]] @@ -9083,18 +7902,16 @@ wheels = [ [[package]] name = "transformer-engine-torch" version = "2.11.0" -source = { registry = "https://pypi.org/simple" } +source = { git = "https://github.com/NVIDIA/TransformerEngine.git?subdirectory=transformer_engine%2Fpytorch&tag=v2.11#c188b533cc3721ca9c6bbfd26148f5cf60108c25" } dependencies = [ { name = "einops" }, { name = "onnx" }, { name = "onnxscript" }, { name = "packaging" }, { name = "pydantic" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, { name = "transformer-engine-cu12" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/09/42/068a40f5b213a3a8899e3885eb178776662897abed03cd725953d1106c39/transformer_engine_torch-2.11.0.tar.gz", hash = "sha256:b58d6322bdf885dfab0646da572aff9cf090b332ad470559aa58883c231e1816", size = 242065, upload-time = "2026-01-02T09:58:58.423Z" } [[package]] name = "transformers" @@ -9117,38 +7934,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/6b/2f416568b3c4c91c96e5a365d164f8a4a4a88030aa8ab4644181fdadce97/transformers-4.57.3-py3-none-any.whl", hash = "sha256:c77d353a4851b1880191603d36acb313411d3577f6e2897814f333841f7003f4", size = 11993463, upload-time = "2025-11-25T15:51:26.493Z" }, ] -[[package]] -name = "triton" -version = "3.5.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", -] - [[package]] name = "triton" version = "3.6.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14' and sys_platform == 'linux'", - "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", -] wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/2c/96f92f3c60387e14cc45aed49487f3486f89ea27106c1b1376913c62abe4/triton-3.6.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49df5ef37379c0c2b5c0012286f80174fcf0e073e5ade1ca9a86c36814553651", size = 176081190, upload-time = "2026-01-20T16:16:00.523Z" }, - { url = "https://files.pythonhosted.org/packages/e0/12/b05ba554d2c623bffa59922b94b0775673de251f468a9609bc9e45de95e9/triton-3.6.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8e323d608e3a9bfcc2d9efcc90ceefb764a82b99dea12a86d643c72539ad5d3", size = 188214640, upload-time = "2026-01-20T16:00:35.869Z" }, { url = "https://files.pythonhosted.org/packages/17/5d/08201db32823bdf77a0e2b9039540080b2e5c23a20706ddba942924ebcd6/triton-3.6.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:374f52c11a711fd062b4bfbb201fd9ac0a5febd28a96fb41b4a0f51dde3157f4", size = 176128243, upload-time = "2026-01-20T16:16:07.857Z" }, { url = "https://files.pythonhosted.org/packages/ab/a8/cdf8b3e4c98132f965f88c2313a4b493266832ad47fb52f23d14d4f86bb5/triton-3.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74caf5e34b66d9f3a429af689c1c7128daba1d8208df60e81106b115c00d6fca", size = 188266850, upload-time = "2026-01-20T16:00:43.041Z" }, { url = "https://files.pythonhosted.org/packages/3c/12/34d71b350e89a204c2c7777a9bba0dcf2f19a5bfdd70b57c4dbc5ffd7154/triton-3.6.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448e02fe6dc898e9e5aa89cf0ee5c371e99df5aa5e8ad976a80b93334f3494fd", size = 176133521, upload-time = "2026-01-20T16:16:13.321Z" }, @@ -9166,7 +7956,6 @@ name = "triton-windows" version = "3.6.0.post25" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/49/b8/2ce283452b0b9e0d239c7833626750befe94d5bbed18fb9449dcc5fa494e/triton_windows-3.6.0.post25-cp311-cp311-win_amd64.whl", hash = "sha256:5dabf103499825379c9ba877da46a4c34296466a628b539249482ab6d970708e", size = 47381466, upload-time = "2026-01-26T03:21:21.541Z" }, { url = "https://files.pythonhosted.org/packages/66/b1/9744fc17eded50644ffb95f3f4b1ffd1f42d646d6e0a811d92e43834865e/triton_windows-3.6.0.post25-cp312-cp312-win_amd64.whl", hash = "sha256:8361375ee4b5e0a4fe7a3c7fc2fde368ce74237396d8ff95c2e26983dd32e342", size = 47382693, upload-time = "2026-01-26T03:21:28.157Z" }, { url = "https://files.pythonhosted.org/packages/e5/cb/1f5f738cf8f6b8c6d475a92422251228a16ca2ee6f872d0f63c761f02896/triton_windows-3.6.0.post25-cp313-cp313-win_amd64.whl", hash = "sha256:d22e5f6f4896b43037d811910e2fcc5ff5f057b78f6094ab28999e4a21997b76", size = 47383937, upload-time = "2026-01-26T03:21:35.071Z" }, { url = "https://files.pythonhosted.org/packages/c7/d3/58ad68518e04a97ce0549cad98eccbafac01ddba640379776a58b513020b/triton_windows-3.6.0.post25-cp314-cp314-win_amd64.whl", hash = "sha256:6f4c4775b22cfb18e9c60aead83deb7b9b970624ae3c13cd26b9be80b5cb8cd8", size = 48566374, upload-time = "2026-01-26T03:21:41.743Z" }, @@ -9308,17 +8097,6 @@ version = "5.11.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/43/d9/3f17e3c5773fb4941c68d9a37a47b1a79c9649d6c56aefbed87cc409d18a/ujson-5.11.0.tar.gz", hash = "sha256:e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0", size = 7156583, upload-time = "2025-08-20T11:57:02.452Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f", size = 55248, upload-time = "2025-08-20T11:55:19.033Z" }, - { url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58", size = 53156, upload-time = "2025-08-20T11:55:20.174Z" }, - { url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26", size = 57657, upload-time = "2025-08-20T11:55:21.296Z" }, - { url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a", size = 59779, upload-time = "2025-08-20T11:55:22.772Z" }, - { url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6", size = 57284, upload-time = "2025-08-20T11:55:24.01Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b", size = 1036395, upload-time = "2025-08-20T11:55:25.725Z" }, - { url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba", size = 1195731, upload-time = "2025-08-20T11:55:27.915Z" }, - { url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3", size = 1088710, upload-time = "2025-08-20T11:55:29.426Z" }, - { url = "https://files.pythonhosted.org/packages/fe/d3/9ba310e07969bc9906eb7548731e33a0f448b122ad9705fed699c9b29345/ujson-5.11.0-cp311-cp311-win32.whl", hash = "sha256:e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34", size = 39648, upload-time = "2025-08-20T11:55:31.194Z" }, - { url = "https://files.pythonhosted.org/packages/57/f7/da05b4a8819f1360be9e71fb20182f0bb3ec611a36c3f213f4d20709e099/ujson-5.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01", size = 43717, upload-time = "2025-08-20T11:55:32.241Z" }, - { url = "https://files.pythonhosted.org/packages/9a/cc/f3f9ac0f24f00a623a48d97dc3814df5c2dc368cfb00031aa4141527a24b/ujson-5.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835", size = 38402, upload-time = "2025-08-20T11:55:33.641Z" }, { url = "https://files.pythonhosted.org/packages/b9/ef/a9cb1fce38f699123ff012161599fb9f2ff3f8d482b4b18c43a2dc35073f/ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702", size = 55434, upload-time = "2025-08-20T11:55:34.987Z" }, { url = "https://files.pythonhosted.org/packages/b1/05/dba51a00eb30bd947791b173766cbed3492269c150a7771d2750000c965f/ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d", size = 53190, upload-time = "2025-08-20T11:55:36.384Z" }, { url = "https://files.pythonhosted.org/packages/03/3c/fd11a224f73fbffa299fb9644e425f38b38b30231f7923a088dd513aabb4/ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80", size = 57600, upload-time = "2025-08-20T11:55:37.692Z" }, @@ -9363,12 +8141,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/ed/5a057199fb0a5deabe0957073a1c1c1c02a3e99476cd03daee98ea21fa57/ujson-5.11.0-cp314-cp314t-win32.whl", hash = "sha256:aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5", size = 41859, upload-time = "2025-08-20T11:56:30.495Z" }, { url = "https://files.pythonhosted.org/packages/aa/03/b19c6176bdf1dc13ed84b886e99677a52764861b6cc023d5e7b6ebda249d/ujson-5.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec", size = 46183, upload-time = "2025-08-20T11:56:31.574Z" }, { url = "https://files.pythonhosted.org/packages/5d/ca/a0413a3874b2dc1708b8796ca895bf363292f9c70b2e8ca482b7dbc0259d/ujson-5.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab", size = 40264, upload-time = "2025-08-20T11:56:32.773Z" }, - { url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362", size = 51206, upload-time = "2025-08-20T11:56:48.797Z" }, - { url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39", size = 48907, upload-time = "2025-08-20T11:56:50.136Z" }, - { url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc", size = 50319, upload-time = "2025-08-20T11:56:51.63Z" }, - { url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844", size = 56584, upload-time = "2025-08-20T11:56:52.89Z" }, - { url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49", size = 51588, upload-time = "2025-08-20T11:56:54.054Z" }, - { url = "https://files.pythonhosted.org/packages/52/5b/8c5e33228f7f83f05719964db59f3f9f276d272dc43752fa3bbf0df53e7b/ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04", size = 43835, upload-time = "2025-08-20T11:56:55.237Z" }, ] [[package]] @@ -9385,18 +8157,14 @@ dependencies = [ { name = "numpy" }, { name = "packaging" }, { name = "peft" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "psutil" }, { name = "sentencepiece" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "torchvision", version = "0.24.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torchvision", version = "0.25.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, + { name = "torchvision" }, { name = "tqdm" }, { name = "transformers" }, - { name = "triton", version = "3.5.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and 'linux' in sys_platform" }, - { name = "triton", version = "3.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux' and 'linux' in sys_platform" }, + { name = "triton", marker = "'linux' in sys_platform" }, { name = "triton-windows", marker = "(platform_machine == 'AMD64' and sys_platform == 'win32') or (platform_machine == 'x86_64' and sys_platform == 'win32')" }, { name = "trl" }, { name = "tyro" }, @@ -9426,18 +8194,15 @@ dependencies = [ { name = "packaging" }, { name = "peft" }, { name = "pillow" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "psutil" }, { name = "regex" }, { name = "sentencepiece" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch" }, { name = "torchao" }, { name = "tqdm" }, { name = "transformers" }, - { name = "triton", version = "3.5.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and 'linux' in sys_platform" }, - { name = "triton", version = "3.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux' and 'linux' in sys_platform" }, + { name = "triton", marker = "'linux' in sys_platform" }, { name = "trl" }, { name = "typing-extensions" }, { name = "tyro" }, @@ -9498,13 +8263,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/9b/e5e99b324b1b5f0c62882230455786df0bc66f67eff3b452447e703f45d2/uuid_utils-0.14.0-cp39-abi3-win32.whl", hash = "sha256:ec2fd80adf8e0e6589d40699e6f6df94c93edcc16dd999be0438dd007c77b151", size = 177319, upload-time = "2026-01-20T20:37:04.208Z" }, { url = "https://files.pythonhosted.org/packages/d3/28/2c7d417ea483b6ff7820c948678fdf2ac98899dc7e43bb15852faa95acaf/uuid_utils-0.14.0-cp39-abi3-win_amd64.whl", hash = "sha256:efe881eb43a5504fad922644cb93d725fd8a6a6d949bd5a4b4b7d1a1587c7fd1", size = 182566, upload-time = "2026-01-20T20:37:16.868Z" }, { url = "https://files.pythonhosted.org/packages/b8/86/49e4bdda28e962fbd7266684171ee29b3d92019116971d58783e51770745/uuid_utils-0.14.0-cp39-abi3-win_arm64.whl", hash = "sha256:32b372b8fd4ebd44d3a219e093fe981af4afdeda2994ee7db208ab065cfcd080", size = 182809, upload-time = "2026-01-20T20:37:05.139Z" }, - { url = "https://files.pythonhosted.org/packages/f1/03/1f1146e32e94d1f260dfabc81e1649102083303fb4ad549775c943425d9a/uuid_utils-0.14.0-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:762e8d67992ac4d2454e24a141a1c82142b5bde10409818c62adbe9924ebc86d", size = 587430, upload-time = "2026-01-20T20:37:24.998Z" }, - { url = "https://files.pythonhosted.org/packages/87/ba/d5a7469362594d885fd9219fe9e851efbe65101d3ef1ef25ea321d7ce841/uuid_utils-0.14.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:40be5bf0b13aa849d9062abc86c198be6a25ff35316ce0b89fc25f3bac6d525e", size = 298106, upload-time = "2026-01-20T20:37:23.896Z" }, - { url = "https://files.pythonhosted.org/packages/8a/11/3dafb2a5502586f59fd49e93f5802cd5face82921b3a0f3abb5f357cb879/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:191a90a6f3940d1b7322b6e6cceff4dd533c943659e0a15f788674407856a515", size = 333423, upload-time = "2026-01-20T20:37:17.828Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f2/c8987663f0cdcf4d717a36d85b5db2a5589df0a4e129aa10f16f4380ef48/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4aa4525f4ad82f9d9c842f9a3703f1539c1808affbaec07bb1b842f6b8b96aa5", size = 338659, upload-time = "2026-01-20T20:37:14.286Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c8/929d81665d83f0b2ffaecb8e66c3091a50f62c7cb5b65e678bd75a96684e/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdbd82ff20147461caefc375551595ecf77ebb384e46267f128aca45a0f2cdfc", size = 467029, upload-time = "2026-01-20T20:37:08.277Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a0/27d7daa1bfed7163f4ccaf52d7d2f4ad7bb1002a85b45077938b91ee584f/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff57e8a5d540006ce73cf0841a643d445afe78ba12e75ac53a95ca2924a56be", size = 333298, upload-time = "2026-01-20T20:37:07.271Z" }, - { url = "https://files.pythonhosted.org/packages/63/d4/acad86ce012b42ce18a12f31ee2aa3cbeeb98664f865f05f68c882945913/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fd9112ca96978361201e669729784f26c71fecc9c13a7f8a07162c31bd4d1e2", size = 359217, upload-time = "2026-01-20T20:36:59.687Z" }, ] [[package]] @@ -9562,12 +8320,6 @@ version = "0.22.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, - { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, - { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, - { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, - { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, - { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, @@ -9613,73 +8365,73 @@ name = "vllm" version = "0.17.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "aiohttp", marker = "sys_platform == 'linux'" }, - { name = "anthropic", marker = "sys_platform == 'linux'" }, - { name = "blake3", marker = "sys_platform == 'linux'" }, - { name = "cachetools", marker = "sys_platform == 'linux'" }, - { name = "cbor2", marker = "sys_platform == 'linux'" }, - { name = "cloudpickle", marker = "sys_platform == 'linux'" }, - { name = "compressed-tensors", marker = "sys_platform == 'linux'" }, - { name = "depyf", marker = "sys_platform == 'linux'" }, - { name = "diskcache", marker = "sys_platform == 'linux'" }, - { name = "einops", marker = "sys_platform == 'linux'" }, - { name = "fastapi", extra = ["standard"], marker = "sys_platform == 'linux'" }, - { name = "filelock", marker = "sys_platform == 'linux'" }, - { name = "flashinfer-python", version = "0.6.4", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "gguf", marker = "sys_platform == 'linux'" }, - { name = "grpcio", marker = "sys_platform == 'linux'" }, - { name = "grpcio-reflection", marker = "sys_platform == 'linux'" }, - { name = "ijson", marker = "sys_platform == 'linux'" }, - { name = "kaldi-native-fbank", marker = "sys_platform == 'linux'" }, - { name = "lark", marker = "sys_platform == 'linux'" }, - { name = "llguidance", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'arm64' and sys_platform == 'linux') or (platform_machine == 'ppc64le' and sys_platform == 'linux') or (platform_machine == 's390x' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, - { name = "lm-format-enforcer", marker = "sys_platform == 'linux'" }, - { name = "mcp", marker = "sys_platform == 'linux'" }, - { name = "mistral-common", extra = ["image"], marker = "sys_platform == 'linux'" }, - { name = "model-hosting-container-standards", marker = "sys_platform == 'linux'" }, - { name = "msgspec", marker = "sys_platform == 'linux'" }, - { name = "ninja", marker = "sys_platform == 'linux'" }, - { name = "numba", marker = "sys_platform == 'linux'" }, - { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "nvidia-cutlass-dsl", marker = "sys_platform == 'linux'" }, - { name = "openai", marker = "sys_platform == 'linux'" }, - { name = "openai-harmony", marker = "sys_platform == 'linux'" }, - { name = "opencv-python-headless", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-api", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-exporter-otlp", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-sdk", marker = "sys_platform == 'linux'" }, - { name = "opentelemetry-semantic-conventions-ai", marker = "sys_platform == 'linux'" }, - { name = "outlines-core", marker = "sys_platform == 'linux'" }, - { name = "partial-json-parser", marker = "sys_platform == 'linux'" }, - { name = "pillow", marker = "sys_platform == 'linux'" }, - { name = "prometheus-client", marker = "sys_platform == 'linux'" }, - { name = "prometheus-fastapi-instrumentator", marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "psutil", marker = "sys_platform == 'linux'" }, - { name = "py-cpuinfo", marker = "sys_platform == 'linux'" }, - { name = "pybase64", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "python-json-logger", marker = "sys_platform == 'linux'" }, - { name = "pyyaml", marker = "sys_platform == 'linux'" }, - { name = "pyzmq", marker = "sys_platform == 'linux'" }, - { name = "quack-kernels", marker = "sys_platform == 'linux'" }, - { name = "ray", extra = ["cgraph"], marker = "sys_platform == 'linux'" }, - { name = "regex", marker = "sys_platform == 'linux'" }, - { name = "requests", marker = "sys_platform == 'linux'" }, - { name = "sentencepiece", marker = "sys_platform == 'linux'" }, - { name = "setproctitle", marker = "sys_platform == 'linux'" }, - { name = "setuptools", marker = "python_full_version >= '3.12' and sys_platform == 'linux'" }, - { name = "six", marker = "python_full_version >= '3.12' and sys_platform == 'linux'" }, - { name = "tiktoken", marker = "sys_platform == 'linux'" }, - { name = "tokenizers", marker = "sys_platform == 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "torchaudio", marker = "sys_platform == 'linux'" }, - { name = "torchvision", version = "0.25.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "tqdm", marker = "sys_platform == 'linux'" }, - { name = "transformers", marker = "sys_platform == 'linux'" }, - { name = "typing-extensions", marker = "sys_platform == 'linux'" }, - { name = "watchfiles", marker = "sys_platform == 'linux'" }, - { name = "xgrammar", marker = "(platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'arm64' and sys_platform == 'linux') or (platform_machine == 'ppc64le' and sys_platform == 'linux') or (platform_machine == 's390x' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, + { name = "aiohttp" }, + { name = "anthropic" }, + { name = "blake3" }, + { name = "cachetools" }, + { name = "cbor2" }, + { name = "cloudpickle" }, + { name = "compressed-tensors" }, + { name = "depyf" }, + { name = "diskcache" }, + { name = "einops" }, + { name = "fastapi", extra = ["standard"] }, + { name = "filelock" }, + { name = "flashinfer-python" }, + { name = "gguf" }, + { name = "grpcio" }, + { name = "grpcio-reflection" }, + { name = "ijson" }, + { name = "kaldi-native-fbank" }, + { name = "lark" }, + { name = "llguidance", marker = "platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'ppc64le' or platform_machine == 's390x' or platform_machine == 'x86_64'" }, + { name = "lm-format-enforcer" }, + { name = "mcp" }, + { name = "mistral-common", extra = ["image"] }, + { name = "model-hosting-container-standards" }, + { name = "msgspec" }, + { name = "ninja" }, + { name = "numba" }, + { name = "numpy" }, + { name = "nvidia-cutlass-dsl" }, + { name = "openai" }, + { name = "openai-harmony" }, + { name = "opencv-python-headless" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp" }, + { name = "opentelemetry-sdk" }, + { name = "opentelemetry-semantic-conventions-ai" }, + { name = "outlines-core" }, + { name = "partial-json-parser" }, + { name = "pillow" }, + { name = "prometheus-client" }, + { name = "prometheus-fastapi-instrumentator" }, + { name = "protobuf" }, + { name = "psutil" }, + { name = "py-cpuinfo" }, + { name = "pybase64" }, + { name = "pydantic" }, + { name = "python-json-logger" }, + { name = "pyyaml" }, + { name = "pyzmq" }, + { name = "quack-kernels" }, + { name = "ray", extra = ["cgraph"] }, + { name = "regex" }, + { name = "requests" }, + { name = "sentencepiece" }, + { name = "setproctitle" }, + { name = "setuptools" }, + { name = "six" }, + { name = "tiktoken" }, + { name = "tokenizers" }, + { name = "torch" }, + { name = "torchaudio" }, + { name = "torchvision" }, + { name = "tqdm" }, + { name = "transformers" }, + { name = "typing-extensions" }, + { name = "watchfiles" }, + { name = "xgrammar", marker = "platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'ppc64le' or platform_machine == 's390x' or platform_machine == 'x86_64'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/13/d5/af83a4262ca4d5692a93b3c322ae954e3e6c4e23f8f9db3ab87bd79c919e/vllm-0.17.0.tar.gz", hash = "sha256:b0b62e58ef4eb633ef371f2726976372cf6dfcb7ff2ea9ddf7194c1930d5629a", size = 30541311, upload-time = "2026-03-07T03:54:54.333Z" } wheels = [ @@ -9696,8 +8448,7 @@ dependencies = [ { name = "gitpython" }, { name = "packaging" }, { name = "platformdirs" }, - { name = "protobuf", version = "5.29.6", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "protobuf", version = "6.33.5", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "protobuf" }, { name = "pydantic" }, { name = "pyyaml" }, { name = "requests" }, @@ -9723,9 +8474,6 @@ version = "6.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, - { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, - { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, @@ -9753,19 +8501,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, - { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, - { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, - { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, - { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, - { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, - { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, - { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, - { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, - { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, - { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, - { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, - { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, @@ -9825,10 +8560,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, - { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, - { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, - { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, - { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, ] [[package]] @@ -9904,15 +8635,6 @@ version = "16.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, - { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, - { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, - { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, - { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, - { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, - { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, - { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, @@ -9949,11 +8671,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, - { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, - { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, - { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, - { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, - { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] @@ -9999,16 +8716,6 @@ version = "1.17.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, - { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, - { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, - { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, - { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, - { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, - { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, - { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, - { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, @@ -10061,13 +8768,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/08/d5/25f7b19af3a2cb4000cac4f9e5525a40bec79f4f5d0ac9b517c0544586a0/xattr-1.3.0.tar.gz", hash = "sha256:30439fabd7de0787b27e9a6e1d569c5959854cb322f64ce7380fedbfa5035036", size = 17148, upload-time = "2025-10-13T22:16:47.353Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/64/292426ad5653e72c6e1325bbff22868a20077290d967cebb9c0624ad08b6/xattr-1.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:331a51bf8f20c27822f44054b0d760588462d3ed472d5e52ba135cf0bea510e8", size = 23448, upload-time = "2025-10-13T22:15:59.229Z" }, - { url = "https://files.pythonhosted.org/packages/63/84/6539fbe620da8e5927406e76b9c8abad8953025d5f578d792747c38a8c0e/xattr-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:196360f068b74fa0132a8c6001ce1333f095364b8f43b6fd8cdaf2f18741ef89", size = 18553, upload-time = "2025-10-13T22:16:00.151Z" }, - { url = "https://files.pythonhosted.org/packages/cc/bb/c1c2e24a49f8d13ff878fb85aabc42ea1b2f98ce08d8205b9661d517a9cc/xattr-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:405d2e4911d37f2b9400fa501acd920fe0c97fe2b2ec252cb23df4b59c000811", size = 18848, upload-time = "2025-10-13T22:16:01.046Z" }, - { url = "https://files.pythonhosted.org/packages/02/c2/a60aad150322b217dfe33695d8d9f32bc01e8f300641b6ba4b73f4b3c03f/xattr-1.3.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4ae3a66ae1effd40994f64defeeaa97da369406485e60bfb421f2d781be3b75d", size = 38547, upload-time = "2025-10-13T22:16:01.973Z" }, - { url = "https://files.pythonhosted.org/packages/c6/58/2eca142bad4ea0a2be6b58d3122d0acce310c4e53fa7defd168202772178/xattr-1.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:69cd3bfe779f7ba87abe6473fdfa428460cf9e78aeb7e390cfd737b784edf1b5", size = 38753, upload-time = "2025-10-13T22:16:03.244Z" }, - { url = "https://files.pythonhosted.org/packages/2b/50/d032e5254c2c27d36bdb02abdf2735db6768a441f0e3d0f139e0f9f56638/xattr-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c5742ca61761a99ae0c522f90a39d5fb8139280f27b254e3128482296d1df2db", size = 38054, upload-time = "2025-10-13T22:16:04.656Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/458a306439aabe0083ca0a7b14c3e6a800ab9782b5ec0bdcec4ec9f3dc6c/xattr-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a04ada131e9bdfd32db3ab1efa9f852646f4f7c9d6fde0596c3825c67161be3", size = 37562, upload-time = "2025-10-13T22:16:05.97Z" }, { url = "https://files.pythonhosted.org/packages/bf/78/00bdc9290066173e53e1e734d8d8e1a84a6faa9c66aee9df81e4d9aeec1c/xattr-1.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:dd4e63614722d183e81842cb237fd1cc978d43384166f9fe22368bfcb187ebe5", size = 23476, upload-time = "2025-10-13T22:16:06.942Z" }, { url = "https://files.pythonhosted.org/packages/53/16/5243722294eb982514fa7b6b87a29dfb7b29b8e5e1486500c5babaf6e4b3/xattr-1.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:995843ef374af73e3370b0c107319611f3cdcdb6d151d629449efecad36be4c4", size = 18556, upload-time = "2025-10-13T22:16:08.209Z" }, { url = "https://files.pythonhosted.org/packages/d6/5c/d7ab0e547bea885b55f097206459bd612cefb652c5fc1f747130cbc0d42c/xattr-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa23a25220e29d956cedf75746e3df6cc824cc1553326d6516479967c540e386", size = 18869, upload-time = "2025-10-13T22:16:10.319Z" }, @@ -10104,21 +8804,18 @@ version = "0.0.33.post2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version == '3.13.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version == '3.13.*' and sys_platform == 'emscripten'", - "python_full_version == '3.12.*' and sys_platform == 'emscripten'", "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'emscripten'", "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version < '3.13' and sys_platform == 'win32'", + "python_full_version < '3.13' and sys_platform == 'emscripten'", + "python_full_version < '3.13' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", ] dependencies = [ { name = "numpy", marker = "sys_platform != 'linux'" }, - { name = "torch", version = "2.9.1", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux'" }, + { name = "torch", marker = "sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/69/403e963d35f1b0c52a1b3127e0bc4e94e7e50ecee8c6684a8abe40e6638e/xformers-0.0.33.post2.tar.gz", hash = "sha256:647ddf26578d2b8643230467ef1f0fbfef0bbe556a546bd27a70d4855d3433e1", size = 14783914, upload-time = "2025-12-04T18:52:42.572Z" } wheels = [ @@ -10132,12 +8829,11 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'linux'", "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version == '3.12.*' and sys_platform == 'linux'", - "python_full_version < '3.12' and sys_platform == 'linux'", + "python_full_version < '3.13' and sys_platform == 'linux'", ] dependencies = [ { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, + { name = "torch", marker = "sys_platform == 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/de/5a/6e27734bd793adc44d0b8d294e67cfacf4ec590572c1aef51d683fc7a791/xformers-0.0.35.tar.gz", hash = "sha256:f7fc183a58e4bf0e2ae339a18fb1b1d4a37854c0f2545b4f360fef001646ab76", size = 4258182, upload-time = "2026-02-20T20:33:05.417Z" } wheels = [ @@ -10149,17 +8845,15 @@ name = "xgrammar" version = "0.1.29" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "pydantic", marker = "sys_platform == 'linux'" }, - { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'linux'" }, - { name = "transformers", marker = "sys_platform == 'linux'" }, - { name = "triton", version = "3.6.0", source = { registry = "https://pypi.org/simple" }, marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "typing-extensions", marker = "sys_platform == 'linux'" }, + { name = "numpy" }, + { name = "pydantic" }, + { name = "torch" }, + { name = "transformers" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "typing-extensions" }, ] sdist = { url = "https://files.pythonhosted.org/packages/02/a3/70dbe3ffd331a1e7e1ad5a95690a4086e6c7cdb8089f5c7eda712219ccec/xgrammar-0.1.29.tar.gz", hash = "sha256:cf195afa81b489eebf35d4c6f37f27136d05420739ab4a6f7f065c938d7e4baa", size = 2321317, upload-time = "2025-12-19T08:23:54.53Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/0b/b5e5c99ce13a9d378a940cda07c5a08b50cc7efb66936c6ac8fa8232a0d5/xgrammar-0.1.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51bcfd63bd48a0b26209ffd2143a42067518559355ec9e4e574cef2ae74fac7c", size = 34699408, upload-time = "2025-12-19T08:23:16.906Z" }, - { url = "https://files.pythonhosted.org/packages/a3/a0/4ebc1b3f5af79a3f73d0566034758f3fbcd9c64174646314a9a6f7cc1d27/xgrammar-0.1.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e27b50cf8c565845295a8263a4a0790c00a7c1fd783e76222fc0f575654d6f56", size = 34903461, upload-time = "2025-12-19T08:23:19.556Z" }, { url = "https://files.pythonhosted.org/packages/57/94/18793c64bf0368075a34c06e196bf002f1e6ab0aee332268f44e8d356d5a/xgrammar-0.1.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eb370a16b27a683e5f2b9e429ab41440c69977d4a504849ed61831b94cc704c", size = 34705239, upload-time = "2025-12-19T08:23:28.369Z" }, { url = "https://files.pythonhosted.org/packages/3e/da/4c14e3e00be698009b52700f15326a23272b4b00475939b6acc86b151188/xgrammar-0.1.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79e6e4f5cd33be77418cf91efc482f2b3d773d309891224383bc8a4948ad7b07", size = 34906135, upload-time = "2025-12-19T08:23:30.838Z" }, { url = "https://files.pythonhosted.org/packages/e9/c5/e4965c9921e7bb6061f246ae7f8c7b9b1dfc21262248100c2f9b398b361e/xgrammar-0.1.29-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb22aea775971f7d8c4d0e193257ebeb71b68acd9d36af3331ca5fd4d9a46991", size = 34904126, upload-time = "2025-12-19T08:23:38.335Z" }, @@ -10171,21 +8865,6 @@ version = "3.6.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, - { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, - { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, - { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, - { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, - { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, - { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, - { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, - { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, - { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, - { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, - { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, - { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, - { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, - { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, @@ -10261,11 +8940,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, - { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, - { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, - { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, - { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, ] [[package]] @@ -10279,22 +8953,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, - { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, - { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, - { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, - { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, - { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, - { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, - { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, - { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, - { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, - { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, - { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, - { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, @@ -10393,23 +9051,6 @@ version = "0.25.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, - { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, - { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, - { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, - { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, - { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, - { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, - { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, - { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, - { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, - { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, - { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, - { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, From 45e32f53cb964e55b134f1a7aad1ef936978c532 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Fri, 20 Mar 2026 22:38:41 +0000 Subject: [PATCH 22/28] Del held packed tensors so dir can be removed. Plus small typing changes. --- src/art/megatron/cute_grouped_lora_quack.py | 9 ++++++++- src/art/megatron/train.py | 4 +++- tests/integration/test_lora_quack_cutover.py | 19 ++++++++++++++++--- 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/src/art/megatron/cute_grouped_lora_quack.py b/src/art/megatron/cute_grouped_lora_quack.py index 70f7da89..c4ad6528 100644 --- a/src/art/megatron/cute_grouped_lora_quack.py +++ b/src/art/megatron/cute_grouped_lora_quack.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Any, cast + from quack.gemm import gemm as quack_gemm import torch @@ -227,10 +229,15 @@ def forward( return out @staticmethod - def backward(ctx, grad_out: torch.Tensor): + def backward(ctx, *grad_outputs: Any): + if len(grad_outputs) != 1: + raise RuntimeError( + f"Expected exactly one gradient output, got {len(grad_outputs)}" + ) x, a_t_eff, b_t_eff, tmp, expert_offsets = ctx.saved_tensors effective_rank = ctx.effective_rank actual_rank = ctx.actual_rank + grad_out = cast(torch.Tensor, grad_outputs[0]) grad_out_c = grad_out.contiguous() grad_tmp = _varlen_quack_gemm( diff --git a/src/art/megatron/train.py b/src/art/megatron/train.py index 312f108a..13396f32 100644 --- a/src/art/megatron/train.py +++ b/src/art/megatron/train.py @@ -611,7 +611,7 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: runtime.rank, "Loading packed tensors from", job.disk_packed_tensors["dir"] ) packed_tensors = packed_tensors_from_dir(**job.disk_packed_tensors) - template = select_indexed_inputs(packed_tensors, 0) + template = _clone_packed_tensors(select_indexed_inputs(packed_tensors, 0)) zero_template = _zero_contribution_inputs(template) num_sequences = job.disk_packed_tensors["num_sequences"] global_grad_accumulation_sequences = config.grad_accumulation_sequences @@ -685,6 +685,8 @@ def _run_service_loop(runtime: TrainingRuntime) -> None: offload_to_cpu(runtime.model, runtime.optimizer, runtime.rank, offload_state) del packed_tensors + del template + del zero_template del adapter_model if "micro_inputs" in locals(): del micro_inputs diff --git a/tests/integration/test_lora_quack_cutover.py b/tests/integration/test_lora_quack_cutover.py index 2e96b4b7..ba496a2f 100644 --- a/tests/integration/test_lora_quack_cutover.py +++ b/tests/integration/test_lora_quack_cutover.py @@ -8,6 +8,12 @@ from art.megatron.lora import LoRA +def _require_grad(grad: torch.Tensor | None, *, name: str) -> torch.Tensor: + if grad is None: + raise AssertionError(f"{name}.grad unexpectedly None") + return grad + + def _eager_grouped_lora( x: torch.Tensor, a_t: torch.Tensor, @@ -77,7 +83,14 @@ def test_lora_grouped_forward_cutover_matches_reference(rank: int) -> None: got_loss = (got_out.float() * loss_grad.float()).sum() / max(1, loss_grad.numel()) got_loss.backward() + x_ref_grad = _require_grad(x_ref.grad, name="x_ref") + x_test_grad = _require_grad(x_test.grad, name="x_test") + a_ref_grad = _require_grad(a_ref.grad, name="a_ref") + a_test_grad = _require_grad(lora.A_T.grad, name="lora.A_T") + b_ref_grad = _require_grad(b_ref.grad, name="b_ref") + b_test_grad = _require_grad(lora.B_T.grad, name="lora.B_T") + assert torch.allclose(ref_out, got_out.detach(), atol=5e-2, rtol=5e-2) - assert torch.allclose(x_ref.grad, x_test.grad, atol=5e-2, rtol=5e-2) - assert torch.allclose(a_ref.grad, lora.A_T.grad, atol=5e-2, rtol=5e-2) - assert torch.allclose(b_ref.grad, lora.B_T.grad, atol=5e-2, rtol=5e-2) + assert torch.allclose(x_ref_grad, x_test_grad, atol=5e-2, rtol=5e-2) + assert torch.allclose(a_ref_grad, a_test_grad, atol=5e-2, rtol=5e-2) + assert torch.allclose(b_ref_grad, b_test_grad, atol=5e-2, rtol=5e-2) From a77bd7c93102f8c443318cbe3e01a92781b54a64 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Sat, 21 Mar 2026 04:28:25 +0000 Subject: [PATCH 23/28] Fuse LoRA scale into QuACK grouped GEMM --- src/art/megatron/cute_grouped_lora_quack.py | 14 +++++++++++++- src/art/megatron/lora.py | 7 +++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/src/art/megatron/cute_grouped_lora_quack.py b/src/art/megatron/cute_grouped_lora_quack.py index c4ad6528..d75ae53a 100644 --- a/src/art/megatron/cute_grouped_lora_quack.py +++ b/src/art/megatron/cute_grouped_lora_quack.py @@ -132,6 +132,7 @@ def _varlen_quack_gemm( expert_offsets: torch.Tensor, tile_m: int, tile_n: int, + alpha: float = 1.0, ) -> torch.Tensor: out = torch.empty( a.shape[0], @@ -150,6 +151,7 @@ def _varlen_quack_gemm( cluster_M=1, cluster_N=1, persistent=True, + alpha=alpha, cu_seqlens_m=expert_offsets, ) return out @@ -165,6 +167,7 @@ def _varlen_quack_gemm_k( expert_offsets: torch.Tensor, tile_m: int, tile_n: int, + alpha: float = 1.0, ) -> torch.Tensor: out = torch.empty( batch_count, @@ -184,6 +187,7 @@ def _varlen_quack_gemm_k( cluster_M=1, cluster_N=1, persistent=True, + alpha=alpha, cu_seqlens_k=expert_offsets, ) return out @@ -197,6 +201,7 @@ def forward( a_t: torch.Tensor, b_t: torch.Tensor, counts: torch.Tensor, + scale: float, ) -> torch.Tensor: expert_offsets = _build_expert_offsets(counts, device=x.device) actual_rank = a_t.shape[-1] @@ -221,11 +226,13 @@ def forward( expert_offsets=expert_offsets, tile_m=64, tile_n=_matmul_tile_n(b_t.shape[-1]), + alpha=scale, ) ctx.save_for_backward(x, a_t_eff, b_t_eff, tmp, expert_offsets) ctx.actual_rank = actual_rank ctx.effective_rank = effective_rank + ctx.scale = scale return out @staticmethod @@ -237,6 +244,7 @@ def backward(ctx, *grad_outputs: Any): x, a_t_eff, b_t_eff, tmp, expert_offsets = ctx.saved_tensors effective_rank = ctx.effective_rank actual_rank = ctx.actual_rank + scale = ctx.scale grad_out = cast(torch.Tensor, grad_outputs[0]) grad_out_c = grad_out.contiguous() @@ -247,6 +255,7 @@ def backward(ctx, *grad_outputs: Any): expert_offsets=expert_offsets, tile_m=64, tile_n=_proj_tile_n(effective_rank), + alpha=scale, ) grad_x = _varlen_quack_gemm( grad_tmp, @@ -275,12 +284,14 @@ def backward(ctx, *grad_outputs: Any): expert_offsets=expert_offsets, tile_m=_grad_b_tile_m(effective_rank), tile_n=_matmul_tile_n(b_t_eff.shape[-1]), + alpha=scale, ) return ( grad_x, grad_a_eff[:, :, :actual_rank].contiguous(), grad_b_eff[:, :actual_rank, :].contiguous(), None, + None, ) @@ -289,6 +300,7 @@ def quack_grouped_lora( a_t: torch.Tensor, b_t: torch.Tensor, counts: list[int] | torch.Tensor, + scale: float = 1.0, ) -> torch.Tensor: """Run grouped LoRA with the QuACK varlen GEMM backend. @@ -302,4 +314,4 @@ def quack_grouped_lora( synchronization in the hot path. """ counts_tensor = _validate_inputs(x, a_t, b_t, counts) - return _QuackGroupedLoraFn.apply(x, a_t, b_t, counts_tensor) + return _QuackGroupedLoraFn.apply(x, a_t, b_t, counts_tensor, scale) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 0842bee8..916ee309 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -367,8 +367,11 @@ def forward( # If no tokens routed locally, return zeros. if isinstance(bsz, torch.Tensor) and int(torch.count_nonzero(bsz)) == 0: return x.new_zeros((x.shape[0], self.B_T.shape[-1])) - return quack_grouped_lora(x, self.A_T, self.B_T, bsz) * self.scale - return ((x @ self.A_T) @ self.B_T) * self.scale + return quack_grouped_lora(x, self.A_T, self.B_T, bsz, scale=self.scale) + out = (x @ self.A_T) @ self.B_T + if self.scale == 1.0: + return out + return out * self.scale class SelfAttentionLinearProjLoRA(torch.nn.Module): From 8b83fb24b4e2d6cc97aba23f2168d41899f98680 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Sat, 21 Mar 2026 04:58:25 +0000 Subject: [PATCH 24/28] Avoid grad_out copy in QuACK LoRA backward --- src/art/megatron/cute_grouped_lora_quack.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/art/megatron/cute_grouped_lora_quack.py b/src/art/megatron/cute_grouped_lora_quack.py index d75ae53a..49b5bae5 100644 --- a/src/art/megatron/cute_grouped_lora_quack.py +++ b/src/art/megatron/cute_grouped_lora_quack.py @@ -246,10 +246,12 @@ def backward(ctx, *grad_outputs: Any): actual_rank = ctx.actual_rank scale = ctx.scale grad_out = cast(torch.Tensor, grad_outputs[0]) - grad_out_c = grad_out.contiguous() + assert grad_out.stride(-1) == 1, ( + "QuACK grouped LoRA backward requires grad_out stride(-1) == 1" + ) grad_tmp = _varlen_quack_gemm( - grad_out_c, + grad_out, b_t_eff.contiguous(), out_features=effective_rank, expert_offsets=expert_offsets, @@ -277,7 +279,7 @@ def backward(ctx, *grad_outputs: Any): ) grad_b_eff = _varlen_quack_gemm_k( tmp.transpose(0, 1), - grad_out_c.transpose(0, 1), + grad_out.transpose(0, 1), batch_count=b_t_eff.shape[0], out_shape_m=effective_rank, out_shape_n=b_t_eff.shape[-1], From f39a5b2b57b96ad745bfb6827692483a40dbc904 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Mon, 23 Mar 2026 17:48:56 +0000 Subject: [PATCH 25/28] Fuse MoE FC1 gate and up LoRA paths --- src/art/megatron/cute_grouped_lora_quack.py | 278 ++++++++++++++++++- src/art/megatron/lora.py | 50 +++- tests/integration/test_lora_quack_cutover.py | 117 ++++++++ 3 files changed, 435 insertions(+), 10 deletions(-) diff --git a/src/art/megatron/cute_grouped_lora_quack.py b/src/art/megatron/cute_grouped_lora_quack.py index 49b5bae5..dde72ae0 100644 --- a/src/art/megatron/cute_grouped_lora_quack.py +++ b/src/art/megatron/cute_grouped_lora_quack.py @@ -86,6 +86,45 @@ def _validate_inputs( return counts +def _validate_dual_inputs( + x: torch.Tensor, + gate_a_t: torch.Tensor, + gate_b_t: torch.Tensor, + up_a_t: torch.Tensor, + up_b_t: torch.Tensor, + tokens_per_expert: list[int] | torch.Tensor, +) -> torch.Tensor: + counts = _validate_inputs(x, gate_a_t, gate_b_t, tokens_per_expert) + if up_a_t.ndim != 3: + raise ValueError(f"up_a_t must be 3D, got shape {tuple(up_a_t.shape)}") + if up_b_t.ndim != 3: + raise ValueError(f"up_b_t must be 3D, got shape {tuple(up_b_t.shape)}") + up_rank = up_a_t.shape[-1] + _validate_supported_rank(up_rank) + if up_b_t.shape[-2] != up_rank: + raise ValueError( + f"Expected up_b_t rank dim {up_rank}, got shape {tuple(up_b_t.shape)}" + ) + if up_a_t.shape[0] != gate_a_t.shape[0] or up_b_t.shape[0] != gate_b_t.shape[0]: + raise ValueError( + "Gate and up tensors must have the same number of experts, " + f"got gate={gate_a_t.shape[0]} up={up_a_t.shape[0]}" + ) + if up_a_t.shape[1] != x.shape[1]: + raise ValueError( + f"up_a_t input dim must match x.shape[1], got {up_a_t.shape[1]} and {x.shape[1]}" + ) + if up_a_t.device != x.device or up_b_t.device != x.device: + raise ValueError( + "x, up_a_t, and up_b_t must be CUDA tensors on the same device" + ) + if up_a_t.dtype != x.dtype or up_b_t.dtype != x.dtype: + raise ValueError( + f"Dtype mismatch: x={x.dtype}, up_a_t={up_a_t.dtype}, up_b_t={up_b_t.dtype}" + ) + return counts + + def _effective_rank(rank: int) -> int: if rank in _PADDED_LOW_RANKS: return _PADDED_LOW_RANK_TARGET @@ -133,13 +172,24 @@ def _varlen_quack_gemm( tile_m: int, tile_n: int, alpha: float = 1.0, + out: torch.Tensor | None = None, ) -> torch.Tensor: - out = torch.empty( - a.shape[0], - out_features, - device=a.device, - dtype=a.dtype, - ) + if out is None: + out = torch.empty( + a.shape[0], + out_features, + device=a.device, + dtype=a.dtype, + ) + else: + if out.shape != (a.shape[0], out_features): + raise ValueError( + f"Expected output shape {(a.shape[0], out_features)}, got {tuple(out.shape)}" + ) + if out.device != a.device or out.dtype != a.dtype: + raise ValueError( + f"Output tensor must match input device/dtype, got {out.device}/{out.dtype}" + ) quack_gemm( a, b, @@ -297,6 +347,197 @@ def backward(ctx, *grad_outputs: Any): ) +class _QuackGroupedLoraDualFn(torch.autograd.Function): + @staticmethod + def forward( + ctx, + x: torch.Tensor, + gate_a_t: torch.Tensor, + gate_b_t: torch.Tensor, + up_a_t: torch.Tensor, + up_b_t: torch.Tensor, + counts: torch.Tensor, + scale_gate: float, + scale_up: float, + ) -> torch.Tensor: + expert_offsets = _build_expert_offsets(counts, device=x.device) + gate_actual_rank = gate_a_t.shape[-1] + up_actual_rank = up_a_t.shape[-1] + gate_effective_rank = _effective_rank(gate_actual_rank) + up_effective_rank = _effective_rank(up_actual_rank) + + gate_a_t_eff = _pad_a_t(gate_a_t, gate_effective_rank) + up_a_t_eff = _pad_a_t(up_a_t, up_effective_rank) + gate_b_t_eff = _pad_b_t(gate_b_t, gate_effective_rank) + up_b_t_eff = _pad_b_t(up_b_t, up_effective_rank) + + a_cat_eff = torch.cat((gate_a_t_eff, up_a_t_eff), dim=-1).contiguous() + proj_weights = a_cat_eff.permute(0, 2, 1).contiguous() + gate_apply_weights = gate_b_t_eff.permute(0, 2, 1).contiguous() + up_apply_weights = up_b_t_eff.permute(0, 2, 1).contiguous() + + total_effective_rank = gate_effective_rank + up_effective_rank + tmp_cat = _varlen_quack_gemm( + x.contiguous(), + proj_weights, + out_features=total_effective_rank, + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_proj_tile_n(total_effective_rank), + ) + tmp_gate, tmp_up = torch.split( + tmp_cat, [gate_effective_rank, up_effective_rank], dim=1 + ) + + gate_out_features = gate_b_t.shape[-1] + up_out_features = up_b_t.shape[-1] + out = torch.empty( + x.shape[0], + gate_out_features + up_out_features, + device=x.device, + dtype=x.dtype, + ) + _varlen_quack_gemm( + tmp_gate, + gate_apply_weights, + out_features=gate_out_features, + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_matmul_tile_n(gate_out_features), + alpha=scale_gate, + out=out[:, :gate_out_features], + ) + _varlen_quack_gemm( + tmp_up, + up_apply_weights, + out_features=up_out_features, + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_matmul_tile_n(up_out_features), + alpha=scale_up, + out=out[:, gate_out_features:], + ) + + ctx.save_for_backward( + x, + a_cat_eff, + gate_b_t_eff, + up_b_t_eff, + tmp_cat, + expert_offsets, + ) + ctx.gate_actual_rank = gate_actual_rank + ctx.up_actual_rank = up_actual_rank + ctx.gate_effective_rank = gate_effective_rank + ctx.up_effective_rank = up_effective_rank + ctx.gate_out_features = gate_out_features + ctx.scale_gate = scale_gate + ctx.scale_up = scale_up + return out + + @staticmethod + def backward(ctx, *grad_outputs: Any): + if len(grad_outputs) != 1: + raise RuntimeError( + f"Expected exactly one gradient output, got {len(grad_outputs)}" + ) + x, a_cat_eff, gate_b_t_eff, up_b_t_eff, tmp_cat, expert_offsets = ( + ctx.saved_tensors + ) + gate_actual_rank = ctx.gate_actual_rank + up_actual_rank = ctx.up_actual_rank + gate_effective_rank = ctx.gate_effective_rank + up_effective_rank = ctx.up_effective_rank + gate_out_features = ctx.gate_out_features + scale_gate = ctx.scale_gate + scale_up = ctx.scale_up + + grad_out = cast(torch.Tensor, grad_outputs[0]) + assert grad_out.stride(-1) == 1, ( + "QuACK grouped FC1 dual LoRA backward requires grad_out stride(-1) == 1" + ) + grad_gate = grad_out[:, :gate_out_features] + grad_up = grad_out[:, gate_out_features:] + tmp_gate, tmp_up = torch.split( + tmp_cat, [gate_effective_rank, up_effective_rank], dim=1 + ) + + grad_tmp_gate = _varlen_quack_gemm( + grad_gate, + gate_b_t_eff.contiguous(), + out_features=gate_effective_rank, + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_proj_tile_n(gate_effective_rank), + alpha=scale_gate, + ) + grad_tmp_up = _varlen_quack_gemm( + grad_up, + up_b_t_eff.contiguous(), + out_features=up_effective_rank, + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_proj_tile_n(up_effective_rank), + alpha=scale_up, + ) + grad_tmp_cat = torch.cat((grad_tmp_gate, grad_tmp_up), dim=1).contiguous() + + total_effective_rank = gate_effective_rank + up_effective_rank + grad_x = _varlen_quack_gemm( + grad_tmp_cat, + a_cat_eff.contiguous(), + out_features=x.shape[-1], + expert_offsets=expert_offsets, + tile_m=64, + tile_n=_matmul_tile_n(x.shape[-1]), + ) + grad_a_cat_eff = _varlen_quack_gemm_k( + x.transpose(0, 1), + grad_tmp_cat.transpose(0, 1), + batch_count=a_cat_eff.shape[0], + out_shape_m=a_cat_eff.shape[1], + out_shape_n=total_effective_rank, + expert_offsets=expert_offsets, + tile_m=_grad_a_tile_m(total_effective_rank), + tile_n=_proj_tile_n(total_effective_rank), + ) + grad_b_gate_eff = _varlen_quack_gemm_k( + tmp_gate.transpose(0, 1), + grad_gate.transpose(0, 1), + batch_count=gate_b_t_eff.shape[0], + out_shape_m=gate_effective_rank, + out_shape_n=gate_b_t_eff.shape[-1], + expert_offsets=expert_offsets, + tile_m=_grad_b_tile_m(gate_effective_rank), + tile_n=_matmul_tile_n(gate_b_t_eff.shape[-1]), + alpha=scale_gate, + ) + grad_b_up_eff = _varlen_quack_gemm_k( + tmp_up.transpose(0, 1), + grad_up.transpose(0, 1), + batch_count=up_b_t_eff.shape[0], + out_shape_m=up_effective_rank, + out_shape_n=up_b_t_eff.shape[-1], + expert_offsets=expert_offsets, + tile_m=_grad_b_tile_m(up_effective_rank), + tile_n=_matmul_tile_n(up_b_t_eff.shape[-1]), + alpha=scale_up, + ) + grad_a_gate_eff, grad_a_up_eff = torch.split( + grad_a_cat_eff, [gate_effective_rank, up_effective_rank], dim=2 + ) + return ( + grad_x, + grad_a_gate_eff[:, :, :gate_actual_rank].contiguous(), + grad_b_gate_eff[:, :gate_actual_rank, :].contiguous(), + grad_a_up_eff[:, :, :up_actual_rank].contiguous(), + grad_b_up_eff[:, :up_actual_rank, :].contiguous(), + None, + None, + None, + ) + + def quack_grouped_lora( x: torch.Tensor, a_t: torch.Tensor, @@ -317,3 +558,28 @@ def quack_grouped_lora( """ counts_tensor = _validate_inputs(x, a_t, b_t, counts) return _QuackGroupedLoraFn.apply(x, a_t, b_t, counts_tensor, scale) + + +def quack_grouped_lora_dual( + x: torch.Tensor, + gate_a_t: torch.Tensor, + gate_b_t: torch.Tensor, + up_a_t: torch.Tensor, + up_b_t: torch.Tensor, + counts: list[int] | torch.Tensor, + *, + scale_gate: float = 1.0, + scale_up: float = 1.0, +) -> torch.Tensor: + """Run grouped FC1 gate/up LoRA with a shared QuACK projection path.""" + counts_tensor = _validate_dual_inputs(x, gate_a_t, gate_b_t, up_a_t, up_b_t, counts) + return _QuackGroupedLoraDualFn.apply( + x, + gate_a_t, + gate_b_t, + up_a_t, + up_b_t, + counts_tensor, + scale_gate, + scale_up, + ) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 916ee309..5d24377d 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -20,7 +20,7 @@ from pydantic import BaseModel, ConfigDict import torch -from .cute_grouped_lora_quack import quack_grouped_lora +from .cute_grouped_lora_quack import quack_grouped_lora, quack_grouped_lora_dual ShardDomain = Literal["tp", "expert_tp"] GradSyncDomain = Literal["tp_default", "expert_tp"] @@ -572,6 +572,34 @@ def forward(self, x: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor | None]: return linear_output + adapter_output, bias +class FusedExpertsFC1LoRA(torch.nn.Module): + def __init__(self, adapter_model_prefix: str) -> None: + super().__init__() + self.adapter_model_prefix = f"{adapter_model_prefix}.fused_fc1" + + def forward( + self, + x: torch.Tensor, + tokens_per_expert: list[int] | torch.Tensor, + gate_a_t: torch.Tensor, + gate_b_t: torch.Tensor, + up_a_t: torch.Tensor, + up_b_t: torch.Tensor, + scale_gate: float, + scale_up: float, + ) -> torch.Tensor: + return quack_grouped_lora_dual( + x, + gate_a_t, + gate_b_t, + up_a_t, + up_b_t, + tokens_per_expert, + scale_gate=scale_gate, + scale_up=scale_up, + ) + + class MLPExpertsLinearFC1LoRA(torch.nn.Module): def __init__( self, @@ -598,6 +626,7 @@ def __init__( alpha=alpha, num_local_experts=num_local_experts, ) + self.fused_lora = FusedExpertsFC1LoRA(adapter_model_prefix=adapter_model_prefix) @staticmethod def _build_fc1_lora( @@ -644,9 +673,22 @@ def forward( self, x: torch.Tensor, tokens_per_expert: list[int] | torch.Tensor ) -> tuple[torch.Tensor, torch.Tensor | None]: base_out, bias_out = self.linear_fc1(x, tokens_per_expert) - gate_out = self.gate_lora(x, tokens_per_expert=tokens_per_expert) - up_out = self.up_lora(x, tokens_per_expert=tokens_per_expert) - adapter_out = torch.cat([gate_out, up_out], dim=1) + counts = tokens_per_expert + if isinstance(counts, list): + counts = torch.tensor(counts, dtype=torch.int64, device="cpu") + if isinstance(counts, torch.Tensor) and int(torch.count_nonzero(counts)) == 0: + adapter_out = x.new_zeros((x.shape[0], self.linear_fc1.out_features)) + else: + adapter_out = self.fused_lora( + x, + counts, + self.gate_lora.A_T, + self.gate_lora.B_T, + self.up_lora.A_T, + self.up_lora.B_T, + self.gate_lora.scale, + self.up_lora.scale, + ) return base_out + adapter_out, bias_out diff --git a/tests/integration/test_lora_quack_cutover.py b/tests/integration/test_lora_quack_cutover.py index ba496a2f..380f146d 100644 --- a/tests/integration/test_lora_quack_cutover.py +++ b/tests/integration/test_lora_quack_cutover.py @@ -5,6 +5,7 @@ pytest.importorskip("quack") +from art.megatron.cute_grouped_lora_quack import quack_grouped_lora_dual from art.megatron.lora import LoRA @@ -37,6 +38,34 @@ def _eager_grouped_lora( return torch.cat(outputs, dim=0) * scale +def _eager_grouped_lora_dual( + x: torch.Tensor, + gate_a_t: torch.Tensor, + gate_b_t: torch.Tensor, + up_a_t: torch.Tensor, + up_b_t: torch.Tensor, + counts: torch.Tensor, + *, + scale_gate: float, + scale_up: float, +) -> torch.Tensor: + outputs: list[torch.Tensor] = [] + start = 0 + for expert_idx, token_count in enumerate(counts.tolist()): + if token_count == 0: + continue + stop = start + int(token_count) + gate_out = x[start:stop] @ gate_a_t[expert_idx] @ gate_b_t[expert_idx] + up_out = x[start:stop] @ up_a_t[expert_idx] @ up_b_t[expert_idx] + outputs.append(torch.cat((gate_out * scale_gate, up_out * scale_up), dim=1)) + start = stop + if start != x.shape[0]: + raise RuntimeError( + f"Grouped split mismatch: consumed {start} rows for shape {tuple(x.shape)}" + ) + return torch.cat(outputs, dim=0) + + @pytest.mark.parametrize("rank", [1, 4, 16]) def test_lora_grouped_forward_cutover_matches_reference(rank: int) -> None: if not torch.cuda.is_available(): @@ -94,3 +123,91 @@ def test_lora_grouped_forward_cutover_matches_reference(rank: int) -> None: assert torch.allclose(x_ref_grad, x_test_grad, atol=5e-2, rtol=5e-2) assert torch.allclose(a_ref_grad, a_test_grad, atol=5e-2, rtol=5e-2) assert torch.allclose(b_ref_grad, b_test_grad, atol=5e-2, rtol=5e-2) + + +@pytest.mark.parametrize("rank", [1, 4, 16]) +def test_lora_grouped_dual_forward_cutover_matches_reference(rank: int) -> None: + if not torch.cuda.is_available(): + pytest.skip("CUDA is required for the LoRA QuACK cutover test.") + + device = torch.device("cuda:0") + torch.manual_seed(20260324 + rank) + + counts = torch.tensor([32, 0, 16, 24], dtype=torch.int64) + total_tokens = int(counts.sum().item()) + x = torch.randn(total_tokens, 64, device=device, dtype=torch.bfloat16) * 0.05 + gate_a_t = torch.randn(4, 64, rank, device=device, dtype=torch.bfloat16) * 0.05 + gate_b_t = torch.randn(4, rank, 64, device=device, dtype=torch.bfloat16) * 0.05 + up_a_t = torch.randn(4, 64, rank, device=device, dtype=torch.bfloat16) * 0.05 + up_b_t = torch.randn(4, rank, 64, device=device, dtype=torch.bfloat16) * 0.05 + loss_grad = torch.randn(total_tokens, 128, device=device, dtype=torch.bfloat16) + scale_gate = 2.0 + scale_up = 3.0 + + x_ref = x.detach().clone().requires_grad_(True) + gate_a_ref = gate_a_t.detach().clone().requires_grad_(True) + gate_b_ref = gate_b_t.detach().clone().requires_grad_(True) + up_a_ref = up_a_t.detach().clone().requires_grad_(True) + up_b_ref = up_b_t.detach().clone().requires_grad_(True) + ref_out = _eager_grouped_lora_dual( + x_ref, + gate_a_ref, + gate_b_ref, + up_a_ref, + up_b_ref, + counts, + scale_gate=scale_gate, + scale_up=scale_up, + ) + ref_loss = (ref_out.float() * loss_grad.float()).sum() / max(1, loss_grad.numel()) + ref_loss.backward() + + x_test = x.detach().clone().requires_grad_(True) + gate_a_test = gate_a_t.detach().clone().requires_grad_(True) + gate_b_test = gate_b_t.detach().clone().requires_grad_(True) + up_a_test = up_a_t.detach().clone().requires_grad_(True) + up_b_test = up_b_t.detach().clone().requires_grad_(True) + got_out = quack_grouped_lora_dual( + x_test, + gate_a_test, + gate_b_test, + up_a_test, + up_b_test, + counts, + scale_gate=scale_gate, + scale_up=scale_up, + ) + got_loss = (got_out.float() * loss_grad.float()).sum() / max(1, loss_grad.numel()) + got_loss.backward() + + assert torch.allclose(ref_out, got_out.detach(), atol=5e-2, rtol=5e-2) + assert torch.allclose( + _require_grad(x_ref.grad, name="x_ref"), + _require_grad(x_test.grad, name="x_test"), + atol=5e-2, + rtol=5e-2, + ) + assert torch.allclose( + _require_grad(gate_a_ref.grad, name="gate_a_ref"), + _require_grad(gate_a_test.grad, name="gate_a_test"), + atol=5e-2, + rtol=5e-2, + ) + assert torch.allclose( + _require_grad(gate_b_ref.grad, name="gate_b_ref"), + _require_grad(gate_b_test.grad, name="gate_b_test"), + atol=5e-2, + rtol=5e-2, + ) + assert torch.allclose( + _require_grad(up_a_ref.grad, name="up_a_ref"), + _require_grad(up_a_test.grad, name="up_a_test"), + atol=5e-2, + rtol=5e-2, + ) + assert torch.allclose( + _require_grad(up_b_ref.grad, name="up_b_ref"), + _require_grad(up_b_test.grad, name="up_b_test"), + atol=5e-2, + rtol=5e-2, + ) From 92858a9636f5ac9de0456a6260552b7088896ada Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Mon, 23 Mar 2026 18:10:35 +0000 Subject: [PATCH 26/28] Tune QuACK low-rank tiles and rank contract --- dev/tune_quack_lora_tiles.py | 184 +++++++++++++++++++ src/art/megatron/cute_grouped_lora_quack.py | 42 ++++- tests/integration/test_lora_quack_cutover.py | 42 ++++- 3 files changed, 258 insertions(+), 10 deletions(-) create mode 100644 dev/tune_quack_lora_tiles.py diff --git a/dev/tune_quack_lora_tiles.py b/dev/tune_quack_lora_tiles.py new file mode 100644 index 00000000..b044b244 --- /dev/null +++ b/dev/tune_quack_lora_tiles.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python3 +"""Offline tuner for QuACK grouped LoRA tile heuristics on the ART layer bench.""" + +from __future__ import annotations + +import argparse +from collections.abc import Iterator +from contextlib import contextmanager +import gc +import itertools +import json +import os +from pathlib import Path +import sys +from typing import Any + +import torch + +REPO_ROOT = Path(__file__).resolve().parents[1] +ART_SRC_ROOT = REPO_ROOT / "src" + + +def _resolve_art_harness_root() -> Path: + for candidate in REPO_ROOT.parents: + maybe_root = candidate / "projects" / "art_harness" + if maybe_root.is_dir(): + return maybe_root + raise RuntimeError( + "Unable to locate projects/art_harness from the current worktree." + ) + + +ART_HARNESS_ROOT = _resolve_art_harness_root() + +if str(ART_HARNESS_ROOT) not in sys.path: + sys.path.insert(0, str(ART_HARNESS_ROOT)) + +import art_harness.layer_benches.bench_moe_lora as bench + +ENV_NAMES = { + "proj_tile_n": "ART_QUACK_PROJ_TILE_N", + "matmul_tile_n": "ART_QUACK_MATMUL_TILE_N", + "grad_a_tile_m": "ART_QUACK_GRAD_A_TILE_M", + "grad_b_tile_m": "ART_QUACK_GRAD_B_TILE_M", +} + + +def _parse_csv_ints(raw: str) -> list[int]: + values = [int(part.strip()) for part in raw.split(",") if part.strip()] + if not values: + raise ValueError(f"Expected at least one integer in '{raw}'") + for value in values: + if value <= 0: + raise ValueError(f"Expected positive integers, got {value}") + return values + + +def _parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Tune QuACK grouped LoRA tile heuristics against the ART layer bench." + ) + parser.add_argument("--batch", type=int, default=1) + parser.add_argument("--seq-len", type=int, default=65536) + parser.add_argument("--hidden-size", type=int, default=2048) + parser.add_argument("--ffn-hidden-size", type=int, default=768) + parser.add_argument("--num-experts", type=int, default=128) + parser.add_argument("--top-k", type=int, default=8) + parser.add_argument("--lora-rank", type=int, default=1) + parser.add_argument("--dtype", type=str, default="bf16") + parser.add_argument("--warmup", type=int, default=6) + parser.add_argument("--iters", type=int, default=12) + parser.add_argument("--seed", type=int, default=0) + parser.add_argument("--proj-tile-n", type=str, default="32,64,128") + parser.add_argument("--matmul-tile-n", type=str, default="64,128") + parser.add_argument("--grad-a-tile-m", type=str, default="64,128") + parser.add_argument("--grad-b-tile-m", type=str, default="64,128") + parser.add_argument("--top-results", type=int, default=5) + parser.add_argument("--output-json", type=Path, default=None) + return parser.parse_args() + + +@contextmanager +def _tile_env(config: dict[str, int]) -> Iterator[None]: + previous = {name: os.environ.get(name) for name in ENV_NAMES.values()} + try: + for key, value in config.items(): + os.environ[ENV_NAMES[key]] = str(value) + yield + finally: + for name, old_value in previous.items(): + if old_value is None: + os.environ.pop(name, None) + else: + os.environ[name] = old_value + + +def _run_config(args: argparse.Namespace, config: dict[str, int]) -> dict[str, Any]: + bench.ART_WORKTREE_SRC = ART_SRC_ROOT + with _tile_env(config): + gc.collect() + torch.cuda.empty_cache() + torch.cuda.reset_peak_memory_stats() + result = bench.benchmark(args) + peak_alloc = torch.cuda.max_memory_allocated() + peak_reserved = torch.cuda.max_memory_reserved() + return { + "config": config, + "timing_ms": result["timing_ms"], + "timed_module_breakdown_ms": result["timed_module_breakdown_ms"], + "flops": result["flops"], + "peak_memory_gib": { + "allocated": peak_alloc / (1024**3), + "reserved": peak_reserved / (1024**3), + }, + } + + +def main() -> None: + if not torch.cuda.is_available(): + raise SystemExit("CUDA is required for QuACK tile tuning.") + + cli = _parse_args() + bench_args = argparse.Namespace( + batch=cli.batch, + seq_len=cli.seq_len, + hidden_size=cli.hidden_size, + ffn_hidden_size=cli.ffn_hidden_size, + num_experts=cli.num_experts, + top_k=cli.top_k, + lora_rank=cli.lora_rank, + dtype=cli.dtype, + warmup=cli.warmup, + iters=cli.iters, + peak_tflops=None, + seed=cli.seed, + ) + + configs: list[dict[str, int]] = [] + for proj_tile_n, matmul_tile_n, grad_a_tile_m, grad_b_tile_m in itertools.product( + _parse_csv_ints(cli.proj_tile_n), + _parse_csv_ints(cli.matmul_tile_n), + _parse_csv_ints(cli.grad_a_tile_m), + _parse_csv_ints(cli.grad_b_tile_m), + ): + configs.append( + { + "proj_tile_n": proj_tile_n, + "matmul_tile_n": matmul_tile_n, + "grad_a_tile_m": grad_a_tile_m, + "grad_b_tile_m": grad_b_tile_m, + } + ) + + results: list[dict[str, Any]] = [] + for config in configs: + try: + payload = _run_config(bench_args, config) + except Exception as exc: + payload = {"config": config, "error": repr(exc)} + results.append(payload) + print(json.dumps(payload, sort_keys=True), flush=True) + + successful = [item for item in results if "timing_ms" in item] + successful.sort(key=lambda item: float(item["timing_ms"]["total_mean"])) + summary = { + "search_space": { + "proj_tile_n": _parse_csv_ints(cli.proj_tile_n), + "matmul_tile_n": _parse_csv_ints(cli.matmul_tile_n), + "grad_a_tile_m": _parse_csv_ints(cli.grad_a_tile_m), + "grad_b_tile_m": _parse_csv_ints(cli.grad_b_tile_m), + }, + "benchmark_config": vars(bench_args), + "top_results": successful[: cli.top_results], + "num_successful": len(successful), + "num_total": len(results), + } + if cli.output_json is not None: + cli.output_json.parent.mkdir(parents=True, exist_ok=True) + cli.output_json.write_text(json.dumps(summary, indent=2, sort_keys=True)) + print(json.dumps(summary, indent=2, sort_keys=True)) + + +if __name__ == "__main__": + main() diff --git a/src/art/megatron/cute_grouped_lora_quack.py b/src/art/megatron/cute_grouped_lora_quack.py index dde72ae0..a9bcb0c2 100644 --- a/src/art/megatron/cute_grouped_lora_quack.py +++ b/src/art/megatron/cute_grouped_lora_quack.py @@ -1,22 +1,34 @@ from __future__ import annotations +import os from typing import Any, cast from quack.gemm import gemm as quack_gemm import torch _PADDED_LOW_RANK_TARGET = 8 -_PADDED_LOW_RANKS = frozenset({1, 2, 4}) -_SUPPORTED_RANKS = frozenset({1, 2, 4, 8, 16, 32, 64, 128}) -def _validate_supported_rank(rank: int) -> None: - if rank not in _SUPPORTED_RANKS: +def _validate_rank(rank: int) -> None: + if rank <= 0: + raise ValueError(f"Grouped LoRA QuACK backend requires rank > 0, got {rank}") + if rank >= _PADDED_LOW_RANK_TARGET and rank % _PADDED_LOW_RANK_TARGET != 0: raise ValueError( - f"Grouped LoRA QuACK backend only supports ranks {sorted(_SUPPORTED_RANKS)}, got {rank}" + "Grouped LoRA QuACK backend requires rank < 8 or a multiple of 8, " + f"got {rank}" ) +def _env_positive_int(name: str) -> int | None: + raw = os.environ.get(name) + if raw is None: + return None + value = int(raw) + if value <= 0: + raise ValueError(f"{name} must be > 0, got {value}") + return value + + def _tokens_per_expert_to_tensor( tokens_per_expert: list[int] | torch.Tensor, ) -> torch.Tensor: @@ -58,7 +70,7 @@ def _validate_inputs( if b_t.ndim != 3: raise ValueError(f"b_t must be 3D, got shape {tuple(b_t.shape)}") rank = a_t.shape[-1] - _validate_supported_rank(rank) + _validate_rank(rank) if b_t.shape[-2] != rank: raise ValueError(f"Expected b_t rank dim {rank}, got shape {tuple(b_t.shape)}") if a_t.shape[0] != b_t.shape[0]: @@ -100,7 +112,7 @@ def _validate_dual_inputs( if up_b_t.ndim != 3: raise ValueError(f"up_b_t must be 3D, got shape {tuple(up_b_t.shape)}") up_rank = up_a_t.shape[-1] - _validate_supported_rank(up_rank) + _validate_rank(up_rank) if up_b_t.shape[-2] != up_rank: raise ValueError( f"Expected up_b_t rank dim {up_rank}, got shape {tuple(up_b_t.shape)}" @@ -126,7 +138,7 @@ def _validate_dual_inputs( def _effective_rank(rank: int) -> int: - if rank in _PADDED_LOW_RANKS: + if rank < _PADDED_LOW_RANK_TARGET: return _PADDED_LOW_RANK_TARGET return rank @@ -148,18 +160,32 @@ def _pad_b_t(b_t: torch.Tensor, effective_rank: int) -> torch.Tensor: def _proj_tile_n(rank: int) -> int: + override = _env_positive_int("ART_QUACK_PROJ_TILE_N") + if override is not None: + return override + if rank <= 32: + return 32 return 64 if rank <= 64 else 128 def _matmul_tile_n(out_features: int) -> int: + override = _env_positive_int("ART_QUACK_MATMUL_TILE_N") + if override is not None: + return override return 128 if out_features >= 128 else 64 def _grad_a_tile_m(rank: int) -> int: + override = _env_positive_int("ART_QUACK_GRAD_A_TILE_M") + if override is not None: + return override return 128 def _grad_b_tile_m(rank: int) -> int: + override = _env_positive_int("ART_QUACK_GRAD_B_TILE_M") + if override is not None: + return override return 64 if rank <= 64 else 128 diff --git a/tests/integration/test_lora_quack_cutover.py b/tests/integration/test_lora_quack_cutover.py index 380f146d..d5259b93 100644 --- a/tests/integration/test_lora_quack_cutover.py +++ b/tests/integration/test_lora_quack_cutover.py @@ -66,7 +66,7 @@ def _eager_grouped_lora_dual( return torch.cat(outputs, dim=0) -@pytest.mark.parametrize("rank", [1, 4, 16]) +@pytest.mark.parametrize("rank", [1, 3, 7, 16, 24]) def test_lora_grouped_forward_cutover_matches_reference(rank: int) -> None: if not torch.cuda.is_available(): pytest.skip("CUDA is required for the LoRA QuACK cutover test.") @@ -125,7 +125,7 @@ def test_lora_grouped_forward_cutover_matches_reference(rank: int) -> None: assert torch.allclose(b_ref_grad, b_test_grad, atol=5e-2, rtol=5e-2) -@pytest.mark.parametrize("rank", [1, 4, 16]) +@pytest.mark.parametrize("rank", [1, 3, 7, 16, 24]) def test_lora_grouped_dual_forward_cutover_matches_reference(rank: int) -> None: if not torch.cuda.is_available(): pytest.skip("CUDA is required for the LoRA QuACK cutover test.") @@ -211,3 +211,41 @@ def test_lora_grouped_dual_forward_cutover_matches_reference(rank: int) -> None: atol=5e-2, rtol=5e-2, ) + + +def test_lora_grouped_cutover_rejects_rank_12() -> None: + if not torch.cuda.is_available(): + pytest.skip("CUDA is required for the LoRA QuACK cutover test.") + + device = torch.device("cuda:0") + counts = torch.tensor([32, 0, 16, 24], dtype=torch.int64) + x = torch.randn(int(counts.sum().item()), 64, device=device, dtype=torch.bfloat16) + + lora = LoRA( + adapter_model_prefix="test.{expert}", + in_features=64, + out_features=64, + rank=12, + alpha=32, + dtype=torch.bfloat16, + device=device, + num_local_experts=4, + ) + with pytest.raises(ValueError, match="rank < 8 or a multiple of 8"): + _ = lora(x, tokens_per_expert=counts) + + gate_a_t = torch.randn(4, 64, 12, device=device, dtype=torch.bfloat16) + gate_b_t = torch.randn(4, 12, 64, device=device, dtype=torch.bfloat16) + up_a_t = torch.randn(4, 64, 12, device=device, dtype=torch.bfloat16) + up_b_t = torch.randn(4, 12, 64, device=device, dtype=torch.bfloat16) + with pytest.raises(ValueError, match="rank < 8 or a multiple of 8"): + _ = quack_grouped_lora_dual( + x, + gate_a_t, + gate_b_t, + up_a_t, + up_b_t, + counts, + scale_gate=2.0, + scale_up=3.0, + ) From 8cc45b834fc8215f2ccd02de32f017fadffe1a51 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Mon, 23 Mar 2026 18:33:06 +0000 Subject: [PATCH 27/28] Inline FC1 QuACK dual call --- src/art/megatron/lora.py | 38 +++----------------- tests/integration/test_lora_quack_cutover.py | 38 -------------------- 2 files changed, 5 insertions(+), 71 deletions(-) diff --git a/src/art/megatron/lora.py b/src/art/megatron/lora.py index 5d24377d..f5d803ab 100644 --- a/src/art/megatron/lora.py +++ b/src/art/megatron/lora.py @@ -572,34 +572,6 @@ def forward(self, x: torch.Tensor) -> tuple[torch.Tensor, torch.Tensor | None]: return linear_output + adapter_output, bias -class FusedExpertsFC1LoRA(torch.nn.Module): - def __init__(self, adapter_model_prefix: str) -> None: - super().__init__() - self.adapter_model_prefix = f"{adapter_model_prefix}.fused_fc1" - - def forward( - self, - x: torch.Tensor, - tokens_per_expert: list[int] | torch.Tensor, - gate_a_t: torch.Tensor, - gate_b_t: torch.Tensor, - up_a_t: torch.Tensor, - up_b_t: torch.Tensor, - scale_gate: float, - scale_up: float, - ) -> torch.Tensor: - return quack_grouped_lora_dual( - x, - gate_a_t, - gate_b_t, - up_a_t, - up_b_t, - tokens_per_expert, - scale_gate=scale_gate, - scale_up=scale_up, - ) - - class MLPExpertsLinearFC1LoRA(torch.nn.Module): def __init__( self, @@ -626,7 +598,7 @@ def __init__( alpha=alpha, num_local_experts=num_local_experts, ) - self.fused_lora = FusedExpertsFC1LoRA(adapter_model_prefix=adapter_model_prefix) + self.uses_direct_quack_grouped_lora_dual = True @staticmethod def _build_fc1_lora( @@ -679,15 +651,15 @@ def forward( if isinstance(counts, torch.Tensor) and int(torch.count_nonzero(counts)) == 0: adapter_out = x.new_zeros((x.shape[0], self.linear_fc1.out_features)) else: - adapter_out = self.fused_lora( + adapter_out = quack_grouped_lora_dual( x, - counts, self.gate_lora.A_T, self.gate_lora.B_T, self.up_lora.A_T, self.up_lora.B_T, - self.gate_lora.scale, - self.up_lora.scale, + counts, + scale_gate=self.gate_lora.scale, + scale_up=self.up_lora.scale, ) return base_out + adapter_out, bias_out diff --git a/tests/integration/test_lora_quack_cutover.py b/tests/integration/test_lora_quack_cutover.py index d5259b93..77ecd42c 100644 --- a/tests/integration/test_lora_quack_cutover.py +++ b/tests/integration/test_lora_quack_cutover.py @@ -211,41 +211,3 @@ def test_lora_grouped_dual_forward_cutover_matches_reference(rank: int) -> None: atol=5e-2, rtol=5e-2, ) - - -def test_lora_grouped_cutover_rejects_rank_12() -> None: - if not torch.cuda.is_available(): - pytest.skip("CUDA is required for the LoRA QuACK cutover test.") - - device = torch.device("cuda:0") - counts = torch.tensor([32, 0, 16, 24], dtype=torch.int64) - x = torch.randn(int(counts.sum().item()), 64, device=device, dtype=torch.bfloat16) - - lora = LoRA( - adapter_model_prefix="test.{expert}", - in_features=64, - out_features=64, - rank=12, - alpha=32, - dtype=torch.bfloat16, - device=device, - num_local_experts=4, - ) - with pytest.raises(ValueError, match="rank < 8 or a multiple of 8"): - _ = lora(x, tokens_per_expert=counts) - - gate_a_t = torch.randn(4, 64, 12, device=device, dtype=torch.bfloat16) - gate_b_t = torch.randn(4, 12, 64, device=device, dtype=torch.bfloat16) - up_a_t = torch.randn(4, 64, 12, device=device, dtype=torch.bfloat16) - up_b_t = torch.randn(4, 12, 64, device=device, dtype=torch.bfloat16) - with pytest.raises(ValueError, match="rank < 8 or a multiple of 8"): - _ = quack_grouped_lora_dual( - x, - gate_a_t, - gate_b_t, - up_a_t, - up_b_t, - counts, - scale_gate=2.0, - scale_up=3.0, - ) From 649410861616e7569c41592890ac14365aceaf14 Mon Sep 17 00:00:00 2001 From: FurtherAI Date: Tue, 24 Mar 2026 01:10:52 +0000 Subject: [PATCH 28/28] Revert unnecessary python 3.12 requirement. --- .python-version | 2 +- pyproject.toml | 4 +- uv.lock | 959 ++++++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 940 insertions(+), 25 deletions(-) diff --git a/.python-version b/.python-version index e4fba218..2c073331 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.12 +3.11 diff --git a/pyproject.toml b/pyproject.toml index c3e27fe6..7dca3c89 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "openpipe-art" version = "0.5.17" description = "The OpenPipe Agent Reinforcement Training (ART) library" readme = "README.md" -requires-python = ">=3.12" +requires-python = ">=3.11" dependencies = [ "openai>=2.14.0", "typer>=0.15.2", @@ -159,7 +159,7 @@ requires-dist = [ ] [tool.ty.environment] -python-version = "3.12" +python-version = "3.11" [tool.ty.rules] # Ignore unused-ignore-comment warnings because they vary depending on whether diff --git a/uv.lock b/uv.lock index 077c42ca..a5be8922 100644 --- a/uv.lock +++ b/uv.lock @@ -1,19 +1,23 @@ version = 1 revision = 3 -requires-python = ">=3.12" +requires-python = ">=3.11" resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'linux'", "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version < '3.13' and sys_platform == 'linux'", + "python_full_version == '3.12.*' and sys_platform == 'linux'", "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", "python_full_version == '3.13.*' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.13' and sys_platform == 'win32'", - "python_full_version < '3.13' and sys_platform == 'emscripten'", - "python_full_version < '3.13' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'emscripten'", + "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version < '3.12' and sys_platform == 'linux'", + "python_full_version < '3.12' and sys_platform == 'win32'", + "python_full_version < '3.12' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", ] [manifest] @@ -118,6 +122,23 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/4c/a164164834f03924d9a29dc3acd9e7ee58f95857e0b467f6d04298594ebb/aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b", size = 746051, upload-time = "2026-01-03T17:29:43.287Z" }, + { url = "https://files.pythonhosted.org/packages/82/71/d5c31390d18d4f58115037c432b7e0348c60f6f53b727cad33172144a112/aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64", size = 499234, upload-time = "2026-01-03T17:29:44.822Z" }, + { url = "https://files.pythonhosted.org/packages/0e/c9/741f8ac91e14b1d2e7100690425a5b2b919a87a5075406582991fb7de920/aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea", size = 494979, upload-time = "2026-01-03T17:29:46.405Z" }, + { url = "https://files.pythonhosted.org/packages/75/b5/31d4d2e802dfd59f74ed47eba48869c1c21552c586d5e81a9d0d5c2ad640/aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a", size = 1748297, upload-time = "2026-01-03T17:29:48.083Z" }, + { url = "https://files.pythonhosted.org/packages/1a/3e/eefad0ad42959f226bb79664826883f2687d602a9ae2941a18e0484a74d3/aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540", size = 1707172, upload-time = "2026-01-03T17:29:49.648Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3a/54a64299fac2891c346cdcf2aa6803f994a2e4beeaf2e5a09dcc54acc842/aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b", size = 1805405, upload-time = "2026-01-03T17:29:51.244Z" }, + { url = "https://files.pythonhosted.org/packages/6c/70/ddc1b7169cf64075e864f64595a14b147a895a868394a48f6a8031979038/aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3", size = 1899449, upload-time = "2026-01-03T17:29:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7e/6815aab7d3a56610891c76ef79095677b8b5be6646aaf00f69b221765021/aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1", size = 1748444, upload-time = "2026-01-03T17:29:55.484Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f2/073b145c4100da5511f457dc0f7558e99b2987cf72600d42b559db856fbc/aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3", size = 1606038, upload-time = "2026-01-03T17:29:57.179Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c1/778d011920cae03ae01424ec202c513dc69243cf2db303965615b81deeea/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440", size = 1724156, upload-time = "2026-01-03T17:29:58.914Z" }, + { url = "https://files.pythonhosted.org/packages/0e/cb/3419eabf4ec1e9ec6f242c32b689248365a1cf621891f6f0386632525494/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7", size = 1722340, upload-time = "2026-01-03T17:30:01.962Z" }, + { url = "https://files.pythonhosted.org/packages/7a/e5/76cf77bdbc435bf233c1f114edad39ed4177ccbfab7c329482b179cff4f4/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c", size = 1783041, upload-time = "2026-01-03T17:30:03.609Z" }, + { url = "https://files.pythonhosted.org/packages/9d/d4/dd1ca234c794fd29c057ce8c0566b8ef7fd6a51069de5f06fa84b9a1971c/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51", size = 1596024, upload-time = "2026-01-03T17:30:05.132Z" }, + { url = "https://files.pythonhosted.org/packages/55/58/4345b5f26661a6180afa686c473620c30a66afdf120ed3dd545bbc809e85/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4", size = 1804590, upload-time = "2026-01-03T17:30:07.135Z" }, + { url = "https://files.pythonhosted.org/packages/7b/06/05950619af6c2df7e0a431d889ba2813c9f0129cec76f663e547a5ad56f2/aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29", size = 1740355, upload-time = "2026-01-03T17:30:09.083Z" }, + { url = "https://files.pythonhosted.org/packages/3e/80/958f16de79ba0422d7c1e284b2abd0c84bc03394fbe631d0a39ffa10e1eb/aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239", size = 433701, upload-time = "2026-01-03T17:30:10.869Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f2/27cdf04c9851712d6c1b99df6821a6623c3c9e55956d4b1e318c337b5a48/aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f", size = 457678, upload-time = "2026-01-03T17:30:12.719Z" }, { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, @@ -318,6 +339,12 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/e3/e9/a13952726228fa6282154ecf927092396bc759739e5e045019f6ab92f3ca/apache_tvm_ffi-0.1.8.post2.tar.gz", hash = "sha256:4513e38852894f290172ecfefcbc18d34e817fd29c16a0f1770e130c82b4067e", size = 2441111, upload-time = "2026-01-13T18:11:27.864Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/86/7db24692281d80204d07d77346ad4cb87f6183f1364ed94311993a47ed1a/apache_tvm_ffi-0.1.8.post2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:40f5fba3e06617f16888a0fdaf7ab4049841ff6e741644be822400438b771fe7", size = 1840013, upload-time = "2026-01-13T18:10:33.724Z" }, + { url = "https://files.pythonhosted.org/packages/cf/cc/fbaef883c6ba8e2c56ffcca997f2c076d1c14787799a62f39bd52c7126d5/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9eb6d228fa22b6a5da140d761962f022a154746c91fe7608c49062deaf671f9f", size = 1995159, upload-time = "2026-01-13T18:10:35.727Z" }, + { url = "https://files.pythonhosted.org/packages/49/08/f1e984e3573d0cbd6d53f3f73a12691fba153afc529fbd506d78e739b330/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:581c0acf845859be0cc26ac79f3663a83393b662c97c7125ebb78f0228b69d96", size = 2068543, upload-time = "2026-01-13T18:10:39.12Z" }, + { url = "https://files.pythonhosted.org/packages/35/1f/5336d430a133cf66ca9dac8ae9b6e25d8b99275a6687656421a1deee9f1b/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:beadc7bb480ae02d02e2108543f6f4b4170d77e361ab3ccb43697d174ec185b0", size = 1939018, upload-time = "2026-01-13T18:10:40.621Z" }, + { url = "https://files.pythonhosted.org/packages/5f/67/969c66a27a128cf738d0c068e0d4451d691d8197929c797cbe8e59c6cfc9/apache_tvm_ffi-0.1.8.post2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e593d191c7ca0726ebcd3b024a4bc8140694fdfce2e7b02493f38ad5c4c9ecf7", size = 2053068, upload-time = "2026-01-13T18:10:43.241Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f1/84881a799d227fdc4a61fbf0cb8d5ceb6a72ad788fa9070e5853ed9759b6/apache_tvm_ffi-0.1.8.post2-cp311-cp311-win_amd64.whl", hash = "sha256:1c685f19d0f26d9356c7c77a1cb652a3632ec9ee6cd21aa1d8cfb968743ec1fd", size = 1809557, upload-time = "2026-01-13T18:10:44.743Z" }, { url = "https://files.pythonhosted.org/packages/12/8b/a39d6c6eb1a87f6003e2717695cc6d44cc65ccd57dae5a0af944c0d25751/apache_tvm_ffi-0.1.8.post2-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:c13ec7fc8f255767998b301ace0cd1e7d17ba76b48ffeb97ca9eb22a3314e250", size = 1811882, upload-time = "2026-01-13T18:10:46.317Z" }, { url = "https://files.pythonhosted.org/packages/8e/3a/7b1c9edcaeaebb945038144896cf17eb828a40b6ace0371823e133132664/apache_tvm_ffi-0.1.8.post2-cp312-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c78b4caf17304a1f47881bccdb2f9ac24d98b3b7fbe761a6dd4fd0585934d96", size = 1967259, upload-time = "2026-01-13T18:10:47.851Z" }, { url = "https://files.pythonhosted.org/packages/6c/b6/463602f57dda2e1c69165c044c07061cd59404593f313a427a3ad9c02cf3/apache_tvm_ffi-0.1.8.post2-cp312-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4a48da3fa8f47130f3502134f01e97044388c5217e7b91be4b0acec4feab81a0", size = 2044821, upload-time = "2026-01-13T18:10:49.396Z" }, @@ -373,6 +400,14 @@ version = "0.31.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" }, + { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" }, + { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" }, + { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" }, + { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" }, { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, @@ -422,6 +457,13 @@ version = "15.1.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e9/c3/83e6e73d1592bc54436eae0bc61704ae0cff0c3cfbde7b58af9ed67ebb49/av-15.1.0.tar.gz", hash = "sha256:39cda2dc810e11c1938f8cb5759c41d6b630550236b3365790e67a313660ec85", size = 3774192, upload-time = "2025-08-30T04:41:56.076Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/47/58/4e44cf6939be7aba96a4abce024e1be11ba7539ecac74d09369b8c03aa05/av-15.1.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b785948762a8d45fc58fc24a20251496829ace1817e9a7a508a348d6de2182c3", size = 21767323, upload-time = "2025-08-30T04:39:37.989Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f6/a946544cdb49f6d892d2761b1d61a8bc6ce912fe57ba06769bdc640c0a7f/av-15.1.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:9c7131494a3a318612b4ee4db98fe5bc50eb705f6b6536127c7ab776c524fd8b", size = 26946268, upload-time = "2025-08-30T04:39:40.601Z" }, + { url = "https://files.pythonhosted.org/packages/70/7c/b33513c0af73d0033af59a98f035b521c5b93445a6af7e9efbf41a6e8383/av-15.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2b9623ae848625c59213b610c8665817924f913580c7c5c91e0dc18936deb00d", size = 38062118, upload-time = "2025-08-30T04:39:43.928Z" }, + { url = "https://files.pythonhosted.org/packages/5e/95/31b7fb34f9fea7c7389240364194f4f56ad2d460095038cc720f50a90bb3/av-15.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c8ef597087db560514617143532b1fafc4825ebb2dda9a22418f548b113a0cc7", size = 39571086, upload-time = "2025-08-30T04:39:47.109Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b0/7b0b45474a4e90c35c11d0032947d8b3c7386872957ce29c6f12add69a74/av-15.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:08eac47a90ebae1e2bd5935f400dd515166019bab4ff5b03c4625fa6ac3a0a5e", size = 40112634, upload-time = "2025-08-30T04:39:50.981Z" }, + { url = "https://files.pythonhosted.org/packages/aa/04/038b94bc9a1ee10a451c867d4a2fc91e845f83bfc2dae9df25893abcb57f/av-15.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d3f66ff200ea166e606cb3c5cb1bd2fc714effbec2e262a5d67ce60450c8234a", size = 40878695, upload-time = "2025-08-30T04:39:54.493Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3d/9f8f96c0deeaaf648485a3dbd1699b2f0580f2ce8a36cb616c0138ba7615/av-15.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:57b99544d91121b8bea570e4ddf61700f679a6b677c1f37966bc1a22e1d4cd5c", size = 31335683, upload-time = "2025-08-30T04:39:57.861Z" }, { url = "https://files.pythonhosted.org/packages/d1/58/de78b276d20db6ffcd4371283df771721a833ba525a3d57e753d00a9fe79/av-15.1.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:40c5df37f4c354ab8190c6fd68dab7881d112f527906f64ca73da4c252a58cee", size = 21760991, upload-time = "2025-08-30T04:40:00.801Z" }, { url = "https://files.pythonhosted.org/packages/56/cc/45f85775304ae60b66976360d82ba5b152ad3fd91f9267d5020a51e9a828/av-15.1.0-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:af455ce65ada3d361f80c90c810d9bced4db5655ab9aa513024d6c71c5c476d5", size = 26953097, upload-time = "2025-08-30T04:40:03.998Z" }, { url = "https://files.pythonhosted.org/packages/f3/f8/2d781e5e71d02fc829487e775ccb1185e72f95340d05f2e84eb57a11e093/av-15.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86226d2474c80c3393fa07a9c366106029ae500716098b72b3ec3f67205524c3", size = 38319710, upload-time = "2025-08-30T04:40:07.701Z" }, @@ -516,12 +558,38 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, ] +[[package]] +name = "backports-tarfile" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, +] + [[package]] name = "backports-zstd" version = "1.3.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f4/b1/36a5182ce1d8ef9ef32bff69037bd28b389bbdb66338f8069e61da7028cb/backports_zstd-1.3.0.tar.gz", hash = "sha256:e8b2d68e2812f5c9970cabc5e21da8b409b5ed04e79b4585dbffa33e9b45ebe2", size = 997138, upload-time = "2025-12-29T17:28:06.143Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/28/ed31a0e35feb4538a996348362051b52912d50f00d25c2d388eccef9242c/backports_zstd-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:249f90b39d3741c48620021a968b35f268ca70e35f555abeea9ff95a451f35f9", size = 435660, upload-time = "2025-12-29T17:25:55.207Z" }, + { url = "https://files.pythonhosted.org/packages/00/0d/3db362169d80442adda9dd563c4f0bb10091c8c1c9a158037f4ecd53988e/backports_zstd-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b0e71e83e46154a9d3ced6d4de9a2fea8207ee1e4832aeecf364dc125eda305c", size = 362056, upload-time = "2025-12-29T17:25:56.729Z" }, + { url = "https://files.pythonhosted.org/packages/bd/00/b67ba053a7d6f6dbe2f8a704b7d3a5e01b1d2e2e8edbc9b634f2702ef73c/backports_zstd-1.3.0-cp311-cp311-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:cbc6193acd21f96760c94dd71bf32b161223e8503f5277acb0a5ab54e5598957", size = 505957, upload-time = "2025-12-29T17:25:57.941Z" }, + { url = "https://files.pythonhosted.org/packages/6f/3e/2667c0ddb53ddf28667e330bf9fe92e8e17705a481c9b698e283120565f7/backports_zstd-1.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1df583adc0ae84a8d13d7139f42eade6d90182b1dd3e0d28f7df3c564b9fd55d", size = 475569, upload-time = "2025-12-29T17:25:59.075Z" }, + { url = "https://files.pythonhosted.org/packages/eb/86/4052473217bd954ccdffda5f7264a0e99e7c4ecf70c0f729845c6a45fc5a/backports_zstd-1.3.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d833fc23aa3cc2e05aeffc7cfadd87b796654ad3a7fb214555cda3f1db2d4dc2", size = 581196, upload-time = "2025-12-29T17:26:00.508Z" }, + { url = "https://files.pythonhosted.org/packages/e5/bd/064f6fdb61db3d2c473159ebc844243e650dc032de0f8208443a00127925/backports_zstd-1.3.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:142178fe981061f1d2a57c5348f2cd31a3b6397a35593e7a17dbda817b793a7f", size = 640888, upload-time = "2025-12-29T17:26:02.134Z" }, + { url = "https://files.pythonhosted.org/packages/d8/09/0822403f40932a165a4f1df289d41653683019e4fd7a86b63ed20e9b6177/backports_zstd-1.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5eed0a09a163f3a8125a857cb031be87ed052e4a47bc75085ed7fca786e9bb5b", size = 491100, upload-time = "2025-12-29T17:26:03.418Z" }, + { url = "https://files.pythonhosted.org/packages/a6/a3/f5ac28d74039b7e182a780809dc66b9dbfc893186f5d5444340bba135389/backports_zstd-1.3.0-cp311-cp311-manylinux_2_34_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:60aa483fef5843749e993dde01229e5eedebca8c283023d27d6bf6800d1d4ce3", size = 565071, upload-time = "2025-12-29T17:26:05.022Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ac/50209aeb92257a642ee987afa1e61d5b6731ab6bf0bff70905856e5aede6/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ea0886c1b619773544546e243ed73f6d6c2b1ae3c00c904ccc9903a352d731e1", size = 481519, upload-time = "2025-12-29T17:26:06.255Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/b06f64199fb4b2e9437cedbf96d0155ca08aeec35fe81d41065acd44762e/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5e137657c830a5ce99be40a1d713eb1d246bae488ada28ff0666ac4387aebdd5", size = 509465, upload-time = "2025-12-29T17:26:07.602Z" }, + { url = "https://files.pythonhosted.org/packages/f4/37/2c365196e61c8fffbbc930ffd69f1ada7aa1c7210857b3e565031c787ac6/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94048c8089755e482e4b34608029cf1142523a625873c272be2b1c9253871a72", size = 585552, upload-time = "2025-12-29T17:26:08.911Z" }, + { url = "https://files.pythonhosted.org/packages/93/8d/c2c4f448bb6b6c9df17410eaedce415e8db0eb25b60d09a3d22a98294d09/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:d339c1ec40485e97e600eb9a285fb13169dbf44c5094b945788a62f38b96e533", size = 562893, upload-time = "2025-12-29T17:26:10.566Z" }, + { url = "https://files.pythonhosted.org/packages/74/e8/2110d4d39115130f7514cbbcec673a885f4052bb68d15e41bc96a7558856/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aeee9210c54cf8bf83f4d263a6d0d6e7a0298aeb5a14a0a95e90487c5c3157c", size = 631462, upload-time = "2025-12-29T17:26:11.99Z" }, + { url = "https://files.pythonhosted.org/packages/b9/a8/d64b59ae0714fdace14e43873f794eff93613e35e3e85eead33a4f44cd80/backports_zstd-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba7114a3099e5ea05cbb46568bd0e08bca2ca11e12c6a7b563a24b86b2b4a67f", size = 495125, upload-time = "2025-12-29T17:26:13.218Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d8/bcff0a091fcf27172c57ae463e49d8dec6dc31e01d7e7bf1ae3aad9c3566/backports_zstd-1.3.0-cp311-cp311-win32.whl", hash = "sha256:08dfdfb85da5915383bfae680b6ac10ab5769ab22e690f9a854320720011ae8e", size = 288664, upload-time = "2025-12-29T17:26:14.791Z" }, + { url = "https://files.pythonhosted.org/packages/28/1a/379061e2abf8c3150ad51c1baab9ac723e01cf7538860a6a74c48f8b73ee/backports_zstd-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8aac2e7cdcc8f310c16f98a0062b48d0a081dbb82862794f4f4f5bdafde30a4", size = 313633, upload-time = "2025-12-29T17:26:16.31Z" }, + { url = "https://files.pythonhosted.org/packages/35/e7/eca40858883029fc716660106069b23253e2ec5fd34e86b4101c8cfe864b/backports_zstd-1.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:440ef1be06e82dc0d69dbb57177f2ce98bbd2151013ee7e551e2f2b54caa6120", size = 288814, upload-time = "2025-12-29T17:26:17.571Z" }, { url = "https://files.pythonhosted.org/packages/72/d4/356da49d3053f4bc50e71a8535631b57bc9ca4e8c6d2442e073e0ab41c44/backports_zstd-1.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f4a292e357f3046d18766ce06d990ccbab97411708d3acb934e63529c2ea7786", size = 435972, upload-time = "2025-12-29T17:26:18.752Z" }, { url = "https://files.pythonhosted.org/packages/30/8f/dbe389e60c7e47af488520f31a4aa14028d66da5bf3c60d3044b571eb906/backports_zstd-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb4c386f38323698991b38edcc9c091d46d4713f5df02a3b5c80a28b40e289ea", size = 362124, upload-time = "2025-12-29T17:26:19.995Z" }, { url = "https://files.pythonhosted.org/packages/55/4b/173beafc99e99e7276ce008ef060b704471e75124c826bc5e2092815da37/backports_zstd-1.3.0-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f52523d2bdada29e653261abdc9cfcecd9e5500d305708b7e37caddb24909d4e", size = 506378, upload-time = "2025-12-29T17:26:21.855Z" }, @@ -573,6 +641,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/32/04/cfab76878f360f124dbb533779e1e4603c801a0f5ada72ae5c742b7c4d7d/backports_zstd-1.3.0-cp313-cp313t-win32.whl", hash = "sha256:7d3f0f2499d2049ec53d2674c605a4b3052c217cc7ee49c05258046411685adc", size = 289389, upload-time = "2025-12-29T17:27:22.287Z" }, { url = "https://files.pythonhosted.org/packages/cb/ff/dbcfb6c9c922ab6d98f3d321e7d0c7b34ecfa26f3ca71d930fe1ef639737/backports_zstd-1.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:eb2f8fab0b1ea05148394cb34a9e543a43477178765f2d6e7c84ed332e34935e", size = 314776, upload-time = "2025-12-29T17:27:23.458Z" }, { url = "https://files.pythonhosted.org/packages/01/4b/82e4baae3117806639fe1c693b1f2f7e6133a7cefd1fa2e38018c8edcd68/backports_zstd-1.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c66ad9eb5bfbe28c2387b7fc58ddcdecfb336d6e4e60bcba1694a906c1f21a6c", size = 289315, upload-time = "2025-12-29T17:27:24.601Z" }, + { url = "https://files.pythonhosted.org/packages/9a/d9/8c9c246e5ea79a4f45d551088b11b61f2dc7efcdc5dbe6df3be84a506e0c/backports_zstd-1.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:968167d29f012cee7b112ad031a8925e484e97e99288e55e4d62962c3a1013e3", size = 409666, upload-time = "2025-12-29T17:27:57.37Z" }, + { url = "https://files.pythonhosted.org/packages/a4/4f/a55b33c314ca8c9074e99daab54d04c5d212070ae7dbc435329baf1b139e/backports_zstd-1.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8f6fc7d62b71083b574193dd8fb3a60e6bb34880cc0132aad242943af301f7a", size = 339199, upload-time = "2025-12-29T17:27:58.542Z" }, + { url = "https://files.pythonhosted.org/packages/9d/13/ce31bd048b1c88d0f65d7af60b6cf89cfbed826c7c978f0ebca9a8a71cfc/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:e0f2eca6aac280fdb77991ad3362487ee91a7fb064ad40043fb5a0bf5a376943", size = 420332, upload-time = "2025-12-29T17:28:00.332Z" }, + { url = "https://files.pythonhosted.org/packages/cf/80/c0cdbc533d0037b57248588403a3afb050b2a83b8c38aa608e31b3a4d600/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:676eb5e177d4ef528cf3baaeea4fffe05f664e4dd985d3ac06960ef4619c81a9", size = 393879, upload-time = "2025-12-29T17:28:01.57Z" }, + { url = "https://files.pythonhosted.org/packages/0f/38/c97428867cac058ed196ccaeddfdf82ecd43b8a65965f2950a6e7547e77a/backports_zstd-1.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:199eb9bd8aca6a9d489c41a682fad22c587dffe57b613d0fe6d492d0d38ce7c5", size = 413842, upload-time = "2025-12-29T17:28:03.113Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ec/6247be6536668fe1c7dfae3eaa9c94b00b956b716957c0fc986ba78c3cc4/backports_zstd-1.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:2524bd6777a828d5e7ccd7bd1a57f9e7007ae654fc2bd1bc1a207f6428674e4a", size = 299684, upload-time = "2025-12-29T17:28:04.856Z" }, ] [[package]] @@ -600,6 +674,19 @@ version = "3.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/06/92fdc84448d324ab8434b78e65caf4fb4c6c90b4f8ad9bdd4c8021bfaf1e/bitarray-3.8.0.tar.gz", hash = "sha256:3eae38daffd77c9621ae80c16932eea3fb3a4af141fb7cc724d4ad93eff9210d", size = 151991, upload-time = "2025-11-02T21:41:15.117Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/7d/63558f1d0eb09217a3d30c1c847890879973e224a728fcff9391fab999b8/bitarray-3.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:25b9cff6c9856bc396232e2f609ea0c5ec1a8a24c500cee4cca96ba8a3cd50b6", size = 148502, upload-time = "2025-11-02T21:39:09.993Z" }, + { url = "https://files.pythonhosted.org/packages/5e/7b/f957ad211cb0172965b5f0881b67b99e2b6d41512af0a1001f44a44ddf4a/bitarray-3.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d9984017314da772f5f7460add7a0301a4ffc06c72c2998bb16c300a6253607", size = 145484, upload-time = "2025-11-02T21:39:10.904Z" }, + { url = "https://files.pythonhosted.org/packages/9f/dc/897973734f14f91467a3a795a4624752238053ecffaec7c8bbda1e363fda/bitarray-3.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbbbfbb7d039b20d289ce56b1beb46138d65769d04af50c199c6ac4cb6054d52", size = 330909, upload-time = "2025-11-02T21:39:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/67/be/24b4b792426d92de289e73e09682915d567c2e69d47e8857586cbdc865d0/bitarray-3.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1f723e260c35e1c7c57a09d3a6ebe681bd56c83e1208ae3ce1869b7c0d10d4f", size = 358469, upload-time = "2025-11-02T21:39:13.766Z" }, + { url = "https://files.pythonhosted.org/packages/3e/0e/2eda69a7a59a6998df8fb57cc9d1e0e62888c599fb5237b0a8b479a01afb/bitarray-3.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cbd1660fb48827381ce3a621a4fdc237959e1cd4e98b098952a8f624a0726425", size = 369131, upload-time = "2025-11-02T21:39:15.041Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7b/8a372d6635a6b2622477b2f96a569b2cd0318a62bc95a4a2144c7942c987/bitarray-3.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df6d7bf3e15b7e6e202a16ff4948a51759354016026deb04ab9b5acbbe35e096", size = 337089, upload-time = "2025-11-02T21:39:16.124Z" }, + { url = "https://files.pythonhosted.org/packages/93/f0/8eca934dbe5dee47a0e5ef44eeb72e85acacc8097c27cd164337bc4ec5d3/bitarray-3.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d5c931ec1c03111718cabf85f6012bb2815fa0ce578175567fa8d6f2cc15d3b4", size = 328504, upload-time = "2025-11-02T21:39:17.321Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/928b8e23a9950f8a8bfc42bc1e7de41f4e27f57de01a716308be5f683c2b/bitarray-3.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:41b53711f89008ba2de62e4c2d2260a8b357072fd4f18e1351b28955db2719dc", size = 356461, upload-time = "2025-11-02T21:39:18.396Z" }, + { url = "https://files.pythonhosted.org/packages/a9/93/4fb58417aff47fa2fe1874a39c9346b589a1d78c93a9cb24cccede5dc737/bitarray-3.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4f298daaaea58d45e245a132d6d2bdfb6f856da50dc03d75ebb761439fb626cf", size = 353008, upload-time = "2025-11-02T21:39:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/da/54/aa04e4a7b45aa5913f08ee377d43319b0979925e3c0407882eb29df3be66/bitarray-3.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:30989a2451b693c3f9359d91098a744992b5431a0be4858f1fdf0ec76b457125", size = 334048, upload-time = "2025-11-02T21:39:20.924Z" }, + { url = "https://files.pythonhosted.org/packages/da/52/e851f41076df014c05d6ac1ce34fbf7db5fa31241da3e2f09bb2be9e283d/bitarray-3.8.0-cp311-cp311-win32.whl", hash = "sha256:e5aed4754895942ae15ffa48c52d181e1c1463236fda68d2dba29c03aa61786b", size = 142907, upload-time = "2025-11-02T21:39:22.312Z" }, + { url = "https://files.pythonhosted.org/packages/28/01/db0006148b1dd13b4ac2686df8fa57d12f5887df313a506e939af0cb0997/bitarray-3.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:22c540ed20167d3dbb1e2d868ca935180247d620c40eace90efa774504a40e3b", size = 149670, upload-time = "2025-11-02T21:39:23.341Z" }, + { url = "https://files.pythonhosted.org/packages/7b/ea/b7d55ee269b1426f758a535c9ec2a07c056f20f403fa981685c3c8b4798c/bitarray-3.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:84b52b2cf77bb7f703d16c4007b021078dbbe6cf8ffb57abe81a7bacfc175ef2", size = 146709, upload-time = "2025-11-02T21:39:24.343Z" }, { url = "https://files.pythonhosted.org/packages/82/a0/0c41d893eda756315491adfdbf9bc928aee3d377a7f97a8834d453aa5de1/bitarray-3.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2fcbe9b3a5996b417e030aa33a562e7e20dfc86271e53d7e841fc5df16268b8", size = 148575, upload-time = "2025-11-02T21:39:25.718Z" }, { url = "https://files.pythonhosted.org/packages/0e/30/12ab2f4a4429bd844b419c37877caba93d676d18be71354fbbeb21d9f4cc/bitarray-3.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd761d158f67e288fd0ebe00c3b158095ce80a4bc7c32b60c7121224003ba70d", size = 145454, upload-time = "2025-11-02T21:39:26.695Z" }, { url = "https://files.pythonhosted.org/packages/26/58/314b3e3f219533464e120f0c51ac5123e7b1c1b91f725a4073fb70c5a858/bitarray-3.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c394a3f055b49f92626f83c1a0b6d6cd2c628f1ccd72481c3e3c6aa4695f3b20", size = 332949, upload-time = "2025-11-02T21:39:27.801Z" }, @@ -696,6 +783,11 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/13/88/560b11e521c522440af991d46848a2bde64b5f7202ec14e1f46f9509d328/black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58", size = 658785, upload-time = "2026-01-18T04:50:11.993Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/30/83/f05f22ff13756e1a8ce7891db517dbc06200796a16326258268f4658a745/black-26.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3cee1487a9e4c640dc7467aaa543d6c0097c391dc8ac74eb313f2fbf9d7a7cb5", size = 1831956, upload-time = "2026-01-18T04:59:21.38Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/b2c570550e39bedc157715e43927360312d6dd677eed2cc149a802577491/black-26.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d62d14ca31c92adf561ebb2e5f2741bf8dea28aef6deb400d49cca011d186c68", size = 1672499, upload-time = "2026-01-18T04:59:23.257Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d7/990d6a94dc9e169f61374b1c3d4f4dd3037e93c2cc12b6f3b12bc663aa7b/black-26.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb1dafbbaa3b1ee8b4550a84425aac8874e5f390200f5502cf3aee4a2acb2f14", size = 1735431, upload-time = "2026-01-18T04:59:24.729Z" }, + { url = "https://files.pythonhosted.org/packages/36/1c/cbd7bae7dd3cb315dfe6eeca802bb56662cc92b89af272e014d98c1f2286/black-26.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:101540cb2a77c680f4f80e628ae98bd2bd8812fb9d72ade4f8995c5ff019e82c", size = 1400468, upload-time = "2026-01-18T04:59:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/59/b1/9fe6132bb2d0d1f7094613320b56297a108ae19ecf3041d9678aec381b37/black-26.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:6f3977a16e347f1b115662be07daa93137259c711e526402aa444d7a88fdc9d4", size = 1207332, upload-time = "2026-01-18T04:59:28.711Z" }, { url = "https://files.pythonhosted.org/packages/f5/13/710298938a61f0f54cdb4d1c0baeb672c01ff0358712eddaf29f76d32a0b/black-26.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6eeca41e70b5f5c84f2f913af857cf2ce17410847e1d54642e658e078da6544f", size = 1878189, upload-time = "2026-01-18T04:59:30.682Z" }, { url = "https://files.pythonhosted.org/packages/79/a6/5179beaa57e5dbd2ec9f1c64016214057b4265647c62125aa6aeffb05392/black-26.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd39eef053e58e60204f2cdf059e2442e2eb08f15989eefe259870f89614c8b6", size = 1700178, upload-time = "2026-01-18T04:59:32.387Z" }, { url = "https://files.pythonhosted.org/packages/8c/04/c96f79d7b93e8f09d9298b333ca0d31cd9b2ee6c46c274fd0f531de9dc61/black-26.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9459ad0d6cd483eacad4c6566b0f8e42af5e8b583cee917d90ffaa3778420a0a", size = 1777029, upload-time = "2026-01-18T04:59:33.767Z" }, @@ -718,8 +810,19 @@ wheels = [ name = "blake3" version = "1.0.8" source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] sdist = { url = "https://files.pythonhosted.org/packages/75/aa/abcd75e9600987a0bc6cfe9b6b2ff3f0e2cb08c170addc6e76035b5c4cb3/blake3-1.0.8.tar.gz", hash = "sha256:513cc7f0f5a7c035812604c2c852a0c1468311345573de647e310aca4ab165ba", size = 117308, upload-time = "2025-10-14T06:47:48.83Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/0a/515209b0c282c360e249b89cd85350d97cfd55fadbb4df736c67b77b27a1/blake3-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fcfe81b3ae3fb5d2e88be0d3259603ff95f0d5ed69f655c28fdaef31e49a470", size = 371092, upload-time = "2025-10-14T06:45:34.062Z" }, + { url = "https://files.pythonhosted.org/packages/a0/33/9d342a2bf5817f006bbe947335e5d387327541ea47590854947befd01251/blake3-1.0.8-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58ce8d45a5bb5326482de72ea1969a378634236186a970fef63058a5b7b8b435", size = 374859, upload-time = "2025-10-14T06:45:35.262Z" }, + { url = "https://files.pythonhosted.org/packages/5b/fc/ea4bef850a7ec9fbb383503fd3c56056dd9fa44e10c3bc61050ab7b2bac0/blake3-1.0.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83605dbf43f581d8b7175b7f3bfe5388bad5a7c6ac175c9c11d669da31133f4b", size = 448585, upload-time = "2025-10-14T06:45:36.542Z" }, + { url = "https://files.pythonhosted.org/packages/a5/67/167a65a4c431715407d07b1b8b1367698a3ad88e7260edb85f0c5293f08a/blake3-1.0.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b5573b052777142b2cecc453d022c3f21aa4aba75011258410bb98f41c1a727", size = 507519, upload-time = "2025-10-14T06:45:37.814Z" }, + { url = "https://files.pythonhosted.org/packages/32/e2/0886e192d634b264c613b0fbf380745b39992b424a0effc00ef08783644e/blake3-1.0.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe1b02ab49bfd969ef50b9f17482a2011c77536654af21807ba5c2674e0bb2a0", size = 393645, upload-time = "2025-10-14T06:45:39.146Z" }, + { url = "https://files.pythonhosted.org/packages/fc/3b/7fb2fe615448caaa5f6632b2c7551117b38ccac747a3a5769181e9751641/blake3-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7780666dc6be809b49442d6d5ce06fdbe33024a87560b58471103ec17644682", size = 387640, upload-time = "2025-10-14T06:45:40.546Z" }, + { url = "https://files.pythonhosted.org/packages/bc/8c/2bfc942c6c97cb3d20f341859343bb86ee20af723fedfc886373e606079b/blake3-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af394b50c6aa0b1b957a99453d1ee440ef67cd2d1b5669c731647dc723de8a3a", size = 550316, upload-time = "2025-10-14T06:45:42.003Z" }, + { url = "https://files.pythonhosted.org/packages/7e/75/0252be37620699b79dbaa799c9b402d63142a131d16731df4ef09d135dd7/blake3-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c63ece266a43014cf29e772a82857cd8e90315ae3ed53e3c5204851596edd5f2", size = 554463, upload-time = "2025-10-14T06:45:43.22Z" }, { url = "https://files.pythonhosted.org/packages/ee/7d/85a4c0782f613de23d114a7a78fcce270f75b193b3ff3493a0de24ba104a/blake3-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:269f255b110840e52b6ce9db02217e39660ebad3e34ddd5bca8b8d378a77e4e1", size = 371296, upload-time = "2025-10-14T06:45:49.674Z" }, { url = "https://files.pythonhosted.org/packages/e3/20/488475254976ed93fab57c67aa80d3b40df77f7d9db6528c9274bff53e08/blake3-1.0.8-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66ca28a673025c40db3eba21a9cac52f559f83637efa675b3f6bd8683f0415f3", size = 374516, upload-time = "2025-10-14T06:45:51.23Z" }, { url = "https://files.pythonhosted.org/packages/7b/21/2a1c47fedb77fb396512677ec6d46caf42ac6e9a897db77edd0a2a46f7bb/blake3-1.0.8-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb04966537777af56c1f399b35525aa70a1225816e121ff95071c33c0f7abca", size = 447911, upload-time = "2025-10-14T06:45:52.637Z" }, @@ -823,6 +926,16 @@ version = "1.2.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f7/16/c92ca344d646e71a43b8bb353f0a6490d7f6e06210f8554c8f874e454285/brotli-1.2.0.tar.gz", hash = "sha256:e310f77e41941c13340a95976fe66a8a95b01e783d430eeaf7a2f87e0a57dd0a", size = 7388632, upload-time = "2025-11-05T18:39:42.86Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/ef/f285668811a9e1ddb47a18cb0b437d5fc2760d537a2fe8a57875ad6f8448/brotli-1.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:15b33fe93cedc4caaff8a0bd1eb7e3dab1c61bb22a0bf5bdfdfd97cd7da79744", size = 863110, upload-time = "2025-11-05T18:38:12.978Z" }, + { url = "https://files.pythonhosted.org/packages/50/62/a3b77593587010c789a9d6eaa527c79e0848b7b860402cc64bc0bc28a86c/brotli-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:898be2be399c221d2671d29eed26b6b2713a02c2119168ed914e7d00ceadb56f", size = 445438, upload-time = "2025-11-05T18:38:14.208Z" }, + { url = "https://files.pythonhosted.org/packages/cd/e1/7fadd47f40ce5549dc44493877db40292277db373da5053aff181656e16e/brotli-1.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:350c8348f0e76fff0a0fd6c26755d2653863279d086d3aa2c290a6a7251135dd", size = 1534420, upload-time = "2025-11-05T18:38:15.111Z" }, + { url = "https://files.pythonhosted.org/packages/12/8b/1ed2f64054a5a008a4ccd2f271dbba7a5fb1a3067a99f5ceadedd4c1d5a7/brotli-1.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1ad3fda65ae0d93fec742a128d72e145c9c7a99ee2fcd667785d99eb25a7fe", size = 1632619, upload-time = "2025-11-05T18:38:16.094Z" }, + { url = "https://files.pythonhosted.org/packages/89/5a/7071a621eb2d052d64efd5da2ef55ecdac7c3b0c6e4f9d519e9c66d987ef/brotli-1.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:40d918bce2b427a0c4ba189df7a006ac0c7277c180aee4617d99e9ccaaf59e6a", size = 1426014, upload-time = "2025-11-05T18:38:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/26/6d/0971a8ea435af5156acaaccec1a505f981c9c80227633851f2810abd252a/brotli-1.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2a7f1d03727130fc875448b65b127a9ec5d06d19d0148e7554384229706f9d1b", size = 1489661, upload-time = "2025-11-05T18:38:18.41Z" }, + { url = "https://files.pythonhosted.org/packages/f3/75/c1baca8b4ec6c96a03ef8230fab2a785e35297632f402ebb1e78a1e39116/brotli-1.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9c79f57faa25d97900bfb119480806d783fba83cd09ee0b33c17623935b05fa3", size = 1599150, upload-time = "2025-11-05T18:38:19.792Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1a/23fcfee1c324fd48a63d7ebf4bac3a4115bdb1b00e600f80f727d850b1ae/brotli-1.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:844a8ceb8483fefafc412f85c14f2aae2fb69567bf2a0de53cdb88b73e7c43ae", size = 1493505, upload-time = "2025-11-05T18:38:20.913Z" }, + { url = "https://files.pythonhosted.org/packages/36/e5/12904bbd36afeef53d45a84881a4810ae8810ad7e328a971ebbfd760a0b3/brotli-1.2.0-cp311-cp311-win32.whl", hash = "sha256:aa47441fa3026543513139cb8926a92a8e305ee9c71a6209ef7a97d91640ea03", size = 334451, upload-time = "2025-11-05T18:38:21.94Z" }, + { url = "https://files.pythonhosted.org/packages/02/8b/ecb5761b989629a4758c394b9301607a5880de61ee2ee5fe104b87149ebc/brotli-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:022426c9e99fd65d9475dce5c195526f04bb8be8907607e27e747893f6ee3e24", size = 369035, upload-time = "2025-11-05T18:38:22.941Z" }, { url = "https://files.pythonhosted.org/packages/11/ee/b0a11ab2315c69bb9b45a2aaed022499c9c24a205c3a49c3513b541a7967/brotli-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:35d382625778834a7f3061b15423919aa03e4f5da34ac8e02c074e4b75ab4f84", size = 861543, upload-time = "2025-11-05T18:38:24.183Z" }, { url = "https://files.pythonhosted.org/packages/e1/2f/29c1459513cd35828e25531ebfcbf3e92a5e49f560b1777a9af7203eb46e/brotli-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a61c06b334bd99bc5ae84f1eeb36bfe01400264b3c352f968c6e30a10f9d08b", size = 444288, upload-time = "2025-11-05T18:38:25.139Z" }, { url = "https://files.pythonhosted.org/packages/3d/6f/feba03130d5fceadfa3a1bb102cb14650798c848b1df2a808356f939bb16/brotli-1.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acec55bb7c90f1dfc476126f9711a8e81c9af7fb617409a9ee2953115343f08d", size = 1528071, upload-time = "2025-11-05T18:38:26.081Z" }, @@ -874,6 +987,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e4/9c/d51486bf366fc7d6735f0e46b5b96ca58dc005b250263525a1eea3cd5d21/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33cfb408d0cff64cd50bef268c0fed397c46fbb53944aa37264148614a62e990", size = 1536547, upload-time = "2025-11-21T18:17:45.729Z" }, { url = "https://files.pythonhosted.org/packages/1b/37/293a9a0a7caf17e6e657668bebb92dfe730305999fe8c0e2703b8888789c/brotlicffi-1.2.0.0-cp38-abi3-win32.whl", hash = "sha256:23e5c912fdc6fd37143203820230374d24babd078fc054e18070a647118158f6", size = 343085, upload-time = "2025-11-21T18:17:48.887Z" }, { url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586, upload-time = "2025-11-21T18:17:50.531Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ec/52488a0563f1663e2ccc75834b470650f4b8bcdea3132aef3bf67219c661/brotlicffi-1.2.0.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fa102a60e50ddbd08de86a63431a722ea216d9bc903b000bf544149cc9b823dc", size = 402002, upload-time = "2025-11-21T18:17:51.76Z" }, + { url = "https://files.pythonhosted.org/packages/e4/63/d4aea4835fd97da1401d798d9b8ba77227974de565faea402f520b37b10f/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d3c4332fc808a94e8c1035950a10d04b681b03ab585ce897ae2a360d479037c", size = 406447, upload-time = "2025-11-21T18:17:53.614Z" }, + { url = "https://files.pythonhosted.org/packages/62/4e/5554ecb2615ff035ef8678d4e419549a0f7a28b3f096b272174d656749fb/brotlicffi-1.2.0.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb4eb5830026b79a93bf503ad32b2c5257315e9ffc49e76b2715cffd07c8e3db", size = 402521, upload-time = "2025-11-21T18:17:54.875Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d3/b07f8f125ac52bbee5dc00ef0d526f820f67321bf4184f915f17f50a4657/brotlicffi-1.2.0.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3832c66e00d6d82087f20a972b2fc03e21cd99ef22705225a6f8f418a9158ecc", size = 374730, upload-time = "2025-11-21T18:17:56.334Z" }, ] [[package]] @@ -914,6 +1031,10 @@ version = "5.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d9/8e/8b4fdde28e42ffcd741a37f4ffa9fb59cd4fe01625b544dfcfd9ccb54f01/cbor2-5.8.0.tar.gz", hash = "sha256:b19c35fcae9688ac01ef75bad5db27300c2537eb4ee00ed07e05d8456a0d4931", size = 107825, upload-time = "2025-12-30T18:44:22.455Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/58/17/f664201080b2a7d0f57c16c8e9e5922013b92f202e294863ec7e75b7ff7f/cbor2-5.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fddee9103a17d7bed5753f0c7fc6663faa506eb953e50d8287804eccf7b048e6", size = 268316, upload-time = "2025-12-30T18:43:37.161Z" }, + { url = "https://files.pythonhosted.org/packages/d0/e1/072745b4ff01afe9df2cd627f8fc51a1acedb5d3d1253765625d2929db91/cbor2-5.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8d2ea26fad620aba5e88d7541be8b10c5034a55db9a23809b7cb49f36803f05b", size = 258874, upload-time = "2025-12-30T18:43:38.878Z" }, + { url = "https://files.pythonhosted.org/packages/a7/10/61c262b886d22b62c56e8aac6d10fa06d0953c997879ab882a31a624952b/cbor2-5.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:de68b4b310b072b082d317adc4c5e6910173a6d9455412e6183d72c778d1f54c", size = 261971, upload-time = "2025-12-30T18:43:40.401Z" }, + { url = "https://files.pythonhosted.org/packages/7e/42/b7862f5e64364b10ad120ea53e87ec7e891fb268cb99c572348e647cf7e9/cbor2-5.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:418d2cf0e03e90160fa1474c05a40fe228bbb4a92d1628bdbbd13a48527cb34d", size = 254151, upload-time = "2025-12-30T18:43:41.938Z" }, { url = "https://files.pythonhosted.org/packages/38/81/0d0cf0796fe8081492a61c45278f03def21a929535a492dd97c8438f5dbe/cbor2-5.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:518c118a5e00001854adb51f3164e647aa99b6a9877d2a733a28cb5c0a4d6857", size = 286242, upload-time = "2025-12-30T18:43:47.026Z" }, { url = "https://files.pythonhosted.org/packages/7b/a9/fdab6c10190cfb8d639e01f2b168f2406fc847a2a6bc00e7de78c3381d0a/cbor2-5.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cff2a1999e49cd51c23d1b6786a012127fd8f722c5946e82bd7ab3eb307443f3", size = 285412, upload-time = "2025-12-30T18:43:48.563Z" }, { url = "https://files.pythonhosted.org/packages/31/59/746a8e630996217a3afd523f583fcf7e3d16640d63f9a03f0f4e4f74b5b1/cbor2-5.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c4492160212374973cdc14e46f0565f2462721ef922b40f7ea11e7d613dfb2a", size = 278041, upload-time = "2025-12-30T18:43:49.92Z" }, @@ -947,6 +1068,19 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, @@ -1010,6 +1144,22 @@ version = "3.4.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, @@ -1133,6 +1283,17 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/91/2e/c4390a31919d8a78b90e8ecf87cd4b4c4f05a5b48d05ec17db8e5404c6f4/contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1", size = 288773, upload-time = "2025-07-26T12:01:02.277Z" }, + { url = "https://files.pythonhosted.org/packages/0d/44/c4b0b6095fef4dc9c420e041799591e3b63e9619e3044f7f4f6c21c0ab24/contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381", size = 270149, upload-time = "2025-07-26T12:01:04.072Z" }, + { url = "https://files.pythonhosted.org/packages/30/2e/dd4ced42fefac8470661d7cb7e264808425e6c5d56d175291e93890cce09/contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7", size = 329222, upload-time = "2025-07-26T12:01:05.688Z" }, + { url = "https://files.pythonhosted.org/packages/f2/74/cc6ec2548e3d276c71389ea4802a774b7aa3558223b7bade3f25787fafc2/contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1", size = 377234, upload-time = "2025-07-26T12:01:07.054Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/64ef723029f917410f75c09da54254c5f9ea90ef89b143ccadb09df14c15/contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a", size = 380555, upload-time = "2025-07-26T12:01:08.801Z" }, + { url = "https://files.pythonhosted.org/packages/5f/4b/6157f24ca425b89fe2eb7e7be642375711ab671135be21e6faa100f7448c/contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db", size = 355238, upload-time = "2025-07-26T12:01:10.319Z" }, + { url = "https://files.pythonhosted.org/packages/98/56/f914f0dd678480708a04cfd2206e7c382533249bc5001eb9f58aa693e200/contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620", size = 1326218, upload-time = "2025-07-26T12:01:12.659Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d7/4a972334a0c971acd5172389671113ae82aa7527073980c38d5868ff1161/contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f", size = 1392867, upload-time = "2025-07-26T12:01:15.533Z" }, + { url = "https://files.pythonhosted.org/packages/75/3e/f2cc6cd56dc8cff46b1a56232eabc6feea52720083ea71ab15523daab796/contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff", size = 183677, upload-time = "2025-07-26T12:01:17.088Z" }, + { url = "https://files.pythonhosted.org/packages/98/4b/9bd370b004b5c9d8045c6c33cf65bae018b27aca550a3f657cdc99acdbd8/contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42", size = 225234, upload-time = "2025-07-26T12:01:18.256Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b6/71771e02c2e004450c12b1120a5f488cad2e4d5b590b1af8bad060360fe4/contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470", size = 193123, upload-time = "2025-07-26T12:01:19.848Z" }, { url = "https://files.pythonhosted.org/packages/be/45/adfee365d9ea3d853550b2e735f9d66366701c65db7855cd07621732ccfc/contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb", size = 293419, upload-time = "2025-07-26T12:01:21.16Z" }, { url = "https://files.pythonhosted.org/packages/53/3e/405b59cfa13021a56bba395a6b3aca8cec012b45bf177b0eaf7a202cde2c/contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6", size = 273979, upload-time = "2025-07-26T12:01:22.448Z" }, { url = "https://files.pythonhosted.org/packages/d4/1c/a12359b9b2ca3a845e8f7f9ac08bdf776114eb931392fcad91743e2ea17b/contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7", size = 332653, upload-time = "2025-07-26T12:01:24.155Z" }, @@ -1188,6 +1349,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/93/8a/68a4ec5c55a2971213d29a9374913f7e9f18581945a7a31d1a39b5d2dfe5/contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae", size = 202428, upload-time = "2025-07-26T12:02:48.691Z" }, { url = "https://files.pythonhosted.org/packages/fa/96/fd9f641ffedc4fa3ace923af73b9d07e869496c9cc7a459103e6e978992f/contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc", size = 250331, upload-time = "2025-07-26T12:02:50.137Z" }, { url = "https://files.pythonhosted.org/packages/ae/8c/469afb6465b853afff216f9528ffda78a915ff880ed58813ba4faf4ba0b6/contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b", size = 203831, upload-time = "2025-07-26T12:02:51.449Z" }, + { url = "https://files.pythonhosted.org/packages/a5/29/8dcfe16f0107943fa92388c23f6e05cff0ba58058c4c95b00280d4c75a14/contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497", size = 278809, upload-time = "2025-07-26T12:02:52.74Z" }, + { url = "https://files.pythonhosted.org/packages/85/a9/8b37ef4f7dafeb335daee3c8254645ef5725be4d9c6aa70b50ec46ef2f7e/contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8", size = 261593, upload-time = "2025-07-26T12:02:54.037Z" }, + { url = "https://files.pythonhosted.org/packages/0a/59/ebfb8c677c75605cc27f7122c90313fd2f375ff3c8d19a1694bda74aaa63/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e", size = 302202, upload-time = "2025-07-26T12:02:55.947Z" }, + { url = "https://files.pythonhosted.org/packages/3c/37/21972a15834d90bfbfb009b9d004779bd5a07a0ec0234e5ba8f64d5736f4/contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989", size = 329207, upload-time = "2025-07-26T12:02:57.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/bd257695f39d05594ca4ad60df5bcb7e32247f9951fd09a9b8edb82d1daa/contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77", size = 225315, upload-time = "2025-07-26T12:02:58.801Z" }, ] [[package]] @@ -1196,6 +1362,21 @@ version = "7.13.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" }, + { url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" }, + { url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" }, + { url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" }, + { url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" }, + { url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" }, + { url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" }, + { url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" }, + { url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" }, + { url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" }, { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, @@ -1311,6 +1492,9 @@ dependencies = [ { name = "cuda-pathfinder" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/2b/ebcbb60aa6dba830474cd360c42e10282f7a343c0a1f58d24fbd3b7c2d77/cuda_bindings-12.9.4-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a6a429dc6c13148ff1e27c44f40a3dd23203823e637b87fd0854205195988306", size = 11840604, upload-time = "2025-10-21T14:51:34.565Z" }, + { url = "https://files.pythonhosted.org/packages/45/e7/b47792cc2d01c7e1d37c32402182524774dadd2d26339bd224e0e913832e/cuda_bindings-12.9.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c912a3d9e6b6651853eed8eed96d6800d69c08e94052c292fec3f282c5a817c9", size = 12210593, upload-time = "2025-10-21T14:51:36.574Z" }, + { url = "https://files.pythonhosted.org/packages/dd/be/90d32049e06abcfba4b2e7df1dbcb5e16215c8852eef0cd8b25f38a66bd4/cuda_bindings-12.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:443b0875916879c2e4c3722941e25e42d5ab9bcbf34c9e83404fb100fa1f6913", size = 11490933, upload-time = "2025-10-21T14:51:38.792Z" }, { url = "https://files.pythonhosted.org/packages/0c/c2/65bfd79292b8ff18be4dd7f7442cea37bcbc1a228c1886f1dea515c45b67/cuda_bindings-12.9.4-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:694ba35023846625ef471257e6b5a4bc8af690f961d197d77d34b1d1db393f56", size = 11760260, upload-time = "2025-10-21T14:51:40.79Z" }, { url = "https://files.pythonhosted.org/packages/a9/c1/dabe88f52c3e3760d861401bb994df08f672ec893b8f7592dc91626adcf3/cuda_bindings-12.9.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fda147a344e8eaeca0c6ff113d2851ffca8f7dfc0a6c932374ee5c47caa649c8", size = 12151019, upload-time = "2025-10-21T14:51:43.167Z" }, { url = "https://files.pythonhosted.org/packages/df/6b/9c1b1a6c01392bfdd758e9486f52a1a72bc8f49e98f9355774ef98b5fb4e/cuda_bindings-12.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:696ca75d249ddf287d01b9a698b8e2d8a05046495a9c051ca15659dc52d17615", size = 11586961, upload-time = "2025-10-21T14:51:45.394Z" }, @@ -1372,6 +1556,8 @@ dependencies = [ { name = "numpy" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/a1/15c59a9abe6720be3a7c3f59bf1ff7ba1ceb5f44869b5cb80b3e60cea170/cupy_cuda12x-14.0.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:e0c8a6b9430313cd5576c473254bbfee1eff4503b00b097ac34e45f4b45af05b", size = 146240038, upload-time = "2026-02-17T00:51:34.109Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/a59be7517c2c8f05a31cbf4a84597b8d686050acf99e77b1f95fcc0da327/cupy_cuda12x-14.0.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:60b4a0e88171ec811f282c8b0ddf2ea51aa335e5739a638f2dc7438a81410833", size = 135081905, upload-time = "2026-02-17T00:51:39.969Z" }, { url = "https://files.pythonhosted.org/packages/37/f6/c561e31d37655d4b9c4e53d43314ed5a7ad715f6c12a329a256380b59c11/cupy_cuda12x-14.0.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:680b2d8be84dc6984ceae41000a0a5a272adef6c9e1650ee43afeb937c319233", size = 145577288, upload-time = "2026-02-17T00:51:49.173Z" }, { url = "https://files.pythonhosted.org/packages/58/c9/7d0a990e01e1082c857c7c869127e8f70f5be07d7600ca318b77844dfbfd/cupy_cuda12x-14.0.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:321953e346995fc5fb970b7eec05be7463b33628fd0673a54f7f206270786c68", size = 134612571, upload-time = "2026-02-17T00:51:54.023Z" }, { url = "https://files.pythonhosted.org/packages/d3/f5/e882da76d77cbed5ba75a9c4a9deedfe3dcd17128407fea17599b87c949d/cupy_cuda12x-14.0.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:5a23a07a51391e4d46dc02b0cf05aa7eb1cb405e3fa8abcec65d8b07259c0ce7", size = 145123903, upload-time = "2026-02-17T00:52:02.841Z" }, @@ -1439,6 +1625,10 @@ version = "1.8.20" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e0/b7/cd8080344452e4874aae67c40d8940e2b4d47b01601a8fd9f44786c757c7/debugpy-1.8.20.tar.gz", hash = "sha256:55bc8701714969f1ab89a6d5f2f3d40c36f91b2cbe2f65d98bf8196f6a6a2c33", size = 1645207, upload-time = "2026-01-29T23:03:28.199Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/51/56/c3baf5cbe4dd77427fd9aef99fcdade259ad128feeb8a786c246adb838e5/debugpy-1.8.20-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:eada6042ad88fa1571b74bd5402ee8b86eded7a8f7b827849761700aff171f1b", size = 2208318, upload-time = "2026-01-29T23:03:36.481Z" }, + { url = "https://files.pythonhosted.org/packages/9a/7d/4fa79a57a8e69fe0d9763e98d1110320f9ecd7f1f362572e3aafd7417c9d/debugpy-1.8.20-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:7de0b7dfeedc504421032afba845ae2a7bcc32ddfb07dae2c3ca5442f821c344", size = 3171493, upload-time = "2026-01-29T23:03:37.775Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/1e8f8affe51e12a26f3a8a8a4277d6e60aa89d0a66512f63b1e799d424a4/debugpy-1.8.20-cp311-cp311-win32.whl", hash = "sha256:773e839380cf459caf73cc533ea45ec2737a5cc184cf1b3b796cd4fd98504fec", size = 5209240, upload-time = "2026-01-29T23:03:39.109Z" }, + { url = "https://files.pythonhosted.org/packages/d5/92/1cb532e88560cbee973396254b21bece8c5d7c2ece958a67afa08c9f10dc/debugpy-1.8.20-cp311-cp311-win_amd64.whl", hash = "sha256:1f7650546e0eded1902d0f6af28f787fa1f1dbdbc97ddabaf1cd963a405930cb", size = 5233481, upload-time = "2026-01-29T23:03:40.659Z" }, { url = "https://files.pythonhosted.org/packages/14/57/7f34f4736bfb6e00f2e4c96351b07805d83c9a7b33d28580ae01374430f7/debugpy-1.8.20-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:4ae3135e2089905a916909ef31922b2d733d756f66d87345b3e5e52b7a55f13d", size = 2550686, upload-time = "2026-01-29T23:03:42.023Z" }, { url = "https://files.pythonhosted.org/packages/ab/78/b193a3975ca34458f6f0e24aaf5c3e3da72f5401f6054c0dfd004b41726f/debugpy-1.8.20-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:88f47850a4284b88bd2bfee1f26132147d5d504e4e86c22485dfa44b97e19b4b", size = 4310588, upload-time = "2026-01-29T23:03:43.314Z" }, { url = "https://files.pythonhosted.org/packages/c1/55/f14deb95eaf4f30f07ef4b90a8590fc05d9e04df85ee379712f6fb6736d7/debugpy-1.8.20-cp312-cp312-win32.whl", hash = "sha256:4057ac68f892064e5f98209ab582abfee3b543fb55d2e87610ddc133a954d390", size = 5331372, upload-time = "2026-01-29T23:03:45.526Z" }, @@ -1586,6 +1776,13 @@ version = "1.4.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/36/9d/ab66a06e416d71b7bdcb9904cdf8d4db3379ef632bb8e9495646702d9718/duckdb-1.4.4.tar.gz", hash = "sha256:8bba52fd2acb67668a4615ee17ee51814124223de836d9e2fdcbc4c9021b3d3c", size = 18419763, upload-time = "2026-01-26T11:50:37.68Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/04/68/19233412033a2bc5a144a3f531f64e3548d4487251e3f16b56c31411a06f/duckdb-1.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5ba684f498d4e924c7e8f30dd157da8da34c8479746c5011b6c0e037e9c60ad2", size = 28883816, upload-time = "2026-01-26T11:49:01.009Z" }, + { url = "https://files.pythonhosted.org/packages/b3/3e/cec70e546c298ab76d80b990109e111068d82cca67942c42328eaa7d6fdb/duckdb-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5536eb952a8aa6ae56469362e344d4e6403cc945a80bc8c5c2ebdd85d85eb64b", size = 15339662, upload-time = "2026-01-26T11:49:04.058Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f0/cf4241a040ec4f571859a738007ec773b642fbc27df4cbcf34b0c32ea559/duckdb-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47dd4162da6a2be59a0aef640eb08d6360df1cf83c317dcc127836daaf3b7f7c", size = 13670044, upload-time = "2026-01-26T11:49:06.627Z" }, + { url = "https://files.pythonhosted.org/packages/11/64/de2bb4ec1e35ec9ebf6090a95b930fc56934a0ad6f34a24c5972a14a77ef/duckdb-1.4.4-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6cb357cfa3403910e79e2eb46c8e445bb1ee2fd62e9e9588c6b999df4256abc1", size = 18409951, upload-time = "2026-01-26T11:49:09.808Z" }, + { url = "https://files.pythonhosted.org/packages/79/a2/ac0f5ee16df890d141304bcd48733516b7202c0de34cd3555634d6eb4551/duckdb-1.4.4-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c25d5b0febda02b7944e94fdae95aecf952797afc8cb920f677b46a7c251955", size = 20411739, upload-time = "2026-01-26T11:49:12.652Z" }, + { url = "https://files.pythonhosted.org/packages/37/a2/9a3402edeedaecf72de05fe9ff7f0303d701b8dfc136aea4a4be1a5f7eee/duckdb-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:6703dd1bb650025b3771552333d305d62ddd7ff182de121483d4e042ea6e2e00", size = 12256972, upload-time = "2026-01-26T11:49:15.468Z" }, + { url = "https://files.pythonhosted.org/packages/f6/e6/052ea6dcdf35b259fd182eff3efd8d75a071de4010c9807556098df137b9/duckdb-1.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:bf138201f56e5d6fc276a25138341b3523e2f84733613fc43f02c54465619a95", size = 13006696, upload-time = "2026-01-26T11:49:18.054Z" }, { url = "https://files.pythonhosted.org/packages/58/33/beadaa69f8458afe466126f2c5ee48c4759cc9d5d784f8703d44e0b52c3c/duckdb-1.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ddcfd9c6ff234da603a1edd5fd8ae6107f4d042f74951b65f91bc5e2643856b3", size = 28896535, upload-time = "2026-01-26T11:49:21.232Z" }, { url = "https://files.pythonhosted.org/packages/76/66/82413f386df10467affc87f65bac095b7c88dbd9c767584164d5f4dc4cb8/duckdb-1.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6792ca647216bd5c4ff16396e4591cfa9b4a72e5ad7cdd312cec6d67e8431a7c", size = 15349716, upload-time = "2026-01-26T11:49:23.989Z" }, { url = "https://files.pythonhosted.org/packages/5d/8c/c13d396fd4e9bf970916dc5b4fea410c1b10fe531069aea65f1dcf849a71/duckdb-1.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1f8d55843cc940e36261689054f7dfb6ce35b1f5b0953b0d355b6adb654b0d52", size = 13672403, upload-time = "2026-01-26T11:49:26.741Z" }, @@ -1754,6 +1951,21 @@ version = "0.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/69/e7/f89d54fb04104114dd0552836dc2b47914f416cc0e200b409dd04a33de5e/fastar-0.8.0.tar.gz", hash = "sha256:f4d4d68dbf1c4c2808f0e730fac5843493fc849f70fe3ad3af60dfbaf68b9a12", size = 68524, upload-time = "2025-11-26T02:36:00.72Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/15/1c764530b81b266f6d27d78d49b6bef22a73b3300cd83a280bfd244908c5/fastar-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cd9c0d3ebf7a0a6f642f771cf41b79f7c98d40a3072a8abe1174fbd9bd615bd3", size = 708427, upload-time = "2025-11-26T02:34:36.502Z" }, + { url = "https://files.pythonhosted.org/packages/41/fc/75d42c008516543219e4293e4d8ac55da57a5c63147484f10468bd1bc24e/fastar-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2875a077340fe4f8099bd3ed8fa90d9595e1ac3cd62ae19ab690d5bf550eeb35", size = 631740, upload-time = "2025-11-26T02:34:20.718Z" }, + { url = "https://files.pythonhosted.org/packages/50/8d/9632984f7824ed2210157dcebd8e9821ef6d4f2b28510d0516db6625ff9b/fastar-0.8.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a999263d9f87184bf2801833b2ecf105e03c0dd91cac78685673b70da564fd64", size = 871628, upload-time = "2025-11-26T02:33:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/05/97/3eb6ea71b7544d45cd29cacb764ca23cde8ce0aed1a6a02251caa4c0a818/fastar-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c41111da56430f638cbfc498ebdcc7d30f63416e904b27b7695c29bd4889cb8", size = 765005, upload-time = "2025-11-26T02:32:45.833Z" }, + { url = "https://files.pythonhosted.org/packages/d6/45/3eb0ee945a0b5d5f9df7e7c25c037ce7fa441cd0b4d44f76d286e2f4396a/fastar-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3719541a12bb09ab1eae91d2c987a9b2b7d7149c52e7109ba6e15b74aabc49b1", size = 765587, upload-time = "2025-11-26T02:33:01.174Z" }, + { url = "https://files.pythonhosted.org/packages/51/bb/7defd6ec0d9570b1987d8ebde52d07d97f3f26e10b592fb3e12738eba39a/fastar-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a9b0fff8079b18acdface7ef1b7f522fd9a589f65ca4a1a0dd7c92a0886c2a2", size = 931150, upload-time = "2025-11-26T02:33:17.374Z" }, + { url = "https://files.pythonhosted.org/packages/28/54/62e51e684dab347c61878afbf09e177029c1a91eb1e39ef244e6b3ef9efa/fastar-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac073576c1931959191cb20df38bab21dd152f66c940aa3ca8b22e39f753b2f3", size = 821354, upload-time = "2025-11-26T02:33:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/53/a8/12708ea4d21e3cf9f485b2a67d44ce84d949a6eddcc9aa5b3d324585ab43/fastar-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:003b59a7c3e405b6a7bff8fab17d31e0ccbc7f06730a8f8ca1694eeea75f3c76", size = 821626, upload-time = "2025-11-26T02:34:05.685Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/1b4d3347c7a759853f963410bf6baf42fe014d587c50c39c8e145f4bf1a0/fastar-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a7b96748425efd9fc155cd920d65088a1b0d754421962418ea73413d02ff515a", size = 986187, upload-time = "2025-11-26T02:34:52.047Z" }, + { url = "https://files.pythonhosted.org/packages/dc/59/2dbe0dc2570764475e60030403738faa261a9d3bff16b08629c378ab939a/fastar-0.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:90957a30e64418b02df5b4d525bea50403d98a4b1f29143ce5914ddfa7e54ee4", size = 1041536, upload-time = "2025-11-26T02:35:08.926Z" }, + { url = "https://files.pythonhosted.org/packages/d9/0f/639b295669c7ca6fbc2b4be2a7832aaeac1a5e06923f15a8a6d6daecbc7d/fastar-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f6e784a8015623fbb7ccca1af372fd82cb511b408ddd2348dc929fc6e415df73", size = 1047149, upload-time = "2025-11-26T02:35:26.597Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e7/23e3a19e06d261d1894f98eca9458f98c090c505a0c712dafc0ff1fc2965/fastar-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a03eaf287bbc93064688a1220580ce261e7557c8898f687f4d0b281c85b28d3c", size = 994992, upload-time = "2025-11-26T02:35:44.009Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7a/3ea4726bae3ac9358d02107ae48f3e10ee186dbed554af79e00b7b498c44/fastar-0.8.0-cp311-cp311-win32.whl", hash = "sha256:661a47ed90762f419406c47e802f46af63a08254ba96abd1c8191e4ce967b665", size = 456449, upload-time = "2025-11-26T02:36:25.291Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3c/0142bee993c431ee91cf5535e6e4b079ad491f620c215fcd79b7e5ffeb2b/fastar-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b48abd6056fef7bc3d414aafb453c5b07fdf06d2df5a2841d650288a3aa1e9d3", size = 490863, upload-time = "2025-11-26T02:36:11.114Z" }, + { url = "https://files.pythonhosted.org/packages/3b/18/d119944f6bdbf6e722e204e36db86390ea45684a1bf6be6e3aa42abd471f/fastar-0.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:50c18788b3c6ffb85e176dcb8548bb8e54616a0519dcdbbfba66f6bbc4316933", size = 462230, upload-time = "2025-11-26T02:36:01.917Z" }, { url = "https://files.pythonhosted.org/packages/58/f1/5b2ff898abac7f1a418284aad285e3a4f68d189c572ab2db0f6c9079dd16/fastar-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f10d2adfe40f47ff228f4efaa32d409d732ded98580e03ed37c9535b5fc923d", size = 706369, upload-time = "2025-11-26T02:34:37.783Z" }, { url = "https://files.pythonhosted.org/packages/23/60/8046a386dca39154f80c927cbbeeb4b1c1267a3271bffe61552eb9995757/fastar-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b930da9d598e3bc69513d131f397e6d6be4643926ef3de5d33d1e826631eb036", size = 629097, upload-time = "2025-11-26T02:34:21.888Z" }, { url = "https://files.pythonhosted.org/packages/22/7e/1ae005addc789924a9268da2394d3bb5c6f96836f7e37b7e3d23c2362675/fastar-0.8.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9d210da2de733ca801de83e931012349d209f38b92d9630ccaa94bd445bdc9b8", size = 868938, upload-time = "2025-11-26T02:33:51.119Z" }, @@ -1814,6 +2026,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/e2/dfa19a4b260b8ab3581b7484dcb80c09b25324f4daa6b6ae1c7640d1607a/fastar-0.8.0-cp314-cp314t-win32.whl", hash = "sha256:187f61dc739afe45ac8e47ed7fd1adc45d52eac110cf27d579155720507d6fbe", size = 455767, upload-time = "2025-11-26T02:36:34.758Z" }, { url = "https://files.pythonhosted.org/packages/51/47/df65c72afc1297797b255f90c4778b5d6f1f0f80282a134d5ab610310ed9/fastar-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:40e9d763cf8bf85ce2fa256e010aa795c0fe3d3bd1326d5c3084e6ce7857127e", size = 489971, upload-time = "2025-11-26T02:36:22.081Z" }, { url = "https://files.pythonhosted.org/packages/85/11/0aa8455af26f0ae89e42be67f3a874255ee5d7f0f026fc86e8d56f76b428/fastar-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e59673307b6a08210987059a2bdea2614fe26e3335d0e5d1a3d95f49a05b1418", size = 460467, upload-time = "2025-11-26T02:36:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/98/6e/6c46aa7f8c8734e7f96ee5141acd3877667ce66f34eea10703aa7571d191/fastar-0.8.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:998e3fa4b555b63eb134e6758437ed739ad1652fdd2a61dfe1dacbfddc35fe66", size = 710662, upload-time = "2025-11-26T02:34:47.593Z" }, + { url = "https://files.pythonhosted.org/packages/70/27/fd622442f2fbd4ff5459677987481ef1c60e077cb4e63a2ed4d8dce6f869/fastar-0.8.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5f83e60d845091f3a12bc37f412774264d161576eaf810ed8b43567eb934b7e5", size = 634049, upload-time = "2025-11-26T02:34:32.365Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ee/aa4d08aea25b5419a7277132e738ab1cd775f26aebddce11413b07e2fdff/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:299672e1c74d8b73c61684fac9159cfc063d35f4b165996a88facb0e26862cb5", size = 872055, upload-time = "2025-11-26T02:34:01.377Z" }, + { url = "https://files.pythonhosted.org/packages/92/9a/2bf2f77aade575e67997e0c759fd55cb1c66b7a5b437b1cd0e97d8b241bc/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3d3a27066b84d015deab5faee78565509bb33b137896443e4144cb1be1a5f90", size = 766787, upload-time = "2025-11-26T02:32:57.161Z" }, + { url = "https://files.pythonhosted.org/packages/0b/90/23a3f6c252f11b10c70f854bce09abc61f71b5a0e6a4b0eac2bcb9a2c583/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef0bcf4385bbdd3c1acecce2d9ea7dab7cc9b8ee0581bbccb7ab11908a7ce288", size = 766861, upload-time = "2025-11-26T02:33:12.824Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/beeb9078380acd4484db5c957d066171695d9340e3526398eb230127b0c2/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f10ef62b6eda6cb6fd9ba8e1fe08a07d7b2bdcc8eaa00eb91566143b92ed7eee", size = 932667, upload-time = "2025-11-26T02:33:28.405Z" }, + { url = "https://files.pythonhosted.org/packages/f4/6d/b034cc637bd0ee638d5a85d08e941b0b8ffd44cf391fb751ba98233734f7/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4f6c82a8ee98c17aa48585ee73b51c89c1b010e5c951af83e07c3436180e3fc", size = 822712, upload-time = "2025-11-26T02:33:44.27Z" }, + { url = "https://files.pythonhosted.org/packages/e2/2b/7d183c63f59227c4689792042d6647f2586a5e7273b55e81745063088d81/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6129067fcb86276635b5857010f4e9b9c7d5d15dd571bb03c6c1ed73c40fd92", size = 822659, upload-time = "2025-11-26T02:34:16.815Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f9/716e0cd9de2427fdf766bc68176f76226cd01fffef3a56c5046fa863f5f0/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4cc9e77019e489f1ddac446b6a5b9dfb5c3d9abd142652c22a1d9415dbcc0e47", size = 987412, upload-time = "2025-11-26T02:35:04.259Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b9/9a8c3fd59958c1c8027bc075af11722cdc62c4968bb277e841d131232289/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:382bfe82c026086487cb17fee12f4c1e2b4e67ce230f2e04487d3e7ddfd69031", size = 1042911, upload-time = "2025-11-26T02:35:21.857Z" }, + { url = "https://files.pythonhosted.org/packages/e2/2f/c3f30963b47022134b8a231c12845f4d7cfba520f59bbc1a82468aea77c7/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:908d2b9a1ff3d549cc304b32f95706a536da8f0bcb0bc0f9e4c1cce39b80e218", size = 1047464, upload-time = "2025-11-26T02:35:39.376Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/218ab6d9a2bab3b07718e6cd8405529600edc1e9c266320e8524c8f63251/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1aa7dbde2d2d73eb5b6203d0f74875cb66350f0f1b4325b4839fc8fbbf5d074e", size = 997309, upload-time = "2025-11-26T02:35:57.722Z" }, ] [[package]] @@ -1831,6 +2055,17 @@ version = "0.14.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386, upload-time = "2025-10-19T22:42:40.176Z" }, + { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569, upload-time = "2025-10-19T22:25:50.977Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366, upload-time = "2025-10-19T22:29:49.166Z" }, + { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978, upload-time = "2025-10-19T22:35:41.306Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692, upload-time = "2025-10-19T22:25:36.997Z" }, + { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384, upload-time = "2025-10-19T22:29:46.578Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921, upload-time = "2025-10-19T22:36:42.006Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575, upload-time = "2025-10-19T22:28:18.975Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317, upload-time = "2025-10-19T22:25:32.75Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804, upload-time = "2025-10-19T22:24:15.615Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099, upload-time = "2025-10-19T22:24:31.646Z" }, { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164, upload-time = "2025-10-19T22:31:45.635Z" }, { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837, upload-time = "2025-10-19T22:38:38.53Z" }, { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370, upload-time = "2025-10-19T22:40:26.07Z" }, @@ -1958,6 +2193,14 @@ version = "4.61.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ec/ca/cf17b88a8df95691275a3d77dc0a5ad9907f328ae53acbe6795da1b2f5ed/fonttools-4.61.1.tar.gz", hash = "sha256:6675329885c44657f826ef01d9e4fb33b9158e9d93c537d84ad8399539bc6f69", size = 3565756, upload-time = "2025-12-12T17:31:24.246Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/69/12/bf9f4eaa2fad039356cc627587e30ed008c03f1cebd3034376b5ee8d1d44/fonttools-4.61.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c6604b735bb12fef8e0efd5578c9fb5d3d8532d5001ea13a19cddf295673ee09", size = 2852213, upload-time = "2025-12-12T17:29:46.675Z" }, + { url = "https://files.pythonhosted.org/packages/ac/49/4138d1acb6261499bedde1c07f8c2605d1d8f9d77a151e5507fd3ef084b6/fonttools-4.61.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ce02f38a754f207f2f06557523cd39a06438ba3aafc0639c477ac409fc64e37", size = 2401689, upload-time = "2025-12-12T17:29:48.769Z" }, + { url = "https://files.pythonhosted.org/packages/e5/fe/e6ce0fe20a40e03aef906af60aa87668696f9e4802fa283627d0b5ed777f/fonttools-4.61.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77efb033d8d7ff233385f30c62c7c79271c8885d5c9657d967ede124671bbdfb", size = 5058809, upload-time = "2025-12-12T17:29:51.701Z" }, + { url = "https://files.pythonhosted.org/packages/79/61/1ca198af22f7dd22c17ab86e9024ed3c06299cfdb08170640e9996d501a0/fonttools-4.61.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:75c1a6dfac6abd407634420c93864a1e274ebc1c7531346d9254c0d8f6ca00f9", size = 5036039, upload-time = "2025-12-12T17:29:53.659Z" }, + { url = "https://files.pythonhosted.org/packages/99/cc/fa1801e408586b5fce4da9f5455af8d770f4fc57391cd5da7256bb364d38/fonttools-4.61.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0de30bfe7745c0d1ffa2b0b7048fb7123ad0d71107e10ee090fa0b16b9452e87", size = 5034714, upload-time = "2025-12-12T17:29:55.592Z" }, + { url = "https://files.pythonhosted.org/packages/bf/aa/b7aeafe65adb1b0a925f8f25725e09f078c635bc22754f3fecb7456955b0/fonttools-4.61.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58b0ee0ab5b1fc9921eccfe11d1435added19d6494dde14e323f25ad2bc30c56", size = 5158648, upload-time = "2025-12-12T17:29:57.861Z" }, + { url = "https://files.pythonhosted.org/packages/99/f9/08ea7a38663328881384c6e7777bbefc46fd7d282adfd87a7d2b84ec9d50/fonttools-4.61.1-cp311-cp311-win32.whl", hash = "sha256:f79b168428351d11e10c5aeb61a74e1851ec221081299f4cf56036a95431c43a", size = 2280681, upload-time = "2025-12-12T17:29:59.943Z" }, + { url = "https://files.pythonhosted.org/packages/07/ad/37dd1ae5fa6e01612a1fbb954f0927681f282925a86e86198ccd7b15d515/fonttools-4.61.1-cp311-cp311-win_amd64.whl", hash = "sha256:fe2efccb324948a11dd09d22136fe2ac8a97d6c1347cf0b58a911dcd529f66b7", size = 2331951, upload-time = "2025-12-12T17:30:02.254Z" }, { url = "https://files.pythonhosted.org/packages/6f/16/7decaa24a1bd3a70c607b2e29f0adc6159f36a7e40eaba59846414765fd4/fonttools-4.61.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f3cb4a569029b9f291f88aafc927dd53683757e640081ca8c412781ea144565e", size = 2851593, upload-time = "2025-12-12T17:30:04.225Z" }, { url = "https://files.pythonhosted.org/packages/94/98/3c4cb97c64713a8cf499b3245c3bf9a2b8fd16a3e375feff2aed78f96259/fonttools-4.61.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41a7170d042e8c0024703ed13b71893519a1a6d6e18e933e3ec7507a2c26a4b2", size = 2400231, upload-time = "2025-12-12T17:30:06.47Z" }, { url = "https://files.pythonhosted.org/packages/b7/37/82dbef0f6342eb01f54bca073ac1498433d6ce71e50c3c3282b655733b31/fonttools-4.61.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10d88e55330e092940584774ee5e8a6971b01fc2f4d3466a1d6c158230880796", size = 4954103, upload-time = "2025-12-12T17:30:08.432Z" }, @@ -1999,6 +2242,22 @@ version = "1.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, @@ -2229,6 +2488,11 @@ version = "1.8.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/03/41/4b9c02f99e4c5fb477122cd5437403b552873f014616ac1d19ac8221a58d/google_crc32c-1.8.0.tar.gz", hash = "sha256:a428e25fb7691024de47fecfbff7ff957214da51eddded0da0ae0e0f03a2cf79", size = 14192, upload-time = "2025-12-16T00:35:25.142Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/ef/21ccfaab3d5078d41efe8612e0ed0bfc9ce22475de074162a91a25f7980d/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:014a7e68d623e9a4222d663931febc3033c5c7c9730785727de2a81f87d5bab8", size = 31298, upload-time = "2025-12-16T00:20:32.241Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b8/f8413d3f4b676136e965e764ceedec904fe38ae8de0cdc52a12d8eb1096e/google_crc32c-1.8.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:86cfc00fe45a0ac7359e5214a1704e51a99e757d0272554874f419f79838c5f7", size = 30872, upload-time = "2025-12-16T00:33:58.785Z" }, + { url = "https://files.pythonhosted.org/packages/f6/fd/33aa4ec62b290477181c55bb1c9302c9698c58c0ce9a6ab4874abc8b0d60/google_crc32c-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:19b40d637a54cb71e0829179f6cb41835f0fbd9e8eb60552152a8b52c36cbe15", size = 33243, upload-time = "2025-12-16T00:40:21.46Z" }, + { url = "https://files.pythonhosted.org/packages/71/03/4820b3bd99c9653d1a5210cb32f9ba4da9681619b4d35b6a052432df4773/google_crc32c-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:17446feb05abddc187e5441a45971b8394ea4c1b6efd88ab0af393fd9e0a156a", size = 33608, upload-time = "2025-12-16T00:40:22.204Z" }, + { url = "https://files.pythonhosted.org/packages/7c/43/acf61476a11437bf9733fb2f70599b1ced11ec7ed9ea760fdd9a77d0c619/google_crc32c-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:71734788a88f551fbd6a97be9668a0020698e07b2bf5b3aa26a36c10cdfb27b2", size = 34439, upload-time = "2025-12-16T00:35:20.458Z" }, { url = "https://files.pythonhosted.org/packages/e9/5f/7307325b1198b59324c0fa9807cafb551afb65e831699f2ce211ad5c8240/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:4b8286b659c1335172e39563ab0a768b8015e88e08329fa5321f774275fc3113", size = 31300, upload-time = "2025-12-16T00:21:56.723Z" }, { url = "https://files.pythonhosted.org/packages/21/8e/58c0d5d86e2220e6a37befe7e6a94dd2f6006044b1a33edf1ff6d9f7e319/google_crc32c-1.8.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:2a3dc3318507de089c5384cc74d54318401410f82aa65b2d9cdde9d297aca7cb", size = 30867, upload-time = "2025-12-16T00:38:31.302Z" }, { url = "https://files.pythonhosted.org/packages/ce/a9/a780cc66f86335a6019f557a8aaca8fbb970728f0efd2430d15ff1beae0e/google_crc32c-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14f87e04d613dfa218d6135e81b78272c3b904e2a7053b841481b38a7d901411", size = 33364, upload-time = "2025-12-16T00:40:22.96Z" }, @@ -2244,6 +2508,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/56/15/c25671c7aad70f8179d858c55a6ae8404902abe0cdcf32a29d581792b491/google_crc32c-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0d1a7afc6e8e4635564ba8aa5c0548e3173e41b6384d7711a9123165f582de2", size = 33381, upload-time = "2025-12-16T00:40:26.268Z" }, { url = "https://files.pythonhosted.org/packages/42/fa/f50f51260d7b0ef5d4898af122d8a7ec5a84e2984f676f746445f783705f/google_crc32c-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3f68782f3cbd1bce027e48768293072813469af6a61a86f6bb4977a4380f21", size = 33734, upload-time = "2025-12-16T00:40:27.028Z" }, { url = "https://files.pythonhosted.org/packages/08/a5/7b059810934a09fb3ccb657e0843813c1fee1183d3bc2c8041800374aa2c/google_crc32c-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:d511b3153e7011a27ab6ee6bb3a5404a55b994dc1a7322c0b87b29606d9790e2", size = 34878, upload-time = "2025-12-16T00:35:23.142Z" }, + { url = "https://files.pythonhosted.org/packages/52/c5/c171e4d8c44fec1422d801a6d2e5d7ddabd733eeda505c79730ee9607f07/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:87fa445064e7db928226b2e6f0d5304ab4cd0339e664a4e9a25029f384d9bb93", size = 28615, upload-time = "2025-12-16T00:40:29.298Z" }, + { url = "https://files.pythonhosted.org/packages/9c/97/7d75fe37a7a6ed171a2cf17117177e7aab7e6e0d115858741b41e9dd4254/google_crc32c-1.8.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f639065ea2042d5c034bf258a9f085eaa7af0cd250667c0635a3118e8f92c69c", size = 28800, upload-time = "2025-12-16T00:40:30.322Z" }, ] [[package]] @@ -2314,6 +2580,15 @@ version = "3.3.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8a/99/1cd3411c56a410994669062bd73dd58270c00cc074cac15f385a1fd91f8a/greenlet-3.3.1.tar.gz", hash = "sha256:41848f3230b58c08bb43dee542e74a2a2e34d3c59dc3076cec9151aeeedcae98", size = 184690, upload-time = "2026-01-23T15:31:02.076Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/e8/2e1462c8fdbe0f210feb5ac7ad2d9029af8be3bf45bd9fa39765f821642f/greenlet-3.3.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:5fd23b9bc6d37b563211c6abbb1b3cab27db385a4449af5c32e932f93017080c", size = 274974, upload-time = "2026-01-23T15:31:02.891Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a8/530a401419a6b302af59f67aaf0b9ba1015855ea7e56c036b5928793c5bd/greenlet-3.3.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09f51496a0bfbaa9d74d36a52d2580d1ef5ed4fdfcff0a73730abfbbbe1403dd", size = 577175, upload-time = "2026-01-23T16:00:56.213Z" }, + { url = "https://files.pythonhosted.org/packages/8e/89/7e812bb9c05e1aaef9b597ac1d0962b9021d2c6269354966451e885c4e6b/greenlet-3.3.1-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb0feb07fe6e6a74615ee62a880007d976cf739b6669cce95daa7373d4fc69c5", size = 590401, upload-time = "2026-01-23T16:05:26.365Z" }, + { url = "https://files.pythonhosted.org/packages/70/ae/e2d5f0e59b94a2269b68a629173263fa40b63da32f5c231307c349315871/greenlet-3.3.1-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:67ea3fc73c8cd92f42467a72b75e8f05ed51a0e9b1d15398c913416f2dafd49f", size = 601161, upload-time = "2026-01-23T16:15:53.456Z" }, + { url = "https://files.pythonhosted.org/packages/5c/ae/8d472e1f5ac5efe55c563f3eabb38c98a44b832602e12910750a7c025802/greenlet-3.3.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39eda9ba259cc9801da05351eaa8576e9aa83eb9411e8f0c299e05d712a210f2", size = 590272, upload-time = "2026-01-23T15:32:49.411Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/0fde34bebfcadc833550717eade64e35ec8738e6b097d5d248274a01258b/greenlet-3.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e2e7e882f83149f0a71ac822ebf156d902e7a5d22c9045e3e0d1daf59cee2cc9", size = 1550729, upload-time = "2026-01-23T16:04:20.867Z" }, + { url = "https://files.pythonhosted.org/packages/16/c9/2fb47bee83b25b119d5a35d580807bb8b92480a54b68fef009a02945629f/greenlet-3.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80aa4d79eb5564f2e0a6144fcc744b5a37c56c4a92d60920720e99210d88db0f", size = 1615552, upload-time = "2026-01-23T15:33:45.743Z" }, + { url = "https://files.pythonhosted.org/packages/1f/54/dcf9f737b96606f82f8dd05becfb8d238db0633dd7397d542a296fe9cad3/greenlet-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:32e4ca9777c5addcbf42ff3915d99030d8e00173a56f80001fb3875998fe410b", size = 226462, upload-time = "2026-01-23T15:36:50.422Z" }, + { url = "https://files.pythonhosted.org/packages/91/37/61e1015cf944ddd2337447d8e97fb423ac9bc21f9963fb5f206b53d65649/greenlet-3.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:da19609432f353fed186cc1b85e9440db93d489f198b4bdf42ae19cc9d9ac9b4", size = 225715, upload-time = "2026-01-23T15:33:17.298Z" }, { url = "https://files.pythonhosted.org/packages/f9/c8/9d76a66421d1ae24340dfae7e79c313957f6e3195c144d2c73333b5bfe34/greenlet-3.3.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7e806ca53acf6d15a888405880766ec84721aa4181261cd11a457dfe9a7a4975", size = 276443, upload-time = "2026-01-23T15:30:10.066Z" }, { url = "https://files.pythonhosted.org/packages/81/99/401ff34bb3c032d1f10477d199724f5e5f6fbfb59816ad1455c79c1eb8e7/greenlet-3.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d842c94b9155f1c9b3058036c24ffb8ff78b428414a19792b2380be9cecf4f36", size = 597359, upload-time = "2026-01-23T16:00:57.394Z" }, { url = "https://files.pythonhosted.org/packages/2b/bc/4dcc0871ed557792d304f50be0f7487a14e017952ec689effe2180a6ff35/greenlet-3.3.1-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20fedaadd422fa02695f82093f9a98bad3dab5fcda793c658b945fcde2ab27ba", size = 607805, upload-time = "2026-01-23T16:05:28.068Z" }, @@ -2360,6 +2635,16 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/86/c7/d0b780a29b0837bf4ca9580904dfb275c1fc321ded7897d620af7047ec57/grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6", size = 5951525, upload-time = "2026-02-06T09:55:01.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/96920bf2ee61df85a9503cb6f733fe711c0ff321a5a697d791b075673281/grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e", size = 11830418, upload-time = "2026-02-06T09:55:04.462Z" }, + { url = "https://files.pythonhosted.org/packages/83/0c/7c1528f098aeb75a97de2bae18c530f56959fb7ad6c882db45d9884d6edc/grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911", size = 6524477, upload-time = "2026-02-06T09:55:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/8d/52/e7c1f3688f949058e19a011c4e0dec973da3d0ae5e033909677f967ae1f4/grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e", size = 7198266, upload-time = "2026-02-06T09:55:10.016Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/8ac32517c1e856677282c34f2e7812d6c328fa02b8f4067ab80e77fdc9c9/grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303", size = 6730552, upload-time = "2026-02-06T09:55:12.207Z" }, + { url = "https://files.pythonhosted.org/packages/bd/98/b8ee0158199250220734f620b12e4a345955ac7329cfd908d0bf0fda77f0/grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04", size = 7304296, upload-time = "2026-02-06T09:55:15.044Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0f/7b72762e0d8840b58032a56fdbd02b78fc645b9fa993d71abf04edbc54f4/grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec", size = 8288298, upload-time = "2026-02-06T09:55:17.276Z" }, + { url = "https://files.pythonhosted.org/packages/24/ae/ae4ce56bc5bb5caa3a486d60f5f6083ac3469228faa734362487176c15c5/grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074", size = 7730953, upload-time = "2026-02-06T09:55:19.545Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6e/8052e3a28eb6a820c372b2eb4b5e32d195c661e137d3eca94d534a4cfd8a/grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856", size = 4076503, upload-time = "2026-02-06T09:55:21.521Z" }, + { url = "https://files.pythonhosted.org/packages/08/62/f22c98c5265dfad327251fa2f840b591b1df5f5e15d88b19c18c86965b27/grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558", size = 4799767, upload-time = "2026-02-06T09:55:24.107Z" }, { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, @@ -2571,6 +2856,13 @@ version = "0.7.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, @@ -2692,6 +2984,17 @@ version = "3.4.0.post0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/2d/30/7ab4b9e88e7946f6beef419f74edcc541df3ea562c7882257b4eaa82417d/ijson-3.4.0.post0.tar.gz", hash = "sha256:9aa02dc70bb245670a6ca7fba737b992aeeb4895360980622f7e568dbf23e41e", size = 67216, upload-time = "2025-10-10T05:29:25.62Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/ac/3d57249d4acba66a33eaef794edb5b2a2222ca449ae08800f8abe9286645/ijson-3.4.0.post0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b473112e72c0c506da425da3278367b6680f340ecc093084693a1e819d28435", size = 88278, upload-time = "2025-10-10T05:27:55.403Z" }, + { url = "https://files.pythonhosted.org/packages/12/fb/2d068d23d1a665f500282ceb6f2473952a95fc7107d739fd629b4ab41959/ijson-3.4.0.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:043f9b7cf9cc744263a78175e769947733710d2412d25180df44b1086b23ebd5", size = 59898, upload-time = "2025-10-10T05:27:56.361Z" }, + { url = "https://files.pythonhosted.org/packages/26/3d/8b14589dfb0e5dbb7bcf9063e53d3617c041cf315ff3dfa60945382237ce/ijson-3.4.0.post0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b55e49045f4c8031f3673f56662fd828dc9e8d65bd3b03a9420dda0d370e64ba", size = 59945, upload-time = "2025-10-10T05:27:57.581Z" }, + { url = "https://files.pythonhosted.org/packages/77/57/086a75094397d4b7584698a540a279689e12905271af78cdfc903bf9eaf8/ijson-3.4.0.post0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11f13b73194ea2a5a8b4a2863f25b0b4624311f10db3a75747b510c4958179b0", size = 131318, upload-time = "2025-10-10T05:27:58.453Z" }, + { url = "https://files.pythonhosted.org/packages/df/35/7f61e9ce4a9ff1306ec581eb851f8a660439126d92ee595c6dc8084aac97/ijson-3.4.0.post0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:659acb2843433e080c271ecedf7d19c71adde1ee5274fc7faa2fec0a793f9f1c", size = 137990, upload-time = "2025-10-10T05:27:59.328Z" }, + { url = "https://files.pythonhosted.org/packages/59/bf/590bbc3c3566adce5e2f43ba5894520cbaf19a3e7f38c1250926ba67eee4/ijson-3.4.0.post0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:deda4cfcaafa72ca3fa845350045b1d0fef9364ec9f413241bb46988afbe6ee6", size = 134416, upload-time = "2025-10-10T05:28:00.317Z" }, + { url = "https://files.pythonhosted.org/packages/24/c1/fb719049851979df71f3e039d6f1a565d349c9cb1b29c0f8775d9db141b4/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47352563e8c594360bacee2e0753e97025f0861234722d02faace62b1b6d2b2a", size = 138034, upload-time = "2025-10-10T05:28:01.627Z" }, + { url = "https://files.pythonhosted.org/packages/10/ce/ccda891f572876aaf2c43f0b2079e31d5b476c3ae53196187eab1a788eff/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5a48b9486242d1295abe7fd0fbb6308867da5ca3f69b55c77922a93c2b6847aa", size = 132510, upload-time = "2025-10-10T05:28:03.141Z" }, + { url = "https://files.pythonhosted.org/packages/11/b5/ca8e64ab7cf5252f358e467be767630f085b5bbcd3c04333a3a5f36c3dd3/ijson-3.4.0.post0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c0886234d1fae15cf4581a430bdba03d79251c1ab3b07e30aa31b13ef28d01c", size = 134907, upload-time = "2025-10-10T05:28:04.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/14/63a4d5dc548690f29f0c2fc9cabd5ecbb37532547439c05f5b3b9ce73021/ijson-3.4.0.post0-cp311-cp311-win32.whl", hash = "sha256:fecae19b5187d92900c73debb3a979b0b3290a53f85df1f8f3c5ba7d1e9fb9cb", size = 52006, upload-time = "2025-10-10T05:28:05.424Z" }, + { url = "https://files.pythonhosted.org/packages/fa/bf/932740899e572a97f9be0c6cd64ebda557eae7701ac216fc284aba21786d/ijson-3.4.0.post0-cp311-cp311-win_amd64.whl", hash = "sha256:b39dbf87071f23a23c8077eea2ae7cfeeca9ff9ffec722dfc8b5f352e4dd729c", size = 54410, upload-time = "2025-10-10T05:28:06.264Z" }, { url = "https://files.pythonhosted.org/packages/7d/fe/3b6af0025288e769dbfa30485dae1b3bd3f33f00390f3ee532cbb1c33e9b/ijson-3.4.0.post0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b607a500fca26101be47d2baf7cddb457b819ab60a75ce51ed1092a40da8b2f9", size = 87847, upload-time = "2025-10-10T05:28:07.229Z" }, { url = "https://files.pythonhosted.org/packages/6e/a5/95ee2ca82f3b1a57892452f6e5087607d56c620beb8ce625475194568698/ijson-3.4.0.post0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4827d9874a6a81625412c59f7ca979a84d01f7f6bfb3c6d4dc4c46d0382b14e0", size = 59815, upload-time = "2025-10-10T05:28:08.448Z" }, { url = "https://files.pythonhosted.org/packages/51/8d/5a704ab3c17c55c21c86423458db8610626ca99cc9086a74dfeb7ee9054c/ijson-3.4.0.post0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4d4afec780881edb2a0d2dd40b1cdbe246e630022d5192f266172a0307986a7", size = 59648, upload-time = "2025-10-10T05:28:09.307Z" }, @@ -2747,6 +3050,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/15/f3/6419d1d5795a16591233d3aa3747b084e82c0c1d7184bdad9be638174560/ijson-3.4.0.post0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b982a3597b0439ce9c8f4cfc929d86c6ed43907908be1e8463a34dc35fe5b258", size = 204825, upload-time = "2025-10-10T05:29:04.242Z" }, { url = "https://files.pythonhosted.org/packages/1f/8d/a520e6902129c55fa94428ea0a22e8547540d5e7ca30f18b39594a5feea2/ijson-3.4.0.post0-cp314-cp314t-win32.whl", hash = "sha256:4e39bfdc36b0b460ef15a06550a6a385c64c81f7ac205ccff39bd45147918912", size = 55559, upload-time = "2025-10-10T05:29:05.681Z" }, { url = "https://files.pythonhosted.org/packages/20/67/0ac6dd0045957ba1270b7b1860864f7d8cea4062e70b1083134c587e5768/ijson-3.4.0.post0-cp314-cp314t-win_amd64.whl", hash = "sha256:17e45262a5ddef39894013fb1548ee7094e444c8389eb1a97f86708b19bea03e", size = 58238, upload-time = "2025-10-10T05:29:06.656Z" }, + { url = "https://files.pythonhosted.org/packages/43/66/27cfcea16e85b95e33814eae2052dab187206b8820cdd90aa39d32ffb441/ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:add9242f886eae844a7410b84aee2bbb8bdc83c624f227cb1fdb2d0476a96cb1", size = 57029, upload-time = "2025-10-10T05:29:19.733Z" }, + { url = "https://files.pythonhosted.org/packages/b8/1b/df3f1561c6629241fb2f8bd7ea1da14e3c2dd16fe9d7cbc97120870ed09c/ijson-3.4.0.post0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:69718ed41710dfcaa7564b0af42abc05875d4f7aaa24627c808867ef32634bc7", size = 56523, upload-time = "2025-10-10T05:29:20.641Z" }, + { url = "https://files.pythonhosted.org/packages/39/0a/6c6a3221ddecf62b696fde0e864415237e05b9a36ab6685a606b8fb3b5a2/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:636b6eca96c6c43c04629c6b37fad0181662eaacf9877c71c698485637f752f9", size = 70546, upload-time = "2025-10-10T05:29:21.526Z" }, + { url = "https://files.pythonhosted.org/packages/42/cb/edf69755e86a3a9f8b418efd60239cb308af46c7c8e12f869423f51c9851/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb5e73028f6e63d27b3d286069fe350ed80a4ccc493b022b590fea4bb086710d", size = 70532, upload-time = "2025-10-10T05:29:22.718Z" }, + { url = "https://files.pythonhosted.org/packages/96/7e/c8730ea39b8712622cd5a1bdff676098208400e37bb92052ba52f93e2aa1/ijson-3.4.0.post0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:461acf4320219459dabe5ed90a45cb86c9ba8cc6d6db9dad0d9427d42f57794c", size = 67927, upload-time = "2025-10-10T05:29:23.596Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f2/53b6e9bdd2a91202066764eaa74b572ba4dede0fe47a5a26f4de34b7541a/ijson-3.4.0.post0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a0fedf09c0f6ffa2a99e7e7fd9c5f3caf74e655c1ee015a0797383e99382ebc3", size = 54657, upload-time = "2025-10-10T05:29:24.482Z" }, ] [[package]] @@ -2852,6 +3161,7 @@ dependencies = [ { name = "pygments" }, { name = "stack-data" }, { name = "traitlets" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a6/60/2111715ea11f39b1535bed6024b7dec7918b71e5e5d30855a5b503056b50/ipython-9.10.0.tar.gz", hash = "sha256:cd9e656be97618a0676d058134cd44e6dc7012c0e5cb36a9ce96a8c904adaf77", size = 4426526, upload-time = "2026-02-02T10:00:33.594Z" } wheels = [ @@ -2920,6 +3230,9 @@ wheels = [ name = "jaraco-context" version = "6.1.0" source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, +] sdist = { url = "https://files.pythonhosted.org/packages/cb/9c/a788f5bb29c61e456b8ee52ce76dbdd32fd72cd73dd67bc95f42c7a8d13c/jaraco_context-6.1.0.tar.gz", hash = "sha256:129a341b0a85a7db7879e22acd66902fda67882db771754574338898b2d5d86f", size = 15850, upload-time = "2026-01-13T02:53:53.847Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/8d/48/aa685dbf1024c7bd82bede569e3a85f82c32fd3d79ba5fea578f0159571a/jaraco_context-6.1.0-py3-none-any.whl", hash = "sha256:a43b5ed85815223d0d3cfdb6d7ca0d2bc8946f28f30b6f3216bda070f68badda", size = 7065, upload-time = "2026-01-13T02:53:53.031Z" }, @@ -2976,6 +3289,19 @@ version = "0.13.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0d/5e/4ec91646aee381d01cdb9974e30882c9cd3b8c5d1079d6b5ff4af522439a/jiter-0.13.0.tar.gz", hash = "sha256:f2839f9c2c7e2dffc1bc5929a510e14ce0a946be9365fd1219e7ef342dae14f4", size = 164847, upload-time = "2026-02-02T12:37:56.441Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/71/29/499f8c9eaa8a16751b1c0e45e6f5f1761d180da873d417996cc7bddc8eef/jiter-0.13.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ea026e70a9a28ebbdddcbcf0f1323128a8db66898a06eaad3a4e62d2f554d096", size = 311157, upload-time = "2026-02-02T12:35:37.758Z" }, + { url = "https://files.pythonhosted.org/packages/50/f6/566364c777d2ab450b92100bea11333c64c38d32caf8dc378b48e5b20c46/jiter-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66aa3e663840152d18cc8ff1e4faad3dd181373491b9cfdc6004b92198d67911", size = 319729, upload-time = "2026-02-02T12:35:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/560f13ec5e4f116d8ad2658781646cca91b617ae3b8758d4a5076b278f70/jiter-0.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3524798e70655ff19aec58c7d05adb1f074fecff62da857ea9be2b908b6d701", size = 354766, upload-time = "2026-02-02T12:35:40.662Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0d/061faffcfe94608cbc28a0d42a77a74222bdf5055ccdbe5fd2292b94f510/jiter-0.13.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec7e287d7fbd02cb6e22f9a00dd9c9cd504c40a61f2c61e7e1f9690a82726b4c", size = 362587, upload-time = "2026-02-02T12:35:42.025Z" }, + { url = "https://files.pythonhosted.org/packages/92/c9/c66a7864982fd38a9773ec6e932e0398d1262677b8c60faecd02ffb67bf3/jiter-0.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47455245307e4debf2ce6c6e65a717550a0244231240dcf3b8f7d64e4c2f22f4", size = 487537, upload-time = "2026-02-02T12:35:43.459Z" }, + { url = "https://files.pythonhosted.org/packages/6c/86/84eb4352cd3668f16d1a88929b5888a3fe0418ea8c1dfc2ad4e7bf6e069a/jiter-0.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ee9da221dca6e0429c2704c1b3655fe7b025204a71d4d9b73390c759d776d165", size = 373717, upload-time = "2026-02-02T12:35:44.928Z" }, + { url = "https://files.pythonhosted.org/packages/6e/09/9fe4c159358176f82d4390407a03f506a8659ed13ca3ac93a843402acecf/jiter-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24ab43126d5e05f3d53a36a8e11eb2f23304c6c1117844aaaf9a0aa5e40b5018", size = 362683, upload-time = "2026-02-02T12:35:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5e/85f3ab9caca0c1d0897937d378b4a515cae9e119730563572361ea0c48ae/jiter-0.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9da38b4fedde4fb528c740c2564628fbab737166a0e73d6d46cb4bb5463ff411", size = 392345, upload-time = "2026-02-02T12:35:48.088Z" }, + { url = "https://files.pythonhosted.org/packages/12/4c/05b8629ad546191939e6f0c2f17e29f542a398f4a52fb987bc70b6d1eb8b/jiter-0.13.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0b34c519e17658ed88d5047999a93547f8889f3c1824120c26ad6be5f27b6cf5", size = 517775, upload-time = "2026-02-02T12:35:49.482Z" }, + { url = "https://files.pythonhosted.org/packages/4d/88/367ea2eb6bc582c7052e4baf5ddf57ebe5ab924a88e0e09830dfb585c02d/jiter-0.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2a6394e6af690d462310a86b53c47ad75ac8c21dc79f120714ea449979cb1d3", size = 551325, upload-time = "2026-02-02T12:35:51.104Z" }, + { url = "https://files.pythonhosted.org/packages/f3/12/fa377ffb94a2f28c41afaed093e0d70cfe512035d5ecb0cad0ae4792d35e/jiter-0.13.0-cp311-cp311-win32.whl", hash = "sha256:0f0c065695f616a27c920a56ad0d4fc46415ef8b806bf8fc1cacf25002bd24e1", size = 204709, upload-time = "2026-02-02T12:35:52.467Z" }, + { url = "https://files.pythonhosted.org/packages/cb/16/8e8203ce92f844dfcd3d9d6a5a7322c77077248dbb12da52d23193a839cd/jiter-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0733312953b909688ae3c2d58d043aa040f9f1a6a75693defed7bc2cc4bf2654", size = 204560, upload-time = "2026-02-02T12:35:53.925Z" }, + { url = "https://files.pythonhosted.org/packages/44/26/97cc40663deb17b9e13c3a5cf29251788c271b18ee4d262c8f94798b8336/jiter-0.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:5d9b34ad56761b3bf0fbe8f7e55468704107608512350962d3317ffd7a4382d5", size = 189608, upload-time = "2026-02-02T12:35:55.304Z" }, { url = "https://files.pythonhosted.org/packages/2e/30/7687e4f87086829955013ca12a9233523349767f69653ebc27036313def9/jiter-0.13.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0a2bd69fc1d902e89925fc34d1da51b2128019423d7b339a45d9e99c894e0663", size = 307958, upload-time = "2026-02-02T12:35:57.165Z" }, { url = "https://files.pythonhosted.org/packages/c3/27/e57f9a783246ed95481e6749cc5002a8a767a73177a83c63ea71f0528b90/jiter-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f917a04240ef31898182f76a332f508f2cc4b57d2b4d7ad2dbfebbfe167eb505", size = 318597, upload-time = "2026-02-02T12:35:58.591Z" }, { url = "https://files.pythonhosted.org/packages/cf/52/e5719a60ac5d4d7c5995461a94ad5ef962a37c8bf5b088390e6fad59b2ff/jiter-0.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1e2b199f446d3e82246b4fd9236d7cb502dc2222b18698ba0d986d2fecc6152", size = 348821, upload-time = "2026-02-02T12:36:00.093Z" }, @@ -3032,6 +3358,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/47/66/eea81dfff765ed66c68fd2ed8c96245109e13c896c2a5015c7839c92367e/jiter-0.13.0-cp314-cp314t-win32.whl", hash = "sha256:24dc96eca9f84da4131cdf87a95e6ce36765c3b156fc9ae33280873b1c32d5f6", size = 201196, upload-time = "2026-02-02T12:37:19.101Z" }, { url = "https://files.pythonhosted.org/packages/ff/32/4ac9c7a76402f8f00d00842a7f6b83b284d0cf7c1e9d4227bc95aa6d17fa/jiter-0.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0a8d76c7524087272c8ae913f5d9d608bd839154b62c4322ef65723d2e5bb0b8", size = 204215, upload-time = "2026-02-02T12:37:20.495Z" }, { url = "https://files.pythonhosted.org/packages/f9/8e/7def204fea9f9be8b3c21a6f2dd6c020cf56c7d5ff753e0e23ed7f9ea57e/jiter-0.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2c26cf47e2cad140fa23b6d58d435a7c0161f5c514284802f25e87fddfe11024", size = 187152, upload-time = "2026-02-02T12:37:22.124Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/3c29819a27178d0e461a8571fb63c6ae38be6dc36b78b3ec2876bbd6a910/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b1cbfa133241d0e6bdab48dcdc2604e8ba81512f6bbd68ec3e8e1357dd3c316c", size = 307016, upload-time = "2026-02-02T12:37:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ae/60993e4b07b1ac5ebe46da7aa99fdbb802eb986c38d26e3883ac0125c4e0/jiter-0.13.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:db367d8be9fad6e8ebbac4a7578b7af562e506211036cba2c06c3b998603c3d2", size = 305024, upload-time = "2026-02-02T12:37:44.774Z" }, + { url = "https://files.pythonhosted.org/packages/77/fa/2227e590e9cf98803db2811f172b2d6460a21539ab73006f251c66f44b14/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45f6f8efb2f3b0603092401dc2df79fa89ccbc027aaba4174d2d4133ed661434", size = 339337, upload-time = "2026-02-02T12:37:46.668Z" }, + { url = "https://files.pythonhosted.org/packages/2d/92/015173281f7eb96c0ef580c997da8ef50870d4f7f4c9e03c845a1d62ae04/jiter-0.13.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:597245258e6ad085d064780abfb23a284d418d3e61c57362d9449c6c7317ee2d", size = 346395, upload-time = "2026-02-02T12:37:48.09Z" }, { url = "https://files.pythonhosted.org/packages/80/60/e50fa45dd7e2eae049f0ce964663849e897300433921198aef94b6ffa23a/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:3d744a6061afba08dd7ae375dcde870cffb14429b7477e10f67e9e6d68772a0a", size = 305169, upload-time = "2026-02-02T12:37:50.376Z" }, { url = "https://files.pythonhosted.org/packages/d2/73/a009f41c5eed71c49bec53036c4b33555afcdee70682a18c6f66e396c039/jiter-0.13.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:ff732bd0a0e778f43d5009840f20b935e79087b4dc65bd36f1cd0f9b04b8ff7f", size = 303808, upload-time = "2026-02-02T12:37:52.092Z" }, { url = "https://files.pythonhosted.org/packages/c4/10/528b439290763bff3d939268085d03382471b442f212dca4ff5f12802d43/jiter-0.13.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab44b178f7981fcaea7e0a5df20e773c663d06ffda0198f1a524e91b2fde7e59", size = 337384, upload-time = "2026-02-02T12:37:53.582Z" }, @@ -3148,6 +3478,8 @@ version = "1.22.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/3a/2c/84076b352107ce12d56f28c313f1aca1be332d953dd96aec7b84976e6d53/kaldi-native-fbank-1.22.3.tar.gz", hash = "sha256:387bf87225c6b83c93ae652eeaef1b4d531994b6e398e7a77189de340674f9af", size = 71013, upload-time = "2025-10-09T02:31:21.487Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/53/720ffbe8b30de203570f397866334eb4c6364c9214699010f2086de911ff/kaldi_native_fbank-1.22.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48e5dd8e897bf4509be2c6eeb4bbab728eaaef1f214ae0510c96219c4253d17", size = 299054, upload-time = "2025-10-09T02:28:42.011Z" }, + { url = "https://files.pythonhosted.org/packages/52/3f/beb161e4fdf6710938ccf18418c147d87ba8f102903d6c6e4eda25588e22/kaldi_native_fbank-1.22.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce84c65779c9eed6ec02699797a4ba1859451977537a993be3ea8167a210ec3e", size = 321921, upload-time = "2025-10-09T02:31:21.646Z" }, { url = "https://files.pythonhosted.org/packages/43/28/6f4fd8953c0b3f30de4526fd024095032abcdc25b6736c77a891687c604e/kaldi_native_fbank-1.22.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f5a44b4a83cf9bf13d3f77858928068b06d3ec2238c27ff2e39393fbf7749c9f", size = 298887, upload-time = "2025-10-09T02:30:53.739Z" }, { url = "https://files.pythonhosted.org/packages/84/90/01ef7331c52b1eaf9916f3f7a535155aac2e9e2ddad12a141613d92758c7/kaldi_native_fbank-1.22.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f16e74372fe9e20abb4183f98a8e2288d5ee4c48d04d94b6160311170e007661", size = 322002, upload-time = "2025-10-09T02:30:13.04Z" }, { url = "https://files.pythonhosted.org/packages/9a/72/adb11d27c545aca1db442da744ee430a6aae377a33574bfd2ec159dcf673/kaldi_native_fbank-1.22.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f74b85948328ab4b4c88522f98a59f83dd5295443b08483e945c7de2c35e5dcc", size = 299276, upload-time = "2025-10-09T02:30:38.1Z" }, @@ -3161,6 +3493,7 @@ name = "keyring" version = "25.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, { name = "jaraco-classes" }, { name = "jaraco-context" }, { name = "jaraco-functools" }, @@ -3179,6 +3512,19 @@ version = "1.4.9" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/5c/3c/85844f1b0feb11ee581ac23fe5fce65cd049a200c1446708cc1b7f922875/kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d", size = 97564, upload-time = "2025-08-10T21:27:49.279Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/ab/c80b0d5a9d8a1a65f4f815f2afff9798b12c3b9f31f1d304dd233dd920e2/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16", size = 124167, upload-time = "2025-08-10T21:25:53.403Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c0/27fe1a68a39cf62472a300e2879ffc13c0538546c359b86f149cc19f6ac3/kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089", size = 66579, upload-time = "2025-08-10T21:25:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/31/a2/a12a503ac1fd4943c50f9822678e8015a790a13b5490354c68afb8489814/kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543", size = 65309, upload-time = "2025-08-10T21:25:55.76Z" }, + { url = "https://files.pythonhosted.org/packages/66/e1/e533435c0be77c3f64040d68d7a657771194a63c279f55573188161e81ca/kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61", size = 1435596, upload-time = "2025-08-10T21:25:56.861Z" }, + { url = "https://files.pythonhosted.org/packages/67/1e/51b73c7347f9aabdc7215aa79e8b15299097dc2f8e67dee2b095faca9cb0/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1", size = 1246548, upload-time = "2025-08-10T21:25:58.246Z" }, + { url = "https://files.pythonhosted.org/packages/21/aa/72a1c5d1e430294f2d32adb9542719cfb441b5da368d09d268c7757af46c/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872", size = 1263618, upload-time = "2025-08-10T21:25:59.857Z" }, + { url = "https://files.pythonhosted.org/packages/a3/af/db1509a9e79dbf4c260ce0cfa3903ea8945f6240e9e59d1e4deb731b1a40/kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26", size = 1317437, upload-time = "2025-08-10T21:26:01.105Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f2/3ea5ee5d52abacdd12013a94130436e19969fa183faa1e7c7fbc89e9a42f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028", size = 2195742, upload-time = "2025-08-10T21:26:02.675Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9b/1efdd3013c2d9a2566aa6a337e9923a00590c516add9a1e89a768a3eb2fc/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771", size = 2290810, upload-time = "2025-08-10T21:26:04.009Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e5/cfdc36109ae4e67361f9bc5b41323648cb24a01b9ade18784657e022e65f/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a", size = 2461579, upload-time = "2025-08-10T21:26:05.317Z" }, + { url = "https://files.pythonhosted.org/packages/62/86/b589e5e86c7610842213994cdea5add00960076bef4ae290c5fa68589cac/kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464", size = 2268071, upload-time = "2025-08-10T21:26:06.686Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c6/f8df8509fd1eee6c622febe54384a96cfaf4d43bf2ccec7a0cc17e4715c9/kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2", size = 73840, upload-time = "2025-08-10T21:26:07.94Z" }, + { url = "https://files.pythonhosted.org/packages/e2/2d/16e0581daafd147bc11ac53f032a2b45eabac897f42a338d0a13c1e5c436/kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7", size = 65159, upload-time = "2025-08-10T21:26:09.048Z" }, { url = "https://files.pythonhosted.org/packages/86/c9/13573a747838aeb1c76e3267620daa054f4152444d1f3d1a2324b78255b5/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999", size = 123686, upload-time = "2025-08-10T21:26:10.034Z" }, { url = "https://files.pythonhosted.org/packages/51/ea/2ecf727927f103ffd1739271ca19c424d0e65ea473fbaeea1c014aea93f6/kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2", size = 66460, upload-time = "2025-08-10T21:26:11.083Z" }, { url = "https://files.pythonhosted.org/packages/5b/5a/51f5464373ce2aeb5194508298a508b6f21d3867f499556263c64c621914/kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14", size = 64952, upload-time = "2025-08-10T21:26:12.058Z" }, @@ -3243,6 +3589,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/99/dd/841e9a66c4715477ea0abc78da039832fbb09dac5c35c58dc4c41a407b8a/kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369", size = 2391835, upload-time = "2025-08-10T21:27:34.23Z" }, { url = "https://files.pythonhosted.org/packages/0c/28/4b2e5c47a0da96896fdfdb006340ade064afa1e63675d01ea5ac222b6d52/kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891", size = 79988, upload-time = "2025-08-10T21:27:35.587Z" }, { url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" }, + { url = "https://files.pythonhosted.org/packages/a3/0f/36d89194b5a32c054ce93e586d4049b6c2c22887b0eb229c61c68afd3078/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5", size = 60104, upload-time = "2025-08-10T21:27:43.287Z" }, + { url = "https://files.pythonhosted.org/packages/52/ba/4ed75f59e4658fd21fe7dde1fee0ac397c678ec3befba3fe6482d987af87/kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa", size = 58592, upload-time = "2025-08-10T21:27:44.314Z" }, + { url = "https://files.pythonhosted.org/packages/33/01/a8ea7c5ea32a9b45ceeaee051a04c8ed4320f5add3c51bfa20879b765b70/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2", size = 80281, upload-time = "2025-08-10T21:27:45.369Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/dbd2ecdce306f1d07a1aaf324817ee993aab7aee9db47ceac757deabafbe/kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f", size = 78009, upload-time = "2025-08-10T21:27:46.376Z" }, + { url = "https://files.pythonhosted.org/packages/da/e9/0d4add7873a73e462aeb45c036a2dead2562b825aa46ba326727b3f31016/kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1", size = 73929, upload-time = "2025-08-10T21:27:48.236Z" }, ] [[package]] @@ -3422,6 +3773,8 @@ version = "0.44.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, { url = "https://files.pythonhosted.org/packages/d2/1b/656f5a357de7135a3777bd735cc7c9b8f23b4d37465505bd0eaf4be9befe/llvmlite-0.44.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46224058b13c96af1365290bdfebe9a6264ae62fb79b2b55693deed11657a8bf", size = 42361904, upload-time = "2025-01-20T11:14:22.949Z" }, @@ -3509,6 +3862,17 @@ version = "3.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, @@ -3583,6 +3947,13 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" }, + { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f4/b8347351da9a5b3f41e26cf547252d861f685c6867d179a7c9d60ad50189/matplotlib-3.10.8-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d56a1efd5bfd61486c8bc968fa18734464556f0fb8e51690f4ac25d85cbbbbc2", size = 9540997, upload-time = "2025-12-10T22:55:23.258Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c0/c7b914e297efe0bc36917bf216b2acb91044b91e930e878ae12981e461e5/matplotlib-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238b7ce5717600615c895050239ec955d91f321c209dd110db988500558e70d6", size = 9596825, upload-time = "2025-12-10T22:55:25.217Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d3/a4bbc01c237ab710a1f22b4da72f4ff6d77eb4c7735ea9811a94ae239067/matplotlib-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:18821ace09c763ec93aef5eeff087ee493a24051936d7b9ebcad9662f66501f9", size = 8135090, upload-time = "2025-12-10T22:55:27.162Z" }, + { url = "https://files.pythonhosted.org/packages/89/dd/a0b6588f102beab33ca6f5218b31725216577b2a24172f327eaf6417d5c9/matplotlib-3.10.8-cp311-cp311-win_arm64.whl", hash = "sha256:bab485bcf8b1c7d2060b4fcb6fc368a9e6f4cd754c9c2fea281f4be21df394a2", size = 8012377, upload-time = "2025-12-10T22:55:29.185Z" }, { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, @@ -3618,6 +3989,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/4b/e7beb6bbd49f6bae727a12b270a2654d13c397576d25bd6786e47033300f/matplotlib-3.10.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:595ba4d8fe983b88f0eec8c26a241e16d6376fe1979086232f481f8f3f67494c", size = 9614011, upload-time = "2025-12-10T22:56:33.85Z" }, { url = "https://files.pythonhosted.org/packages/7c/e6/76f2813d31f032e65f6f797e3f2f6e4aab95b65015924b1c51370395c28a/matplotlib-3.10.8-cp314-cp314t-win_amd64.whl", hash = "sha256:25d380fe8b1dc32cf8f0b1b448470a77afb195438bafdf1d858bfb876f3edf7b", size = 8362801, upload-time = "2025-12-10T22:56:36.107Z" }, { url = "https://files.pythonhosted.org/packages/5d/49/d651878698a0b67f23aa28e17f45a6d6dd3d3f933fa29087fa4ce5947b5a/matplotlib-3.10.8-cp314-cp314t-win_arm64.whl", hash = "sha256:113bb52413ea508ce954a02c10ffd0d565f9c3bc7f2eddc27dfe1731e71c7b5f", size = 8192560, upload-time = "2025-12-10T22:56:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" }, + { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" }, + { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" }, ] [[package]] @@ -3699,6 +4073,8 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/0a/e5/6698af70623ac6a4d6403f34520550ac4d9743913206f79c96c3497b87fc/megatron_core-0.15.2.tar.gz", hash = "sha256:8397898a0ade70c3ba96fc21335a7e022587bc139b91bf625ef76cd0e66d7b4f", size = 878187, upload-time = "2026-01-08T15:38:47.965Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/65/d994d36aeecbd236e24d8860c3347399c714c913c733c2717824ff10d1c8/megatron_core-0.15.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:015375a4632432ba8ff63a12f6ed89df3f525290b7192f3cde70d79c93ea8ecc", size = 2218355, upload-time = "2026-01-08T15:38:37.74Z" }, + { url = "https://files.pythonhosted.org/packages/3f/28/a6119ce2211268e2c9c1bbf31cfb560c4e03932bed3bcb4f03bdde329b54/megatron_core-0.15.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5220f1a9c19695c2b0c614f83e4575b2032d12ea7276f1e38bde4a2d90ee6ed3", size = 2250269, upload-time = "2026-01-08T15:38:40.207Z" }, { url = "https://files.pythonhosted.org/packages/32/61/96ae649e1f22e978db34006161b84e58b3dbc80587da096981f045ffa3ec/megatron_core-0.15.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744fb4b788791342c53927df584d35d8cfb14330d3bdde95fc1555f6406bc50a", size = 2238920, upload-time = "2026-01-08T15:38:41.475Z" }, { url = "https://files.pythonhosted.org/packages/10/f0/42adab418f2850bcb56e7d9c03398cb85df98ff1ac996b225958207a5775/megatron_core-0.15.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2402411a4ec077b05fcc703d948d6527d29f11200e9ec0b9c505b5576e24f7f2", size = 2263262, upload-time = "2026-01-08T15:38:43.577Z" }, { url = "https://files.pythonhosted.org/packages/b9/94/8235ad4525e3941cf02b40febb81ae3c75c1212628ae7a12c095a82e75ca/megatron_core-0.15.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35c6b5fc4b3e8d332c33143d1a85793451f2845cdb766b8b62fefffef6d3a327", size = 2238145, upload-time = "2026-01-08T15:38:44.94Z" }, @@ -3798,6 +4174,11 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314, upload-time = "2025-11-17T22:32:31.031Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/5e/712092cfe7e5eb667b8ad9ca7c54442f21ed7ca8979745f1000e24cf8737/ml_dtypes-0.5.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6c7ecb74c4bd71db68a6bea1edf8da8c34f3d9fe218f038814fd1d310ac76c90", size = 679734, upload-time = "2025-11-17T22:31:39.223Z" }, + { url = "https://files.pythonhosted.org/packages/4f/cf/912146dfd4b5c0eea956836c01dcd2fce6c9c844b2691f5152aca196ce4f/ml_dtypes-0.5.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc11d7e8c44a65115d05e2ab9989d1e045125d7be8e05a071a48bc76eb6d6040", size = 5056165, upload-time = "2025-11-17T22:31:41.071Z" }, + { url = "https://files.pythonhosted.org/packages/a9/80/19189ea605017473660e43762dc853d2797984b3c7bf30ce656099add30c/ml_dtypes-0.5.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:19b9a53598f21e453ea2fbda8aa783c20faff8e1eeb0d7ab899309a0053f1483", size = 5034975, upload-time = "2025-11-17T22:31:42.758Z" }, + { url = "https://files.pythonhosted.org/packages/b4/24/70bd59276883fdd91600ca20040b41efd4902a923283c4d6edcb1de128d2/ml_dtypes-0.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:7c23c54a00ae43edf48d44066a7ec31e05fdc2eee0be2b8b50dd1903a1db94bb", size = 210742, upload-time = "2025-11-17T22:31:44.068Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c9/64230ef14e40aa3f1cb254ef623bf812735e6bec7772848d19131111ac0d/ml_dtypes-0.5.4-cp311-cp311-win_arm64.whl", hash = "sha256:557a31a390b7e9439056644cb80ed0735a6e3e3bb09d67fd5687e4b04238d1de", size = 160709, upload-time = "2025-11-17T22:31:46.557Z" }, { url = "https://files.pythonhosted.org/packages/a8/b8/3c70881695e056f8a32f8b941126cf78775d9a4d7feba8abcb52cb7b04f2/ml_dtypes-0.5.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a174837a64f5b16cab6f368171a1a03a27936b31699d167684073ff1c4237dac", size = 676927, upload-time = "2025-11-17T22:31:48.182Z" }, { url = "https://files.pythonhosted.org/packages/54/0f/428ef6881782e5ebb7eca459689448c0394fa0a80bea3aa9262cba5445ea/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7f7c643e8b1320fd958bf098aa7ecf70623a42ec5154e3be3be673f4c34d900", size = 5028464, upload-time = "2025-11-17T22:31:50.135Z" }, { url = "https://files.pythonhosted.org/packages/3a/cb/28ce52eb94390dda42599c98ea0204d74799e4d8047a0eb559b6fd648056/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ad459e99793fa6e13bd5b7e6792c8f9190b4e5a1b45c63aba14a4d0a7f1d5ff", size = 5009002, upload-time = "2025-11-17T22:31:52.001Z" }, @@ -3893,6 +4274,10 @@ version = "1.1.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" }, + { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" }, { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, @@ -3917,6 +4302,14 @@ version = "0.20.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ea/9c/bfbd12955a49180cbd234c5d29ec6f74fe641698f0cd9df154a854fc8a15/msgspec-0.20.0.tar.gz", hash = "sha256:692349e588fde322875f8d3025ac01689fead5901e7fb18d6870a44519d62a29", size = 317862, upload-time = "2025-11-24T03:56:28.934Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/03/59/fdcb3af72f750a8de2bcf39d62ada70b5eb17b06d7f63860e0a679cb656b/msgspec-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:09e0efbf1ac641fedb1d5496c59507c2f0dc62a052189ee62c763e0aae217520", size = 193345, upload-time = "2025-11-24T03:55:20.613Z" }, + { url = "https://files.pythonhosted.org/packages/5a/15/3c225610da9f02505d37d69a77f4a2e7daae2a125f99d638df211ba84e59/msgspec-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23ee3787142e48f5ee746b2909ce1b76e2949fbe0f97f9f6e70879f06c218b54", size = 186867, upload-time = "2025-11-24T03:55:22.4Z" }, + { url = "https://files.pythonhosted.org/packages/81/36/13ab0c547e283bf172f45491edfdea0e2cecb26ae61e3a7b1ae6058b326d/msgspec-0.20.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:81f4ac6f0363407ac0465eff5c7d4d18f26870e00674f8fcb336d898a1e36854", size = 215351, upload-time = "2025-11-24T03:55:23.958Z" }, + { url = "https://files.pythonhosted.org/packages/6b/96/5c095b940de3aa6b43a71ec76275ac3537b21bd45c7499b5a17a429110fa/msgspec-0.20.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb4d873f24ae18cd1334f4e37a178ed46c9d186437733351267e0a269bdf7e53", size = 219896, upload-time = "2025-11-24T03:55:25.356Z" }, + { url = "https://files.pythonhosted.org/packages/98/7a/81a7b5f01af300761087b114dafa20fb97aed7184d33aab64d48874eb187/msgspec-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b92b8334427b8393b520c24ff53b70f326f79acf5f74adb94fd361bcff8a1d4e", size = 220389, upload-time = "2025-11-24T03:55:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/70/c0/3d0cce27db9a9912421273d49eab79ce01ecd2fed1a2f1b74af9b445f33c/msgspec-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:562c44b047c05cc0384e006fae7a5e715740215c799429e0d7e3e5adf324285a", size = 223348, upload-time = "2025-11-24T03:55:28.311Z" }, + { url = "https://files.pythonhosted.org/packages/89/5e/406b7d578926b68790e390d83a1165a9bfc2d95612a1a9c1c4d5c72ea815/msgspec-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:d1dcc93a3ce3d3195985bfff18a48274d0b5ffbc96fa1c5b89da6f0d9af81b29", size = 188713, upload-time = "2025-11-24T03:55:29.553Z" }, + { url = "https://files.pythonhosted.org/packages/47/87/14fe2316624ceedf76a9e94d714d194cbcb699720b210ff189f89ca4efd7/msgspec-0.20.0-cp311-cp311-win_arm64.whl", hash = "sha256:aa387aa330d2e4bd69995f66ea8fdc87099ddeedf6fdb232993c6a67711e7520", size = 174229, upload-time = "2025-11-24T03:55:31.107Z" }, { url = "https://files.pythonhosted.org/packages/d9/6f/1e25eee957e58e3afb2a44b94fa95e06cebc4c236193ed0de3012fff1e19/msgspec-0.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2aba22e2e302e9231e85edc24f27ba1f524d43c223ef5765bd8624c7df9ec0a5", size = 196391, upload-time = "2025-11-24T03:55:32.677Z" }, { url = "https://files.pythonhosted.org/packages/7f/ee/af51d090ada641d4b264992a486435ba3ef5b5634bc27e6eb002f71cef7d/msgspec-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:716284f898ab2547fedd72a93bb940375de9fbfe77538f05779632dc34afdfde", size = 188644, upload-time = "2025-11-24T03:55:33.934Z" }, { url = "https://files.pythonhosted.org/packages/49/d6/9709ee093b7742362c2934bfb1bbe791a1e09bed3ea5d8a18ce552fbfd73/msgspec-0.20.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:558ed73315efa51b1538fa8f1d3b22c8c5ff6d9a2a62eff87d25829b94fc5054", size = 218852, upload-time = "2025-11-24T03:55:35.575Z" }, @@ -3987,6 +4380,9 @@ dependencies = [ { name = "xattr" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/90/1e69cb6d71418b38a9409b0e2564efe1e7c12e18e63e478591ae0317dbcc/multi_storage_client-0.42.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3db30610d6bb15a5c211af9d7b11c8a1a13265893c1a625d5aaadacdb61a9a8e", size = 8805275, upload-time = "2026-02-06T20:58:10.943Z" }, + { url = "https://files.pythonhosted.org/packages/de/dd/a55dc9e60113f98af10075c3e33b97007bfbfd2e6f8bc6a1b2b1b43857c8/multi_storage_client-0.42.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8c2d491475eec5e80ad706eca7005d9bd17d30b29166e891c18695b42336493", size = 5155309, upload-time = "2026-02-06T20:56:22.528Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/648a1d6b4482634fbb0d5bc0ba156b42fafd4f364227f9203bc4ac70dbac/multi_storage_client-0.42.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a87e05e0e09b8fbd6804bb1ac85a28213d4371e91d06d9c35ad12b247f28ec", size = 5422770, upload-time = "2026-02-06T21:01:41.97Z" }, { url = "https://files.pythonhosted.org/packages/d4/5a/6af92f30d09c97a314594029c115da0c44d5fa14e772983d88ad8023d355/multi_storage_client-0.42.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5c71c128b9f81cfbd59f1e2c2acfb2559658dfecde904496b7845901f0161430", size = 8798046, upload-time = "2026-02-06T21:02:32.674Z" }, { url = "https://files.pythonhosted.org/packages/c1/b2/e686bcbe754bfede1773153d928422b2c4b25453faf0e228cf9cadfa73e0/multi_storage_client-0.42.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afe72fcb3f44ddc23488ab65bbab8575181fe15f63d297074703a36f4d8f7cc9", size = 5155767, upload-time = "2026-02-06T21:01:02.151Z" }, { url = "https://files.pythonhosted.org/packages/05/44/2b7e0ec6fa68f208cb919c38df346cca37c910906f564a43f74731eb6cdb/multi_storage_client-0.42.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30410d59d1f93758640a15779af6379a961bfa0f9607809a2b869e8b750efac7", size = 5421800, upload-time = "2026-02-06T21:03:04.852Z" }, @@ -4001,6 +4397,24 @@ version = "6.7.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/f1/a90635c4f88fb913fbf4ce660b83b7445b7a02615bda034b2f8eb38fd597/multidict-6.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ff981b266af91d7b4b3793ca3382e53229088d193a85dfad6f5f4c27fc73e5d", size = 76626, upload-time = "2026-01-26T02:43:26.485Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9b/267e64eaf6fc637a15b35f5de31a566634a2740f97d8d094a69d34f524a4/multidict-6.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:844c5bca0b5444adb44a623fb0a1310c2f4cd41f402126bb269cd44c9b3f3e1e", size = 44706, upload-time = "2026-01-26T02:43:27.607Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a4/d45caf2b97b035c57267791ecfaafbd59c68212004b3842830954bb4b02e/multidict-6.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2a0a924d4c2e9afcd7ec64f9de35fcd96915149b2216e1cb2c10a56df483855", size = 44356, upload-time = "2026-01-26T02:43:28.661Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d2/0a36c8473f0cbaeadd5db6c8b72d15bbceeec275807772bfcd059bef487d/multidict-6.7.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8be1802715a8e892c784c0197c2ace276ea52702a0ede98b6310c8f255a5afb3", size = 244355, upload-time = "2026-01-26T02:43:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/8c65be997fd7dd311b7d39c7b6e71a0cb449bad093761481eccbbe4b42a2/multidict-6.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2e2d2ed645ea29f31c4c7ea1552fcfd7cb7ba656e1eafd4134a6620c9f5fdd9e", size = 246433, upload-time = "2026-01-26T02:43:32.581Z" }, + { url = "https://files.pythonhosted.org/packages/01/fb/4dbd7e848d2799c6a026ec88ad39cf2b8416aa167fcc903baa55ecaa045c/multidict-6.7.1-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:95922cee9a778659e91db6497596435777bd25ed116701a4c034f8e46544955a", size = 225376, upload-time = "2026-01-26T02:43:34.417Z" }, + { url = "https://files.pythonhosted.org/packages/b6/8a/4a3a6341eac3830f6053062f8fbc9a9e54407c80755b3f05bc427295c2d0/multidict-6.7.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6b83cabdc375ffaaa15edd97eb7c0c672ad788e2687004990074d7d6c9b140c8", size = 257365, upload-time = "2026-01-26T02:43:35.741Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a2/dd575a69c1aa206e12d27d0770cdf9b92434b48a9ef0cd0d1afdecaa93c4/multidict-6.7.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:38fb49540705369bab8484db0689d86c0a33a0a9f2c1b197f506b71b4b6c19b0", size = 254747, upload-time = "2026-01-26T02:43:36.976Z" }, + { url = "https://files.pythonhosted.org/packages/5a/56/21b27c560c13822ed93133f08aa6372c53a8e067f11fbed37b4adcdac922/multidict-6.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:439cbebd499f92e9aa6793016a8acaa161dfa749ae86d20960189f5398a19144", size = 246293, upload-time = "2026-01-26T02:43:38.258Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a4/23466059dc3854763423d0ad6c0f3683a379d97673b1b89ec33826e46728/multidict-6.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d3bc717b6fe763b8be3f2bee2701d3c8eb1b2a8ae9f60910f1b2860c82b6c49", size = 242962, upload-time = "2026-01-26T02:43:40.034Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/51dd754a3524d685958001e8fa20a0f5f90a6a856e0a9dcabff69be3dbb7/multidict-6.7.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:619e5a1ac57986dbfec9f0b301d865dddf763696435e2962f6d9cf2fdff2bb71", size = 237360, upload-time = "2026-01-26T02:43:41.752Z" }, + { url = "https://files.pythonhosted.org/packages/64/3f/036dfc8c174934d4b55d86ff4f978e558b0e585cef70cfc1ad01adc6bf18/multidict-6.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0b38ebffd9be37c1170d33bc0f36f4f262e0a09bc1aac1c34c7aa51a7293f0b3", size = 245940, upload-time = "2026-01-26T02:43:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/3d/20/6214d3c105928ebc353a1c644a6ef1408bc5794fcb4f170bb524a3c16311/multidict-6.7.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:10ae39c9cfe6adedcdb764f5e8411d4a92b055e35573a2eaa88d3323289ef93c", size = 253502, upload-time = "2026-01-26T02:43:44.371Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e2/c653bc4ae1be70a0f836b82172d643fcf1dade042ba2676ab08ec08bff0f/multidict-6.7.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:25167cc263257660290fba06b9318d2026e3c910be240a146e1f66dd114af2b0", size = 247065, upload-time = "2026-01-26T02:43:45.745Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/a854b4154cd3bd8b1fd375e8a8ca9d73be37610c361543d56f764109509b/multidict-6.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:128441d052254f42989ef98b7b6a6ecb1e6f708aa962c7984235316db59f50fa", size = 241870, upload-time = "2026-01-26T02:43:47.054Z" }, + { url = "https://files.pythonhosted.org/packages/13/bf/9676c0392309b5fdae322333d22a829715b570edb9baa8016a517b55b558/multidict-6.7.1-cp311-cp311-win32.whl", hash = "sha256:d62b7f64ffde3b99d06b707a280db04fb3855b55f5a06df387236051d0668f4a", size = 41302, upload-time = "2026-01-26T02:43:48.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/68/f16a3a8ba6f7b6dc92a1f19669c0810bd2c43fc5a02da13b1cbf8e253845/multidict-6.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bdbf9f3b332abd0cdb306e7c2113818ab1e922dc84b8f8fd06ec89ed2a19ab8b", size = 45981, upload-time = "2026-01-26T02:43:49.921Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ad/9dd5305253fa00cd3c7555dbef69d5bf4133debc53b87ab8d6a44d411665/multidict-6.7.1-cp311-cp311-win_arm64.whl", hash = "sha256:b8c990b037d2fff2f4e33d3f21b9b531c5745b33a49a7d6dbe7a177266af44f6", size = 43159, upload-time = "2026-01-26T02:43:51.635Z" }, { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, @@ -4235,6 +4649,8 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, + { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, { url = "https://files.pythonhosted.org/packages/0d/e0/5ea04e7ad2c39288c0f0f9e8d47638ad70f28e275d092733b5817cf243c9/numba-0.61.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bdbca73ad81fa196bd53dc12e3aaf1564ae036e0c125f237c7644fe64a4928ab", size = 3893918, upload-time = "2025-04-09T02:58:02.933Z" }, @@ -4247,6 +4663,14 @@ version = "1.26.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" }, { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" }, { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" }, { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" }, @@ -4316,6 +4740,9 @@ name = "nvidia-cudnn-frontend" version = "1.18.0" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/9a/83d3d080118de4a7810fa019349edec634b8b37b9cafaacd05719de62dd6/nvidia_cudnn_frontend-1.18.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6d4d0b88d617b233a503c84980b54d840b60b2734497d1a7a071ec5293daec2", size = 2023709, upload-time = "2026-01-27T23:32:10.912Z" }, + { url = "https://files.pythonhosted.org/packages/13/c7/c3624b3ed77b102618f26295e816b27f1c3ebb1143730237a9f51d403c3f/nvidia_cudnn_frontend-1.18.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:382ea063b92cbfd5b442cb75ff8422932d78276aecf139e46713ed1ad3d07af4", size = 2155568, upload-time = "2026-01-27T23:07:13.277Z" }, + { url = "https://files.pythonhosted.org/packages/52/dd/8613dfd029d076b86a8a87efe3f4bb4ab73cec15fa8fc27e665098f4d167/nvidia_cudnn_frontend-1.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:baa509effc4d299d3f04e549d4188f88bca8a8b527f483cbd2f66bc18f13a8b1", size = 1591244, upload-time = "2026-01-27T23:08:44.691Z" }, { url = "https://files.pythonhosted.org/packages/e3/b4/604e230378680ee117849a4e1045baca092f93161a829291a84d5acce70c/nvidia_cudnn_frontend-1.18.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:310b417f2848a83d1437203fcaeea320a74fb7f28af20bf42bf5afc9c01f1c12", size = 2027408, upload-time = "2026-01-27T23:32:46.576Z" }, { url = "https://files.pythonhosted.org/packages/c6/52/08f98262e77b1cbcc834cc1a5db494d0661ea1dbdea58c2e2d51a57fdaca/nvidia_cudnn_frontend-1.18.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c023539ca6de99234cf5102c3ec0d6af817f5396fc93028a22ba5b834a35b8a", size = 2159245, upload-time = "2026-01-27T23:07:32.664Z" }, { url = "https://files.pythonhosted.org/packages/aa/1f/751a5a8cfdc95fb4dc556192d37369ae488c30c473fe9a3ec720b23d07ea/nvidia_cudnn_frontend-1.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:e13f7dd46cdb4762dde87f181f06d1c5e15e9478bbdd547bfa74d9b11f415aae", size = 1591041, upload-time = "2026-01-27T23:09:04.118Z" }, @@ -4407,6 +4834,8 @@ dependencies = [ { name = "typing-extensions" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/08/1b1481b382f0bfddb91fe19c425dae7ffcb0dacb19a60d4fa490f19cabdf/nvidia_cutlass_dsl_libs_base-4.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:18249a0c13a7b7fe08fbf600ce38a871538067cfe7b20ef2bc131a5902a67377", size = 75457259, upload-time = "2026-02-14T03:44:48.408Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2f/4d525af7805a7cf04f25efd9900d9acca1d6a8973f436b6058dfec5b545f/nvidia_cutlass_dsl_libs_base-4.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c09ee076f2b61ba26523686f550a2c642a35ec178861a5e0a38f2979ad515604", size = 74345003, upload-time = "2026-02-14T03:46:37.751Z" }, { url = "https://files.pythonhosted.org/packages/33/34/63a1dce4d65cd6fd29b9d50286abbfcdd965c3ca2156cf423eda2ab1fc5d/nvidia_cutlass_dsl_libs_base-4.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9cde72efb065d9bea29a92ca85835eaedec20bf89af22798d2d2a551ccd51731", size = 75458501, upload-time = "2026-02-14T03:45:15.866Z" }, { url = "https://files.pythonhosted.org/packages/cf/ae/5bbd3c9d7909d64a7f139b480c70ff3220554f64775e941c95438265ef1f/nvidia_cutlass_dsl_libs_base-4.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e31a2fcc9854417242ee072c9b8fd1257d5ee422166dfd85eb3f8784fee34dd8", size = 74345995, upload-time = "2026-02-14T03:45:42.9Z" }, { url = "https://files.pythonhosted.org/packages/48/5c/c76ec134e0fbd4ee2f31b32e1fbcb727e7f6323d136a3fc7a8ea3aa3e75d/nvidia_cutlass_dsl_libs_base-4.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ad63fe382b36f69f2a9b51d35e95cbcb240565d06a990e5a19a8eacae49c8b94", size = 75456473, upload-time = "2026-02-14T03:43:51.005Z" }, @@ -4490,6 +4919,8 @@ dependencies = [ { name = "torch" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/46/77/8cda264b262e2868a4e6ebcddaea112200b1e34b8d5a35a2fe3b4978d137/nvidia_resiliency_ext-0.4.1-cp311-cp311-manylinux_2_31_aarch64.whl", hash = "sha256:d8ca454a8b8abef72e0ff0e33914686c263414e8891471c02a9f6af9d2d6b925", size = 443649, upload-time = "2025-07-17T03:49:16.183Z" }, + { url = "https://files.pythonhosted.org/packages/3a/53/029cc7493b5833cb8dfa201f15a1e422e2e1cc6308d34c5b0a90028a73fd/nvidia_resiliency_ext-0.4.1-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:dde6034f29350ac6326cdd861ceec641bdd93be0eddbf034739f4cd9452a4dd9", size = 449189, upload-time = "2025-07-17T03:52:15.24Z" }, { url = "https://files.pythonhosted.org/packages/70/05/38d491962273c7905708762279f440520eb79f3c00b67a023497215ad023/nvidia_resiliency_ext-0.4.1-cp312-cp312-manylinux_2_31_aarch64.whl", hash = "sha256:b3bd5f01535574b16d0f38bca6e39afe3806c4a2896eee1b321cd944e00025a7", size = 444570, upload-time = "2025-07-17T03:50:58.877Z" }, { url = "https://files.pythonhosted.org/packages/18/8b/4cb8aa2bbdf3705d3034c3f3dacdadb03b3b7dd3dc7f5200e64663fb477f/nvidia_resiliency_ext-0.4.1-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:ca9f8de465af345952bedbea53c90c0e2323d88cfd830ded0e806fad91845c0e", size = 450280, upload-time = "2025-07-17T03:49:55.327Z" }, ] @@ -4500,6 +4931,9 @@ version = "0.2.14" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0e/03/b8a4391523a92163167fd0fee6769c223e8612043cb07aebc1173ca83fc9/nvtx-0.2.14.tar.gz", hash = "sha256:12945242a31bde70b1f15cae867f8706bdff290e2f808a11738e03ebefdf847f", size = 119864, upload-time = "2025-12-01T18:06:16.674Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/87/a6/4d473abd7c07a6d1060c0f708e21ddf46a960258532ffc897681db5c0f46/nvtx-0.2.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:227f6406d2fe1a4b890be17eb1f4c1f5bd4df8f7032dd1cb8c7651d379f35541", size = 732764, upload-time = "2025-11-27T17:26:21.853Z" }, + { url = "https://files.pythonhosted.org/packages/94/06/3ab72e5a463af1b95934638cb8377e99f58e5ef21a47cbf69b92267d6602/nvtx-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0664aa75b24e2ad0abdd0fa52c49e9c8a120652f2194289c85dc2d93cbc6017f", size = 724555, upload-time = "2025-11-27T17:22:36.402Z" }, + { url = "https://files.pythonhosted.org/packages/18/1d/64f6078a5ab4134af91ba294035ee1ebb3512edaaa9d60d8f0f023178620/nvtx-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:10f5971661d61c1a90cd36c3069240452c904ecec4b3a08d0d6fdba1e5398165", size = 119660, upload-time = "2025-11-27T17:32:30.406Z" }, { url = "https://files.pythonhosted.org/packages/8a/de/2cc15bb805b1b18317b60837b853ed023757730d0db82de291635fc88bc3/nvtx-0.2.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ece46f555e725db879df06549980744f89db5923a77e6f7a5aecda75292421a", size = 727708, upload-time = "2025-11-27T17:25:20.836Z" }, { url = "https://files.pythonhosted.org/packages/81/94/b37d634fef8677ce525b5bfd2886737ea2c064bc3576fc84423973ff5b97/nvtx-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17efe5d903996bceb0c8a12cae80fa9b66bee7ee895923bd9d8ec2a5af1aabd8", size = 737691, upload-time = "2025-11-27T17:21:27.87Z" }, { url = "https://files.pythonhosted.org/packages/ad/c1/f633aa32003050ff83626a19402f03c83990a15b4df658a7bf1b590ee83e/nvtx-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:f40db4746714d525d3020c702a0df866c2335efd6a27c41e869e577402a53a4b", size = 119193, upload-time = "2025-11-27T17:31:42.943Z" }, @@ -4551,6 +4985,12 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/3b/8a/335c03a8683a88a32f9a6bb98899ea6df241a41df64b37b9696772414794/onnx-1.20.1.tar.gz", hash = "sha256:ded16de1df563d51fbc1ad885f2a426f814039d8b5f4feb77febe09c0295ad67", size = 12048980, upload-time = "2026-01-10T01:40:03.043Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/38/1a0e74d586c08833404100f5c052f92732fb5be417c0b2d7cb0838443bfe/onnx-1.20.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:53426e1b458641e7a537e9f176330012ff59d90206cac1c1a9d03cdd73ed3095", size = 17904965, upload-time = "2026-01-10T01:39:13.532Z" }, + { url = "https://files.pythonhosted.org/packages/96/25/64b076e9684d17335f80b15b3bf502f7a8e1a89f08a6b208d4f2861b3011/onnx-1.20.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ca7281f8c576adf396c338cf43fff26faee8d4d2e2577b8e73738f37ceccf945", size = 17415179, upload-time = "2026-01-10T01:39:16.516Z" }, + { url = "https://files.pythonhosted.org/packages/ac/d5/6743b409421ced20ad5af1b3a7b4c4e568689ffaca86db431692fca409a6/onnx-1.20.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2297f428c51c7fc6d8fad0cf34384284dfeff3f86799f8e83ef905451348ade0", size = 17513672, upload-time = "2026-01-10T01:39:19.35Z" }, + { url = "https://files.pythonhosted.org/packages/9a/6b/dae82e6fdb2043302f29adca37522312ea2be55b75907b59be06fbdffe87/onnx-1.20.1-cp311-cp311-win32.whl", hash = "sha256:63d9cbcab8c96841eadeb7c930e07bfab4dde8081eb76fb68e0dfb222706b81e", size = 16239336, upload-time = "2026-01-10T01:39:22.506Z" }, + { url = "https://files.pythonhosted.org/packages/8e/17/a0d7863390c1f2067d7c02dcc1477034965c32aaa1407bfcf775305ffee4/onnx-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:d78cde72d7ca8356a2d99c5dc0dbf67264254828cae2c5780184486c0cd7b3bf", size = 16392120, upload-time = "2026-01-10T01:39:25.106Z" }, + { url = "https://files.pythonhosted.org/packages/aa/72/9b879a46eb7a3322223791f36bf9c25d95da9ed93779eabb75a560f22e5b/onnx-1.20.1-cp311-cp311-win_arm64.whl", hash = "sha256:0104bb2d4394c179bcea3df7599a45a2932b80f4633840896fcf0d7d8daecea2", size = 16346923, upload-time = "2026-01-10T01:39:27.782Z" }, { url = "https://files.pythonhosted.org/packages/7c/4c/4b17e82f91ab9aa07ff595771e935ca73547b035030dc5f5a76e63fbfea9/onnx-1.20.1-cp312-abi3-macosx_12_0_universal2.whl", hash = "sha256:1d923bb4f0ce1b24c6859222a7e6b2f123e7bfe7623683662805f2e7b9e95af2", size = 17903547, upload-time = "2026-01-10T01:39:31.015Z" }, { url = "https://files.pythonhosted.org/packages/64/5e/1bfa100a9cb3f2d3d5f2f05f52f7e60323b0e20bb0abace1ae64dbc88f25/onnx-1.20.1-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddc0b7d8b5a94627dc86c533d5e415af94cbfd103019a582669dad1f56d30281", size = 17412021, upload-time = "2026-01-10T01:39:33.885Z" }, { url = "https://files.pythonhosted.org/packages/fb/71/d3fec0dcf9a7a99e7368112d9c765154e81da70fcba1e3121131a45c245b/onnx-1.20.1-cp312-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9336b6b8e6efcf5c490a845f6afd7e041c89a56199aeda384ed7d58fb953b080", size = 17510450, upload-time = "2026-01-10T01:39:36.589Z" }, @@ -4945,6 +5385,21 @@ version = "3.11.7" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/53/45/b268004f745ede84e5798b48ee12b05129d19235d0e15267aa57dcdb400b/orjson-3.11.7.tar.gz", hash = "sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49", size = 6144992, upload-time = "2026-02-02T15:38:49.29Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/37/02/da6cb01fc6087048d7f61522c327edf4250f1683a58a839fdcc435746dd5/orjson-3.11.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9487abc2c2086e7c8eb9a211d2ce8855bae0e92586279d0d27b341d5ad76c85c", size = 228664, upload-time = "2026-02-02T15:37:25.542Z" }, + { url = "https://files.pythonhosted.org/packages/c1/c2/5885e7a5881dba9a9af51bc564e8967225a642b3e03d089289a35054e749/orjson-3.11.7-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:79cacb0b52f6004caf92405a7e1f11e6e2de8bdf9019e4f76b44ba045125cd6b", size = 125344, upload-time = "2026-02-02T15:37:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/4e7688de0a92d1caf600dfd5fb70b4c5bfff51dfa61ac555072ef2d0d32a/orjson-3.11.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e85fe4698b6a56d5e2ebf7ae87544d668eb6bde1ad1226c13f44663f20ec9e", size = 128404, upload-time = "2026-02-02T15:37:28.108Z" }, + { url = "https://files.pythonhosted.org/packages/2f/b2/ec04b74ae03a125db7bd69cffd014b227b7f341e3261bf75b5eb88a1aa92/orjson-3.11.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8d14b71c0b12963fe8a62aac87119f1afdf4cb88a400f61ca5ae581449efcb5", size = 123677, upload-time = "2026-02-02T15:37:30.287Z" }, + { url = "https://files.pythonhosted.org/packages/4c/69/f95bdf960605f08f827f6e3291fe243d8aa9c5c9ff017a8d7232209184c3/orjson-3.11.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91c81ef070c8f3220054115e1ef468b1c9ce8497b4e526cb9f68ab4dc0a7ac62", size = 128950, upload-time = "2026-02-02T15:37:31.595Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1b/de59c57bae1d148ef298852abd31909ac3089cff370dfd4cd84cc99cbc42/orjson-3.11.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:411ebaf34d735e25e358a6d9e7978954a9c9d58cfb47bc6683cdc3964cd2f910", size = 141756, upload-time = "2026-02-02T15:37:32.985Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9e/9decc59f4499f695f65c650f6cfa6cd4c37a3fbe8fa235a0a3614cb54386/orjson-3.11.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a16bcd08ab0bcdfc7e8801d9c4a9cc17e58418e4d48ddc6ded4e9e4b1a94062b", size = 130812, upload-time = "2026-02-02T15:37:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/28/e6/59f932bcabd1eac44e334fe8e3281a92eacfcb450586e1f4bde0423728d8/orjson-3.11.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c0b51672e466fd7e56230ffbae7f1639e18d0ce023351fb75da21b71bc2c960", size = 133444, upload-time = "2026-02-02T15:37:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/f1/36/b0f05c0eaa7ca30bc965e37e6a2956b0d67adb87a9872942d3568da846ae/orjson-3.11.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:136dcd6a2e796dfd9ffca9fc027d778567b0b7c9968d092842d3c323cef88aa8", size = 138609, upload-time = "2026-02-02T15:37:36.657Z" }, + { url = "https://files.pythonhosted.org/packages/b8/03/58ec7d302b8d86944c60c7b4b82975d5161fcce4c9bc8c6cb1d6741b6115/orjson-3.11.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:7ba61079379b0ae29e117db13bda5f28d939766e410d321ec1624afc6a0b0504", size = 408918, upload-time = "2026-02-02T15:37:38.076Z" }, + { url = "https://files.pythonhosted.org/packages/06/3a/868d65ef9a8b99be723bd510de491349618abd9f62c826cf206d962db295/orjson-3.11.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0527a4510c300e3b406591b0ba69b5dc50031895b0a93743526a3fc45f59d26e", size = 143998, upload-time = "2026-02-02T15:37:39.706Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c7/1e18e1c83afe3349f4f6dc9e14910f0ae5f82eac756d1412ea4018938535/orjson-3.11.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a709e881723c9b18acddcfb8ba357322491ad553e277cf467e1e7e20e2d90561", size = 134802, upload-time = "2026-02-02T15:37:41.002Z" }, + { url = "https://files.pythonhosted.org/packages/d4/0b/ccb7ee1a65b37e8eeb8b267dc953561d72370e85185e459616d4345bab34/orjson-3.11.7-cp311-cp311-win32.whl", hash = "sha256:c43b8b5bab288b6b90dac410cca7e986a4fa747a2e8f94615aea407da706980d", size = 127828, upload-time = "2026-02-02T15:37:42.241Z" }, + { url = "https://files.pythonhosted.org/packages/af/9e/55c776dffda3f381e0f07d010a4f5f3902bf48eaba1bb7684d301acd4924/orjson-3.11.7-cp311-cp311-win_amd64.whl", hash = "sha256:6543001328aa857187f905308a028935864aefe9968af3848401b6fe80dbb471", size = 124941, upload-time = "2026-02-02T15:37:43.444Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8e/424a620fa7d263b880162505fb107ef5e0afaa765b5b06a88312ac291560/orjson-3.11.7-cp311-cp311-win_arm64.whl", hash = "sha256:1ee5cc7160a821dfe14f130bc8e63e7611051f964b463d9e2a3a573204446a4d", size = 126245, upload-time = "2026-02-02T15:37:45.18Z" }, { url = "https://files.pythonhosted.org/packages/80/bf/76f4f1665f6983385938f0e2a5d7efa12a58171b8456c252f3bae8a4cf75/orjson-3.11.7-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bd03ea7606833655048dab1a00734a2875e3e86c276e1d772b2a02556f0d895f", size = 228545, upload-time = "2026-02-02T15:37:46.376Z" }, { url = "https://files.pythonhosted.org/packages/79/53/6c72c002cb13b5a978a068add59b25a8bdf2800ac1c9c8ecdb26d6d97064/orjson-3.11.7-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:89e440ebc74ce8ab5c7bc4ce6757b4a6b1041becb127df818f6997b5c71aa60b", size = 125224, upload-time = "2026-02-02T15:37:47.697Z" }, { url = "https://files.pythonhosted.org/packages/2c/83/10e48852865e5dd151bdfe652c06f7da484578ed02c5fca938e3632cb0b8/orjson-3.11.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ede977b5fe5ac91b1dffc0a517ca4542d2ec8a6a4ff7b2652d94f640796342a", size = 128154, upload-time = "2026-02-02T15:37:48.954Z" }, @@ -4998,6 +5453,15 @@ version = "1.12.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/12/0c/f1761e21486942ab9bb6feaebc610fa074f7c5e496e6962dea5873348077/ormsgpack-1.12.2.tar.gz", hash = "sha256:944a2233640273bee67521795a73cf1e959538e0dfb7ac635505010455e53b33", size = 39031, upload-time = "2026-01-18T20:55:28.023Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/08/8b68f24b18e69d92238aa8f258218e6dfeacf4381d9d07ab8df303f524a9/ormsgpack-1.12.2-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bd5f4bf04c37888e864f08e740c5a573c4017f6fd6e99fa944c5c935fabf2dd9", size = 378266, upload-time = "2026-01-18T20:55:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/29fc13044ecb7c153523ae0a1972269fcd613650d1fa1a9cec1044c6b666/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d5b28b3570e9fed9a5a76528fc7230c3c76333bc214798958e58e9b79cc18a", size = 203035, upload-time = "2026-01-18T20:55:30.59Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c2/00169fb25dd8f9213f5e8a549dfb73e4d592009ebc85fbbcd3e1dcac575b/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3708693412c28f3538fb5a65da93787b6bbab3484f6bc6e935bfb77a62400ae5", size = 210539, upload-time = "2026-01-18T20:55:48.569Z" }, + { url = "https://files.pythonhosted.org/packages/1b/33/543627f323ff3c73091f51d6a20db28a1a33531af30873ea90c5ac95a9b5/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43013a3f3e2e902e1d05e72c0f1aeb5bedbb8e09240b51e26792a3c89267e181", size = 212401, upload-time = "2026-01-18T20:56:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5d/f70e2c3da414f46186659d24745483757bcc9adccb481a6eb93e2b729301/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7c8b1667a72cbba74f0ae7ecf3105a5e01304620ed14528b2cb4320679d2869b", size = 387082, upload-time = "2026-01-18T20:56:12.047Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d6/06e8dc920c7903e051f30934d874d4afccc9bb1c09dcaf0bc03a7de4b343/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:df6961442140193e517303d0b5d7bc2e20e69a879c2d774316125350c4a76b92", size = 482346, upload-time = "2026-01-18T20:56:05.152Z" }, + { url = "https://files.pythonhosted.org/packages/66/c4/f337ac0905eed9c393ef990c54565cd33644918e0a8031fe48c098c71dbf/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6a4c34ddef109647c769d69be65fa1de7a6022b02ad45546a69b3216573eb4a", size = 425181, upload-time = "2026-01-18T20:55:37.83Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/6d5758fabef3babdf4bbbc453738cc7de9cd3334e4c38dd5737e27b85653/ormsgpack-1.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:73670ed0375ecc303858e3613f407628dd1fca18fe6ac57b7b7ce66cc7bb006c", size = 117182, upload-time = "2026-01-18T20:55:31.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/17a15549233c37e7fd054c48fe9207492e06b026dbd872b826a0b5f833b6/ormsgpack-1.12.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2be829954434e33601ae5da328cccce3266b098927ca7a30246a0baec2ce7bd", size = 111464, upload-time = "2026-01-18T20:55:38.811Z" }, { url = "https://files.pythonhosted.org/packages/4c/36/16c4b1921c308a92cef3bf6663226ae283395aa0ff6e154f925c32e91ff5/ormsgpack-1.12.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7a29d09b64b9694b588ff2f80e9826bdceb3a2b91523c5beae1fab27d5c940e7", size = 378618, upload-time = "2026-01-18T20:55:50.835Z" }, { url = "https://files.pythonhosted.org/packages/c0/68/468de634079615abf66ed13bb5c34ff71da237213f29294363beeeca5306/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b39e629fd2e1c5b2f46f99778450b59454d1f901bc507963168985e79f09c5d", size = 203186, upload-time = "2026-01-18T20:56:11.163Z" }, { url = "https://files.pythonhosted.org/packages/73/a9/d756e01961442688b7939bacd87ce13bfad7d26ce24f910f6028178b2cc8/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:958dcb270d30a7cb633a45ee62b9444433fa571a752d2ca484efdac07480876e", size = 210738, upload-time = "2026-01-18T20:56:09.181Z" }, @@ -5037,6 +5501,8 @@ version = "0.2.11" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1a/d3/e04e9145f8f806723dec9b9e5227ad695a3efcd3ced7794cf7c22b15df5e/outlines_core-0.2.11.tar.gz", hash = "sha256:dfce56f717ff5083e54cbcfdb66cad243365437fccbb5509adaa7e31e030f1d8", size = 197263, upload-time = "2025-05-19T10:12:51.719Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/db/32c6e1170f139420e948fdd18a09a6175244bc0760dcf4dc2470e18411b9/outlines_core-0.2.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:132605b8dd1e3d1369da6a851992dd357f6376068292f6bd47caa7a28b794d19", size = 2289078, upload-time = "2025-05-19T10:12:12.118Z" }, + { url = "https://files.pythonhosted.org/packages/25/c3/b6e6f4e08fa84d2424f82705a6dc47fee33cb91989010fa678736957dcf6/outlines_core-0.2.11-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b31d5fc83b78aad282dd667b8d6e684614481fe08a7609ce0ce45dee64cd2991", size = 2115075, upload-time = "2025-05-19T10:12:13.761Z" }, { url = "https://files.pythonhosted.org/packages/92/c7/a65d1fddf49830ebc41422294eacde35286d9f68994a8aa905cb14f5aade/outlines_core-0.2.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86df9740368866295077346440d911df4972da2b3f1f54b8125e6f329e8a8891", size = 2287677, upload-time = "2025-05-19T10:12:24.24Z" }, { url = "https://files.pythonhosted.org/packages/23/79/8795aed8be9b77dd69d78e7cfbfcf28c179e6b08da6e56bbbf48a09fe55f/outlines_core-0.2.11-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:96ce4dd78f106799be4a0a5795cefd1352806162973756a4b6fce4bb6eddd7e4", size = 2113000, upload-time = "2025-05-19T10:12:25.446Z" }, { url = "https://files.pythonhosted.org/packages/87/96/7dcdc5198844145ab35528f9f93a58c3d47b87e54d0f79357c631d7b7a9a/outlines_core-0.2.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daef6eaaf8c3403455ab5cbf265cb5c6838df571eb7c4b23cddac19cfc701726", size = 2287320, upload-time = "2025-05-19T10:12:35.515Z" }, @@ -5063,6 +5529,14 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/07/c7087e003ceee9b9a82539b40414ec557aa795b584a1a346e89180853d79/pandas-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de09668c1bf3b925c07e5762291602f0d789eca1b3a781f99c1c78f6cac0e7ea", size = 10323380, upload-time = "2026-02-17T22:18:16.133Z" }, + { url = "https://files.pythonhosted.org/packages/c1/27/90683c7122febeefe84a56f2cde86a9f05f68d53885cebcc473298dfc33e/pandas-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24ba315ba3d6e5806063ac6eb717504e499ce30bd8c236d8693a5fd3f084c796", size = 9923455, upload-time = "2026-02-17T22:18:19.13Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f1/ed17d927f9950643bc7631aa4c99ff0cc83a37864470bc419345b656a41f/pandas-3.0.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:406ce835c55bac912f2a0dcfaf27c06d73c6b04a5dde45f1fd3169ce31337389", size = 10753464, upload-time = "2026-02-17T22:18:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/2e/7c/870c7e7daec2a6c7ff2ac9e33b23317230d4e4e954b35112759ea4a924a7/pandas-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:830994d7e1f31dd7e790045235605ab61cff6c94defc774547e8b7fdfbff3dc7", size = 11255234, upload-time = "2026-02-17T22:18:24.175Z" }, + { url = "https://files.pythonhosted.org/packages/5c/39/3653fe59af68606282b989c23d1a543ceba6e8099cbcc5f1d506a7bae2aa/pandas-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a64ce8b0f2de1d2efd2ae40b0abe7f8ae6b29fbfb3812098ed5a6f8e235ad9bf", size = 11767299, upload-time = "2026-02-17T22:18:26.824Z" }, + { url = "https://files.pythonhosted.org/packages/9b/31/1daf3c0c94a849c7a8dab8a69697b36d313b229918002ba3e409265c7888/pandas-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9832c2c69da24b602c32e0c7b1b508a03949c18ba08d4d9f1c1033426685b447", size = 12333292, upload-time = "2026-02-17T22:18:28.996Z" }, + { url = "https://files.pythonhosted.org/packages/1f/67/af63f83cd6ca603a00fe8530c10a60f0879265b8be00b5930e8e78c5b30b/pandas-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:84f0904a69e7365f79a0c77d3cdfccbfb05bf87847e3a51a41e1426b0edb9c79", size = 9892176, upload-time = "2026-02-17T22:18:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/79/ab/9c776b14ac4b7b4140788eca18468ea39894bc7340a408f1d1e379856a6b/pandas-3.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:4a68773d5a778afb31d12e34f7dd4612ab90de8c6fb1d8ffe5d4a03b955082a1", size = 9151328, upload-time = "2026-02-17T22:18:35.721Z" }, { url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" }, { url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" }, { url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" }, @@ -5199,6 +5673,16 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/cb/72/9a51afa0a822b09e286c4cb827ed7b00bc818dac7bd11a5f161e493a217d/pendulum-3.2.0.tar.gz", hash = "sha256:e80feda2d10fa3ff8b1526715f7d33dcb7e08494b3088f2c8a3ac92d4a4331ce", size = 86912, upload-time = "2026-01-30T11:22:24.093Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/27/a4be6ec12161b503dd036f8d7cc57f8626170ae31bb298038be9af0001ce/pendulum-3.2.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5d775cc608c909ad415c8e789c84a9f120bb6a794c4215b2d8d910893cf0ec6a", size = 337923, upload-time = "2026-01-30T11:20:51.61Z" }, + { url = "https://files.pythonhosted.org/packages/59/e1/2a214e18355ec2a6ce3f683a97eecdb6050866ff3a6cf165d411450aeb1b/pendulum-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8de794a7f665aebc8c1ba4dd4b05ab8fe1a36ce9c0498366adf1d1edd79b2686", size = 327379, upload-time = "2026-01-30T11:20:53.085Z" }, + { url = "https://files.pythonhosted.org/packages/9d/01/7392e58ebc1d9e70b987dc8bb0c89710b47ac8125067efe7aa4c420b616f/pendulum-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bac7df7696e1c942e17c0556b3a7bcdd1d7aa5b24faee7620cb071e754a0622", size = 340115, upload-time = "2026-01-30T11:20:54.635Z" }, + { url = "https://files.pythonhosted.org/packages/ef/33/80de84c5ca1a3e4f7f3b75090c9b61b6dbb6d095e302ee592cebbaf0bbfb/pendulum-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db0f6a8a04475d9cba26ce701e7d66d266fd97227f2f5f499270eba04be1c7e9", size = 373969, upload-time = "2026-01-30T11:20:56.209Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/f7b4c1818927ab394a2a0a9b7011f360a0a75839a22678833c5bc0a84183/pendulum-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c352c63c1ff05f2198409b28498d7158547a8be23e1fbd4aa2cf5402fb239b55", size = 379058, upload-time = "2026-01-30T11:20:57.618Z" }, + { url = "https://files.pythonhosted.org/packages/36/94/9947cf710620afcc68751683f2f8de88d902505e7c13c0349d7e9d362f97/pendulum-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de8c1ad1d1aa7d4ceae341528bab35a0f8c88a5aa63f2f5d84e16b517d1b32c2", size = 348403, upload-time = "2026-01-30T11:20:59.56Z" }, + { url = "https://files.pythonhosted.org/packages/6f/12/0e6ba0bb00fa57907af2a3fca8643bded5dba1e87072d50673776a0d6ed2/pendulum-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1ba955511c12fec2252038b0c866c25c0c30b720bf74d3023710f121e42b1498", size = 517457, upload-time = "2026-01-30T11:21:01.602Z" }, + { url = "https://files.pythonhosted.org/packages/c6/fe/dae5fbfe67bd41d943def0ad8f1e7f6988aa8e527255e433cd7c494f9ad5/pendulum-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4115bf364a2ec6d5ddc476751ceaa4164a04f2c15589f0d29aa210ddb784b15d", size = 561103, upload-time = "2026-01-30T11:21:03.924Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a0/8f646160b98abfc19152505af19bd643a4279ec2bdbe0959f16b7025fc6b/pendulum-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:4151a903356413fdd9549de0997b708fb95a214ed97803ffb479ffd834088378", size = 260595, upload-time = "2026-01-30T11:21:05.495Z" }, + { url = "https://files.pythonhosted.org/packages/79/01/feead7af9ded7a13f2d798fb6573e70f469113eafcd8cc8f59671584ca3e/pendulum-3.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:acfdee9ddc56053cb7c8c075afbfde0857322d09e56a56195b9cd127fae87e4c", size = 255382, upload-time = "2026-01-30T11:21:06.847Z" }, { url = "https://files.pythonhosted.org/packages/41/56/dd0ea9f97d25a0763cda09e2217563b45714786118d8c68b0b745395d6eb/pendulum-3.2.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:bf0b489def51202a39a2a665dcc4162d5e46934a740fe4c4fe3068979610156c", size = 337830, upload-time = "2026-01-30T11:21:08.298Z" }, { url = "https://files.pythonhosted.org/packages/cf/98/83d62899bf7226fc12396de4bc1fb2b5da27e451c7c60790043aaf8b4731/pendulum-3.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:937a529aa302efa18dcf25e53834964a87ffb2df8f80e3669ab7757a6126beaf", size = 327574, upload-time = "2026-01-30T11:21:09.715Z" }, { url = "https://files.pythonhosted.org/packages/76/fa/ff2aa992b23f0543c709b1a3f3f9ed760ec71fd02c8bb01f93bf008b52e4/pendulum-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85c7689defc65c4dc29bf257f7cca55d210fabb455de9476e1748d2ab2ae80d7", size = 339891, upload-time = "2026-01-30T11:21:11.089Z" }, @@ -5229,6 +5713,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/37/b4f2b5f1200351c4869b8b46ad5c21019e3dbe0417f5867ae969fad7b5fe/pendulum-3.2.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:a50d8cf42f06d3d8c3f8bb2a7ac47fa93b5145e69de6a7209be6a47afdd9cf76", size = 561926, upload-time = "2026-01-30T11:21:51.698Z" }, { url = "https://files.pythonhosted.org/packages/a0/9e/567376582da58f5fe8e4f579db2bcfbf243cf619a5825bdf1023ad1436b3/pendulum-3.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:e5bbb92b155cd5018b3cf70ee49ed3b9c94398caaaa7ed97fe41e5bb5a968418", size = 258817, upload-time = "2026-01-30T11:21:53.074Z" }, { url = "https://files.pythonhosted.org/packages/95/67/dfffd7eb50d67fa821cd4d92cf71575ead6162930202bc40dfcedf78c38c/pendulum-3.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:d53134418e04335c3029a32e9341cccc9b085a28744fb5ee4e6a8f5039363b1a", size = 253292, upload-time = "2026-01-30T11:21:54.484Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0d/d5ac8468a1b40f09a62d6e91654088de432367907579dd161c0fb1bdf222/pendulum-3.2.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9585594d32faa71efa5a78f576f1ee4f79e9c5340d7c6f0cd6c5dfe725effaaa", size = 338760, upload-time = "2026-01-30T11:22:12.225Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e5/7fa8c8be6caac8e0be78fbe7668df571f44820ed779cb3736fab645fcba8/pendulum-3.2.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:26401e2de77c437e8f3b6160c08c6c5d45518d906f8f9b48fd7cb5aa0f4e2aff", size = 328333, upload-time = "2026-01-30T11:22:13.811Z" }, + { url = "https://files.pythonhosted.org/packages/ad/78/73a1031b7d1bf7986e8e655cea3f018164b3470aecfea25a4074e77dda73/pendulum-3.2.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:637e65af042f383a2764a886aa28ccc6f853bf7a142df18e41c720542934c13b", size = 340841, upload-time = "2026-01-30T11:22:15.278Z" }, + { url = "https://files.pythonhosted.org/packages/49/40/4e36e9074e92b0164c088b9ada3c02bfea386d83e24fa98b30fe9b6e61a8/pendulum-3.2.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e46c28f4d067233c4a4c42748f4ffa641d9289c09e0e81488beb6d4b3fab51", size = 348959, upload-time = "2026-01-30T11:22:16.718Z" }, + { url = "https://files.pythonhosted.org/packages/24/99/8bf7fcb91b526e1efe17d047faa845709b88800fff915ff848ff26054293/pendulum-3.2.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:71d46bcc86269f97bfd8c5f1475d55e717696a0a010b1871023605ca94624031", size = 518102, upload-time = "2026-01-30T11:22:18.2Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b0/a36c468d2d0dec62ddea7c5e4177e93abb12f48ac90f09f24d0581c5189f/pendulum-3.2.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5cd956d4176afc7bfe8a91bf3f771b46ff8d326f6c5bf778eb5010eb742ebba6", size = 561884, upload-time = "2026-01-30T11:22:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/c5/4d/dad105261898907bf806cabca53d3878529a9fa2c0d5d7f95f2035246fc2/pendulum-3.2.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:39ef129d7b90aab49708645867abdd207b714ba7bff12dae549975b0aca09716", size = 261236, upload-time = "2026-01-30T11:22:21.059Z" }, { url = "https://files.pythonhosted.org/packages/02/fb/d65db067a67df7252f18b0cb7420dda84078b9e8bfb375215469c14a50be/pendulum-3.2.0-py3-none-any.whl", hash = "sha256:f3a9c18a89b4d9ef39c5fa6a78722aaff8d5be2597c129a3b16b9f40a561acf3", size = 114111, upload-time = "2026-01-30T11:22:22.361Z" }, ] @@ -5259,6 +5750,17 @@ version = "12.1.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/46/5da1ec4a5171ee7bf1a0efa064aba70ba3d6e0788ce3f5acd1375d23c8c0/pillow-12.1.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e879bb6cd5c73848ef3b2b48b8af9ff08c5b71ecda8048b7dd22d8a33f60be32", size = 5304084, upload-time = "2026-02-11T04:20:27.501Z" }, + { url = "https://files.pythonhosted.org/packages/78/93/a29e9bc02d1cf557a834da780ceccd54e02421627200696fcf805ebdc3fb/pillow-12.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:365b10bb9417dd4498c0e3b128018c4a624dc11c7b97d8cc54effe3b096f4c38", size = 4657866, upload-time = "2026-02-11T04:20:29.827Z" }, + { url = "https://files.pythonhosted.org/packages/13/84/583a4558d492a179d31e4aae32eadce94b9acf49c0337c4ce0b70e0a01f2/pillow-12.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d4ce8e329c93845720cd2014659ca67eac35f6433fd3050393d85f3ecef0dad5", size = 6232148, upload-time = "2026-02-11T04:20:31.329Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e2/53c43334bbbb2d3b938978532fbda8e62bb6e0b23a26ce8592f36bcc4987/pillow-12.1.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc354a04072b765eccf2204f588a7a532c9511e8b9c7f900e1b64e3e33487090", size = 8038007, upload-time = "2026-02-11T04:20:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a6/3d0e79c8a9d58150dd98e199d7c1c56861027f3829a3a60b3c2784190180/pillow-12.1.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e7976bf1910a8116b523b9f9f58bf410f3e8aa330cd9a2bb2953f9266ab49af", size = 6345418, upload-time = "2026-02-11T04:20:35.858Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c8/46dfeac5825e600579157eea177be43e2f7ff4a99da9d0d0a49533509ac5/pillow-12.1.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:597bd9c8419bc7c6af5604e55847789b69123bbe25d65cc6ad3012b4f3c98d8b", size = 7034590, upload-time = "2026-02-11T04:20:37.91Z" }, + { url = "https://files.pythonhosted.org/packages/af/bf/e6f65d3db8a8bbfeaf9e13cc0417813f6319863a73de934f14b2229ada18/pillow-12.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2c1fc0f2ca5f96a3c8407e41cca26a16e46b21060fe6d5b099d2cb01412222f5", size = 6458655, upload-time = "2026-02-11T04:20:39.496Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c2/66091f3f34a25894ca129362e510b956ef26f8fb67a0e6417bc5744e56f1/pillow-12.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:578510d88c6229d735855e1f278aa305270438d36a05031dfaae5067cc8eb04d", size = 7159286, upload-time = "2026-02-11T04:20:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5a/24bc8eb526a22f957d0cec6243146744966d40857e3d8deb68f7902ca6c1/pillow-12.1.1-cp311-cp311-win32.whl", hash = "sha256:7311c0a0dcadb89b36b7025dfd8326ecfa36964e29913074d47382706e516a7c", size = 6328663, upload-time = "2026-02-11T04:20:43.184Z" }, + { url = "https://files.pythonhosted.org/packages/31/03/bef822e4f2d8f9d7448c133d0a18185d3cce3e70472774fffefe8b0ed562/pillow-12.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:fbfa2a7c10cc2623f412753cddf391c7f971c52ca40a3f65dc5039b2939e8563", size = 7031448, upload-time = "2026-02-11T04:20:44.696Z" }, + { url = "https://files.pythonhosted.org/packages/49/70/f76296f53610bd17b2e7d31728b8b7825e3ac3b5b3688b51f52eab7c0818/pillow-12.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:b81b5e3511211631b3f672a595e3221252c90af017e399056d0faabb9538aa80", size = 2453651, upload-time = "2026-02-11T04:20:46.243Z" }, { url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" }, { url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" }, { url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" }, @@ -5320,6 +5822,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" }, { url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" }, { url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/5d43209aa4cb58e0cc80127956ff1796a68b928e6324bbf06ef4db34367b/pillow-12.1.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:600fd103672b925fe62ed08e0d874ea34d692474df6f4bf7ebe148b30f89f39f", size = 5228606, upload-time = "2026-02-11T04:22:52.106Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d5/3b005b4e4fda6698b371fa6c21b097d4707585d7db99e98d9b0b87ac612a/pillow-12.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:665e1b916b043cef294bc54d47bf02d87e13f769bc4bc5fa225a24b3a6c5aca9", size = 4622321, upload-time = "2026-02-11T04:22:53.827Z" }, + { url = "https://files.pythonhosted.org/packages/df/36/ed3ea2d594356fd8037e5a01f6156c74bc8d92dbb0fa60746cc96cabb6e8/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:495c302af3aad1ca67420ddd5c7bd480c8867ad173528767d906428057a11f0e", size = 5247579, upload-time = "2026-02-11T04:22:56.094Z" }, + { url = "https://files.pythonhosted.org/packages/54/9a/9cc3e029683cf6d20ae5085da0dafc63148e3252c2f13328e553aaa13cfb/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fd420ef0c52c88b5a035a0886f367748c72147b2b8f384c9d12656678dfdfa9", size = 6989094, upload-time = "2026-02-11T04:22:58.288Z" }, + { url = "https://files.pythonhosted.org/packages/00/98/fc53ab36da80b88df0967896b6c4b4cd948a0dc5aa40a754266aa3ae48b3/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f975aa7ef9684ce7e2c18a3aa8f8e2106ce1e46b94ab713d156b2898811651d3", size = 5313850, upload-time = "2026-02-11T04:23:00.554Z" }, + { url = "https://files.pythonhosted.org/packages/30/02/00fa585abfd9fe9d73e5f6e554dc36cc2b842898cbfc46d70353dae227f8/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8089c852a56c2966cf18835db62d9b34fef7ba74c726ad943928d494fa7f4735", size = 5963343, upload-time = "2026-02-11T04:23:02.934Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/c56ce33ca856e358d27fda9676c055395abddb82c35ac0f593877ed4562e/pillow-12.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e", size = 7029880, upload-time = "2026-02-11T04:23:04.783Z" }, ] [[package]] @@ -5478,6 +5987,21 @@ version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, @@ -5616,6 +6140,17 @@ version = "2.9.11" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/ae/8d8266f6dd183ab4d48b95b9674034e1b482a3f8619b33a0d86438694577/psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10", size = 3756452, upload-time = "2025-10-10T11:11:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/4b/34/aa03d327739c1be70e09d01182619aca8ebab5970cd0cfa50dd8b9cec2ac/psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a", size = 3863957, upload-time = "2025-10-10T11:11:16.932Z" }, + { url = "https://files.pythonhosted.org/packages/48/89/3fdb5902bdab8868bbedc1c6e6023a4e08112ceac5db97fc2012060e0c9a/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4", size = 4410955, upload-time = "2025-10-10T11:11:21.21Z" }, + { url = "https://files.pythonhosted.org/packages/ce/24/e18339c407a13c72b336e0d9013fbbbde77b6fd13e853979019a1269519c/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7", size = 4468007, upload-time = "2025-10-10T11:11:24.831Z" }, + { url = "https://files.pythonhosted.org/packages/91/7e/b8441e831a0f16c159b5381698f9f7f7ed54b77d57bc9c5f99144cc78232/psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee", size = 4165012, upload-time = "2025-10-10T11:11:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/0d/61/4aa89eeb6d751f05178a13da95516c036e27468c5d4d2509bb1e15341c81/psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb", size = 3981881, upload-time = "2025-10-30T02:55:07.332Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/2f5841cae4c635a9459fe7aca8ed771336e9383b6429e05c01267b0774cf/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f", size = 3650985, upload-time = "2025-10-10T11:11:34.975Z" }, + { url = "https://files.pythonhosted.org/packages/84/74/4defcac9d002bca5709951b975173c8c2fa968e1a95dc713f61b3a8d3b6a/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94", size = 3296039, upload-time = "2025-10-10T11:11:40.432Z" }, + { url = "https://files.pythonhosted.org/packages/6d/c2/782a3c64403d8ce35b5c50e1b684412cf94f171dc18111be8c976abd2de1/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f", size = 3043477, upload-time = "2025-10-30T02:55:11.182Z" }, + { url = "https://files.pythonhosted.org/packages/c8/31/36a1d8e702aa35c38fc117c2b8be3f182613faa25d794b8aeaab948d4c03/psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908", size = 3345842, upload-time = "2025-10-10T11:11:45.366Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b4/a5375cda5b54cb95ee9b836930fea30ae5a8f14aa97da7821722323d979b/psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03", size = 2713894, upload-time = "2025-10-10T11:11:48.775Z" }, { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, @@ -5693,6 +6228,13 @@ version = "23.0.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/41/8e6b6ef7e225d4ceead8459427a52afdc23379768f54dd3566014d7618c1/pyarrow-23.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6f0147ee9e0386f519c952cc670eb4a8b05caa594eeffe01af0e25f699e4e9bb", size = 34302230, upload-time = "2026-02-16T10:09:03.859Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4a/1472c00392f521fea03ae93408bf445cc7bfa1ab81683faf9bc188e36629/pyarrow-23.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:0ae6e17c828455b6265d590100c295193f93cc5675eb0af59e49dbd00d2de350", size = 35850050, upload-time = "2026-02-16T10:09:11.877Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b2/bd1f2f05ded56af7f54d702c8364c9c43cd6abb91b0e9933f3d77b4f4132/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:fed7020203e9ef273360b9e45be52a2a47d3103caf156a30ace5247ffb51bdbd", size = 44491918, upload-time = "2026-02-16T10:09:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/0b/62/96459ef5b67957eac38a90f541d1c28833d1b367f014a482cb63f3b7cd2d/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:26d50dee49d741ac0e82185033488d28d35be4d763ae6f321f97d1140eb7a0e9", size = 47562811, upload-time = "2026-02-16T10:09:25.792Z" }, + { url = "https://files.pythonhosted.org/packages/7d/94/1170e235add1f5f45a954e26cd0e906e7e74e23392dcb560de471f7366ec/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c30143b17161310f151f4a2bcfe41b5ff744238c1039338779424e38579d701", size = 48183766, upload-time = "2026-02-16T10:09:34.645Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/39a42af4570377b99774cdb47f63ee6c7da7616bd55b3d5001aa18edfe4f/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db2190fa79c80a23fdd29fef4b8992893f024ae7c17d2f5f4db7171fa30c2c78", size = 50607669, upload-time = "2026-02-16T10:09:44.153Z" }, + { url = "https://files.pythonhosted.org/packages/00/ca/db94101c187f3df742133ac837e93b1f269ebdac49427f8310ee40b6a58f/pyarrow-23.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:f00f993a8179e0e1c9713bcc0baf6d6c01326a406a9c23495ec1ba9c9ebf2919", size = 27527698, upload-time = "2026-02-16T10:09:50.263Z" }, { url = "https://files.pythonhosted.org/packages/9a/4b/4166bb5abbfe6f750fc60ad337c43ecf61340fa52ab386da6e8dbf9e63c4/pyarrow-23.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f4b0dbfa124c0bb161f8b5ebb40f1a680b70279aa0c9901d44a2b5a20806039f", size = 34214575, upload-time = "2026-02-16T10:09:56.225Z" }, { url = "https://files.pythonhosted.org/packages/e1/da/3f941e3734ac8088ea588b53e860baeddac8323ea40ce22e3d0baa865cc9/pyarrow-23.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7707d2b6673f7de054e2e83d59f9e805939038eebe1763fe811ee8fa5c0cd1a7", size = 35832540, upload-time = "2026-02-16T10:10:03.428Z" }, { url = "https://files.pythonhosted.org/packages/88/7c/3d841c366620e906d54430817531b877ba646310296df42ef697308c2705/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86ff03fb9f1a320266e0de855dee4b17da6794c595d207f89bba40d16b5c78b9", size = 44470940, upload-time = "2026-02-16T10:10:10.704Z" }, @@ -5757,6 +6299,20 @@ version = "1.4.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/aa/b8/4ed5c7ad5ec15b08d35cc79ace6145d5c1ae426e46435f4987379439dfea/pybase64-1.4.3.tar.gz", hash = "sha256:c2ed274c9e0ba9c8f9c4083cfe265e66dd679126cd9c2027965d807352f3f053", size = 137272, upload-time = "2025-12-06T13:27:04.013Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/fb/bb06a5b9885e7d853ac1e801c4d8abfdb4c8506deee33e53d55aa6690e67/pybase64-1.4.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f9ef0388878bc15a084bd9bf73ec1b2b4ee513d11009b1506375e10a7aae5032", size = 68331, upload-time = "2025-12-06T13:22:54.197Z" }, + { url = "https://files.pythonhosted.org/packages/64/15/8d60b9ec5e658185fc2ee3333e01a6e30d717cf677b24f47cbb3a859d13c/pybase64-1.4.3-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95a57cccf106352a72ed8bc8198f6820b16cc7d55aa3867a16dea7011ae7c218", size = 71370, upload-time = "2025-12-06T13:22:55.517Z" }, + { url = "https://files.pythonhosted.org/packages/ac/29/a3e5c1667cc8c38d025a4636855de0fc117fc62e2afeb033a3c6f12c6a22/pybase64-1.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cd1c47dfceb9c7bd3de210fb4e65904053ed2d7c9dce6d107f041ff6fbd7e21", size = 59834, upload-time = "2025-12-06T13:22:56.682Z" }, + { url = "https://files.pythonhosted.org/packages/a9/00/8ffcf9810bd23f3984698be161cf7edba656fd639b818039a7be1d6405d4/pybase64-1.4.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.whl", hash = "sha256:9fe9922698f3e2f72874b26890d53a051c431d942701bb3a37aae94da0b12107", size = 56652, upload-time = "2025-12-06T13:22:57.724Z" }, + { url = "https://files.pythonhosted.org/packages/81/62/379e347797cdea4ab686375945bc77ad8d039c688c0d4d0cfb09d247beb9/pybase64-1.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:af5f4bd29c86b59bb4375e0491d16ec8a67548fa99c54763aaedaf0b4b5a6632", size = 59382, upload-time = "2025-12-06T13:22:58.758Z" }, + { url = "https://files.pythonhosted.org/packages/c6/f2/9338ffe2f487086f26a2c8ca175acb3baa86fce0a756ff5670a0822bb877/pybase64-1.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c302f6ca7465262908131411226e02100f488f531bb5e64cb901aa3f439bccd9", size = 59990, upload-time = "2025-12-06T13:23:01.007Z" }, + { url = "https://files.pythonhosted.org/packages/f9/a4/85a6142b65b4df8625b337727aa81dc199642de3d09677804141df6ee312/pybase64-1.4.3-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:2f3f439fa4d7fde164ebbbb41968db7d66b064450ab6017c6c95cef0afa2b349", size = 54923, upload-time = "2025-12-06T13:23:02.369Z" }, + { url = "https://files.pythonhosted.org/packages/ac/00/e40215d25624012bf5b7416ca37f168cb75f6dd15acdb91ea1f2ea4dc4e7/pybase64-1.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a23c6866551043f8b681a5e1e0d59469148b2920a3b4fc42b1275f25ea4217a", size = 58664, upload-time = "2025-12-06T13:23:03.378Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/d7e19a63e795c13837f2356268d95dc79d1180e756f57ced742a1e52fdeb/pybase64-1.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:56e6526f8565642abc5f84338cc131ce298a8ccab696b19bdf76fa6d7dc592ef", size = 52338, upload-time = "2025-12-06T13:23:04.458Z" }, + { url = "https://files.pythonhosted.org/packages/f2/32/3c746d7a310b69bdd9df77ffc85c41b80bce00a774717596f869b0d4a20e/pybase64-1.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6a792a8b9d866ffa413c9687d9b611553203753987a3a582d68cbc51cf23da45", size = 68993, upload-time = "2025-12-06T13:23:05.526Z" }, + { url = "https://files.pythonhosted.org/packages/5d/b3/63cec68f9d6f6e4c0b438d14e5f1ef536a5fe63ce14b70733ac5e31d7ab8/pybase64-1.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:62ad29a5026bb22cfcd1ca484ec34b0a5ced56ddba38ceecd9359b2818c9c4f9", size = 58055, upload-time = "2025-12-06T13:23:06.931Z" }, + { url = "https://files.pythonhosted.org/packages/d5/cb/7acf7c3c06f9692093c07f109668725dc37fb9a3df0fa912b50add645195/pybase64-1.4.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:11b9d1d2d32ec358c02214363b8fc3651f6be7dd84d880ecd597a6206a80e121", size = 54430, upload-time = "2025-12-06T13:23:07.936Z" }, + { url = "https://files.pythonhosted.org/packages/33/39/4eb33ff35d173bfff4002e184ce8907f5d0a42d958d61cd9058ef3570179/pybase64-1.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0aebaa7f238caa0a0d373616016e2040c6c879ebce3ba7ab3c59029920f13640", size = 56272, upload-time = "2025-12-06T13:23:09.253Z" }, + { url = "https://files.pythonhosted.org/packages/19/97/a76d65c375a254e65b730c6f56bf528feca91305da32eceab8bcc08591e6/pybase64-1.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e504682b20c63c2b0c000e5f98a80ea867f8d97642e042a5a39818e44ba4d599", size = 70904, upload-time = "2025-12-06T13:23:10.336Z" }, { url = "https://files.pythonhosted.org/packages/43/1b/9a8cab0042b464e9a876d5c65fe5127445a2436da36fda64899b119b1a1b/pybase64-1.4.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f0b3f200c3e06316f6bebabd458b4e4bcd4c2ca26af7c0c766614d91968dee27", size = 68210, upload-time = "2025-12-06T13:23:18.813Z" }, { url = "https://files.pythonhosted.org/packages/62/f7/965b79ff391ad208b50e412b5d3205ccce372a2d27b7218ae86d5295b105/pybase64-1.4.3-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb632edfd132b3eaf90c39c89aa314beec4e946e210099b57d40311f704e11d4", size = 71599, upload-time = "2025-12-06T13:23:20.195Z" }, { url = "https://files.pythonhosted.org/packages/03/4b/a3b5175130b3810bbb8ccfa1edaadbd3afddb9992d877c8a1e2f274b476e/pybase64-1.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:356ef1d74648ce997f5a777cf8f1aefecc1c0b4fe6201e0ef3ec8a08170e1b54", size = 59922, upload-time = "2025-12-06T13:23:21.487Z" }, @@ -5833,8 +6389,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/b8/f544a2e37c778d59208966d4ef19742a0be37c12fc8149ff34483c176616/pybase64-1.4.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d94020ef09f624d841aa9a3a6029df8cf65d60d7a6d5c8687579fa68bd679b65", size = 58295, upload-time = "2025-12-06T13:25:20.822Z" }, { url = "https://files.pythonhosted.org/packages/03/99/1fae8a3b7ac181e36f6e7864a62d42d5b1f4fa7edf408c6711e28fba6b4d/pybase64-1.4.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:f64ce70d89942a23602dee910dec9b48e5edf94351e1b378186b74fcc00d7f66", size = 60960, upload-time = "2025-12-06T13:25:22.099Z" }, { url = "https://files.pythonhosted.org/packages/9d/9e/cd4c727742345ad8384569a4466f1a1428f4e5cc94d9c2ab2f53d30be3fe/pybase64-1.4.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8ea99f56e45c469818b9781903be86ba4153769f007ba0655fa3b46dc332803d", size = 74863, upload-time = "2025-12-06T13:25:23.442Z" }, + { url = "https://files.pythonhosted.org/packages/bf/44/d4b7adc7bf4fd5b52d8d099121760c450a52c390223806b873f0b6a2d551/pybase64-1.4.3-graalpy311-graalpy242_311_native-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a492518f3078a4e3faaef310697d21df9c6bc71908cebc8c2f6fbfa16d7d6b1f", size = 43227, upload-time = "2025-12-06T13:26:21.845Z" }, + { url = "https://files.pythonhosted.org/packages/08/86/2ba2d8734ef7939debeb52cf9952e457ba7aa226cae5c0e6dd631f9b851f/pybase64-1.4.3-graalpy311-graalpy242_311_native-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cae1a0f47784fd16df90d8acc32011c8d5fcdd9ab392c9ec49543e5f6a9c43a4", size = 35804, upload-time = "2025-12-06T13:26:23.149Z" }, { url = "https://files.pythonhosted.org/packages/fa/8f/43c3bb11ca9bacf81cb0b7a71500bb65b2eda6d5fe07433c09b543de97f3/pybase64-1.4.3-graalpy312-graalpy250_312_native-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5c29a582b0ea3936d02bd6fe9bf674ab6059e6e45ab71c78404ab2c913224414", size = 43461, upload-time = "2025-12-06T13:26:28.906Z" }, { url = "https://files.pythonhosted.org/packages/2d/4c/2a5258329200be57497d3972b5308558c6de42e3749c6cc2aa1cbe34b25a/pybase64-1.4.3-graalpy312-graalpy250_312_native-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b6b664758c804fa919b4f1257aa8cf68e95db76fc331de5f70bfc3a34655afe1", size = 36058, upload-time = "2025-12-06T13:26:30.092Z" }, + { url = "https://files.pythonhosted.org/packages/d3/22/832a2f9e76cdf39b52e01e40d8feeb6a04cf105494f2c3e3126d0149717f/pybase64-1.4.3-pp311-pypy311_pp73-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:bd4d2293de9fd212e294c136cec85892460b17d24e8c18a6ba18750928037750", size = 40681, upload-time = "2025-12-06T13:26:43.782Z" }, + { url = "https://files.pythonhosted.org/packages/12/d7/6610f34a8972415fab3bb4704c174a1cc477bffbc3c36e526428d0f3957d/pybase64-1.4.3-pp311-pypy311_pp73-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af6d0d3a691911cc4c9a625f3ddcd3af720738c21be3d5c72de05629139d393", size = 41294, upload-time = "2025-12-06T13:26:44.936Z" }, + { url = "https://files.pythonhosted.org/packages/64/25/ed24400948a6c974ab1374a233cb7e8af0a5373cea0dd8a944627d17c34a/pybase64-1.4.3-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5cfc8c49a28322d82242088378f8542ce97459866ba73150b062a7073e82629d", size = 35447, upload-time = "2025-12-06T13:26:46.098Z" }, ] [[package]] @@ -5846,6 +6407,18 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/df/a0/9c823651872e6a0face3f0311de2a40c8bbcb9c8dcb15680bd019ac56ac7/pycares-5.0.1.tar.gz", hash = "sha256:5a3c249c830432631439815f9a818463416f2a8cbdb1e988e78757de9ae75081", size = 652222, upload-time = "2026-01-01T12:37:00.604Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/87/78/43b09f4b8e5fb8a6024661b458b48987abdb39304c78117b106b10a029f1/pycares-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c29ca77ff9712e20787201ca8e76ad89384771c0e058a0a4f3dc05afbc4b32de", size = 136177, upload-time = "2026-01-01T12:35:11.567Z" }, + { url = "https://files.pythonhosted.org/packages/19/05/194c0e039ff52b166b50e79ff166c61f931fbca2bf94fc0dbaaf39041518/pycares-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f11424bf5cf6226d0b136ed47daa58434e377c61b62d0100d1de7793f8e34a72", size = 130960, upload-time = "2026-01-01T12:35:12.828Z" }, + { url = "https://files.pythonhosted.org/packages/0d/84/5fce65cc058c5ab619c0dd1370d539667235a5565da72ca77f3f741cdc70/pycares-5.0.1-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d765afb52d579879f5c4f005763827d3b1eb86b23139e9614e6089c9f98db017", size = 220584, upload-time = "2026-01-01T12:35:14.005Z" }, + { url = "https://files.pythonhosted.org/packages/f6/74/d82304297308f6c24a17961bf589b53eefa5f7f2724158c842c67fa0b302/pycares-5.0.1-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ea0d57ba5add4bfbcc40cbdfa92bbb8a5ef0c4c21881e26c7229d9bdc92a4533", size = 252166, upload-time = "2026-01-01T12:35:15.293Z" }, + { url = "https://files.pythonhosted.org/packages/39/a2/0ead3ba4228a490b52eb44d43514dae172c90421bb30a3659516e5b251a2/pycares-5.0.1-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9ec2aa3553d33e6220aeb1a05f4853fb83fce4cec3e0dea2dc970338ea47dc", size = 239085, upload-time = "2026-01-01T12:35:16.594Z" }, + { url = "https://files.pythonhosted.org/packages/26/ad/e59f173933f0e696a6afbbd63935114d1400524a72da4f2cbafc6002a398/pycares-5.0.1-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5c63fb2498b05e9f5670a1bf3b900c5d09343b3b6d5001a9714d593f9eb54de1", size = 222936, upload-time = "2026-01-01T12:35:17.521Z" }, + { url = "https://files.pythonhosted.org/packages/98/fa/d85bfe663a9c292efd8e699779027612c0c65ff50dc4cc9eb7a143613460/pycares-5.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71316f7a87c15a8d32127ff01374dc2c969c37410693cc0cf6532590b7f18e7a", size = 223506, upload-time = "2026-01-01T12:35:18.535Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6b/4c225a5b10a4c9f88891a20bfe363eca1b1ce7d5244b396e5683c6070998/pycares-5.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a2117dffbb78615bfdb41ad77b17038689e4e01c66f153649e80d268c6228b4f", size = 251633, upload-time = "2026-01-01T12:35:19.819Z" }, + { url = "https://files.pythonhosted.org/packages/26/ce/ba2349413b5197b72ec19c46e07f6be3a324f80a7b1579c7cbb1b82d6dc2/pycares-5.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7d7c4f5d8b88b586ef2288142b806250020e6490b9f2bd8fd5f634a78fd20fcf", size = 237703, upload-time = "2026-01-01T12:35:20.827Z" }, + { url = "https://files.pythonhosted.org/packages/84/2f/1fd794e6fca10d9e20569113d10a4f92cc2b4242d3eb45524419a37cca6b/pycares-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:433b9a4b5a7e10ef8aef0b957e6cd0bfc1bb5bc730d2729f04e93c91c25979c0", size = 222622, upload-time = "2026-01-01T12:35:22.518Z" }, + { url = "https://files.pythonhosted.org/packages/c9/07/7db7977649b210092a7e02d550fcebdfa69bc995c684a3b960c88a5dc4ce/pycares-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:cf2699883b88713670d3f9c0a1e44ac24c70aeace9f8c6aa7f0b9f222d5b08a5", size = 117438, upload-time = "2026-01-01T12:35:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ca/f322ddaa8b3414667de8faeea944ce9d3ddfaf1455839f499a21fcea4cec/pycares-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:9528dc11749e5e098c996475b60f879e1db5a6cb3dd0cdc747530620bb1a8941", size = 108920, upload-time = "2026-01-01T12:35:24.599Z" }, { url = "https://files.pythonhosted.org/packages/75/67/e84ba11d3fec3bf1322c3b302c4df13c85e0a1bc48f16d65cd0f59ad9853/pycares-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ee551be4f3f3ac814ac8547586c464c9035e914f5122a534d25de147fa745e1", size = 136241, upload-time = "2026-01-01T12:35:25.439Z" }, { url = "https://files.pythonhosted.org/packages/ce/ae/50fbb3b4e52b9f1d16a36ffabd051ef8b2106b3f0a0d1c1113904d187a9d/pycares-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:252d4e5a52a68f825eaa90e16b595f9baee22c760f51e286ab612c6829b96de3", size = 131069, upload-time = "2026-01-01T12:35:26.293Z" }, { url = "https://files.pythonhosted.org/packages/0e/ea/f431599f1ac42149ea4768e516db7cdae3a503a6646319ae63ab66da1486/pycares-5.0.1-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c1aa549b8c2f2e224215c793d660270778dcba9abc3b85abbc7c41eabe4f1e5", size = 221120, upload-time = "2026-01-01T12:35:27.143Z" }, @@ -5956,6 +6529,20 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, @@ -6012,10 +6599,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, ] [[package]] @@ -6262,6 +6861,11 @@ version = "0.4.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, + { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, @@ -6309,6 +6913,15 @@ version = "6.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, @@ -6358,6 +6971,16 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/06/5d/305323ba86b284e6fcb0d842d6adaa2999035f70f8c38a9b6d21ad28c3d4/pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86", size = 1333328, upload-time = "2025-09-08T23:07:45.946Z" }, + { url = "https://files.pythonhosted.org/packages/bd/a0/fc7e78a23748ad5443ac3275943457e8452da67fda347e05260261108cbc/pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581", size = 908803, upload-time = "2025-09-08T23:07:47.551Z" }, + { url = "https://files.pythonhosted.org/packages/7e/22/37d15eb05f3bdfa4abea6f6d96eb3bb58585fbd3e4e0ded4e743bc650c97/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f", size = 668836, upload-time = "2025-09-08T23:07:49.436Z" }, + { url = "https://files.pythonhosted.org/packages/b1/c4/2a6fe5111a01005fc7af3878259ce17684fabb8852815eda6225620f3c59/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e", size = 857038, upload-time = "2025-09-08T23:07:51.234Z" }, + { url = "https://files.pythonhosted.org/packages/cb/eb/bfdcb41d0db9cd233d6fb22dc131583774135505ada800ebf14dfb0a7c40/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e", size = 1657531, upload-time = "2025-09-08T23:07:52.795Z" }, + { url = "https://files.pythonhosted.org/packages/ab/21/e3180ca269ed4a0de5c34417dfe71a8ae80421198be83ee619a8a485b0c7/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2", size = 2034786, upload-time = "2025-09-08T23:07:55.047Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b1/5e21d0b517434b7f33588ff76c177c5a167858cc38ef740608898cd329f2/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394", size = 1894220, upload-time = "2025-09-08T23:07:57.172Z" }, + { url = "https://files.pythonhosted.org/packages/03/f2/44913a6ff6941905efc24a1acf3d3cb6146b636c546c7406c38c49c403d4/pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f", size = 567155, upload-time = "2025-09-08T23:07:59.05Z" }, + { url = "https://files.pythonhosted.org/packages/23/6d/d8d92a0eb270a925c9b4dd039c0b4dc10abc2fcbc48331788824ef113935/pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97", size = 633428, upload-time = "2025-09-08T23:08:00.663Z" }, + { url = "https://files.pythonhosted.org/packages/ae/14/01afebc96c5abbbd713ecfc7469cfb1bc801c819a74ed5c9fad9a48801cb/pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07", size = 559497, upload-time = "2025-09-08T23:08:02.15Z" }, { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" }, { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" }, { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" }, @@ -6390,6 +7013,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c4/59/a5f38970f9bf07cee96128de79590bb354917914a9be11272cfc7ff26af0/pyzmq-27.1.0-cp314-cp314t-win32.whl", hash = "sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92", size = 587472, upload-time = "2025-09-08T23:08:58.18Z" }, { url = "https://files.pythonhosted.org/packages/70/d8/78b1bad170f93fcf5e3536e70e8fadac55030002275c9a29e8f5719185de/pyzmq-27.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0", size = 661401, upload-time = "2025-09-08T23:08:59.802Z" }, { url = "https://files.pythonhosted.org/packages/81/d6/4bfbb40c9a0b42fc53c7cf442f6385db70b40f74a783130c5d0a5aa62228/pyzmq-27.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7", size = 575170, upload-time = "2025-09-08T23:09:01.418Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c6/c4dcdecdbaa70969ee1fdced6d7b8f60cfabe64d25361f27ac4665a70620/pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066", size = 836265, upload-time = "2025-09-08T23:09:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/3e/79/f38c92eeaeb03a2ccc2ba9866f0439593bb08c5e3b714ac1d553e5c96e25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604", size = 800208, upload-time = "2025-09-08T23:09:51.073Z" }, + { url = "https://files.pythonhosted.org/packages/49/0e/3f0d0d335c6b3abb9b7b723776d0b21fa7f3a6c819a0db6097059aada160/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c", size = 567747, upload-time = "2025-09-08T23:09:52.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cf/f2b3784d536250ffd4be70e049f3b60981235d70c6e8ce7e3ef21e1adb25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271", size = 747371, upload-time = "2025-09-08T23:09:54.563Z" }, + { url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862, upload-time = "2025-09-08T23:09:56.509Z" }, ] [[package]] @@ -6422,6 +7050,8 @@ dependencies = [ { name = "requests" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/29/7871f4206e6b00a9bb784c16dad32ccd01e9df5a93545db92de220eb2871/ray-2.54.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:491ae56ab80d8822c4eaf4d5bb96dcf32a6231d8d7b76eb8034400eb9be1bb18", size = 72066630, upload-time = "2026-02-18T04:05:04.957Z" }, + { url = "https://files.pythonhosted.org/packages/1d/e8/d2c8ebd9cd945abc817b01ad02a29df78cdb86cd07d764587e16977389d0/ray-2.54.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:928bb09245a3c6f7c3c113ba8eafc69f948da9602d7f33e8251ecdf97c157615", size = 72895723, upload-time = "2026-02-18T04:05:10.686Z" }, { url = "https://files.pythonhosted.org/packages/60/ad/e07aca3637e9c3ec4857ec4366208099cf8488ece8061a9925ba29b66382/ray-2.54.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:795ae21d6b764245d3f521bc5833446d58569e7dfde9c5777417eb285d87450f", size = 72107346, upload-time = "2026-02-18T04:05:27.999Z" }, { url = "https://files.pythonhosted.org/packages/9e/b9/cc5ea8460c3dc602e6b7198277a7c59ba2b8929374ab22efa8df9f3deac8/ray-2.54.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:a972afd5aa3dda99d0b2f369b5f62e5dd95865ab7d37bf2e0a0e0d2cfbd9b325", size = 72967230, upload-time = "2026-02-18T04:05:33.771Z" }, { url = "https://files.pythonhosted.org/packages/fd/8c/4a4a38eaec6e9614076a96967f58540f4f8d4aa0c793f43150c5df23cb9a/ray-2.54.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:8952c23a8aa94f10728c2d16e0dc3732d09aa0e6254801757ff494984a214f45", size = 72013826, upload-time = "2026-02-18T04:05:49.866Z" }, @@ -6453,6 +7083,22 @@ version = "2026.1.15" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/c9/0c80c96eab96948363d270143138d671d5731c3a692b417629bf3492a9d6/regex-2026.1.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ae6020fb311f68d753b7efa9d4b9a5d47a5d6466ea0d5e3b5a471a960ea6e4a", size = 488168, upload-time = "2026-01-14T23:14:16.129Z" }, + { url = "https://files.pythonhosted.org/packages/17/f0/271c92f5389a552494c429e5cc38d76d1322eb142fb5db3c8ccc47751468/regex-2026.1.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eddf73f41225942c1f994914742afa53dc0d01a6e20fe14b878a1b1edc74151f", size = 290636, upload-time = "2026-01-14T23:14:17.715Z" }, + { url = "https://files.pythonhosted.org/packages/a0/f9/5f1fd077d106ca5655a0f9ff8f25a1ab55b92128b5713a91ed7134ff688e/regex-2026.1.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e8cd52557603f5c66a548f69421310886b28b7066853089e1a71ee710e1cdc1", size = 288496, upload-time = "2026-01-14T23:14:19.326Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e1/8f43b03a4968c748858ec77f746c286d81f896c2e437ccf050ebc5d3128c/regex-2026.1.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5170907244b14303edc5978f522f16c974f32d3aa92109fabc2af52411c9433b", size = 793503, upload-time = "2026-01-14T23:14:20.922Z" }, + { url = "https://files.pythonhosted.org/packages/8d/4e/a39a5e8edc5377a46a7c875c2f9a626ed3338cb3bb06931be461c3e1a34a/regex-2026.1.15-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2748c1ec0663580b4510bd89941a31560b4b439a0b428b49472a3d9944d11cd8", size = 860535, upload-time = "2026-01-14T23:14:22.405Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1c/9dce667a32a9477f7a2869c1c767dc00727284a9fa3ff5c09a5c6c03575e/regex-2026.1.15-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2f2775843ca49360508d080eaa87f94fa248e2c946bbcd963bb3aae14f333413", size = 907225, upload-time = "2026-01-14T23:14:23.897Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3c/87ca0a02736d16b6262921425e84b48984e77d8e4e572c9072ce96e66c30/regex-2026.1.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9ea2604370efc9a174c1b5dcc81784fb040044232150f7f33756049edfc9026", size = 800526, upload-time = "2026-01-14T23:14:26.039Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/647d5715aeea7c87bdcbd2f578f47b415f55c24e361e639fe8c0cc88878f/regex-2026.1.15-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0dcd31594264029b57bf16f37fd7248a70b3b764ed9e0839a8f271b2d22c0785", size = 773446, upload-time = "2026-01-14T23:14:28.109Z" }, + { url = "https://files.pythonhosted.org/packages/af/89/bf22cac25cb4ba0fe6bff52ebedbb65b77a179052a9d6037136ae93f42f4/regex-2026.1.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c08c1f3e34338256732bd6938747daa3c0d5b251e04b6e43b5813e94d503076e", size = 783051, upload-time = "2026-01-14T23:14:29.929Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f4/6ed03e71dca6348a5188363a34f5e26ffd5db1404780288ff0d79513bce4/regex-2026.1.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e43a55f378df1e7a4fa3547c88d9a5a9b7113f653a66821bcea4718fe6c58763", size = 854485, upload-time = "2026-01-14T23:14:31.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/8e8560bd78caded8eb137e3e47612430a05b9a772caf60876435192d670a/regex-2026.1.15-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:f82110ab962a541737bd0ce87978d4c658f06e7591ba899192e2712a517badbb", size = 762195, upload-time = "2026-01-14T23:14:32.802Z" }, + { url = "https://files.pythonhosted.org/packages/38/6b/61fc710f9aa8dfcd764fe27d37edfaa023b1a23305a0d84fccd5adb346ea/regex-2026.1.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:27618391db7bdaf87ac6c92b31e8f0dfb83a9de0075855152b720140bda177a2", size = 845986, upload-time = "2026-01-14T23:14:34.898Z" }, + { url = "https://files.pythonhosted.org/packages/fd/2e/fbee4cb93f9d686901a7ca8d94285b80405e8c34fe4107f63ffcbfb56379/regex-2026.1.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bfb0d6be01fbae8d6655c8ca21b3b72458606c4aec9bbc932db758d47aba6db1", size = 788992, upload-time = "2026-01-14T23:14:37.116Z" }, + { url = "https://files.pythonhosted.org/packages/ed/14/3076348f3f586de64b1ab75a3fbabdaab7684af7f308ad43be7ef1849e55/regex-2026.1.15-cp311-cp311-win32.whl", hash = "sha256:b10e42a6de0e32559a92f2f8dc908478cc0fa02838d7dbe764c44dca3fa13569", size = 265893, upload-time = "2026-01-14T23:14:38.426Z" }, + { url = "https://files.pythonhosted.org/packages/0f/19/772cf8b5fc803f5c89ba85d8b1870a1ca580dc482aa030383a9289c82e44/regex-2026.1.15-cp311-cp311-win_amd64.whl", hash = "sha256:e9bf3f0bbdb56633c07d7116ae60a576f846efdd86a8848f8d62b749e1209ca7", size = 277840, upload-time = "2026-01-14T23:14:39.785Z" }, + { url = "https://files.pythonhosted.org/packages/78/84/d05f61142709474da3c0853222d91086d3e1372bcdab516c6fd8d80f3297/regex-2026.1.15-cp311-cp311-win_arm64.whl", hash = "sha256:41aef6f953283291c4e4e6850607bd71502be67779586a61472beacb315c97ec", size = 270374, upload-time = "2026-01-14T23:14:41.592Z" }, { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" }, { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" }, { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" }, @@ -6608,6 +7254,21 @@ version = "0.7.6" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e5/f5/8bed2310abe4ae04b67a38374a4d311dd85220f5d8da56f47ae9361be0b0/rignore-0.7.6.tar.gz", hash = "sha256:00d3546cd793c30cb17921ce674d2c8f3a4b00501cb0e3dd0e82217dbeba2671", size = 57140, upload-time = "2025-11-05T21:41:21.968Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/25/41/b6e2be3069ef3b7f24e35d2911bd6deb83d20ed5642ad81d5a6d1c015473/rignore-0.7.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:40be8226e12d6653abbebaffaea2885f80374c1c8f76fe5ca9e0cadd120a272c", size = 885285, upload-time = "2025-11-05T20:42:39.763Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/ba7f561b6062402022887706a7f2b2c2e2e2a28f1e3839202b0a2f77e36d/rignore-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182f4e5e4064d947c756819446a7d4cdede8e756b8c81cf9e509683fe38778d7", size = 823882, upload-time = "2025-11-05T20:42:23.488Z" }, + { url = "https://files.pythonhosted.org/packages/f5/81/4087453df35a90b07370647b19017029324950c1b9137d54bf1f33843f17/rignore-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b63047648a916a87be1e51bb5c009063f1b8b6f5afe4f04f875525507e63dc", size = 899362, upload-time = "2025-11-05T20:40:51.111Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c9/390a8fdfabb76d71416be773bd9f162977bd483084f68daf19da1dec88a6/rignore-0.7.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba5524f5178deca4d7695e936604ebc742acb8958f9395776e1fcb8133f8257a", size = 873633, upload-time = "2025-11-05T20:41:06.193Z" }, + { url = "https://files.pythonhosted.org/packages/df/c9/79404fcb0faa76edfbc9df0901f8ef18568d1104919ebbbad6d608c888d1/rignore-0.7.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62020dbb89a1dd4b84ab3d60547b3b2eb2723641d5fb198463643f71eaaed57d", size = 1167633, upload-time = "2025-11-05T20:41:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/b3466d32d445d158a0aceb80919085baaae495b1f540fb942f91d93b5e5b/rignore-0.7.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34acd532769d5a6f153a52a98dcb81615c949ab11697ce26b2eb776af2e174d", size = 941434, upload-time = "2025-11-05T20:41:38.151Z" }, + { url = "https://files.pythonhosted.org/packages/e8/40/9cd949761a7af5bc27022a939c91ff622d29c7a0b66d0c13a863097dde2d/rignore-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e53b752f9de44dff7b3be3c98455ce3bf88e69d6dc0cf4f213346c5e3416c", size = 959461, upload-time = "2025-11-05T20:42:08.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/87/1e1a145731f73bdb7835e11f80da06f79a00d68b370d9a847de979575e6d/rignore-0.7.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25b3536d13a5d6409ce85f23936f044576eeebf7b6db1d078051b288410fc049", size = 985323, upload-time = "2025-11-05T20:41:52.735Z" }, + { url = "https://files.pythonhosted.org/packages/6c/31/1ecff992fc3f59c4fcdcb6c07d5f6c1e6dfb55ccda19c083aca9d86fa1c6/rignore-0.7.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e01cad2b0b92f6b1993f29fc01f23f2d78caf4bf93b11096d28e9d578eb08ce", size = 1079173, upload-time = "2025-11-05T21:40:12.007Z" }, + { url = "https://files.pythonhosted.org/packages/17/18/162eedadb4c2282fa4c521700dbf93c9b14b8842e8354f7d72b445b8d593/rignore-0.7.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5991e46ab9b4868334c9e372ab0892b0150f3f586ff2b1e314272caeb38aaedb", size = 1139012, upload-time = "2025-11-05T21:40:29.399Z" }, + { url = "https://files.pythonhosted.org/packages/78/96/a9ca398a8af74bb143ad66c2a31303c894111977e28b0d0eab03867f1b43/rignore-0.7.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6c8ae562e5d1246cba5eaeb92a47b2a279e7637102828dde41dcbe291f529a3e", size = 1118827, upload-time = "2025-11-05T21:40:46.6Z" }, + { url = "https://files.pythonhosted.org/packages/9f/22/1c1a65047df864def9a047dbb40bc0b580b8289a4280e62779cd61ae21f2/rignore-0.7.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaf938530dcc0b47c4cfa52807aa2e5bfd5ca6d57a621125fe293098692f6345", size = 1128182, upload-time = "2025-11-05T21:41:04.239Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f4/1526eb01fdc2235aca1fd9d0189bee4021d009a8dcb0161540238c24166e/rignore-0.7.6-cp311-cp311-win32.whl", hash = "sha256:166ebce373105dd485ec213a6a2695986346e60c94ff3d84eb532a237b24a4d5", size = 646547, upload-time = "2025-11-05T21:41:49.439Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/dda0983e1845706beb5826459781549a840fe5a7eb934abc523e8cd17814/rignore-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:44f35ee844b1a8cea50d056e6a595190ce9d42d3cccf9f19d280ae5f3058973a", size = 727139, upload-time = "2025-11-05T21:41:34.367Z" }, + { url = "https://files.pythonhosted.org/packages/e3/47/eb1206b7bf65970d41190b879e1723fc6bbdb2d45e53565f28991a8d9d96/rignore-0.7.6-cp311-cp311-win_arm64.whl", hash = "sha256:14b58f3da4fa3d5c3fa865cab49821675371f5e979281c683e131ae29159a581", size = 657598, upload-time = "2025-11-05T21:41:23.758Z" }, { url = "https://files.pythonhosted.org/packages/0b/0e/012556ef3047a2628842b44e753bb15f4dc46806780ff090f1e8fe4bf1eb/rignore-0.7.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:03e82348cb7234f8d9b2834f854400ddbbd04c0f8f35495119e66adbd37827a8", size = 883488, upload-time = "2025-11-05T20:42:41.359Z" }, { url = "https://files.pythonhosted.org/packages/93/b0/d4f1f3fe9eb3f8e382d45ce5b0547ea01c4b7e0b4b4eb87bcd66a1d2b888/rignore-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9e624f6be6116ea682e76c5feb71ea91255c67c86cb75befe774365b2931961", size = 820411, upload-time = "2025-11-05T20:42:24.782Z" }, { url = "https://files.pythonhosted.org/packages/4a/c8/dea564b36dedac8de21c18e1851789545bc52a0c22ece9843444d5608a6a/rignore-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda49950d405aa8d0ebe26af807c4e662dd281d926530f03f29690a2e07d649a", size = 897821, upload-time = "2025-11-05T20:40:52.613Z" }, @@ -6668,6 +7329,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fc/d3/18210222b37e87e36357f7b300b7d98c6dd62b133771e71ae27acba83a4f/rignore-0.7.6-cp314-cp314t-win32.whl", hash = "sha256:c1d8f117f7da0a4a96a8daef3da75bc090e3792d30b8b12cfadc240c631353f9", size = 647033, upload-time = "2025-11-05T21:42:00.095Z" }, { url = "https://files.pythonhosted.org/packages/3e/87/033eebfbee3ec7d92b3bb1717d8f68c88e6fc7de54537040f3b3a405726f/rignore-0.7.6-cp314-cp314t-win_amd64.whl", hash = "sha256:ca36e59408bec81de75d307c568c2d0d410fb880b1769be43611472c61e85c96", size = 725647, upload-time = "2025-11-05T21:41:44.449Z" }, { url = "https://files.pythonhosted.org/packages/79/62/b88e5879512c55b8ee979c666ee6902adc4ed05007226de266410ae27965/rignore-0.7.6-cp314-cp314t-win_arm64.whl", hash = "sha256:b83adabeb3e8cf662cabe1931b83e165b88c526fa6af6b3aa90429686e474896", size = 656035, upload-time = "2025-11-05T21:41:31.13Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/a6250ff0c49a3cdb943910ada4116e708118e9b901c878cfae616c80a904/rignore-0.7.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a20b6fb61bcced9a83dfcca6599ad45182b06ba720cff7c8d891e5b78db5b65f", size = 886470, upload-time = "2025-11-05T20:42:52.314Z" }, + { url = "https://files.pythonhosted.org/packages/35/af/c69c0c51b8f9f7914d95c4ea91c29a2ac067572048cae95dd6d2efdbe05d/rignore-0.7.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:392dcabfecbe176c9ebbcb40d85a5e86a5989559c4f988c2741da7daf1b5be25", size = 825976, upload-time = "2025-11-05T20:42:35.118Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d2/1b264f56132264ea609d3213ab603d6a27016b19559a1a1ede1a66a03dcd/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22baa462abdc36fdd5a5e2dae423107723351b85ff093762f9261148b9d0a04a", size = 899739, upload-time = "2025-11-05T20:41:01.518Z" }, + { url = "https://files.pythonhosted.org/packages/55/e4/b3c5dfdd8d8a10741dfe7199ef45d19a0e42d0c13aa377c83bd6caf65d90/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53fb28882d2538cb2d231972146c4927a9d9455e62b209f85d634408c4103538", size = 874843, upload-time = "2025-11-05T20:41:17.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/10/d6f3750233881a2a154cefc9a6a0a9b19da526b19f7f08221b552c6f827d/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87409f7eeb1103d6b77f3472a3a0d9a5953e3ae804a55080bdcb0120ee43995b", size = 1170348, upload-time = "2025-11-05T20:41:34.21Z" }, + { url = "https://files.pythonhosted.org/packages/6e/10/ad98ca05c9771c15af734cee18114a3c280914b6e34fde9ffea2e61e88aa/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:684014e42e4341ab3ea23a203551857fcc03a7f8ae96ca3aefb824663f55db32", size = 942315, upload-time = "2025-11-05T20:41:48.508Z" }, + { url = "https://files.pythonhosted.org/packages/de/00/ab5c0f872acb60d534e687e629c17e0896c62da9b389c66d3aa16b817aa8/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77356ebb01ba13f8a425c3d30fcad40e57719c0e37670d022d560884a30e4767", size = 961047, upload-time = "2025-11-05T20:42:19.403Z" }, + { url = "https://files.pythonhosted.org/packages/b8/86/3030fdc363a8f0d1cd155b4c453d6db9bab47a24fcc64d03f61d9d78fe6a/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6cbd8a48abbd3747a6c830393cd578782fab5d43f4deea48c5f5e344b8fed2b0", size = 986090, upload-time = "2025-11-05T20:42:03.581Z" }, + { url = "https://files.pythonhosted.org/packages/33/b8/133aa4002cee0ebbb39362f94e4898eec7fbd09cec9fcbce1cd65b355b7f/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2673225dcec7f90497e79438c35e34638d0d0391ccea3cbb79bfb9adc0dc5bd7", size = 1079656, upload-time = "2025-11-05T21:40:24.89Z" }, + { url = "https://files.pythonhosted.org/packages/67/56/36d5d34210e5e7dfcd134eed8335b19e80ae940ee758f493e4f2b344dd70/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:c081f17290d8a2b96052b79207622aa635686ea39d502b976836384ede3d303c", size = 1139789, upload-time = "2025-11-05T21:40:42.119Z" }, + { url = "https://files.pythonhosted.org/packages/6b/5b/bb4f9420802bf73678033a4a55ab1bede36ce2e9b41fec5f966d83d932b3/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:57e8327aacc27f921968cb2a174f9e47b084ce9a7dd0122c8132d22358f6bd79", size = 1120308, upload-time = "2025-11-05T21:40:59.402Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8b/a1299085b28a2f6135e30370b126e3c5055b61908622f2488ade67641479/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d8955b57e42f2a5434670d5aa7b75eaf6e74602ccd8955dddf7045379cd762fb", size = 1129444, upload-time = "2025-11-05T21:41:17.906Z" }, ] [[package]] @@ -6676,6 +7349,21 @@ version = "0.30.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" }, + { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" }, + { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" }, + { url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" }, + { url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" }, + { url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" }, + { url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" }, + { url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" }, + { url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" }, + { url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" }, { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, @@ -6749,6 +7437,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" }, + { url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" }, + { url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" }, + { url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" }, + { url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" }, + { url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" }, + { url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" }, ] [[package]] @@ -6883,6 +7583,12 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/85/bd/f5af70c28c6da2227e510875cadf64879855193a687fb19951f0f44cfd6b/scipy-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042", size = 32862414, upload-time = "2026-01-10T21:24:52.566Z" }, + { url = "https://files.pythonhosted.org/packages/ef/df/df1457c4df3826e908879fe3d76bc5b6e60aae45f4ee42539512438cfd5d/scipy-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4", size = 35112380, upload-time = "2026-01-10T21:24:58.433Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bb/88e2c16bd1dd4de19d80d7c5e238387182993c2fb13b4b8111e3927ad422/scipy-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0", size = 34922676, upload-time = "2026-01-10T21:25:04.287Z" }, + { url = "https://files.pythonhosted.org/packages/02/ba/5120242cc735f71fc002cff0303d536af4405eb265f7c60742851e7ccfe9/scipy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449", size = 37507599, upload-time = "2026-01-10T21:25:09.851Z" }, + { url = "https://files.pythonhosted.org/packages/52/c8/08629657ac6c0da198487ce8cd3de78e02cfde42b7f34117d56a3fe249dc/scipy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea", size = 36380284, upload-time = "2026-01-10T21:25:15.632Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4a/465f96d42c6f33ad324a40049dfd63269891db9324aa66c4a1c108c6f994/scipy-1.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379", size = 24370427, upload-time = "2026-01-10T21:25:20.514Z" }, { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, @@ -6948,6 +7654,14 @@ version = "0.2.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/15/15/2e7a025fc62d764b151ae6d0f2a92f8081755ebe8d4a64099accc6f77ba6/sentencepiece-0.2.1.tar.gz", hash = "sha256:8138cec27c2f2282f4a34d9a016e3374cd40e5c6e9cb335063db66a0a3b71fad", size = 3228515, upload-time = "2025-08-12T07:00:51.718Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/15/46afbab00733d81788b64be430ca1b93011bb9388527958e26cc31832de5/sentencepiece-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6356d0986b8b8dc351b943150fcd81a1c6e6e4d439772e8584c64230e58ca987", size = 1942560, upload-time = "2025-08-12T06:59:25.82Z" }, + { url = "https://files.pythonhosted.org/packages/fa/79/7c01b8ef98a0567e9d84a4e7a910f8e7074fcbf398a5cd76f93f4b9316f9/sentencepiece-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f8ba89a3acb3dc1ae90f65ec1894b0b9596fdb98ab003ff38e058f898b39bc7", size = 1325385, upload-time = "2025-08-12T06:59:27.722Z" }, + { url = "https://files.pythonhosted.org/packages/bb/88/2b41e07bd24f33dcf2f18ec3b74247aa4af3526bad8907b8727ea3caba03/sentencepiece-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:02593eca45440ef39247cee8c47322a34bdcc1d8ae83ad28ba5a899a2cf8d79a", size = 1253319, upload-time = "2025-08-12T06:59:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/a0/54/38a1af0c6210a3c6f95aa46d23d6640636d020fba7135cd0d9a84ada05a7/sentencepiece-0.2.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a0d15781a171d188b661ae4bde1d998c303f6bd8621498c50c671bd45a4798e", size = 1316162, upload-time = "2025-08-12T06:59:30.914Z" }, + { url = "https://files.pythonhosted.org/packages/ef/66/fb191403ade791ad2c3c1e72fe8413e63781b08cfa3aa4c9dfc536d6e795/sentencepiece-0.2.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f5a3e0d9f445ed9d66c0fec47d4b23d12cfc858b407a03c194c1b26c2ac2a63", size = 1387785, upload-time = "2025-08-12T06:59:32.491Z" }, + { url = "https://files.pythonhosted.org/packages/a9/2d/3bd9b08e70067b2124518b308db6a84a4f8901cc8a4317e2e4288cdd9b4d/sentencepiece-0.2.1-cp311-cp311-win32.whl", hash = "sha256:6d297a1748d429ba8534eebe5535448d78b8acc32d00a29b49acf28102eeb094", size = 999555, upload-time = "2025-08-12T06:59:34.475Z" }, + { url = "https://files.pythonhosted.org/packages/32/b8/f709977f5fda195ae1ea24f24e7c581163b6f142b1005bc3d0bbfe4d7082/sentencepiece-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:82d9ead6591015f009cb1be1cb1c015d5e6f04046dbb8c9588b931e869a29728", size = 1054617, upload-time = "2025-08-12T06:59:36.461Z" }, + { url = "https://files.pythonhosted.org/packages/7a/40/a1fc23be23067da0f703709797b464e8a30a1c78cc8a687120cd58d4d509/sentencepiece-0.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:39f8651bd10974eafb9834ce30d9bcf5b73e1fc798a7f7d2528f9820ca86e119", size = 1033877, upload-time = "2025-08-12T06:59:38.391Z" }, { url = "https://files.pythonhosted.org/packages/4a/be/32ce495aa1d0e0c323dcb1ba87096037358edee539cac5baf8755a6bd396/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:57cae326c8727de58c85977b175af132a7138d84c764635d7e71bbee7e774133", size = 1943152, upload-time = "2025-08-12T06:59:40.048Z" }, { url = "https://files.pythonhosted.org/packages/88/7e/ff23008899a58678e98c6ff592bf4d368eee5a71af96d0df6b38a039dd4f/sentencepiece-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:56dd39a3c4d6493db3cdca7e8cc68c6b633f0d4195495cbadfcf5af8a22d05a6", size = 1325651, upload-time = "2025-08-12T06:59:41.536Z" }, { url = "https://files.pythonhosted.org/packages/19/84/42eb3ce4796777a1b5d3699dfd4dca85113e68b637f194a6c8d786f16a04/sentencepiece-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9381351182ff9888cc80e41c632e7e274b106f450de33d67a9e8f6043da6f76", size = 1253645, upload-time = "2025-08-12T06:59:42.903Z" }, @@ -7009,6 +7723,16 @@ version = "1.3.7" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8d/48/49393a96a2eef1ab418b17475fb92b8fcfad83d099e678751b05472e69de/setproctitle-1.3.7.tar.gz", hash = "sha256:bc2bc917691c1537d5b9bca1468437176809c7e11e5694ca79a9ca12345dcb9e", size = 27002, upload-time = "2025-09-05T12:51:25.278Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/04/cd/1b7ba5cad635510720ce19d7122154df96a2387d2a74217be552887c93e5/setproctitle-1.3.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a600eeb4145fb0ee6c287cb82a2884bd4ec5bbb076921e287039dcc7b7cc6dd0", size = 18085, upload-time = "2025-09-05T12:49:22.183Z" }, + { url = "https://files.pythonhosted.org/packages/8f/1a/b2da0a620490aae355f9d72072ac13e901a9fec809a6a24fc6493a8f3c35/setproctitle-1.3.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97a090fed480471bb175689859532709e28c085087e344bca45cf318034f70c4", size = 13097, upload-time = "2025-09-05T12:49:23.322Z" }, + { url = "https://files.pythonhosted.org/packages/18/2e/bd03ff02432a181c1787f6fc2a678f53b7dacdd5ded69c318fe1619556e8/setproctitle-1.3.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1607b963e7b53e24ec8a2cb4e0ab3ae591d7c6bf0a160feef0551da63452b37f", size = 32191, upload-time = "2025-09-05T12:49:24.567Z" }, + { url = "https://files.pythonhosted.org/packages/28/78/1e62fc0937a8549f2220445ed2175daacee9b6764c7963b16148119b016d/setproctitle-1.3.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a20fb1a3974e2dab857870cf874b325b8705605cb7e7e8bcbb915bca896f52a9", size = 33203, upload-time = "2025-09-05T12:49:25.871Z" }, + { url = "https://files.pythonhosted.org/packages/a0/3c/65edc65db3fa3df400cf13b05e9d41a3c77517b4839ce873aa6b4043184f/setproctitle-1.3.7-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f8d961bba676e07d77665204f36cffaa260f526e7b32d07ab3df6a2c1dfb44ba", size = 34963, upload-time = "2025-09-05T12:49:27.044Z" }, + { url = "https://files.pythonhosted.org/packages/a1/32/89157e3de997973e306e44152522385f428e16f92f3cf113461489e1e2ee/setproctitle-1.3.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:db0fd964fbd3a9f8999b502f65bd2e20883fdb5b1fae3a424e66db9a793ed307", size = 32398, upload-time = "2025-09-05T12:49:28.909Z" }, + { url = "https://files.pythonhosted.org/packages/4a/18/77a765a339ddf046844cb4513353d8e9dcd8183da9cdba6e078713e6b0b2/setproctitle-1.3.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:db116850fcf7cca19492030f8d3b4b6e231278e8fe097a043957d22ce1bdf3ee", size = 33657, upload-time = "2025-09-05T12:49:30.323Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/f0b6205c64d74d2a24a58644a38ec77bdbaa6afc13747e75973bf8904932/setproctitle-1.3.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:316664d8b24a5c91ee244460bdaf7a74a707adaa9e14fbe0dc0a53168bb9aba1", size = 31836, upload-time = "2025-09-05T12:49:32.309Z" }, + { url = "https://files.pythonhosted.org/packages/ba/51/e1277f9ba302f1a250bbd3eedbbee747a244b3cc682eb58fb9733968f6d8/setproctitle-1.3.7-cp311-cp311-win32.whl", hash = "sha256:b74774ca471c86c09b9d5037c8451fff06bb82cd320d26ae5a01c758088c0d5d", size = 12556, upload-time = "2025-09-05T12:49:33.529Z" }, + { url = "https://files.pythonhosted.org/packages/b6/7b/822a23f17e9003dfdee92cd72758441ca2a3680388da813a371b716fb07f/setproctitle-1.3.7-cp311-cp311-win_amd64.whl", hash = "sha256:acb9097213a8dd3410ed9f0dc147840e45ca9797785272928d4be3f0e69e3be4", size = 13243, upload-time = "2025-09-05T12:49:34.553Z" }, { url = "https://files.pythonhosted.org/packages/fb/f0/2dc88e842077719d7384d86cc47403e5102810492b33680e7dadcee64cd8/setproctitle-1.3.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2dc99aec591ab6126e636b11035a70991bc1ab7a261da428491a40b84376654e", size = 18049, upload-time = "2025-09-05T12:49:36.241Z" }, { url = "https://files.pythonhosted.org/packages/f0/b4/50940504466689cda65680c9e9a1e518e5750c10490639fa687489ac7013/setproctitle-1.3.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdd8aa571b7aa39840fdbea620e308a19691ff595c3a10231e9ee830339dd798", size = 13079, upload-time = "2025-09-05T12:49:38.088Z" }, { url = "https://files.pythonhosted.org/packages/d0/99/71630546b9395b095f4082be41165d1078204d1696c2d9baade3de3202d0/setproctitle-1.3.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2906b6c7959cdb75f46159bf0acd8cc9906cf1361c9e1ded0d065fe8f9039629", size = 32932, upload-time = "2025-09-05T12:49:39.271Z" }, @@ -7059,6 +7783,9 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/e3/54b496ac724e60e61cc3447f02690105901ca6d90da0377dffe49ff99fc7/setproctitle-1.3.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1fae595d032b30dab4d659bece20debd202229fce12b55abab978b7f30783d73", size = 33958, upload-time = "2025-09-05T12:50:39.841Z" }, { url = "https://files.pythonhosted.org/packages/ea/a8/c84bb045ebf8c6fdc7f7532319e86f8380d14bbd3084e6348df56bdfe6fd/setproctitle-1.3.7-cp314-cp314t-win32.whl", hash = "sha256:02432f26f5d1329ab22279ff863c83589894977063f59e6c4b4845804a08f8c2", size = 12745, upload-time = "2025-09-05T12:50:41.377Z" }, { url = "https://files.pythonhosted.org/packages/08/b6/3a5a4f9952972791a9114ac01dfc123f0df79903577a3e0a7a404a695586/setproctitle-1.3.7-cp314-cp314t-win_amd64.whl", hash = "sha256:cbc388e3d86da1f766d8fc2e12682e446064c01cea9f88a88647cfe7c011de6a", size = 13469, upload-time = "2025-09-05T12:50:42.67Z" }, + { url = "https://files.pythonhosted.org/packages/c3/5b/5e1c117ac84e3cefcf8d7a7f6b2461795a87e20869da065a5c087149060b/setproctitle-1.3.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:b1cac6a4b0252b8811d60b6d8d0f157c0fdfed379ac89c25a914e6346cf355a1", size = 12587, upload-time = "2025-09-05T12:51:21.195Z" }, + { url = "https://files.pythonhosted.org/packages/73/02/b9eadc226195dcfa90eed37afe56b5dd6fa2f0e5220ab8b7867b8862b926/setproctitle-1.3.7-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f1704c9e041f2b1dc38f5be4552e141e1432fba3dd52c72eeffd5bc2db04dc65", size = 14286, upload-time = "2025-09-05T12:51:22.61Z" }, + { url = "https://files.pythonhosted.org/packages/28/26/1be1d2a53c2a91ec48fa2ff4a409b395f836798adf194d99de9c059419ea/setproctitle-1.3.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b08b61976ffa548bd5349ce54404bf6b2d51bd74d4f1b241ed1b0f25bce09c3a", size = 13282, upload-time = "2025-09-05T12:51:24.094Z" }, ] [[package]] @@ -7321,6 +8048,13 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393, upload-time = "2026-01-21T18:03:45.119Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/69/ac/b42ad16800d0885105b59380ad69aad0cce5a65276e269ce2729a2343b6a/sqlalchemy-2.0.46-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:261c4b1f101b4a411154f1da2b76497d73abbfc42740029205d4d01fa1052684", size = 2154851, upload-time = "2026-01-21T18:27:30.54Z" }, + { url = "https://files.pythonhosted.org/packages/a0/60/d8710068cb79f64d002ebed62a7263c00c8fd95f4ebd4b5be8f7ca93f2bc/sqlalchemy-2.0.46-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:181903fe8c1b9082995325f1b2e84ac078b1189e2819380c2303a5f90e114a62", size = 3311241, upload-time = "2026-01-21T18:32:33.45Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/20c71487c7219ab3aa7421c7c62d93824c97c1460f2e8bb72404b0192d13/sqlalchemy-2.0.46-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:590be24e20e2424a4c3c1b0835e9405fa3d0af5823a1a9fc02e5dff56471515f", size = 3310741, upload-time = "2026-01-21T18:44:57.887Z" }, + { url = "https://files.pythonhosted.org/packages/65/80/d26d00b3b249ae000eee4db206fcfc564bf6ca5030e4747adf451f4b5108/sqlalchemy-2.0.46-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7568fe771f974abadce52669ef3a03150ff03186d8eb82613bc8adc435a03f01", size = 3263116, upload-time = "2026-01-21T18:32:35.044Z" }, + { url = "https://files.pythonhosted.org/packages/da/ee/74dda7506640923821340541e8e45bd3edd8df78664f1f2e0aae8077192b/sqlalchemy-2.0.46-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf7e1e78af38047e08836d33502c7a278915698b7c2145d045f780201679999", size = 3285327, upload-time = "2026-01-21T18:44:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/9f/25/6dcf8abafff1389a21c7185364de145107b7394ecdcb05233815b236330d/sqlalchemy-2.0.46-cp311-cp311-win32.whl", hash = "sha256:9d80ea2ac519c364a7286e8d765d6cd08648f5b21ca855a8017d9871f075542d", size = 2114564, upload-time = "2026-01-21T18:33:15.85Z" }, + { url = "https://files.pythonhosted.org/packages/93/5f/e081490f8523adc0088f777e4ebad3cac21e498ec8a3d4067074e21447a1/sqlalchemy-2.0.46-cp311-cp311-win_amd64.whl", hash = "sha256:585af6afe518732d9ccd3aea33af2edaae4a7aa881af5d8f6f4fe3a368699597", size = 2139233, upload-time = "2026-01-21T18:33:17.528Z" }, { url = "https://files.pythonhosted.org/packages/b6/35/d16bfa235c8b7caba3730bba43e20b1e376d2224f407c178fbf59559f23e/sqlalchemy-2.0.46-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a9a72b0da8387f15d5810f1facca8f879de9b85af8c645138cba61ea147968c", size = 2153405, upload-time = "2026-01-21T19:05:54.143Z" }, { url = "https://files.pythonhosted.org/packages/06/6c/3192e24486749862f495ddc6584ed730c0c994a67550ec395d872a2ad650/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2347c3f0efc4de367ba00218e0ae5c4ba2306e47216ef80d6e31761ac97cb0b9", size = 3334702, upload-time = "2026-01-21T18:46:45.384Z" }, { url = "https://files.pythonhosted.org/packages/ea/a2/b9f33c8d68a3747d972a0bb758c6b63691f8fb8a49014bc3379ba15d4274/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9094c8b3197db12aa6f05c51c05daaad0a92b8c9af5388569847b03b1007fb1b", size = 3347664, upload-time = "2026-01-21T18:40:09.979Z" }, @@ -7503,6 +8237,11 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/43/f6/e2403fc05b97ba74ad408a98a42c288e6e1b8eacc23780c153b0e5166179/tensorstore-0.1.81.tar.gz", hash = "sha256:687546192ea6f6c8ae28d18f13103336f68017d928b9f5a00325e9b0548d9c25", size = 7120819, upload-time = "2026-02-06T18:56:12.535Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/df/f472bd0dee801d7e33c53335ad0fcde9c71e5f9324241faa0a6b4be4270a/tensorstore-0.1.81-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:f64fb510f293079f9e5c63cb227e8a76904655a32912fc107c1e63bd8dc3e187", size = 16501390, upload-time = "2026-02-06T18:55:13.678Z" }, + { url = "https://files.pythonhosted.org/packages/5a/93/5f40c51d7b15d3574b1788a251dd4e3abd0415dab71811e126d2da5e826b/tensorstore-0.1.81-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4282587598885ff447f08369ac9bb681a65e224888cfa8ef8f3dd63544759e6c", size = 14535592, upload-time = "2026-02-06T18:55:16.44Z" }, + { url = "https://files.pythonhosted.org/packages/76/48/b7adcc8eca502ce8050c18cea066ca0c0122df7a686e10da6470e55456b4/tensorstore-0.1.81-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b4ea06038f6912bb6ed8a89db0c31e4e3d1b2404f3365dc756e4bc42bd6a89c", size = 19038732, upload-time = "2026-02-06T18:55:18.924Z" }, + { url = "https://files.pythonhosted.org/packages/40/b0/99294895b030bd7d9ebc06e7ed523d0c09ab65667e031f8a67923f398f86/tensorstore-0.1.81-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51d59f7db9cdae02fce9d347300c0ccfb8265052945757e95592a265eb620b15", size = 21038447, upload-time = "2026-02-06T18:55:21.085Z" }, + { url = "https://files.pythonhosted.org/packages/32/e6/1ce977baf09aa3889f10f04460b588a6c8876ea441e51090c671f0400a6f/tensorstore-0.1.81-cp311-cp311-win_amd64.whl", hash = "sha256:fdb9579a729cccc02127cab5abf26f57a0e27968ba65c9c548ad058f5a45417f", size = 13221673, upload-time = "2026-02-06T18:55:23.195Z" }, { url = "https://files.pythonhosted.org/packages/85/82/00037db699f74d792efe2696305ddd6932e04306899e3701824a7f7de961/tensorstore-0.1.81-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:7aefa1e3eadca804bce05215184c9cde29205ac2f3b443ca15a4e1846d31af4e", size = 16521245, upload-time = "2026-02-06T18:55:25.559Z" }, { url = "https://files.pythonhosted.org/packages/86/2e/1deca1b955cb959eec13fd342ffaa2fd84e4770b4e2bcb95a2f541875a52/tensorstore-0.1.81-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e001d3edc6758eb5dc80556da9e945c1381f0529102fcc0301358ba6b9b70ed", size = 14543561, upload-time = "2026-02-06T18:55:27.624Z" }, { url = "https://files.pythonhosted.org/packages/6c/e4/b4343eae773f72a8777f82c5328191a06d8a5195e62105c14b7dcc49823f/tensorstore-0.1.81-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c27e07f4e91e6dc6a0878e13e2c5931d1716196b67b0df927f2f571de2576e9", size = 19043982, upload-time = "2026-02-06T18:55:30.076Z" }, @@ -7534,6 +8273,13 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, @@ -7624,6 +8370,15 @@ version = "2.4.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, @@ -7706,19 +8461,25 @@ dependencies = [ { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "setuptools" }, + { name = "setuptools", marker = "python_full_version >= '3.12'" }, { name = "sympy" }, { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "typing-extensions" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/8b/4b61d6e13f7108f36910df9ab4b58fd389cc2520d54d81b88660804aad99/torch-2.10.0-2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:418997cb02d0a0f1497cf6a09f63166f9f5df9f3e16c8a716ab76a72127c714f", size = 79423467, upload-time = "2026-02-10T21:44:48.711Z" }, { url = "https://files.pythonhosted.org/packages/d3/54/a2ba279afcca44bbd320d4e73675b282fcee3d81400ea1b53934efca6462/torch-2.10.0-2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:13ec4add8c3faaed8d13e0574f5cd4a323c11655546f91fbe6afa77b57423574", size = 79498202, upload-time = "2026-02-10T21:44:52.603Z" }, { url = "https://files.pythonhosted.org/packages/ec/23/2c9fe0c9c27f7f6cb865abcea8a4568f29f00acaeadfc6a37f6801f84cb4/torch-2.10.0-2-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:e521c9f030a3774ed770a9c011751fb47c4d12029a3d6522116e48431f2ff89e", size = 79498254, upload-time = "2026-02-10T21:44:44.095Z" }, + { url = "https://files.pythonhosted.org/packages/36/ab/7b562f1808d3f65414cd80a4f7d4bb00979d9355616c034c171249e1a303/torch-2.10.0-3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac5bdcbb074384c66fa160c15b1ead77839e3fe7ed117d667249afce0acabfac", size = 915518691, upload-time = "2026-03-11T14:15:43.147Z" }, { url = "https://files.pythonhosted.org/packages/b3/7a/abada41517ce0011775f0f4eacc79659bc9bc6c361e6bfe6f7052a6b9363/torch-2.10.0-3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:98c01b8bb5e3240426dcde1446eed6f40c778091c8544767ef1168fc663a05a6", size = 915622781, upload-time = "2026-03-11T14:17:11.354Z" }, { url = "https://files.pythonhosted.org/packages/ab/c6/4dfe238342ffdcec5aef1c96c457548762d33c40b45a1ab7033bb26d2ff2/torch-2.10.0-3-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:80b1b5bfe38eb0e9f5ff09f206dcac0a87aadd084230d4a36eea5ec5232c115b", size = 915627275, upload-time = "2026-03-11T14:16:11.325Z" }, { url = "https://files.pythonhosted.org/packages/d8/f0/72bf18847f58f877a6a8acf60614b14935e2f156d942483af1ffc081aea0/torch-2.10.0-3-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:46b3574d93a2a8134b3f5475cfb98e2eb46771794c57015f6ad1fb795ec25e49", size = 915523474, upload-time = "2026-03-11T14:17:44.422Z" }, { url = "https://files.pythonhosted.org/packages/f4/39/590742415c3030551944edc2ddc273ea1fdfe8ffb2780992e824f1ebee98/torch-2.10.0-3-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:b1d5e2aba4eb7f8e87fbe04f86442887f9167a35f092afe4c237dfcaaef6e328", size = 915632474, upload-time = "2026-03-11T14:15:13.666Z" }, { url = "https://files.pythonhosted.org/packages/b6/8e/34949484f764dde5b222b7fe3fede43e4a6f0da9d7f8c370bb617d629ee2/torch-2.10.0-3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:0228d20b06701c05a8f978357f657817a4a63984b0c90745def81c18aedfa591", size = 915523882, upload-time = "2026-03-11T14:14:46.311Z" }, + { url = "https://files.pythonhosted.org/packages/78/89/f5554b13ebd71e05c0b002f95148033e730d3f7067f67423026cc9c69410/torch-2.10.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3282d9febd1e4e476630a099692b44fdc214ee9bf8ee5377732d9d9dfe5712e4", size = 145992610, upload-time = "2026-01-21T16:25:26.327Z" }, + { url = "https://files.pythonhosted.org/packages/ae/30/a3a2120621bf9c17779b169fc17e3dc29b230c29d0f8222f499f5e159aa8/torch-2.10.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a2f9edd8dbc99f62bc4dfb78af7bf89499bca3d753423ac1b4e06592e467b763", size = 915607863, upload-time = "2026-01-21T16:25:06.696Z" }, + { url = "https://files.pythonhosted.org/packages/6f/3d/c87b33c5f260a2a8ad68da7147e105f05868c281c63d65ed85aa4da98c66/torch-2.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:29b7009dba4b7a1c960260fc8ac85022c784250af43af9fb0ebafc9883782ebd", size = 113723116, upload-time = "2026-01-21T16:25:21.916Z" }, + { url = "https://files.pythonhosted.org/packages/61/d8/15b9d9d3a6b0c01b883787bd056acbe5cc321090d4b216d3ea89a8fcfdf3/torch-2.10.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:b7bd80f3477b830dd166c707c5b0b82a898e7b16f59a7d9d42778dd058272e8b", size = 79423461, upload-time = "2026-01-21T16:24:50.266Z" }, { url = "https://files.pythonhosted.org/packages/cc/af/758e242e9102e9988969b5e621d41f36b8f258bb4a099109b7a4b4b50ea4/torch-2.10.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5fd4117d89ffd47e3dcc71e71a22efac24828ad781c7e46aaaf56bf7f2796acf", size = 145996088, upload-time = "2026-01-21T16:24:44.171Z" }, { url = "https://files.pythonhosted.org/packages/23/8e/3c74db5e53bff7ed9e34c8123e6a8bfef718b2450c35eefab85bb4a7e270/torch-2.10.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:787124e7db3b379d4f1ed54dd12ae7c741c16a4d29b49c0226a89bea50923ffb", size = 915711952, upload-time = "2026-01-21T16:23:53.503Z" }, { url = "https://files.pythonhosted.org/packages/6e/01/624c4324ca01f66ae4c7cd1b74eb16fb52596dce66dbe51eff95ef9e7a4c/torch-2.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2c66c61f44c5f903046cc696d088e21062644cbe541c7f1c4eaae88b2ad23547", size = 113757972, upload-time = "2026-01-21T16:24:39.516Z" }, @@ -7750,6 +8511,10 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/37/de/921b6491efce5c389a5ef9bbed3d2d6660005840dae488124173180859ab/torch_c_dlpack_ext-0.1.5.tar.gz", hash = "sha256:d06f0357d575d22a168cc77acb9020fc4bae30968ceb6718a055dcbe92bacabe", size = 12913, upload-time = "2026-01-12T11:25:08.484Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/65/66/c12a9bb3a5ddc0962c00467891bf1ffdda39a4d4780bf0fbbf54523ff34e/torch_c_dlpack_ext-0.1.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:56bd25a2af19280bf8a06aa62cff5510106f43235b9327d8561b3e9a659c4d84", size = 5076782, upload-time = "2026-01-12T11:24:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/64e1e579d107064785549e70758e38a42376ab7e73d86897ed4beab10e74/torch_c_dlpack_ext-0.1.5-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fba674110e1fab0b176bb5a28223e157db65c90767d4ba74abdbee9f537b0e9d", size = 440949, upload-time = "2026-01-12T11:24:39.716Z" }, + { url = "https://files.pythonhosted.org/packages/64/5c/3e1382a620824f92920ab3fae132d8fb4e85898284c99e0c6a7764e452ce/torch_c_dlpack_ext-0.1.5-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3448c4f0d64104d0b2e58080a7efa72304a04960c18f338024b80b13cd3eca26", size = 897768, upload-time = "2026-01-12T11:24:41.209Z" }, + { url = "https://files.pythonhosted.org/packages/54/4f/76ea1006b9038b496d01e916c91efd17cb782abde2491a261cf203f57e30/torch_c_dlpack_ext-0.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:74676474e0afa9a4216c4755ea7cf05e8158be1d168f6bda669ba91097c263f2", size = 1479088, upload-time = "2026-01-12T11:24:42.436Z" }, { url = "https://files.pythonhosted.org/packages/b1/67/10d236698525d7b7db4d74ec0a4b01f5b2db33968995fdd9ac6b4635e327/torch_c_dlpack_ext-0.1.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:c0f2bd51fcd99c0e5b50314e1985f2728c4941bfa821f065e6c30951d1f995ca", size = 5291237, upload-time = "2026-01-12T11:24:44.011Z" }, { url = "https://files.pythonhosted.org/packages/87/06/8d760997307a5c3be4384424667bf31aae0a42060838c532c7d846516175/torch_c_dlpack_ext-0.1.5-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3562ee411258676f9c38b8ad39306d1c8d027b6a86f6a87c920d2d009a9d1510", size = 443069, upload-time = "2026-01-12T11:24:45.451Z" }, { url = "https://files.pythonhosted.org/packages/e2/79/a914539b4785f3e44f891aa012a886edb8bc10fe081c440981c57543ce21/torch_c_dlpack_ext-0.1.5-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e6f9da4bb9af70e27facc777458be62e10dbbbddda7672d16138db0553c5a524", size = 897846, upload-time = "2026-01-12T11:24:48.168Z" }, @@ -7781,6 +8546,8 @@ dependencies = [ { name = "torch" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/b7/c66dc34a27441d78997e20d0ffe2f5ad73db9f7b1267511be255bb94ac9b/torchaudio-2.10.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:87c841a21e82703ebd4a29170c4e60c25a2b47312dc212930087ad58965ac0c8", size = 391843, upload-time = "2026-01-21T16:28:43.093Z" }, + { url = "https://files.pythonhosted.org/packages/13/ae/a2a34a64947c4fa4a61b4c86d8f36fbcb4ebfec30fdde140267db260f96c/torchaudio-2.10.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b2c77fb9114dd463dc805560bf55a1ac2a52e219794cc32b7b32cf2aeffd2826", size = 1894140, upload-time = "2026-01-21T16:28:35.892Z" }, { url = "https://files.pythonhosted.org/packages/ea/3f/df620439a76ece170472d41438d11a1545d5db5dc9f1eaeab8c6e055a328/torchaudio-2.10.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:42b148a0921a3721abd1f6ae098b1ec9f89703e555c4f7a0d44da87b8decbcb9", size = 391973, upload-time = "2026-01-21T16:28:39.732Z" }, { url = "https://files.pythonhosted.org/packages/98/25/e55a30d7138f8fe56ed006df25b0a3c27681f0ec7bc9989e1778e6d559c3/torchaudio-2.10.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0e77b2956448d63790a99beed0b74ac8b8cd3a94dcdd9ad01974411078f46278", size = 1895234, upload-time = "2026-01-21T16:28:37.034Z" }, { url = "https://files.pythonhosted.org/packages/49/fd/831c2595c81b17141180ca11ab3c0836cc544ef13e15aa0e7b2cb619e582/torchaudio-2.10.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5bc39ff3ea341097ce1ab023dd88c9dd8ca5f96ebf48821e7d23766137bb55d7", size = 392757, upload-time = "2026-01-21T16:28:33.631Z" }, @@ -7803,6 +8570,10 @@ dependencies = [ { name = "torch" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/be/c704bceaf11c4f6b19d64337a34a877fcdfe3bd68160a8c9ae9bea4a35a3/torchvision-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:db74a551946b75d19f9996c419a799ffdf6a223ecf17c656f90da011f1d75b20", size = 1874923, upload-time = "2026-01-21T16:27:46.574Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e9/f143cd71232430de1f547ceab840f68c55e127d72558b1061a71d0b193cd/torchvision-0.25.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f49964f96644dbac2506dffe1a0a7ec0f2bf8cf7a588c3319fed26e6329ffdf3", size = 2344808, upload-time = "2026-01-21T16:27:43.191Z" }, + { url = "https://files.pythonhosted.org/packages/43/ae/ad5d6165797de234c9658752acb4fce65b78a6a18d82efdf8367c940d8da/torchvision-0.25.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:153c0d2cbc34b7cf2da19d73450f24ba36d2b75ec9211b9962b5022fb9e4ecee", size = 8070752, upload-time = "2026-01-21T16:27:33.748Z" }, + { url = "https://files.pythonhosted.org/packages/23/19/55b28aecdc7f38df57b8eb55eb0b14a62b470ed8efeb22cdc74224df1d6a/torchvision-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:ea580ffd6094cc01914ad32f8c8118174f18974629af905cea08cb6d5d48c7b7", size = 4038722, upload-time = "2026-01-21T16:27:41.355Z" }, { url = "https://files.pythonhosted.org/packages/56/3a/6ea0d73f49a9bef38a1b3a92e8dd455cea58470985d25635beab93841748/torchvision-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2abe430c90b1d5e552680037d68da4eb80a5852ebb1c811b2b89d299b10573b", size = 1874920, upload-time = "2026-01-21T16:27:45.348Z" }, { url = "https://files.pythonhosted.org/packages/51/f8/c0e1ef27c66e15406fece94930e7d6feee4cb6374bbc02d945a630d6426e/torchvision-0.25.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b75deafa2dfea3e2c2a525559b04783515e3463f6e830cb71de0fb7ea36fe233", size = 2344556, upload-time = "2026-01-21T16:27:40.125Z" }, { url = "https://files.pythonhosted.org/packages/68/2f/f24b039169db474e8688f649377de082a965fbf85daf4e46c44412f1d15a/torchvision-0.25.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f25aa9e380865b11ea6e9d99d84df86b9cc959f1a007cd966fc6f1ab2ed0e248", size = 8072351, upload-time = "2026-01-21T16:27:21.074Z" }, @@ -7939,6 +8710,8 @@ name = "triton" version = "3.6.0" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/2c/96f92f3c60387e14cc45aed49487f3486f89ea27106c1b1376913c62abe4/triton-3.6.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49df5ef37379c0c2b5c0012286f80174fcf0e073e5ade1ca9a86c36814553651", size = 176081190, upload-time = "2026-01-20T16:16:00.523Z" }, + { url = "https://files.pythonhosted.org/packages/e0/12/b05ba554d2c623bffa59922b94b0775673de251f468a9609bc9e45de95e9/triton-3.6.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8e323d608e3a9bfcc2d9efcc90ceefb764a82b99dea12a86d643c72539ad5d3", size = 188214640, upload-time = "2026-01-20T16:00:35.869Z" }, { url = "https://files.pythonhosted.org/packages/17/5d/08201db32823bdf77a0e2b9039540080b2e5c23a20706ddba942924ebcd6/triton-3.6.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:374f52c11a711fd062b4bfbb201fd9ac0a5febd28a96fb41b4a0f51dde3157f4", size = 176128243, upload-time = "2026-01-20T16:16:07.857Z" }, { url = "https://files.pythonhosted.org/packages/ab/a8/cdf8b3e4c98132f965f88c2313a4b493266832ad47fb52f23d14d4f86bb5/triton-3.6.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74caf5e34b66d9f3a429af689c1c7128daba1d8208df60e81106b115c00d6fca", size = 188266850, upload-time = "2026-01-20T16:00:43.041Z" }, { url = "https://files.pythonhosted.org/packages/3c/12/34d71b350e89a204c2c7777a9bba0dcf2f19a5bfdd70b57c4dbc5ffd7154/triton-3.6.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448e02fe6dc898e9e5aa89cf0ee5c371e99df5aa5e8ad976a80b93334f3494fd", size = 176133521, upload-time = "2026-01-20T16:16:13.321Z" }, @@ -7956,6 +8729,7 @@ name = "triton-windows" version = "3.6.0.post25" source = { registry = "https://pypi.org/simple" } wheels = [ + { url = "https://files.pythonhosted.org/packages/49/b8/2ce283452b0b9e0d239c7833626750befe94d5bbed18fb9449dcc5fa494e/triton_windows-3.6.0.post25-cp311-cp311-win_amd64.whl", hash = "sha256:5dabf103499825379c9ba877da46a4c34296466a628b539249482ab6d970708e", size = 47381466, upload-time = "2026-01-26T03:21:21.541Z" }, { url = "https://files.pythonhosted.org/packages/66/b1/9744fc17eded50644ffb95f3f4b1ffd1f42d646d6e0a811d92e43834865e/triton_windows-3.6.0.post25-cp312-cp312-win_amd64.whl", hash = "sha256:8361375ee4b5e0a4fe7a3c7fc2fde368ce74237396d8ff95c2e26983dd32e342", size = 47382693, upload-time = "2026-01-26T03:21:28.157Z" }, { url = "https://files.pythonhosted.org/packages/e5/cb/1f5f738cf8f6b8c6d475a92422251228a16ca2ee6f872d0f63c761f02896/triton_windows-3.6.0.post25-cp313-cp313-win_amd64.whl", hash = "sha256:d22e5f6f4896b43037d811910e2fcc5ff5f057b78f6094ab28999e4a21997b76", size = 47383937, upload-time = "2026-01-26T03:21:35.071Z" }, { url = "https://files.pythonhosted.org/packages/c7/d3/58ad68518e04a97ce0549cad98eccbafac01ddba640379776a58b513020b/triton_windows-3.6.0.post25-cp314-cp314-win_amd64.whl", hash = "sha256:6f4c4775b22cfb18e9c60aead83deb7b9b970624ae3c13cd26b9be80b5cb8cd8", size = 48566374, upload-time = "2026-01-26T03:21:41.743Z" }, @@ -8097,6 +8871,17 @@ version = "5.11.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/43/d9/3f17e3c5773fb4941c68d9a37a47b1a79c9649d6c56aefbed87cc409d18a/ujson-5.11.0.tar.gz", hash = "sha256:e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0", size = 7156583, upload-time = "2025-08-20T11:57:02.452Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f", size = 55248, upload-time = "2025-08-20T11:55:19.033Z" }, + { url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58", size = 53156, upload-time = "2025-08-20T11:55:20.174Z" }, + { url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26", size = 57657, upload-time = "2025-08-20T11:55:21.296Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a", size = 59779, upload-time = "2025-08-20T11:55:22.772Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6", size = 57284, upload-time = "2025-08-20T11:55:24.01Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b", size = 1036395, upload-time = "2025-08-20T11:55:25.725Z" }, + { url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba", size = 1195731, upload-time = "2025-08-20T11:55:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3", size = 1088710, upload-time = "2025-08-20T11:55:29.426Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d3/9ba310e07969bc9906eb7548731e33a0f448b122ad9705fed699c9b29345/ujson-5.11.0-cp311-cp311-win32.whl", hash = "sha256:e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34", size = 39648, upload-time = "2025-08-20T11:55:31.194Z" }, + { url = "https://files.pythonhosted.org/packages/57/f7/da05b4a8819f1360be9e71fb20182f0bb3ec611a36c3f213f4d20709e099/ujson-5.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01", size = 43717, upload-time = "2025-08-20T11:55:32.241Z" }, + { url = "https://files.pythonhosted.org/packages/9a/cc/f3f9ac0f24f00a623a48d97dc3814df5c2dc368cfb00031aa4141527a24b/ujson-5.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835", size = 38402, upload-time = "2025-08-20T11:55:33.641Z" }, { url = "https://files.pythonhosted.org/packages/b9/ef/a9cb1fce38f699123ff012161599fb9f2ff3f8d482b4b18c43a2dc35073f/ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702", size = 55434, upload-time = "2025-08-20T11:55:34.987Z" }, { url = "https://files.pythonhosted.org/packages/b1/05/dba51a00eb30bd947791b173766cbed3492269c150a7771d2750000c965f/ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d", size = 53190, upload-time = "2025-08-20T11:55:36.384Z" }, { url = "https://files.pythonhosted.org/packages/03/3c/fd11a224f73fbffa299fb9644e425f38b38b30231f7923a088dd513aabb4/ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80", size = 57600, upload-time = "2025-08-20T11:55:37.692Z" }, @@ -8141,6 +8926,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/ed/5a057199fb0a5deabe0957073a1c1c1c02a3e99476cd03daee98ea21fa57/ujson-5.11.0-cp314-cp314t-win32.whl", hash = "sha256:aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5", size = 41859, upload-time = "2025-08-20T11:56:30.495Z" }, { url = "https://files.pythonhosted.org/packages/aa/03/b19c6176bdf1dc13ed84b886e99677a52764861b6cc023d5e7b6ebda249d/ujson-5.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec", size = 46183, upload-time = "2025-08-20T11:56:31.574Z" }, { url = "https://files.pythonhosted.org/packages/5d/ca/a0413a3874b2dc1708b8796ca895bf363292f9c70b2e8ca482b7dbc0259d/ujson-5.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab", size = 40264, upload-time = "2025-08-20T11:56:32.773Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362", size = 51206, upload-time = "2025-08-20T11:56:48.797Z" }, + { url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39", size = 48907, upload-time = "2025-08-20T11:56:50.136Z" }, + { url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc", size = 50319, upload-time = "2025-08-20T11:56:51.63Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844", size = 56584, upload-time = "2025-08-20T11:56:52.89Z" }, + { url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49", size = 51588, upload-time = "2025-08-20T11:56:54.054Z" }, + { url = "https://files.pythonhosted.org/packages/52/5b/8c5e33228f7f83f05719964db59f3f9f276d272dc43752fa3bbf0df53e7b/ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04", size = 43835, upload-time = "2025-08-20T11:56:55.237Z" }, ] [[package]] @@ -8170,8 +8961,8 @@ dependencies = [ { name = "tyro" }, { name = "unsloth-zoo" }, { name = "wheel" }, - { name = "xformers", version = "0.0.33.post2", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'AMD64' and sys_platform == 'win32') or (platform_machine == 'x86_64' and sys_platform == 'win32') or (platform_machine == 'AMD64' and sys_platform != 'linux' and 'linux' in sys_platform) or (platform_machine == 'x86_64' and sys_platform != 'linux' and 'linux' in sys_platform)" }, - { name = "xformers", version = "0.0.35", source = { registry = "https://pypi.org/simple" }, marker = "(platform_machine == 'AMD64' and sys_platform == 'linux' and 'linux' in sys_platform) or (platform_machine == 'x86_64' and sys_platform == 'linux' and 'linux' in sys_platform)" }, + { name = "xformers", version = "0.0.33.post2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.12' and platform_machine == 'AMD64' and sys_platform == 'win32') or (python_full_version >= '3.12' and platform_machine == 'x86_64' and sys_platform == 'win32') or (python_full_version >= '3.12' and platform_machine == 'AMD64' and sys_platform != 'linux' and 'linux' in sys_platform) or (python_full_version >= '3.12' and platform_machine == 'x86_64' and sys_platform != 'linux' and 'linux' in sys_platform)" }, + { name = "xformers", version = "0.0.35", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.12' and platform_machine == 'AMD64' and 'linux' in sys_platform) or (python_full_version < '3.12' and platform_machine == 'x86_64' and 'linux' in sys_platform) or (python_full_version < '3.12' and platform_machine == 'AMD64' and sys_platform == 'win32') or (python_full_version < '3.12' and platform_machine == 'x86_64' and sys_platform == 'win32') or (platform_machine == 'AMD64' and sys_platform == 'linux' and 'linux' in sys_platform) or (platform_machine == 'x86_64' and sys_platform == 'linux' and 'linux' in sys_platform)" }, ] sdist = { url = "https://files.pythonhosted.org/packages/04/78/26b0d5299d9ccbc8ce72933729ef309f57c2991edbb6d70c41a93cb6438c/unsloth-2026.3.3.tar.gz", hash = "sha256:80cb3dd56381117175888cc7caa662ff160704a5cc39b44eee54f8d15ad8522a", size = 4855357, upload-time = "2026-03-03T16:31:25.518Z" } wheels = [ @@ -8263,6 +9054,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/9b/e5e99b324b1b5f0c62882230455786df0bc66f67eff3b452447e703f45d2/uuid_utils-0.14.0-cp39-abi3-win32.whl", hash = "sha256:ec2fd80adf8e0e6589d40699e6f6df94c93edcc16dd999be0438dd007c77b151", size = 177319, upload-time = "2026-01-20T20:37:04.208Z" }, { url = "https://files.pythonhosted.org/packages/d3/28/2c7d417ea483b6ff7820c948678fdf2ac98899dc7e43bb15852faa95acaf/uuid_utils-0.14.0-cp39-abi3-win_amd64.whl", hash = "sha256:efe881eb43a5504fad922644cb93d725fd8a6a6d949bd5a4b4b7d1a1587c7fd1", size = 182566, upload-time = "2026-01-20T20:37:16.868Z" }, { url = "https://files.pythonhosted.org/packages/b8/86/49e4bdda28e962fbd7266684171ee29b3d92019116971d58783e51770745/uuid_utils-0.14.0-cp39-abi3-win_arm64.whl", hash = "sha256:32b372b8fd4ebd44d3a219e093fe981af4afdeda2994ee7db208ab065cfcd080", size = 182809, upload-time = "2026-01-20T20:37:05.139Z" }, + { url = "https://files.pythonhosted.org/packages/f1/03/1f1146e32e94d1f260dfabc81e1649102083303fb4ad549775c943425d9a/uuid_utils-0.14.0-pp311-pypy311_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:762e8d67992ac4d2454e24a141a1c82142b5bde10409818c62adbe9924ebc86d", size = 587430, upload-time = "2026-01-20T20:37:24.998Z" }, + { url = "https://files.pythonhosted.org/packages/87/ba/d5a7469362594d885fd9219fe9e851efbe65101d3ef1ef25ea321d7ce841/uuid_utils-0.14.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:40be5bf0b13aa849d9062abc86c198be6a25ff35316ce0b89fc25f3bac6d525e", size = 298106, upload-time = "2026-01-20T20:37:23.896Z" }, + { url = "https://files.pythonhosted.org/packages/8a/11/3dafb2a5502586f59fd49e93f5802cd5face82921b3a0f3abb5f357cb879/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:191a90a6f3940d1b7322b6e6cceff4dd533c943659e0a15f788674407856a515", size = 333423, upload-time = "2026-01-20T20:37:17.828Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f2/c8987663f0cdcf4d717a36d85b5db2a5589df0a4e129aa10f16f4380ef48/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4aa4525f4ad82f9d9c842f9a3703f1539c1808affbaec07bb1b842f6b8b96aa5", size = 338659, upload-time = "2026-01-20T20:37:14.286Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c8/929d81665d83f0b2ffaecb8e66c3091a50f62c7cb5b65e678bd75a96684e/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdbd82ff20147461caefc375551595ecf77ebb384e46267f128aca45a0f2cdfc", size = 467029, upload-time = "2026-01-20T20:37:08.277Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a0/27d7daa1bfed7163f4ccaf52d7d2f4ad7bb1002a85b45077938b91ee584f/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff57e8a5d540006ce73cf0841a643d445afe78ba12e75ac53a95ca2924a56be", size = 333298, upload-time = "2026-01-20T20:37:07.271Z" }, + { url = "https://files.pythonhosted.org/packages/63/d4/acad86ce012b42ce18a12f31ee2aa3cbeeb98664f865f05f68c882945913/uuid_utils-0.14.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fd9112ca96978361201e669729784f26c71fecc9c13a7f8a07162c31bd4d1e2", size = 359217, upload-time = "2026-01-20T20:36:59.687Z" }, ] [[package]] @@ -8320,6 +9118,12 @@ version = "0.22.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, @@ -8420,8 +9224,8 @@ dependencies = [ { name = "requests" }, { name = "sentencepiece" }, { name = "setproctitle" }, - { name = "setuptools" }, - { name = "six" }, + { name = "setuptools", marker = "python_full_version >= '3.12'" }, + { name = "six", marker = "python_full_version >= '3.12'" }, { name = "tiktoken" }, { name = "tokenizers" }, { name = "torch" }, @@ -8474,6 +9278,9 @@ version = "6.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, @@ -8501,6 +9308,19 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, @@ -8560,6 +9380,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, ] [[package]] @@ -8635,6 +9459,15 @@ version = "16.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340, upload-time = "2026-01-10T09:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022, upload-time = "2026-01-10T09:22:36.332Z" }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319, upload-time = "2026-01-10T09:22:37.602Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631, upload-time = "2026-01-10T09:22:38.789Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870, upload-time = "2026-01-10T09:22:39.893Z" }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361, upload-time = "2026-01-10T09:22:41.016Z" }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615, upload-time = "2026-01-10T09:22:42.442Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246, upload-time = "2026-01-10T09:22:43.654Z" }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684, upload-time = "2026-01-10T09:22:44.941Z" }, { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, @@ -8671,6 +9504,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947, upload-time = "2026-01-10T09:23:36.166Z" }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260, upload-time = "2026-01-10T09:23:37.409Z" }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071, upload-time = "2026-01-10T09:23:39.158Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968, upload-time = "2026-01-10T09:23:41.031Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735, upload-time = "2026-01-10T09:23:42.259Z" }, { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] @@ -8716,6 +9554,16 @@ version = "1.17.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, @@ -8768,6 +9616,13 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/08/d5/25f7b19af3a2cb4000cac4f9e5525a40bec79f4f5d0ac9b517c0544586a0/xattr-1.3.0.tar.gz", hash = "sha256:30439fabd7de0787b27e9a6e1d569c5959854cb322f64ce7380fedbfa5035036", size = 17148, upload-time = "2025-10-13T22:16:47.353Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/64/292426ad5653e72c6e1325bbff22868a20077290d967cebb9c0624ad08b6/xattr-1.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:331a51bf8f20c27822f44054b0d760588462d3ed472d5e52ba135cf0bea510e8", size = 23448, upload-time = "2025-10-13T22:15:59.229Z" }, + { url = "https://files.pythonhosted.org/packages/63/84/6539fbe620da8e5927406e76b9c8abad8953025d5f578d792747c38a8c0e/xattr-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:196360f068b74fa0132a8c6001ce1333f095364b8f43b6fd8cdaf2f18741ef89", size = 18553, upload-time = "2025-10-13T22:16:00.151Z" }, + { url = "https://files.pythonhosted.org/packages/cc/bb/c1c2e24a49f8d13ff878fb85aabc42ea1b2f98ce08d8205b9661d517a9cc/xattr-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:405d2e4911d37f2b9400fa501acd920fe0c97fe2b2ec252cb23df4b59c000811", size = 18848, upload-time = "2025-10-13T22:16:01.046Z" }, + { url = "https://files.pythonhosted.org/packages/02/c2/a60aad150322b217dfe33695d8d9f32bc01e8f300641b6ba4b73f4b3c03f/xattr-1.3.0-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4ae3a66ae1effd40994f64defeeaa97da369406485e60bfb421f2d781be3b75d", size = 38547, upload-time = "2025-10-13T22:16:01.973Z" }, + { url = "https://files.pythonhosted.org/packages/c6/58/2eca142bad4ea0a2be6b58d3122d0acce310c4e53fa7defd168202772178/xattr-1.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:69cd3bfe779f7ba87abe6473fdfa428460cf9e78aeb7e390cfd737b784edf1b5", size = 38753, upload-time = "2025-10-13T22:16:03.244Z" }, + { url = "https://files.pythonhosted.org/packages/2b/50/d032e5254c2c27d36bdb02abdf2735db6768a441f0e3d0f139e0f9f56638/xattr-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c5742ca61761a99ae0c522f90a39d5fb8139280f27b254e3128482296d1df2db", size = 38054, upload-time = "2025-10-13T22:16:04.656Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/458a306439aabe0083ca0a7b14c3e6a800ab9782b5ec0bdcec4ec9f3dc6c/xattr-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a04ada131e9bdfd32db3ab1efa9f852646f4f7c9d6fde0596c3825c67161be3", size = 37562, upload-time = "2025-10-13T22:16:05.97Z" }, { url = "https://files.pythonhosted.org/packages/bf/78/00bdc9290066173e53e1e734d8d8e1a84a6faa9c66aee9df81e4d9aeec1c/xattr-1.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:dd4e63614722d183e81842cb237fd1cc978d43384166f9fe22368bfcb187ebe5", size = 23476, upload-time = "2025-10-13T22:16:06.942Z" }, { url = "https://files.pythonhosted.org/packages/53/16/5243722294eb982514fa7b6b87a29dfb7b29b8e5e1486500c5babaf6e4b3/xattr-1.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:995843ef374af73e3370b0c107319611f3cdcdb6d151d629449efecad36be4c4", size = 18556, upload-time = "2025-10-13T22:16:08.209Z" }, { url = "https://files.pythonhosted.org/packages/d6/5c/d7ab0e547bea885b55f097206459bd612cefb652c5fc1f747130cbc0d42c/xattr-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa23a25220e29d956cedf75746e3df6cc824cc1553326d6516479967c540e386", size = 18869, upload-time = "2025-10-13T22:16:10.319Z" }, @@ -8804,18 +9659,18 @@ version = "0.0.33.post2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", - "python_full_version >= '3.14' and sys_platform == 'emscripten'", - "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", "python_full_version == '3.13.*' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", - "python_full_version < '3.13' and sys_platform == 'win32'", - "python_full_version < '3.13' and sys_platform == 'emscripten'", - "python_full_version < '3.13' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'emscripten'", + "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", ] dependencies = [ - { name = "numpy", marker = "sys_platform != 'linux'" }, - { name = "torch", marker = "sys_platform != 'linux'" }, + { name = "numpy", marker = "python_full_version >= '3.12' and sys_platform != 'linux'" }, + { name = "torch", marker = "python_full_version >= '3.12' and sys_platform != 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/69/403e963d35f1b0c52a1b3127e0bc4e94e7e50ecee8c6684a8abe40e6638e/xformers-0.0.33.post2.tar.gz", hash = "sha256:647ddf26578d2b8643230467ef1f0fbfef0bbe556a546bd27a70d4855d3433e1", size = 14783914, upload-time = "2025-12-04T18:52:42.572Z" } wheels = [ @@ -8829,15 +9684,20 @@ source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'linux'", "python_full_version == '3.13.*' and sys_platform == 'linux'", - "python_full_version < '3.13' and sys_platform == 'linux'", + "python_full_version == '3.12.*' and sys_platform == 'linux'", + "python_full_version < '3.12' and sys_platform == 'linux'", + "python_full_version < '3.12' and sys_platform == 'win32'", + "python_full_version < '3.12' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'linux' and sys_platform != 'win32'", ] dependencies = [ - { name = "numpy", marker = "sys_platform == 'linux'" }, - { name = "torch", marker = "sys_platform == 'linux'" }, + { name = "numpy", marker = "python_full_version < '3.12' or sys_platform == 'linux'" }, + { name = "torch", marker = "python_full_version < '3.12' or sys_platform == 'linux'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/de/5a/6e27734bd793adc44d0b8d294e67cfacf4ec590572c1aef51d683fc7a791/xformers-0.0.35.tar.gz", hash = "sha256:f7fc183a58e4bf0e2ae339a18fb1b1d4a37854c0f2545b4f360fef001646ab76", size = 4258182, upload-time = "2026-02-20T20:33:05.417Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a4/85/6d71f9b16f2ac647877e66ed4af723b3fbd477806ab8b8a89d39a362b85f/xformers-0.0.35-py39-none-manylinux_2_28_x86_64.whl", hash = "sha256:ccc73c7db9890224ab05f5fb60e2034f9e6c8672a10be0cf00e95cbbae3eda7c", size = 3264751, upload-time = "2026-02-20T20:33:02.444Z" }, + { url = "https://files.pythonhosted.org/packages/49/0b/88c39c128a05d5b553a67cb9c4c3fc32eefb91f836f838befab9e78f8364/xformers-0.0.35-py39-none-win_amd64.whl", hash = "sha256:57381ce3cbb79b593e6b62cb20a937885345fad2796de2aa6fbb66c033601179", size = 2638618, upload-time = "2026-02-20T20:33:04.104Z" }, ] [[package]] @@ -8854,6 +9714,8 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/02/a3/70dbe3ffd331a1e7e1ad5a95690a4086e6c7cdb8089f5c7eda712219ccec/xgrammar-0.1.29.tar.gz", hash = "sha256:cf195afa81b489eebf35d4c6f37f27136d05420739ab4a6f7f065c938d7e4baa", size = 2321317, upload-time = "2025-12-19T08:23:54.53Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/87/0b/b5e5c99ce13a9d378a940cda07c5a08b50cc7efb66936c6ac8fa8232a0d5/xgrammar-0.1.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51bcfd63bd48a0b26209ffd2143a42067518559355ec9e4e574cef2ae74fac7c", size = 34699408, upload-time = "2025-12-19T08:23:16.906Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a0/4ebc1b3f5af79a3f73d0566034758f3fbcd9c64174646314a9a6f7cc1d27/xgrammar-0.1.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e27b50cf8c565845295a8263a4a0790c00a7c1fd783e76222fc0f575654d6f56", size = 34903461, upload-time = "2025-12-19T08:23:19.556Z" }, { url = "https://files.pythonhosted.org/packages/57/94/18793c64bf0368075a34c06e196bf002f1e6ab0aee332268f44e8d356d5a/xgrammar-0.1.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eb370a16b27a683e5f2b9e429ab41440c69977d4a504849ed61831b94cc704c", size = 34705239, upload-time = "2025-12-19T08:23:28.369Z" }, { url = "https://files.pythonhosted.org/packages/3e/da/4c14e3e00be698009b52700f15326a23272b4b00475939b6acc86b151188/xgrammar-0.1.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79e6e4f5cd33be77418cf91efc482f2b3d773d309891224383bc8a4948ad7b07", size = 34906135, upload-time = "2025-12-19T08:23:30.838Z" }, { url = "https://files.pythonhosted.org/packages/e9/c5/e4965c9921e7bb6061f246ae7f8c7b9b1dfc21262248100c2f9b398b361e/xgrammar-0.1.29-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb22aea775971f7d8c4d0e193257ebeb71b68acd9d36af3331ca5fd4d9a46991", size = 34904126, upload-time = "2025-12-19T08:23:38.335Z" }, @@ -8865,6 +9727,21 @@ version = "3.6.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, + { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, + { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, + { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, + { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, + { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, + { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, @@ -8940,6 +9817,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, + { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, + { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, + { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, ] [[package]] @@ -8953,6 +9835,22 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, + { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, + { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, + { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, + { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, @@ -9051,6 +9949,23 @@ version = "0.25.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" },