diff --git a/Dockerfile b/Dockerfile index 820468c94..03840b971 100644 --- a/Dockerfile +++ b/Dockerfile @@ -165,8 +165,9 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-ins git \ && rm -rf /var/lib/apt/lists/* -RUN curl -LsSf https://astral.sh/uv/install.sh | sh -ENV PATH="$PATH:/root/.local/bin" +# RUN curl -LsSf https://astral.sh/uv/install.sh | sh +# ENV PATH="$PATH:/root/.local/bin" +COPY --from=ghcr.io/astral-sh/uv:0.5.31 /uv /uvx /bin/ # Install flash-attention dependencies # RUN pip install einops --no-cache-dir @@ -183,19 +184,16 @@ COPY server server COPY server/Makefile server/Makefile ENV HF_KERNELS_CACHE=/kernels RUN cd server && \ - uv sync --frozen --extra gen --extra bnb --extra accelerate --extra compressed-tensors --extra quantize --extra peft --extra outlines --no-install-project --active && \ + uv sync --frozen --extra gen --extra bnb --extra accelerate --extra compressed-tensors --extra quantize --extra peft --extra outlines --extra torch --no-install-project --active && \ make gen-server-raw && \ kernels download . RUN cd server && \ - uv sync --frozen --extra gen --extra bnb --extra accelerate --extra compressed-tensors --extra quantize --extra peft --extra outlines --active --python=${PYTHON_VERSION} && \ + uv sync --frozen --extra gen --extra bnb --extra accelerate --extra compressed-tensors --extra quantize --extra peft --extra outlines --extra torch --active --python=${PYTHON_VERSION} && \ uv pip install nvidia-nccl-cu12==2.25.1 && \ pwd && \ text-generation-server --help -# This shouldn't be necessary. -# RUN uv pip install torchvision --no-deps - # Copy build artifacts from flash attention builder COPY --from=flash-att-builder /usr/src/flash-attention/build/lib.linux-x86_64-cpython-311 /usr/src/.venv/lib/python3.11/site-packages COPY --from=flash-att-builder /usr/src/flash-attention/csrc/layer_norm/build/lib.linux-x86_64-cpython-311 /usr/src/.venv/lib/python3.11/site-packages diff --git a/flake.lock b/flake.lock index 77593fd18..6e1875100 100644 --- a/flake.lock +++ b/flake.lock @@ -853,11 +853,11 @@ ] }, "locked": { - "lastModified": 1742783666, - "narHash": "sha256-IwdSl51NL6V0f+mYXZR0UTKaGleOsk9zV3l6kt5SUWw=", + "lastModified": 1743993291, + "narHash": "sha256-u8GHvduU1gCtoFXvTS/wGjH1ouv5S/GRGq6MAT+sG/k=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "60766d63c227d576510ecfb5edd3a687d56f6bc7", + "rev": "0cb3c8979c65dc6a5812dfe67499a8c7b8b4325b", "type": "github" }, "original": { diff --git a/integration-tests/models/test_transformers_llama4.py b/integration-tests/models/test_transformers_llama4.py index a20d32847..029a9fb16 100644 --- a/integration-tests/models/test_transformers_llama4.py +++ b/integration-tests/models/test_transformers_llama4.py @@ -1,155 +1,155 @@ -import base64 -from io import BytesIO -from PIL import Image - -import pytest - - -@pytest.fixture(scope="module") -def flash_llama4_handle(launcher): - with launcher("ll-re/Llama-4-Scout-17B-16E-Instruct", num_shard=8) as handle: - yield handle - - -@pytest.fixture(scope="module") -async def flash_llama4(flash_llama4_handle): - await flash_llama4_handle.health(300) - return flash_llama4_handle.client - - -async def test_flash_llama4(flash_llama4, response_snapshot): - response = await flash_llama4.generate( - "Hello I am doing a project on the 1918 flu pandemic and I am trying to find out how many", - seed=42, - max_new_tokens=100, - ) - - assert ( - response.generated_text - == " people died in the 1918 flu pandemic. Estimating the death toll of the 1918 flu pandemic is difficult because of incomplete records and because of the fact that many of the extra deaths were not attributed to the flu. Many experts believe that the 1918 flu pandemic killed between 50 and 100 million people. Iassistant\n\nThe 1918 flu pandemic, also known as the Spanish flu, is indeed one of the most devastating public health crises in human history. Estimating the exact" - ) - assert response.details.generated_tokens == 100 - assert response == response_snapshot - - -async def test_flash_llama4_image_cow_dog(flash_llama4, response_snapshot): - image_url = "https://huggingface.co/datasets/hf-internal-testing/fixtures-captioning/resolve/main/cow_beach_1.png" - response = await flash_llama4.chat( - seed=42, - messages=[ - { - "role": "user", - "content": [ - {"type": "image_url", "image_url": {"url": image_url}}, - { - "type": "text", - "text": "What is the breed of the dog in the image?", - }, - ], - }, - ], - max_tokens=100, - ) - - assert ( - response.choices[0].message.content - == "The image does not depict a dog; it shows a cow standing on a beach. Therefore, there is no breed of a dog to identify." - ) - assert response.usage["completion_tokens"] == 30 - assert response == response_snapshot - - -async def test_flash_llama4_image_cow(flash_llama4, response_snapshot): - image_url = "https://huggingface.co/datasets/hf-internal-testing/fixtures-captioning/resolve/main/cow_beach_1.png" - response = await flash_llama4.chat( - seed=42, - messages=[ - { - "role": "user", - "content": [ - {"type": "image_url", "image_url": {"url": image_url}}, - {"type": "text", "text": "What is shown in this image?"}, - ], - }, - ], - max_tokens=100, - ) - assert ( - response.choices[0].message.content - == "The image shows a brown cow standing on the beach with a white face and black and white marking on its ears. The cow has a white patch around its nose and mouth. The ocean and blue sky are in the background." - ) - assert response.usage["completion_tokens"] == 46 - assert response == response_snapshot - - -# Helper function to convert a Pillow image to a base64 data URL -def image_to_data_url(img: Image.Image, fmt: str) -> str: - buffer = BytesIO() - img.save(buffer, format=fmt) - img_data = buffer.getvalue() - b64_str = base64.b64encode(img_data).decode("utf-8") - mime_type = "image/png" if fmt.upper() == "PNG" else "image/jpeg" - return f"data:{mime_type};base64,{b64_str}" - - -async def test_flash_llama4_image_base64_rgba(flash_llama4, response_snapshot): - # Create an empty 100x100 PNG image with alpha (transparent background) - img = Image.new("RGBA", (100, 100), (0, 0, 0, 0)) - data_url = image_to_data_url(img, "PNG") - response = await flash_llama4.chat( - seed=42, - messages=[ - { - "role": "user", - "content": [ - {"type": "image_url", "image_url": {"url": data_url}}, - { - "type": "text", - "text": "What do you see in this transparent image?", - }, - ], - }, - ], - max_tokens=100, - ) - assert response == response_snapshot - - -async def test_flash_llama4_image_base64_rgb_png(flash_llama4, response_snapshot): - # Create an empty 100x100 PNG image without alpha (white background) - img = Image.new("RGB", (100, 100), (255, 255, 255)) - data_url = image_to_data_url(img, "PNG") - response = await flash_llama4.chat( - seed=42, - messages=[ - { - "role": "user", - "content": [ - {"type": "image_url", "image_url": {"url": data_url}}, - {"type": "text", "text": "What do you see in this plain image?"}, - ], - }, - ], - max_tokens=100, - ) - assert response == response_snapshot - - -async def test_flash_llama4_image_base64_rgb_jpg(flash_llama4, response_snapshot): - # Create an empty 100x100 JPEG image (white background) - img = Image.new("RGB", (100, 100), (255, 255, 255)) - data_url = image_to_data_url(img, "JPEG") - response = await flash_llama4.chat( - seed=42, - messages=[ - { - "role": "user", - "content": [ - {"type": "image_url", "image_url": {"url": data_url}}, - {"type": "text", "text": "What do you see in this JPEG image?"}, - ], - }, - ], - max_tokens=100, - ) - assert response == response_snapshot +# import base64 +# from io import BytesIO +# from PIL import Image +# +# import pytest +# +# +# @pytest.fixture(scope="module") +# def flash_llama4_handle(launcher): +# with launcher("ll-re/Llama-4-Scout-17B-16E-Instruct", num_shard=8) as handle: +# yield handle +# +# +# @pytest.fixture(scope="module") +# async def flash_llama4(flash_llama4_handle): +# await flash_llama4_handle.health(300) +# return flash_llama4_handle.client +# +# +# async def test_flash_llama4(flash_llama4, response_snapshot): +# response = await flash_llama4.generate( +# "Hello I am doing a project on the 1918 flu pandemic and I am trying to find out how many", +# seed=42, +# max_new_tokens=100, +# ) +# +# assert ( +# response.generated_text +# == " people died in the 1918 flu pandemic. Estimating the death toll of the 1918 flu pandemic is difficult because of incomplete records and because of the fact that many of the extra deaths were not attributed to the flu. Many experts believe that the 1918 flu pandemic killed between 50 and 100 million people. Iassistant\n\nThe 1918 flu pandemic, also known as the Spanish flu, is indeed one of the most devastating public health crises in human history. Estimating the exact" +# ) +# assert response.details.generated_tokens == 100 +# assert response == response_snapshot +# +# +# async def test_flash_llama4_image_cow_dog(flash_llama4, response_snapshot): +# image_url = "https://huggingface.co/datasets/hf-internal-testing/fixtures-captioning/resolve/main/cow_beach_1.png" +# response = await flash_llama4.chat( +# seed=42, +# messages=[ +# { +# "role": "user", +# "content": [ +# {"type": "image_url", "image_url": {"url": image_url}}, +# { +# "type": "text", +# "text": "What is the breed of the dog in the image?", +# }, +# ], +# }, +# ], +# max_tokens=100, +# ) +# +# assert ( +# response.choices[0].message.content +# == "The image does not depict a dog; it shows a cow standing on a beach. Therefore, there is no breed of a dog to identify." +# ) +# assert response.usage["completion_tokens"] == 30 +# assert response == response_snapshot +# +# +# async def test_flash_llama4_image_cow(flash_llama4, response_snapshot): +# image_url = "https://huggingface.co/datasets/hf-internal-testing/fixtures-captioning/resolve/main/cow_beach_1.png" +# response = await flash_llama4.chat( +# seed=42, +# messages=[ +# { +# "role": "user", +# "content": [ +# {"type": "image_url", "image_url": {"url": image_url}}, +# {"type": "text", "text": "What is shown in this image?"}, +# ], +# }, +# ], +# max_tokens=100, +# ) +# assert ( +# response.choices[0].message.content +# == "The image shows a brown cow standing on the beach with a white face and black and white marking on its ears. The cow has a white patch around its nose and mouth. The ocean and blue sky are in the background." +# ) +# assert response.usage["completion_tokens"] == 46 +# assert response == response_snapshot +# +# +# # Helper function to convert a Pillow image to a base64 data URL +# def image_to_data_url(img: Image.Image, fmt: str) -> str: +# buffer = BytesIO() +# img.save(buffer, format=fmt) +# img_data = buffer.getvalue() +# b64_str = base64.b64encode(img_data).decode("utf-8") +# mime_type = "image/png" if fmt.upper() == "PNG" else "image/jpeg" +# return f"data:{mime_type};base64,{b64_str}" +# +# +# async def test_flash_llama4_image_base64_rgba(flash_llama4, response_snapshot): +# # Create an empty 100x100 PNG image with alpha (transparent background) +# img = Image.new("RGBA", (100, 100), (0, 0, 0, 0)) +# data_url = image_to_data_url(img, "PNG") +# response = await flash_llama4.chat( +# seed=42, +# messages=[ +# { +# "role": "user", +# "content": [ +# {"type": "image_url", "image_url": {"url": data_url}}, +# { +# "type": "text", +# "text": "What do you see in this transparent image?", +# }, +# ], +# }, +# ], +# max_tokens=100, +# ) +# assert response == response_snapshot +# +# +# async def test_flash_llama4_image_base64_rgb_png(flash_llama4, response_snapshot): +# # Create an empty 100x100 PNG image without alpha (white background) +# img = Image.new("RGB", (100, 100), (255, 255, 255)) +# data_url = image_to_data_url(img, "PNG") +# response = await flash_llama4.chat( +# seed=42, +# messages=[ +# { +# "role": "user", +# "content": [ +# {"type": "image_url", "image_url": {"url": data_url}}, +# {"type": "text", "text": "What do you see in this plain image?"}, +# ], +# }, +# ], +# max_tokens=100, +# ) +# assert response == response_snapshot +# +# +# async def test_flash_llama4_image_base64_rgb_jpg(flash_llama4, response_snapshot): +# # Create an empty 100x100 JPEG image (white background) +# img = Image.new("RGB", (100, 100), (255, 255, 255)) +# data_url = image_to_data_url(img, "JPEG") +# response = await flash_llama4.chat( +# seed=42, +# messages=[ +# { +# "role": "user", +# "content": [ +# {"type": "image_url", "image_url": {"url": data_url}}, +# {"type": "text", "text": "What do you see in this JPEG image?"}, +# ], +# }, +# ], +# max_tokens=100, +# ) +# assert response == response_snapshot diff --git a/nix/overlay.nix b/nix/overlay.nix index 7274d903c..069fdd80d 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -18,8 +18,18 @@ final: prev: { src = final.fetchFromGitHub { owner = "huggingface"; repo = "transformers"; - rev = "v4.50.0"; - hash = "sha256-/scrMPUY43n+XAMbwWCtmiJKXscXGLrklyDg9XZTaqw="; + rev = "v4.51.0"; + hash = "sha256-dnVpc6fm1SYGcx7FegpwVVxUY6XRlsxLs5WOxYv11y8="; + }; + } + ); + huggingface-hub = python-super.huggingface-hub.overrideAttrs ( + _: _: { + src = final.fetchFromGitHub { + owner = "huggingface"; + repo = "huggingface_hub"; + rev = "v0.30.0"; + hash = "sha256-sz+n1uoWrSQPqJFiG/qCT6b4r08kD9MsoPZXbfWNB2o="; }; } ); diff --git a/router/src/validation.rs b/router/src/validation.rs index 2d1d9a3d8..dfe9dd4d2 100644 --- a/router/src/validation.rs +++ b/router/src/validation.rs @@ -566,7 +566,7 @@ fn fetch_image(input: &str) -> Result<(Vec, String, usize, usize), Validatio return Err(ValidationError::InvalidImageContent(content.to_string())); } - let data = STANDARD.decode(content["base64,".len()..].as_bytes())?; + let data = STANDARD.decode(&content["base64,".len()..])?; let img = if let Some(format) = format_from_mimetype(mimetype) { ImageReader::with_format(Cursor::new(&data), format).decode()? } else { @@ -603,7 +603,7 @@ fn image_tokens( let mut image_string = String::with_capacity(2 * FAKE.len() + slots * IMAGE.len()); image_string.push_str(FAKE); - image_string.extend(iter::repeat(IMAGE).take(slots)); + image_string.extend(iter::repeat_n(IMAGE, slots)); image_string.push_str(FAKE); if matches!( diff --git a/server/pyproject.toml b/server/pyproject.toml index 86cb49223..53347f521 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -31,11 +31,24 @@ dependencies = [ "sentencepiece>=0.2.0", "tokenizers>=0.20.3", "typer>=0.15.1", - "transformers>=4.49.0", + "transformers>=4.51.0", "huggingface-hub>=0.30.1", "hf-xet>=1.0.0", ] +[[tool.uv.index]] +name = "pytorch-cu124" +url = "https://download.pytorch.org/whl/cu124" +explicit = true + +[tool.uv.sources] +torch = [ + { index = "pytorch-cu124", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +torchvision = [ + { index = "pytorch-cu124", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] + [build-system] requires = ["kernels>=0.1.7", "setuptools"] build-backend = "setuptools.build_meta" @@ -78,6 +91,10 @@ gen = [ "grpcio-tools>=1.69.0", "mypy-protobuf>=3.6.0", ] +torch = [ + "torch==2.6.0", + "torchvision==0.21.0", +] [tool.pytest.ini_options] markers = ["private: marks tests as requiring an admin hf token (deselect with '-m \"not private\"')"] diff --git a/server/text_generation_server/models/mllama_causal_lm.py b/server/text_generation_server/models/mllama_causal_lm.py index 28e7489ea..c268ff9a8 100644 --- a/server/text_generation_server/models/mllama_causal_lm.py +++ b/server/text_generation_server/models/mllama_causal_lm.py @@ -256,12 +256,6 @@ class MllamaCausalLM(VlmCausalLM): max_s = batch.max_current_length lm_head_indices = batch.prefill_head_indices - if cu_seqlen_prefill is None and self.max_past() is not None: - # In decode, not prefill, we're actually overwriting the KV-cache - # in a circular buffer mode. - # This makes sure the max_s for the decode pass is correct. - max_s = min(self.max_past(), max_s) - # Try to find an associated cuda graph bs = input_ids.shape[0] sorted_padded_bs = sorted([k for k in self.cuda_graphs.keys() if k >= bs]) diff --git a/server/uv.lock b/server/uv.lock index c3f3f0895..b4a95c133 100644 --- a/server/uv.lock +++ b/server/uv.lock @@ -2,10 +2,14 @@ version = 1 revision = 1 requires-python = ">=3.9" resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", - "python_full_version < '3.10'", + "(python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')", + "python_full_version >= '3.12' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.11.*' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform == 'win32')", + "python_full_version == '3.11.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.10.*' and sys_platform == 'linux') or (python_full_version == '3.10.*' and sys_platform == 'win32')", + "python_full_version == '3.10.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')", + "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'", ] [[package]] @@ -20,7 +24,8 @@ dependencies = [ { name = "psutil" }, { name = "pyyaml" }, { name = "safetensors" }, - { name = "torch" }, + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/85/15/0fab0260ab4069e5224e637d2e400538bb27b0dfc36f17daf68db9770d78/accelerate-1.3.0.tar.gz", hash = "sha256:518631c0adb80bd3d42fb29e7e2dc2256bcd7c786b0ba9119bbaa08611b36d9c", size = 342758 } wheels = [ @@ -189,7 +194,8 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "numpy", version = "2.2.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "torch" }, + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/db/9d/9382259196d7ad7f3550702390081224e673a705e75b5660ee377b592fc0/bitsandbytes-0.45.2-py3-none-manylinux_2_24_x86_64.whl", hash = "sha256:ba3a720187f518b172ebce4081049c682ae3fd8284947e22499b256ff99a2bc3", size = 69680042 }, @@ -315,7 +321,8 @@ version = "0.9.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, - { name = "torch" }, + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "transformers" }, ] sdist = { url = "https://files.pythonhosted.org/packages/40/e0/d9529aae2d2425d214e5a50497df4532d3f9e21c8d2023037c701f8a37d3/compressed-tensors-0.9.1.tar.gz", hash = "sha256:3cf5cd637f0186c184dd5bbbbf941356b1225199b49c6a45bf0909d65907f686", size = 63060 } @@ -826,7 +833,8 @@ dependencies = [ { name = "huggingface-hub" }, { name = "packaging" }, { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "torch" }, + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/26/99/41af9dce502bb1682977fee1bc487a73fa8418cebbce16b8d27733947375/kernels-0.2.1.tar.gz", hash = "sha256:918942332819b28377b9d07070daddecfd8a5e7bab574dd3dc64a209ca6008b2", size = 9395 } @@ -1084,7 +1092,8 @@ name = "networkx" version = "3.2.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version < '3.10'", + "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')", + "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'", ] sdist = { url = "https://files.pythonhosted.org/packages/c4/80/a84676339aaae2f1cfdf9f418701dd634aef9cc76f708ef55c36ff39c3ca/networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6", size = 2073928 } wheels = [ @@ -1096,9 +1105,12 @@ name = "networkx" version = "3.4.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", + "(python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')", + "python_full_version >= '3.12' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.11.*' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform == 'win32')", + "python_full_version == '3.11.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.10.*' and sys_platform == 'linux') or (python_full_version == '3.10.*' and sys_platform == 'win32')", + "python_full_version == '3.10.*' and sys_platform != 'linux' and sys_platform != 'win32'", ] sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368 } wheels = [ @@ -1110,7 +1122,8 @@ name = "numpy" version = "2.0.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version < '3.10'", + "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')", + "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'", ] sdist = { url = "https://files.pythonhosted.org/packages/a9/75/10dd1f8116a8b796cb2c737b674e02d02e80454bda953fa7e65d8c12b016/numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78", size = 18902015 } wheels = [ @@ -1165,9 +1178,12 @@ name = "numpy" version = "2.2.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", + "(python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')", + "python_full_version >= '3.12' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.11.*' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform == 'win32')", + "python_full_version == '3.11.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.10.*' and sys_platform == 'linux') or (python_full_version == '3.10.*' and sys_platform == 'win32')", + "python_full_version == '3.10.*' and sys_platform != 'linux' and sys_platform != 'win32'", ] sdist = { url = "https://files.pythonhosted.org/packages/ec/d0/c12ddfd3a02274be06ffc71f3efc6d0e457b0409c4481596881e748cb264/numpy-2.2.2.tar.gz", hash = "sha256:ed6906f61834d687738d25988ae117683705636936cc605be0bb208b23df4d8f", size = 20233295 } wheels = [ @@ -1264,7 +1280,7 @@ name = "nvidia-cudnn-cu12" version = "9.1.0.70" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12" }, + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 }, @@ -1275,7 +1291,7 @@ name = "nvidia-cufft-cu12" version = "11.2.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/27/94/3266821f65b92b3138631e9c8e7fe1fb513804ac934485a8d05776e1dd43/nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9", size = 211459117 }, @@ -1294,9 +1310,9 @@ name = "nvidia-cusolver-cu12" version = "11.6.1.9" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12" }, - { name = "nvidia-cusparse-cu12" }, - { name = "nvidia-nvjitlink-cu12" }, + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-cusparse-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/3a/e1/5b9089a4b2a4790dfdea8b3a006052cfecff58139d5a4e34cb1a51df8d6f/nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260", size = 127936057 }, @@ -1307,7 +1323,7 @@ name = "nvidia-cusparse-cu12" version = "12.3.1.170" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/db/f7/97a9ea26ed4bbbfc2d470994b8b4f338ef663be97b8f677519ac195e113d/nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1", size = 207454763 }, @@ -1509,7 +1525,8 @@ dependencies = [ { name = "pydantic" }, { name = "referencing" }, { name = "requests" }, - { name = "torch" }, + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tqdm" }, { name = "typing-extensions" }, ] @@ -1632,7 +1649,8 @@ dependencies = [ { name = "psutil" }, { name = "pyyaml" }, { name = "safetensors" }, - { name = "torch" }, + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tqdm" }, { name = "transformers" }, ] @@ -2400,7 +2418,8 @@ name = "scipy" version = "1.13.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version < '3.10'", + "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')", + "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'", ] dependencies = [ { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -2438,9 +2457,12 @@ name = "scipy" version = "1.15.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.12'", - "python_full_version == '3.11.*'", - "python_full_version == '3.10.*'", + "(python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')", + "python_full_version >= '3.12' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.11.*' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform == 'win32')", + "python_full_version == '3.11.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "(python_full_version == '3.10.*' and sys_platform == 'linux') or (python_full_version == '3.10.*' and sys_platform == 'win32')", + "python_full_version == '3.10.*' and sys_platform != 'linux' and sys_platform != 'win32'", ] dependencies = [ { name = "numpy", version = "2.2.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -2629,6 +2651,12 @@ quantize = [ { name = "datasets" }, { name = "texttable" }, ] +torch = [ + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "torchvision", version = "0.21.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torchvision", version = "0.21.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] [package.metadata] requires-dist = [ @@ -2666,10 +2694,14 @@ requires-dist = [ { name = "sentencepiece", specifier = ">=0.2.0" }, { name = "texttable", marker = "extra == 'quantize'", specifier = ">=1.6.7,<2" }, { name = "tokenizers", specifier = ">=0.20.3" }, - { name = "transformers", specifier = ">=4.49.0" }, + { name = "torch", marker = "(sys_platform == 'linux' and extra == 'torch') or (sys_platform == 'win32' and extra == 'torch')", specifier = "==2.6.0", index = "https://download.pytorch.org/whl/cu124" }, + { name = "torch", marker = "sys_platform != 'linux' and sys_platform != 'win32' and extra == 'torch'", specifier = "==2.6.0" }, + { name = "torchvision", marker = "(sys_platform == 'linux' and extra == 'torch') or (sys_platform == 'win32' and extra == 'torch')", specifier = "==0.21.0", index = "https://download.pytorch.org/whl/cu124" }, + { name = "torchvision", marker = "sys_platform != 'linux' and sys_platform != 'win32' and extra == 'torch'", specifier = "==0.21.0" }, + { name = "transformers", specifier = ">=4.51.0" }, { name = "typer", specifier = ">=0.15.1" }, ] -provides-extras = ["accelerate", "bnb", "compressed-tensors", "peft", "outlines", "dev", "quantize", "gen"] +provides-extras = ["accelerate", "bnb", "compressed-tensors", "peft", "outlines", "dev", "quantize", "gen", "torch"] [[package]] name = "texttable" @@ -2748,12 +2780,46 @@ wheels = [ name = "torch" version = "2.6.0" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.10.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'", +] dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "jinja2" }, - { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "filelock", marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "fsspec", marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "jinja2", marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10' and sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "setuptools", marker = "python_full_version >= '3.12' and sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "sympy", marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "typing-extensions", marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/16/ea1b7842413a7b8a5aaa5e99e8eaf3da3183cc3ab345ad025a07ff636301/torch-2.6.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:09e06f9949e1a0518c5b09fe95295bc9661f219d9ecb6f9893e5123e10696628", size = 66520221 }, + { url = "https://files.pythonhosted.org/packages/0b/fa/f33a4148c6fb46ca2a3f8de39c24d473822d5774d652b66ed9b1214da5f7/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:94fc63b3b4bedd327af588696559f68c264440e2503cc9e6954019473d74ae21", size = 66530713 }, + { url = "https://files.pythonhosted.org/packages/81/b4/605ae4173aa37fb5aa14605d100ff31f4f5d49f617928c9f486bb3aaec08/torch-2.6.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:9a610afe216a85a8b9bc9f8365ed561535c93e804c2a317ef7fabcc5deda0989", size = 66532538 }, + { url = "https://files.pythonhosted.org/packages/88/8b/d60c0491ab63634763be1537ad488694d316ddc4a20eaadd639cedc53971/torch-2.6.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:ff96f4038f8af9f7ec4231710ed4549da1bdebad95923953a25045dcf6fd87e2", size = 66536783 }, + { url = "https://files.pythonhosted.org/packages/b3/17/41f681b87290a1d2f1394f943e470f8b0b3c2987b7df8dc078d8831fce5b/torch-2.6.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:265f70de5fd45b864d924b64be1797f86e76c8e48a02c2a3a6fc7ec247d2226c", size = 66520446 }, +] + +[[package]] +name = "torch" +version = "2.6.0+cu124" +source = { registry = "https://download.pytorch.org/whl/cu124" } +resolution-markers = [ + "(python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')", + "(python_full_version == '3.11.*' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform == 'win32')", + "(python_full_version == '3.10.*' and sys_platform == 'linux') or (python_full_version == '3.10.*' and sys_platform == 'win32')", + "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')", +] +dependencies = [ + { name = "filelock", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "fsspec", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "jinja2", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and sys_platform == 'linux') or (python_full_version >= '3.10' and sys_platform == 'win32')" }, { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, @@ -2767,32 +2833,76 @@ dependencies = [ { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "setuptools", marker = "python_full_version >= '3.12'" }, - { name = "sympy" }, + { name = "setuptools", marker = "(python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')" }, + { name = "sympy", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "typing-extensions" }, + { name = "typing-extensions", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/37/81/aa9ab58ec10264c1abe62c8b73f5086c3c558885d6beecebf699f0dbeaeb/torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961", size = 766685561 }, - { url = "https://files.pythonhosted.org/packages/86/86/e661e229df2f5bfc6eab4c97deb1286d598bbeff31ab0cdb99b3c0d53c6f/torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab", size = 95751887 }, - { url = "https://files.pythonhosted.org/packages/20/e0/5cb2f8493571f0a5a7273cd7078f191ac252a402b5fb9cb6091f14879109/torch-2.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:56eeaf2ecac90da5d9e35f7f35eb286da82673ec3c582e310a8d1631a1c02341", size = 204165139 }, - { url = "https://files.pythonhosted.org/packages/e5/16/ea1b7842413a7b8a5aaa5e99e8eaf3da3183cc3ab345ad025a07ff636301/torch-2.6.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:09e06f9949e1a0518c5b09fe95295bc9661f219d9ecb6f9893e5123e10696628", size = 66520221 }, - { url = "https://files.pythonhosted.org/packages/78/a9/97cbbc97002fff0de394a2da2cdfa859481fdca36996d7bd845d50aa9d8d/torch-2.6.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:7979834102cd5b7a43cc64e87f2f3b14bd0e1458f06e9f88ffa386d07c7446e1", size = 766715424 }, - { url = "https://files.pythonhosted.org/packages/6d/fa/134ce8f8a7ea07f09588c9cc2cea0d69249efab977707cf67669431dcf5c/torch-2.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:ccbd0320411fe1a3b3fec7b4d3185aa7d0c52adac94480ab024b5c8f74a0bf1d", size = 95759416 }, - { url = "https://files.pythonhosted.org/packages/11/c5/2370d96b31eb1841c3a0883a492c15278a6718ccad61bb6a649c80d1d9eb/torch-2.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:46763dcb051180ce1ed23d1891d9b1598e07d051ce4c9d14307029809c4d64f7", size = 204164970 }, - { url = "https://files.pythonhosted.org/packages/0b/fa/f33a4148c6fb46ca2a3f8de39c24d473822d5774d652b66ed9b1214da5f7/torch-2.6.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:94fc63b3b4bedd327af588696559f68c264440e2503cc9e6954019473d74ae21", size = 66530713 }, - { url = "https://files.pythonhosted.org/packages/e5/35/0c52d708144c2deb595cd22819a609f78fdd699b95ff6f0ebcd456e3c7c1/torch-2.6.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:2bb8987f3bb1ef2675897034402373ddfc8f5ef0e156e2d8cfc47cacafdda4a9", size = 766624563 }, - { url = "https://files.pythonhosted.org/packages/01/d6/455ab3fbb2c61c71c8842753b566012e1ed111e7a4c82e0e1c20d0c76b62/torch-2.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:b789069020c5588c70d5c2158ac0aa23fd24a028f34a8b4fcb8fcb4d7efcf5fb", size = 95607867 }, - { url = "https://files.pythonhosted.org/packages/18/cf/ae99bd066571656185be0d88ee70abc58467b76f2f7c8bfeb48735a71fe6/torch-2.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7e1448426d0ba3620408218b50aa6ada88aeae34f7a239ba5431f6c8774b1239", size = 204120469 }, - { url = "https://files.pythonhosted.org/packages/81/b4/605ae4173aa37fb5aa14605d100ff31f4f5d49f617928c9f486bb3aaec08/torch-2.6.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:9a610afe216a85a8b9bc9f8365ed561535c93e804c2a317ef7fabcc5deda0989", size = 66532538 }, - { url = "https://files.pythonhosted.org/packages/24/85/ead1349fc30fe5a32cadd947c91bda4a62fbfd7f8c34ee61f6398d38fb48/torch-2.6.0-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:4874a73507a300a5d089ceaff616a569e7bb7c613c56f37f63ec3ffac65259cf", size = 766626191 }, - { url = "https://files.pythonhosted.org/packages/dd/b0/26f06f9428b250d856f6d512413e9e800b78625f63801cbba13957432036/torch-2.6.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a0d5e1b9874c1a6c25556840ab8920569a7a4137afa8a63a32cee0bc7d89bd4b", size = 95611439 }, - { url = "https://files.pythonhosted.org/packages/c2/9c/fc5224e9770c83faed3a087112d73147cd7c7bfb7557dcf9ad87e1dda163/torch-2.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:510c73251bee9ba02ae1cb6c9d4ee0907b3ce6020e62784e2d7598e0cfa4d6cc", size = 204126475 }, - { url = "https://files.pythonhosted.org/packages/88/8b/d60c0491ab63634763be1537ad488694d316ddc4a20eaadd639cedc53971/torch-2.6.0-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:ff96f4038f8af9f7ec4231710ed4549da1bdebad95923953a25045dcf6fd87e2", size = 66536783 }, - { url = "https://files.pythonhosted.org/packages/40/bb/feb5644baa621fd8e1e88bf51f6fa38ab3f985d472a764144ff4867ac1d6/torch-2.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:9ea955317cfcd3852b1402b62af258ce735c2edeee42ca9419b6bc889e5ae053", size = 766680961 }, - { url = "https://files.pythonhosted.org/packages/ee/11/08774a8198a33263947c59e04b8a0bf85a61a44e82100c46cf833bbce35e/torch-2.6.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:bb2c6c3e65049f081940f5ab15c9136c7de40d3f01192541c920a07c7c585b7e", size = 95782656 }, - { url = "https://files.pythonhosted.org/packages/c1/0d/56fb07032accbfebb4555638b6002ec5678d0942da85497e40f9405ab756/torch-2.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:683410f97984103148e31b38a8631acf31c3034c020c0f4d26171e7626d8317a", size = 204061417 }, - { url = "https://files.pythonhosted.org/packages/b3/17/41f681b87290a1d2f1394f943e470f8b0b3c2987b7df8dc078d8831fce5b/torch-2.6.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:265f70de5fd45b864d924b64be1797f86e76c8e48a02c2a3a6fc7ec247d2226c", size = 66520446 }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp310-cp310-linux_x86_64.whl", hash = "sha256:7f2ba7f7c0459320a521696f6b5bccc187f59890b23c9dfb6c49b0b87c6bfc97" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp310-cp310-win_amd64.whl", hash = "sha256:7cc45c5b39d74875cfafe908b7f55c544147cc16b01e795feb2fe766583efe78" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-linux_x86_64.whl", hash = "sha256:d4c3e9a8d31a7c0fcbb9da17c31a1917e1fac26c566a4cfbd8c9568ad7cade79" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp311-cp311-win_amd64.whl", hash = "sha256:6a1fb2714e9323f11edb6e8abf7aad5f79e45ad25c081cde87681a18d99c29eb" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-linux_x86_64.whl", hash = "sha256:a393b506844035c0dac2f30ea8478c343b8e95a429f06f3b3cadfc7f53adb597" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp312-cp312-win_amd64.whl", hash = "sha256:3313061c1fec4c7310cf47944e84513dcd27b6173b72a349bb7ca68d0ee6e9c0" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp313-cp313-linux_x86_64.whl", hash = "sha256:0f3bc53c988ce9568cd876a2a5316761e84a8704135ec8068f5f81b4417979cb" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp313-cp313-win_amd64.whl", hash = "sha256:519330eef09534acad8110b6f423d2fe58c1d8e9ada999ed077a637a0021f908" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp313-cp313t-linux_x86_64.whl", hash = "sha256:35cba404c0d742406cdcba1609085874bc60facdfbc50e910c47a92405fef44c" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp39-cp39-linux_x86_64.whl", hash = "sha256:e661267cd0242462ab100bdd67f651988aa9f67eb31609d6909afcac891df612" }, + { url = "https://download.pytorch.org/whl/cu124/torch-2.6.0%2Bcu124-cp39-cp39-win_amd64.whl", hash = "sha256:c2eb62b99161d87be486c88fd82441274cc892bce8c48dbc28c055cb147732ce" }, +] + +[[package]] +name = "torchvision" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version == '3.10.*' and sys_platform != 'linux' and sys_platform != 'win32'", + "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'", +] +dependencies = [ + { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10' and sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "numpy", version = "2.2.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10' and sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "pillow", marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, + { name = "torch", version = "2.6.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform != 'linux' and sys_platform != 'win32'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/0d/143bd264876fad17c82096b6c2d433f1ac9b29cdc69ee45023096976ee3d/torchvision-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:044ea420b8c6c3162a234cada8e2025b9076fa82504758cd11ec5d0f8cd9fa37", size = 1784140 }, + { url = "https://files.pythonhosted.org/packages/29/88/00c69db213ee2443ada8886ec60789b227e06bb869d85ee324578221a7f7/torchvision-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:110d115333524d60e9e474d53c7d20f096dbd8a080232f88dddb90566f90064c", size = 1784141 }, + { url = "https://files.pythonhosted.org/packages/6e/1b/28f527b22d5e8800184d0bc847f801ae92c7573a8c15979d92b7091c0751/torchvision-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:97a5814a93c793aaf0179cfc7f916024f4b63218929aee977b645633d074a49f", size = 1784140 }, + { url = "https://files.pythonhosted.org/packages/f9/56/47d456b61c3bbce7bed4af3925c83d405bb87468e659fd3cf3d9840c3b51/torchvision-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:659b76c86757cb2ee4ca2db245e0740cfc3081fef46f0f1064d11adb4a8cee31", size = 1784141 }, + { url = "https://files.pythonhosted.org/packages/49/d5/d18c5d89cbe32015b033f1fa06918c7cdd5c0af0c03e55d72a3cc2d768f8/torchvision-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c22caeaae8b3c36d93459f1a5294e6f43306cff856ed243189a229331a404b4", size = 1784154 }, +] + +[[package]] +name = "torchvision" +version = "0.21.0+cu124" +source = { registry = "https://download.pytorch.org/whl/cu124" } +resolution-markers = [ + "(python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')", + "(python_full_version == '3.11.*' and sys_platform == 'linux') or (python_full_version == '3.11.*' and sys_platform == 'win32')", + "(python_full_version == '3.10.*' and sys_platform == 'linux') or (python_full_version == '3.10.*' and sys_platform == 'win32')", + "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')", +] +dependencies = [ + { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.10' and sys_platform == 'linux') or (python_full_version < '3.10' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.2.2", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.10' and sys_platform == 'linux') or (python_full_version >= '3.10' and sys_platform == 'win32')" }, + { name = "pillow", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "torch", version = "2.6.0+cu124", source = { registry = "https://download.pytorch.org/whl/cu124" }, marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, +] +wheels = [ + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp310-cp310-linux_x86_64.whl", hash = "sha256:3d3e74018eaa7837c73e3764dad3b7792b7544401c25a42977e9744303731bd3" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp310-cp310-win_amd64.whl", hash = "sha256:0c6aefb70ab2b312065240c804e459ac7b0e449867afd469b38d2fd47f9391a7" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp311-cp311-linux_x86_64.whl", hash = "sha256:137376805aca5ba57bd2c7a3ecb8569df961dbe82b128aac9b3b0a7125ef9385" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp311-cp311-win_amd64.whl", hash = "sha256:000a013584ad2304ab30496318145f284ac364622addb5ee3a5abd2769ba146f" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp312-cp312-linux_x86_64.whl", hash = "sha256:efb53ea0af7bf09b7b53e2a18b9be6d245f7d46a90b51d5cf97f37e9b929a991" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp312-cp312-win_amd64.whl", hash = "sha256:ec63c2ee792757492da40590e34b14f2fceda29050558c215f0c1f3b08149c0f" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp313-cp313-linux_x86_64.whl", hash = "sha256:4b70acf3b4b96a0ceb1374116626c9bef9e8be016b57b1284e482260ca1896d6" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp313-cp313-win_amd64.whl", hash = "sha256:8fcf55321b206de70ff8e01c884fa42e57a60b1cb749341b96e0f22c8a7c9ec7" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp39-cp39-linux_x86_64.whl", hash = "sha256:6afb21a22f5497e08ea4dbd4544472330d8249bf09dafd239302552cad6906b2" }, + { url = "https://download.pytorch.org/whl/cu124/torchvision-0.21.0%2Bcu124-cp39-cp39-win_amd64.whl", hash = "sha256:579b6a7fffc34a860c57a7131221ef125831f5961431f8da15760ab1ef752d44" }, ] [[package]] @@ -2809,7 +2919,7 @@ wheels = [ [[package]] name = "transformers" -version = "4.49.0" +version = "4.51.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -2824,9 +2934,9 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/50/46573150944f46df8ec968eda854023165a84470b42f69f67c7d475dabc5/transformers-4.49.0.tar.gz", hash = "sha256:7e40e640b5b8dc3f48743f5f5adbdce3660c82baafbd3afdfc04143cdbd2089e", size = 8610952 } +sdist = { url = "https://files.pythonhosted.org/packages/38/75/6ebdae4d6f4574f47139a070445245537e43482d006f615af8e23d5bf05e/transformers-4.51.0.tar.gz", hash = "sha256:2d302563ff6c2cc2d0e88ef352cf059f9a21ce18102fd43662bb1246f70b8a84", size = 8925571 } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/37/1f29af63e9c30156a3ed6ebc2754077016577c094f31de7b2631e5d379eb/transformers-4.49.0-py3-none-any.whl", hash = "sha256:6b4fded1c5fee04d384b1014495b4235a2b53c87503d7d592423c06128cbbe03", size = 9970275 }, + { url = "https://files.pythonhosted.org/packages/6f/db/7ee15028d5130929aa0b1b85bab6d8bafe806254d3b5c56c42a0066cceb8/transformers-4.51.0-py3-none-any.whl", hash = "sha256:2e6baa476735ab8adccbaee6961525a0d1ce8c21d49293af30ef5ee4b082f64d", size = 10362017 }, ] [[package]]