# This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml --extra accelerate --extra compressed-tensors --extra quantize --extra peft --extra outlines -o requirements_intel.txt --python-version 3.11 accelerate==1.3.0 # via # text-generation-server (pyproject.toml) # peft aiohappyeyeballs==2.4.4 # via aiohttp aiohttp==3.11.11 # via # datasets # fsspec aiosignal==1.3.2 # via aiohttp airportsdata==20241001 # via outlines annotated-types==0.7.0 # via pydantic attrs==25.1.0 # via # aiohttp # jsonschema # referencing certifi==2024.8.30 # via requests charset-normalizer==3.4.0 # via requests click==8.1.7 # via typer cloudpickle==3.1.1 # via outlines compressed-tensors==0.9.1 # via text-generation-server (pyproject.toml) datasets==2.21.0 # via text-generation-server (pyproject.toml) deprecated==1.2.14 # via # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-semantic-conventions dill==0.3.8 # via # datasets # multiprocess diskcache==5.6.3 # via outlines einops==0.8.0 # via text-generation-server (pyproject.toml) filelock==3.16.1 # via # datasets # huggingface-hub # torch # transformers frozenlist==1.5.0 # via # aiohttp # aiosignal fsspec==2024.6.1 # via # datasets # huggingface-hub # torch genson==1.3.0 # via outlines googleapis-common-protos==1.65.0 # via # grpcio-status # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http grpc-interceptor==0.15.4 # via text-generation-server (pyproject.toml) grpcio==1.68.0 # via # text-generation-server (pyproject.toml) # grpc-interceptor # grpcio-reflection # grpcio-status # opentelemetry-exporter-otlp-proto-grpc grpcio-reflection==1.68.0 # via text-generation-server (pyproject.toml) grpcio-status==1.68.0 # via text-generation-server (pyproject.toml) hf-transfer==0.1.8 # via text-generation-server (pyproject.toml) huggingface-hub==0.28.1 # via # accelerate # datasets # peft # tokenizers # transformers idna==3.10 # via # requests # yarl importlib-metadata==7.1.0 # via opentelemetry-api interegular==0.3.3 # via # outlines # outlines-core jinja2==3.1.5 # via # outlines # torch jsonschema==4.23.0 # via # outlines # outlines-core jsonschema-specifications==2024.10.1 # via jsonschema lark==1.2.2 # via outlines loguru==0.7.3 # via text-generation-server (pyproject.toml) markdown-it-py==3.0.0 # via rich markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py mpmath==1.3.0 # via sympy multidict==6.1.0 # via # aiohttp # yarl multiprocess==0.70.16 # via datasets nest-asyncio==1.6.0 # via outlines networkx==3.4.2 # via torch numpy==1.26.4 # via # text-generation-server (pyproject.toml) # accelerate # datasets # outlines # pandas # peft # scipy # transformers nvidia-cublas-cu12==12.4.5.8 # via # nvidia-cudnn-cu12 # nvidia-cusolver-cu12 # torch nvidia-cuda-cupti-cu12==12.4.127 # via torch nvidia-cuda-nvrtc-cu12==12.4.127 # via torch nvidia-cuda-runtime-cu12==12.4.127 # via torch nvidia-cudnn-cu12==9.1.0.70 # via torch nvidia-cufft-cu12==11.2.1.3 # via torch nvidia-curand-cu12==10.3.5.147 # via torch nvidia-cusolver-cu12==11.6.1.9 # via torch nvidia-cusparse-cu12==12.3.1.170 # via # nvidia-cusolver-cu12 # torch nvidia-cusparselt-cu12==0.6.2 # via torch nvidia-nccl-cu12==2.21.5 # via torch nvidia-nvjitlink-cu12==12.4.127 # via # nvidia-cusolver-cu12 # nvidia-cusparse-cu12 # torch nvidia-nvtx-cu12==12.4.127 # via torch opentelemetry-api==1.30.0 # via # text-generation-server (pyproject.toml) # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation # opentelemetry-instrumentation-grpc # opentelemetry-sdk # opentelemetry-semantic-conventions opentelemetry-exporter-otlp==1.30.0 # via text-generation-server (pyproject.toml) opentelemetry-exporter-otlp-proto-common==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http opentelemetry-exporter-otlp-proto-grpc==1.30.0 # via opentelemetry-exporter-otlp opentelemetry-exporter-otlp-proto-http==1.30.0 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.51b0 # via opentelemetry-instrumentation-grpc opentelemetry-instrumentation-grpc==0.51b0 # via text-generation-server (pyproject.toml) opentelemetry-proto==1.30.0 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http opentelemetry-sdk==1.30.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http opentelemetry-semantic-conventions==0.51b0 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-grpc # opentelemetry-sdk outlines==0.1.14 # via text-generation-server (pyproject.toml) outlines-core==0.1.26 # via outlines packaging==24.1 # via # accelerate # datasets # huggingface-hub # opentelemetry-instrumentation # peft # transformers pandas==2.2.3 # via datasets peft==0.14.0 # via text-generation-server (pyproject.toml) pillow==11.1.0 # via text-generation-server (pyproject.toml) prometheus-client==0.21.1 # via text-generation-server (pyproject.toml) propcache==0.2.1 # via # aiohttp # yarl protobuf==5.29.3 # via # text-generation-server (pyproject.toml) # googleapis-common-protos # grpcio-reflection # grpcio-status # opentelemetry-proto psutil==6.1.1 # via # accelerate # peft py-cpuinfo==9.0.0 # via text-generation-server (pyproject.toml) pyarrow==19.0.0 # via datasets pycountry==24.6.1 # via outlines pydantic==2.10.6 # via # compressed-tensors # outlines pydantic-core==2.27.2 # via pydantic pygments==2.18.0 # via rich python-dateutil==2.9.0.post0 # via pandas pytz==2025.1 # via pandas pyyaml==6.0.2 # via # accelerate # datasets # huggingface-hub # peft # transformers referencing==0.36.2 # via # jsonschema # jsonschema-specifications # outlines regex==2024.9.11 # via transformers requests==2.32.3 # via # datasets # huggingface-hub # opentelemetry-exporter-otlp-proto-http # outlines # transformers rich==13.9.4 # via # text-generation-server (pyproject.toml) # typer rpds-py==0.22.3 # via # jsonschema # referencing safetensors==0.4.5 # via # text-generation-server (pyproject.toml) # accelerate # peft # transformers scipy==1.13.1 # via text-generation-server (pyproject.toml) sentencepiece==0.2.0 # via text-generation-server (pyproject.toml) shellingham==1.5.4 # via typer six==1.17.0 # via python-dateutil sympy==1.13.1 # via torch texttable==1.7.0 # via text-generation-server (pyproject.toml) tokenizers==0.21.0 # via # text-generation-server (pyproject.toml) # transformers torch==2.6.0 # via # accelerate # compressed-tensors # outlines # peft tqdm==4.66.5 # via # datasets # huggingface-hub # outlines # peft # transformers transformers==4.49 # via # text-generation-server (pyproject.toml) # compressed-tensors # peft triton==3.2.0 # via torch typer==0.15.1 # via text-generation-server (pyproject.toml) typing-extensions==4.12.2 # via # huggingface-hub # opentelemetry-sdk # outlines # pydantic # pydantic-core # referencing # torch # typer tzdata==2025.1 # via pandas urllib3==2.2.3 # via requests wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-grpc xxhash==3.5.0 # via datasets yarl==1.18.3 # via aiohttp zipp==3.20.2 # via importlib-metadata