mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-04-19 13:52:07 +00:00
* feat(gaudi): release ready (docs, docker image and vlm ready) * fix(gaudi): add default argument for the dockerfile * fix(gaudi): remove use of latest for gaudi docker image + redid gaudi benchmarking section to include best practices
32 lines
554 B
Plaintext
32 lines
554 B
Plaintext
.idea
|
|
target
|
|
router/tokenizer.json
|
|
*__pycache__*
|
|
|
|
backends/v2/src/client/pb
|
|
backends/v3/src/client/pb
|
|
backends/client/src/v2/pb
|
|
backends/client/src/v3/pb
|
|
|
|
# ROCm auto-generated files
|
|
*.hip
|
|
server/exllamav2
|
|
server/exllama_kernels/exllama_kernels/hip/
|
|
server/exllama_kernels/exllama_kernels/hip_func/
|
|
*_hip.cuh
|
|
server/exllama_kernels/exllama_kernels/hip_buffers.cuh
|
|
server/exllama_kernels/exllama_kernels/exllama_ext_hip.cpp
|
|
|
|
data/
|
|
load_tests/*.json
|
|
server/fbgemmm
|
|
|
|
.direnv/
|
|
.venv/
|
|
|
|
# Gaudi auto-generated files
|
|
hl-smi_log*.txt
|
|
.graph_dumps
|
|
out
|
|
hqt_output
|