mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-04-24 08:22:07 +00:00
Make the Nix-based Docker container work on non-NixOS
On NixOS, the CUDA driver shim gets mounted on /run/opengl-driver, where Nix packages expect the shim to be. However, on other distributions, some FHS paths are mounted. This is a small change to make the dynamic loader find the shim.
This commit is contained in:
parent
8b91f92978
commit
2158aaa3d9
@ -176,11 +176,15 @@
|
|||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
dockerImage = pkgs.callPackage nix/docker.nix {
|
# Use plain nixpkgs without overlays for dockerTools. dockerTools
|
||||||
|
# uses a Python package for computing the layers from the transitive
|
||||||
|
# closure. However, this needs a lot of rebuilds due to our overlay.
|
||||||
|
|
||||||
|
dockerImage = nixpkgs.legacyPackages.${system}.callPackage nix/docker.nix {
|
||||||
text-generation-inference = default;
|
text-generation-inference = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
dockerImageStreamed = pkgs.callPackage nix/docker.nix {
|
dockerImageStreamed = nixpkgs.legacyPackages.${system}.callPackage nix/docker.nix {
|
||||||
text-generation-inference = default;
|
text-generation-inference = default;
|
||||||
stream = true;
|
stream = true;
|
||||||
};
|
};
|
||||||
|
@ -18,6 +18,10 @@ build {
|
|||||||
Env = [
|
Env = [
|
||||||
"HF_HOME=/data"
|
"HF_HOME=/data"
|
||||||
"PORT=80"
|
"PORT=80"
|
||||||
|
# The CUDA container toolkit will mount the driver shim into the
|
||||||
|
# container. We just have to ensure that the dynamic loader finds
|
||||||
|
# the libraries.
|
||||||
|
"LD_LIBRARY_PATH=/usr/lib64"
|
||||||
];
|
];
|
||||||
|
|
||||||
};
|
};
|
||||||
|
Loading…
Reference in New Issue
Block a user