2023-11-27 13:08:12 +00:00
|
|
|
import torch
|
2024-05-31 15:57:01 +00:00
|
|
|
from loguru import logger
|
2024-07-01 12:32:54 +00:00
|
|
|
import os
|
2023-11-27 13:08:12 +00:00
|
|
|
|
2024-04-26 17:19:55 +00:00
|
|
|
|
2024-07-26 14:29:09 +00:00
|
|
|
import importlib.util
|
|
|
|
|
|
|
|
|
2024-06-25 10:21:29 +00:00
|
|
|
def is_ipex_available():
|
2024-07-26 14:29:09 +00:00
|
|
|
return importlib.util.find_spec("intel_extension_for_pytorch") is not None
|
2024-04-26 13:48:58 +00:00
|
|
|
|
2024-04-26 17:19:55 +00:00
|
|
|
|
2024-05-13 10:44:30 +00:00
|
|
|
def get_cuda_free_memory(device, memory_fraction):
|
|
|
|
total_free_memory, _ = torch.cuda.mem_get_info(device)
|
|
|
|
total_gpu_memory = torch.cuda.get_device_properties(device).total_memory
|
|
|
|
free_memory = max(0, total_free_memory - (1 - memory_fraction) * total_gpu_memory)
|
|
|
|
return free_memory
|
|
|
|
|
|
|
|
|
2024-05-23 12:11:08 +00:00
|
|
|
def get_xpu_free_memory(device, memory_fraction):
|
2024-06-25 08:15:46 +00:00
|
|
|
total_memory = torch.xpu.get_device_properties(device).total_memory
|
|
|
|
device_id = device.index
|
2024-07-01 12:32:54 +00:00
|
|
|
memory_fraction = float(os.getenv("XPU_MEMORY_FRACTION", "1.0"))
|
|
|
|
free_memory = max(
|
|
|
|
0,
|
|
|
|
int(
|
|
|
|
total_memory * 0.9 * memory_fraction - torch.xpu.memory_reserved(device_id)
|
|
|
|
),
|
|
|
|
)
|
2024-05-13 10:44:30 +00:00
|
|
|
return free_memory
|
|
|
|
|
|
|
|
|
2024-06-25 10:21:29 +00:00
|
|
|
def get_cpu_free_memory(device, memory_fraction):
|
|
|
|
import psutil
|
|
|
|
from text_generation_server.utils.dist import WORLD_SIZE
|
|
|
|
|
|
|
|
mem = psutil.virtual_memory()
|
|
|
|
free_memory = int(mem.available * 0.95 / WORLD_SIZE)
|
|
|
|
return free_memory
|
|
|
|
|
|
|
|
|
2024-06-25 11:20:57 +00:00
|
|
|
def noop(*args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2024-05-13 10:44:30 +00:00
|
|
|
SYSTEM = None
|
|
|
|
if torch.version.hip is not None:
|
|
|
|
SYSTEM = "rocm"
|
|
|
|
empty_cache = torch.cuda.empty_cache
|
|
|
|
synchronize = torch.cuda.synchronize
|
|
|
|
get_free_memory = get_cuda_free_memory
|
|
|
|
elif torch.version.cuda is not None and torch.cuda.is_available():
|
|
|
|
SYSTEM = "cuda"
|
|
|
|
empty_cache = torch.cuda.empty_cache
|
|
|
|
synchronize = torch.cuda.synchronize
|
|
|
|
get_free_memory = get_cuda_free_memory
|
2024-06-25 11:20:57 +00:00
|
|
|
elif is_ipex_available():
|
|
|
|
SYSTEM = "ipex"
|
2024-08-08 16:30:29 +00:00
|
|
|
import intel_extension_for_pytorch # noqa: F401
|
|
|
|
|
2024-06-25 11:20:57 +00:00
|
|
|
if hasattr(torch, "xpu") and torch.xpu.is_available():
|
|
|
|
empty_cache = torch.xpu.empty_cache
|
|
|
|
synchronize = torch.xpu.synchronize
|
|
|
|
get_free_memory = get_xpu_free_memory
|
|
|
|
else:
|
|
|
|
empty_cache = noop
|
|
|
|
synchronize = noop
|
|
|
|
get_free_memory = get_cpu_free_memory
|
2024-05-13 10:44:30 +00:00
|
|
|
else:
|
|
|
|
SYSTEM = "cpu"
|
|
|
|
|
|
|
|
empty_cache = noop
|
|
|
|
synchronize = noop
|
2024-06-25 10:21:29 +00:00
|
|
|
get_free_memory = get_cpu_free_memory
|
2024-05-31 15:57:01 +00:00
|
|
|
logger.info(f"Detected system {SYSTEM}")
|