File indexing completed on 2025-02-23 09:22:36
0001 import os
0002 from dataclasses import dataclass
0003
0004 import tensorflow as tf
0005
0006
0007 @dataclass
0008 class GPULimiter:
0009 """
0010 Class responsible to set the limits of possible GPU usage by TensorFlow. Currently, the limiter creates one
0011 instance of logical device per physical device. This can be changed in a future.
0012
0013 Attributes:
0014 _gpu_ids: A string representing visible devices for the process. Identifiers of physical GPUs should
0015 be separated by commas (no spaces).
0016 _max_gpu_memory_allocation: An integer specifying limit of allocated memory per logical device.
0017
0018 """
0019 _gpu_ids: str
0020 _max_gpu_memory_allocation: int
0021
0022 def __call__(self):
0023 os.environ["CUDA_VISIBLE_DEVICES"] = f"{self._gpu_ids}"
0024 gpus = tf.config.list_physical_devices('GPU')
0025 if gpus:
0026
0027 try:
0028 for gpu in gpus:
0029 tf.config.set_logical_device_configuration(
0030 gpu,
0031 [tf.config.LogicalDeviceConfiguration(memory_limit=1024 * self._max_gpu_memory_allocation)])
0032 logical_gpus = tf.config.list_logical_devices('GPU')
0033 print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs")
0034 except RuntimeError as e:
0035
0036 print(e)