diff --git a/i3pystatus/gpu_mem.py b/i3pystatus/gpu_mem.py new file mode 100644 index 0000000..c8118c1 --- /dev/null +++ b/i3pystatus/gpu_mem.py @@ -0,0 +1,68 @@ +from i3pystatus import IntervalModule +from .utils import gpu + + +class GPUMemory(IntervalModule): + """ + Shows GPU memory load + + Currently Nvidia only and nvidia-smi required + + .. rubric:: Available formatters + + * {avail_mem} + * {percent_used_mem} + * {used_mem} + * {total_mem} + """ + + settings = ( + ("format", "format string used for output."), + ("divisor", "divide all megabyte values by this value, default is 1 (megabytes)"), + ("warn_percentage", "minimal percentage for warn state"), + ("alert_percentage", "minimal percentage for alert state"), + ("color", "standard color"), + ("warn_color", "defines the color used wann warn percentage ist exceeded"), + ("alert_color", "defines the color used when alert percentage is exceeded"), + ("round_size", "defines number of digits in round"), + + ) + + format = "{avail_mem} MiB" + divisor = 1 + color = "#00FF00" + warn_color = "#FFFF00" + alert_color = "#FF0000" + warn_percentage = 50 + alert_percentage = 80 + round_size = 1 + + def run(self): + info = gpu.query_nvidia_smi() + + if info.used_mem is not None and info.total_mem is not None: + mem_percent = 100 * info.used_mem / info.total_mem + else: + mem_percent = None + + if mem_percent >= self.alert_percentage: + color = self.alert_color + elif mem_percent >= self.warn_percentage: + color = self.warn_color + else: + color = self.color + + cdict = { + "used_mem": info.used_mem / self.divisor, + "avail_mem": info.avail_mem / self.divisor, + "total_mem": info.total_mem / self.divisor, + "percent_used_mem": mem_percent, + } + for key, value in cdict.items(): + if value is not None: + cdict[key] = round(value, self.round_size) + + self.output = { + "full_text": self.format.format(**cdict), + "color": color + } diff --git a/i3pystatus/gpu_temp.py b/i3pystatus/gpu_temp.py new file mode 100644 index 0000000..2f0aacf --- /dev/null +++ b/i3pystatus/gpu_temp.py @@ -0,0 +1,34 @@ +from i3pystatus import IntervalModule +from .utils import gpu + + +class GPUTemperature(IntervalModule): + """ + Shows GPU temperature + + Currently Nvidia only and nvidia-smi required + + .. rubric:: Available formatters + + * `{temp}` — the temperature in integer degrees celsius + """ + + settings = ( + ("format", "format string used for output. {temp} is the temperature in integer degrees celsius"), + "color", + "alert_temp", + "alert_color", + ) + format = "{temp} °C" + color = "#FFFFFF" + alert_temp = 90 + alert_color = "#FF0000" + + def run(self): + temp = gpu.query_nvidia_smi().temp + temp_alert = temp is None or temp >= self.alert_temp + + self.output = { + "full_text": self.format.format(temp=temp), + "color": self.color if not temp_alert else self.alert_color, + } diff --git a/i3pystatus/utils/__init__.py b/i3pystatus/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/i3pystatus/utils/gpu.py b/i3pystatus/utils/gpu.py new file mode 100644 index 0000000..3311fb4 --- /dev/null +++ b/i3pystatus/utils/gpu.py @@ -0,0 +1,43 @@ +import subprocess +from collections import namedtuple + +GPUUsageInfo = namedtuple('GPUUsageInfo', ['total_mem', 'avail_mem', 'used_mem', + 'temp', 'percent_fan', + 'usage_gpu', 'usage_mem']) + + +def query_nvidia_smi() -> GPUUsageInfo: + """ + :return: + all memory fields are in megabytes, + temperature in degrees celsius, + fan speed is integer percent from 0 to 100 inclusive, + usage_gpu and usage_mem are integer percents from 0 to 100 inclusive + (usage_mem != used_mem, usage_mem is about read/write access load) + read more in 'nvidia-smi --help-query-gpu'. + + Any field can be None if such information is not supported by nvidia-smi for current GPU + + Returns None if call failed (no nvidia-smi or query format was changed) + + Raises exception with readable comment + """ + params = ["memory.total", "memory.free", "memory.used", + "temperature.gpu", "fan.speed", + "utilization.gpu", "utilization.memory"] + try: + output = subprocess.check_output(["nvidia-smi", + "--query-gpu={}".format(','.join(params)), + "--format=csv,noheader,nounits"]) + except FileNotFoundError: + raise Exception("No nvidia-smi") + except subprocess.CalledProcessError: + raise Exception("nvidia-smi call failed") + + output = output.decode('utf-8').strip() + values = output.split(", ") + + # If value contains 'not' - it is not supported for this GPU (in fact, for now nvidia-smi returns '[Not Supported]') + values = [None if ("not" in value.lower()) else int(value) for value in values] + + return GPUUsageInfo(*values)