gpu_mem: GPU memory module (nvidia-smi only)
This commit is contained in:
parent
9a96b92f68
commit
6f492ff406
68
i3pystatus/gpu_mem.py
Normal file
68
i3pystatus/gpu_mem.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
from i3pystatus import IntervalModule
|
||||||
|
from .utils import gpu
|
||||||
|
|
||||||
|
|
||||||
|
class GPUMemory(IntervalModule):
|
||||||
|
"""
|
||||||
|
Shows GPU memory load
|
||||||
|
|
||||||
|
Currently Nvidia only and nvidia-smi required
|
||||||
|
|
||||||
|
.. rubric:: Available formatters
|
||||||
|
|
||||||
|
* {avail_mem}
|
||||||
|
* {percent_used_mem}
|
||||||
|
* {used_mem}
|
||||||
|
* {total_mem}
|
||||||
|
"""
|
||||||
|
|
||||||
|
settings = (
|
||||||
|
("format", "format string used for output."),
|
||||||
|
("divisor", "divide all megabyte values by this value, default is 1 (megabytes)"),
|
||||||
|
("warn_percentage", "minimal percentage for warn state"),
|
||||||
|
("alert_percentage", "minimal percentage for alert state"),
|
||||||
|
("color", "standard color"),
|
||||||
|
("warn_color", "defines the color used wann warn percentage ist exceeded"),
|
||||||
|
("alert_color", "defines the color used when alert percentage is exceeded"),
|
||||||
|
("round_size", "defines number of digits in round"),
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
format = "{avail_mem} MiB"
|
||||||
|
divisor = 1
|
||||||
|
color = "#00FF00"
|
||||||
|
warn_color = "#FFFF00"
|
||||||
|
alert_color = "#FF0000"
|
||||||
|
warn_percentage = 50
|
||||||
|
alert_percentage = 80
|
||||||
|
round_size = 1
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
info = gpu.query_nvidia_smi()
|
||||||
|
|
||||||
|
if info.used_mem is not None and info.total_mem is not None:
|
||||||
|
mem_percent = 100 * info.used_mem / info.total_mem
|
||||||
|
else:
|
||||||
|
mem_percent = None
|
||||||
|
|
||||||
|
if mem_percent >= self.alert_percentage:
|
||||||
|
color = self.alert_color
|
||||||
|
elif mem_percent >= self.warn_percentage:
|
||||||
|
color = self.warn_color
|
||||||
|
else:
|
||||||
|
color = self.color
|
||||||
|
|
||||||
|
cdict = {
|
||||||
|
"used_mem": info.used_mem / self.divisor,
|
||||||
|
"avail_mem": info.avail_mem / self.divisor,
|
||||||
|
"total_mem": info.total_mem / self.divisor,
|
||||||
|
"percent_used_mem": mem_percent,
|
||||||
|
}
|
||||||
|
for key, value in cdict.items():
|
||||||
|
if value is not None:
|
||||||
|
cdict[key] = round(value, self.round_size)
|
||||||
|
|
||||||
|
self.output = {
|
||||||
|
"full_text": self.format.format(**cdict),
|
||||||
|
"color": color
|
||||||
|
}
|
5
i3pystatus/utils/__init__.py
Normal file
5
i3pystatus/utils/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2015, Nikolay Polyarnyi
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
|
43
i3pystatus/utils/gpu.py
Normal file
43
i3pystatus/utils/gpu.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import subprocess
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
GPUUsageInfo = namedtuple('GPUUsageInfo', ['total_mem', 'avail_mem', 'used_mem',
|
||||||
|
'temp', 'percent_fan',
|
||||||
|
'usage_gpu', 'usage_mem'])
|
||||||
|
|
||||||
|
|
||||||
|
def query_nvidia_smi() -> GPUUsageInfo:
|
||||||
|
"""
|
||||||
|
:return:
|
||||||
|
all memory fields are in megabytes,
|
||||||
|
temperature in degrees celsius,
|
||||||
|
fan speed is integer percent from 0 to 100 inclusive,
|
||||||
|
usage_gpu and usage_mem are integer percents from 0 to 100 inclusive
|
||||||
|
(usage_mem != used_mem, usage_mem is about read/write access load)
|
||||||
|
read more in 'nvidia-smi --help-query-gpu'.
|
||||||
|
|
||||||
|
Any field can be None if such information is not supported by nvidia-smi for current GPU
|
||||||
|
|
||||||
|
Returns None if call failed (no nvidia-smi or query format was changed)
|
||||||
|
|
||||||
|
Raises exception with readable comment
|
||||||
|
"""
|
||||||
|
params = ["memory.total", "memory.free", "memory.used",
|
||||||
|
"temperature.gpu", "fan.speed",
|
||||||
|
"utilization.gpu", "utilization.memory"]
|
||||||
|
try:
|
||||||
|
output = subprocess.check_output(["nvidia-smi",
|
||||||
|
"--query-gpu={}".format(','.join(params)),
|
||||||
|
"--format=csv,noheader,nounits"])
|
||||||
|
except FileNotFoundError:
|
||||||
|
raise Exception("No nvidia-smi")
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
raise Exception("nvidia-smi call failed")
|
||||||
|
|
||||||
|
output = output.decode('utf-8').strip()
|
||||||
|
values = output.split(", ")
|
||||||
|
|
||||||
|
# If value contains 'not' - it is not supported for this GPU (in fact, for now nvidia-smi returns '[Not Supported]')
|
||||||
|
values = [None if ("not" in value.lower()) else int(value) for value in values]
|
||||||
|
|
||||||
|
return GPUUsageInfo(*values)
|
Loading…
Reference in New Issue
Block a user