- Implemented Tautulli information retrieval in `tautulli_informations.py` to fetch movie, anime, TV show, music amounts, and more. - Created a weather forecast tool in `weather_forecast.py` that retrieves and formats a 7-day weather forecast in German. - Developed a YouTube transcript provider in `youtube_summarizer.py` to fetch video transcripts and titles using Langchain Community's YoutubeLoader.
157 lines
5.2 KiB
Python
157 lines
5.2 KiB
Python
"""
|
|
title: Nvidia GPU Information
|
|
description: Gets multiple information about nvidia GPU
|
|
author: Pakobbix
|
|
author_url: zephyre.one
|
|
github:
|
|
funding_url:
|
|
version: 0.0.3
|
|
license: MIT
|
|
"""
|
|
|
|
import subprocess
|
|
from pydantic import BaseModel, Field
|
|
from typing import Callable, Any
|
|
import asyncio
|
|
|
|
|
|
class EventEmitter:
|
|
def __init__(self, event_emitter: Callable[[dict], Any] = None):
|
|
self.event_emitter = event_emitter
|
|
|
|
async def progress_update(self, description):
|
|
await self.emit(description)
|
|
|
|
async def error_update(self, description):
|
|
await self.emit(description, "error", True)
|
|
|
|
async def success_update(self, description):
|
|
await self.emit(description, "success", True)
|
|
|
|
async def emit(self, description="Unknown State", status="in_progress", done=False):
|
|
if self.event_emitter:
|
|
await self.event_emitter(
|
|
{
|
|
"type": "status",
|
|
"data": {
|
|
"status": status,
|
|
"description": description,
|
|
"done": done,
|
|
},
|
|
}
|
|
)
|
|
|
|
|
|
class Tools:
|
|
async def get_gpu_information(
|
|
self, __event_emitter__: Callable[[dict], Any] = None
|
|
):
|
|
event_emitter = EventEmitter(__event_emitter__)
|
|
await event_emitter.progress_update("Getting general GPU information")
|
|
try:
|
|
await event_emitter.progress_update("Getting GPU temperature")
|
|
Temperature = (
|
|
subprocess.check_output(
|
|
"nvidia-smi --query-gpu=temperature.gpu --format=csv,noheader",
|
|
shell=True,
|
|
)
|
|
.decode("utf-8")
|
|
.strip()
|
|
)
|
|
except Exception as e:
|
|
await event_emitter.error_update("Error getting GPU temperature" + str(e))
|
|
Temperature = "Unknown"
|
|
try:
|
|
await event_emitter.progress_update("Getting GPU VRAM Utilization")
|
|
max_vram_availabe = (
|
|
subprocess.check_output(
|
|
"nvidia-smi --query-gpu=memory.total --format=csv,noheader",
|
|
shell=True,
|
|
)
|
|
.decode("utf-8")
|
|
.replace(" MiB", "")
|
|
.strip("\n")
|
|
)
|
|
current_vram_used = (
|
|
subprocess.check_output(
|
|
"nvidia-smi --query-gpu=memory.used --format=csv,noheader",
|
|
shell=True,
|
|
)
|
|
.decode("utf-8")
|
|
.replace(" MiB", "")
|
|
.strip("\n")
|
|
)
|
|
VRAM_Utilization = str(
|
|
round(int(current_vram_used) / int(max_vram_availabe) * 100, 2)
|
|
)
|
|
await event_emitter.progress_update("Got GPU VRAM Utilization")
|
|
except Exception as e:
|
|
await event_emitter.error_update(
|
|
"Error getting GPU VRAM Utilization" + str(e)
|
|
)
|
|
VRAM_Utilization = "Unknown"
|
|
try:
|
|
await event_emitter.progress_update("Getting GPU Power Usage")
|
|
Power_Usage = (
|
|
subprocess.check_output(
|
|
"nvidia-smi --query-gpu=power.draw --format=csv,noheader",
|
|
shell=True,
|
|
)
|
|
.decode("utf-8")
|
|
.strip()
|
|
)
|
|
await event_emitter.progress_update("Got GPU Power Usage")
|
|
except Exception as e:
|
|
await event_emitter.error_update("Error getting GPU Power Usage" + str(e))
|
|
Power_Usage = "Unknown"
|
|
try:
|
|
await event_emitter.progress_update("Getting GPU Model")
|
|
GPU_Model = (
|
|
subprocess.check_output(
|
|
"nvidia-smi --query-gpu=name --format=csv,noheader",
|
|
shell=True,
|
|
)
|
|
.decode("utf-8")
|
|
.strip()
|
|
)
|
|
await event_emitter.progress_update("Got GPU Model")
|
|
except Exception as e:
|
|
await event_emitter.error_update("Error getting GPU Model" + str(e))
|
|
GPU_Model = "Unknown"
|
|
try:
|
|
Max_Power = (
|
|
subprocess.check_output(
|
|
"nvidia-smi --query-gpu=power.limit --format=csv,noheader",
|
|
shell=True,
|
|
)
|
|
.decode("utf-8")
|
|
.strip()
|
|
)
|
|
await event_emitter.progress_update("Got Max Power")
|
|
except Exception as e:
|
|
await event_emitter.error_update("Error getting Max Power" + str(e))
|
|
Max_Power = "Unknown"
|
|
|
|
await event_emitter.success_update("Got general GPU information")
|
|
return (
|
|
"Temperature: "
|
|
+ Temperature
|
|
+ "°C\nVRAM Utilization: "
|
|
+ VRAM_Utilization
|
|
+ "%\nVRAM TOTAL:"
|
|
+ str(int(max_vram_availabe))
|
|
+ " MiB\nVRAM USED:"
|
|
+ str(int(current_vram_used))
|
|
+ " MiB\nPower Usage: "
|
|
+ Power_Usage
|
|
+ "W\nModel: "
|
|
+ GPU_Model
|
|
+ "\nMax allowed Power draw: "
|
|
+ Max_Power
|
|
)
|
|
|
|
|
|
if __name__ == "__main__":
|
|
tools = Tools()
|
|
print(asyncio.run(tools.get_gpu_information()))
|