feat:execution vram size
This commit is contained in:
parent
ec12f679bf
commit
0631396986
BIN
facefusion/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
facefusion/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
facefusion/__pycache__/common_helper.cpython-312.pyc
Normal file
BIN
facefusion/__pycache__/common_helper.cpython-312.pyc
Normal file
Binary file not shown.
BIN
facefusion/__pycache__/content_analyser.cpython-312.pyc
Normal file
BIN
facefusion/__pycache__/content_analyser.cpython-312.pyc
Normal file
Binary file not shown.
BIN
facefusion/__pycache__/core.cpython-312.pyc
Normal file
BIN
facefusion/__pycache__/core.cpython-312.pyc
Normal file
Binary file not shown.
BIN
facefusion/__pycache__/installer.cpython-312.pyc
Normal file
BIN
facefusion/__pycache__/installer.cpython-312.pyc
Normal file
Binary file not shown.
BIN
facefusion/__pycache__/metadata.cpython-312.pyc
Normal file
BIN
facefusion/__pycache__/metadata.cpython-312.pyc
Normal file
Binary file not shown.
BIN
facefusion/__pycache__/wording.cpython-312.pyc
Normal file
BIN
facefusion/__pycache__/wording.cpython-312.pyc
Normal file
Binary file not shown.
140
facefusion/uis/components/execution_vram_size.py
Normal file
140
facefusion/uis/components/execution_vram_size.py
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
from facefusion import process_manager, state_manager, wording
|
||||||
|
import gradio as gr
|
||||||
|
import psutil
|
||||||
|
import subprocess
|
||||||
|
import platform
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
TOTAL_RAM_SLIDER: Optional[gr.Slider] = None
|
||||||
|
CHARTIFACE: Optional[gr.Interface] = None
|
||||||
|
CHART_CLEAR_BUTTON: Optional[gr.Button] = None
|
||||||
|
CHART_REFRESH_BUTTON: Optional[gr.Button] = None
|
||||||
|
|
||||||
|
def get_ram_info():
|
||||||
|
ram = psutil.virtual_memory()
|
||||||
|
total_ram = round(ram.total / (1024 ** 3), 2) # Convert bytes to GB
|
||||||
|
available_ram = round(ram.free / (1024 ** 3), 2) # Convert bytes to GB
|
||||||
|
return total_ram, available_ram
|
||||||
|
|
||||||
|
def get_vram_info():
|
||||||
|
vram_info = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
if platform.system() == "Windows":
|
||||||
|
# Get GPU details
|
||||||
|
command = "wmic path win32_VideoController get name, adapterram"
|
||||||
|
output = subprocess.check_output(command, shell=True).decode().strip().split("\n")[1:]
|
||||||
|
for line in output:
|
||||||
|
if line.strip():
|
||||||
|
parts = line.split()
|
||||||
|
gpu_name = " ".join(parts[:-1])
|
||||||
|
total_vram = int(parts[-1]) / (1024 ** 2) # Convert bytes to GB
|
||||||
|
# Get current VRAM usage using nvidia-smi
|
||||||
|
usage_command = "nvidia-smi --query-gpu=memory.used --format=csv,noheader,nounits"
|
||||||
|
current_usage = subprocess.check_output(usage_command, shell=True).decode().strip().split("\n")[0]
|
||||||
|
used_vram = int(current_usage) # Used VRAM in MB
|
||||||
|
vram_info.append((gpu_name, total_vram, used_vram / 1024)) # Convert to GB
|
||||||
|
|
||||||
|
elif platform.system() == "Linux":
|
||||||
|
# Get GPU names and total VRAM using nvidia-smi
|
||||||
|
command = "nvidia-smi --query-gpu=name,memory.total --format=csv,noheader,nounits"
|
||||||
|
output = subprocess.check_output(command, shell=True).decode().strip().split("\n")
|
||||||
|
for line in output:
|
||||||
|
if line.strip():
|
||||||
|
parts = line.split(", ")
|
||||||
|
gpu_name = parts[0]
|
||||||
|
total_vram = int(parts[1]) # Total VRAM in MB
|
||||||
|
# Get current VRAM usage
|
||||||
|
usage_command = "nvidia-smi --query-gpu=memory.used --format=csv,noheader,nounits"
|
||||||
|
current_usage = subprocess.check_output(usage_command, shell=True).decode().strip().split("\n")[0]
|
||||||
|
used_vram = int(current_usage) # Used VRAM in MB
|
||||||
|
vram_info.append((gpu_name, total_vram / 1024, used_vram / 1024)) # Convert to GB
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("Unsupported OS")
|
||||||
|
return []
|
||||||
|
|
||||||
|
return vram_info
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An error occurred: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def plot_vram_chart():
|
||||||
|
vram_info = get_vram_info()
|
||||||
|
total_ram, available_ram = get_ram_info()
|
||||||
|
vram_info.append(("RAM", total_ram, available_ram))
|
||||||
|
print(vram_info)
|
||||||
|
|
||||||
|
gpu_names = [gpu[0] for gpu in vram_info]
|
||||||
|
used_vram = [gpu[2] for gpu in vram_info]
|
||||||
|
total_vram = [gpu[1] for gpu in vram_info]
|
||||||
|
|
||||||
|
# Create a bar chart using matplotlib
|
||||||
|
plt.figure(figsize=(10, 5))
|
||||||
|
bars = plt.bar(gpu_names, used_vram, label='Used VRAM', color='blue')
|
||||||
|
plt.bar(gpu_names, [total - used for total, used in zip(total_vram, used_vram)],
|
||||||
|
bottom=used_vram, label='Free VRAM', color='orange')
|
||||||
|
|
||||||
|
plt.ylabel('VRAM (GB)')
|
||||||
|
plt.title('VRAM Usage')
|
||||||
|
plt.legend()
|
||||||
|
plt.xticks(rotation=45)
|
||||||
|
plt.tight_layout()
|
||||||
|
|
||||||
|
# Save the plot to a file
|
||||||
|
plt.savefig("vram_usage.png")
|
||||||
|
plt.close()
|
||||||
|
|
||||||
|
return "vram_usage.png"
|
||||||
|
|
||||||
|
def render() -> None:
|
||||||
|
global TOTAL_RAM_SLIDER
|
||||||
|
global CHARTIFACE
|
||||||
|
global CHART_REFRESH_BUTTON
|
||||||
|
global CHART_CLEAR_BUTTON
|
||||||
|
|
||||||
|
total_ram, available_ram = get_ram_info()
|
||||||
|
TOTAL_RAM_SLIDER = gr.Slider(
|
||||||
|
label="USED RAM SIZE ",
|
||||||
|
info=f"TOTAL RAM SIZE {total_ram} GB",
|
||||||
|
value=round(total_ram - available_ram, 2),
|
||||||
|
step=0.1,
|
||||||
|
minimum=0,
|
||||||
|
maximum=total_ram
|
||||||
|
)
|
||||||
|
|
||||||
|
vram_info = get_vram_info()
|
||||||
|
|
||||||
|
if vram_info:
|
||||||
|
vram_info.append(("RAM", total_ram, available_ram))
|
||||||
|
else:
|
||||||
|
print("No VRAM information available.")
|
||||||
|
vram_info.append(("RAM", total_ram, available_ram))
|
||||||
|
|
||||||
|
# Create the Gradio Blocks interface
|
||||||
|
with gr.Blocks() as CHARTIFACE:
|
||||||
|
# Custom CSS to hide buttons
|
||||||
|
gr.Markdown(
|
||||||
|
"""
|
||||||
|
<style>
|
||||||
|
.gr-button { display: none; }
|
||||||
|
</style>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Button to generate the plot
|
||||||
|
btn = gr.Button("RAM, VRAM USAGE", variant='primary', size='sm')
|
||||||
|
img = gr.Image(type="filepath")
|
||||||
|
|
||||||
|
initial_image = plot_vram_chart()
|
||||||
|
img.value = initial_image
|
||||||
|
|
||||||
|
# Set the button action
|
||||||
|
btn.click(fn=plot_vram_chart, outputs=img)
|
||||||
|
|
||||||
|
def listen() -> None:
|
||||||
|
pass
|
@ -1,7 +1,7 @@
|
|||||||
import gradio
|
import gradio
|
||||||
|
|
||||||
from facefusion import state_manager
|
from facefusion import state_manager
|
||||||
from facefusion.uis.components import about, age_modifier_options, common_options, execution, execution_queue_count, execution_thread_count, expression_restorer_options, face_debugger_options, face_detector, face_editor_options, face_enhancer_options, face_landmarker, face_masker, face_selector, face_swapper_options, frame_colorizer_options, frame_enhancer_options, instant_runner, job_manager, job_runner, lip_syncer_options, memory, output, output_options, preview, processors, source, target, temp_frame, terminal, trim_frame, ui_workflow
|
from facefusion.uis.components import about, age_modifier_options, common_options, execution, execution_queue_count, execution_thread_count, execution_vram_size, expression_restorer_options, face_debugger_options, face_detector, face_editor_options, face_enhancer_options, face_landmarker, face_masker, face_selector, face_swapper_options, frame_colorizer_options, frame_enhancer_options, instant_runner, job_manager, job_runner, lip_syncer_options, memory, output, output_options, preview, processors, source, target, temp_frame, terminal, trim_frame, ui_workflow
|
||||||
|
|
||||||
|
|
||||||
def pre_check() -> bool:
|
def pre_check() -> bool:
|
||||||
@ -38,6 +38,7 @@ def render() -> gradio.Blocks:
|
|||||||
execution.render()
|
execution.render()
|
||||||
execution_thread_count.render()
|
execution_thread_count.render()
|
||||||
execution_queue_count.render()
|
execution_queue_count.render()
|
||||||
|
execution_vram_size.render()
|
||||||
with gradio.Blocks():
|
with gradio.Blocks():
|
||||||
memory.render()
|
memory.render()
|
||||||
with gradio.Blocks():
|
with gradio.Blocks():
|
||||||
|
@ -8,3 +8,6 @@ opencv-python==4.10.0.84
|
|||||||
psutil==6.0.0
|
psutil==6.0.0
|
||||||
tqdm==4.66.5
|
tqdm==4.66.5
|
||||||
scipy==1.14.1
|
scipy==1.14.1
|
||||||
|
GPUtil==1.4.0
|
||||||
|
pandas==2.2.3
|
||||||
|
matplotlib==3.9.2
|
Loading…
Reference in New Issue
Block a user