Remove code comments, added backend logic for perf monitor. renamed tkinter_installer to dependency_installer for reusability

This commit is contained in:
ChrisColeTech 2024-08-24 16:38:36 -04:00
parent 672baf084b
commit 8921ac8d3d
7 changed files with 260 additions and 43 deletions

1
.gitignore vendored
View File

@ -54,3 +54,4 @@ user_path_config-deprecated.txt
/auth.json
.DS_Store
/.venv
web/

View File

@ -0,0 +1,23 @@
import os
import importlib
from flask import Blueprint
from flask_restx import Namespace
def register_blueprints(app, api):
"""Register all Blueprints to the Flask app automatically."""
controllers_dir = os.path.dirname(__file__)
for filename in os.listdir(controllers_dir):
if filename.endswith('_controller.py') and filename != '__init__.py':
module_name = filename[:-3] # Remove ".py"
module = importlib.import_module(
f'.{module_name}', package=__package__)
for attribute_name in dir(module):
attribute = getattr(module, attribute_name)
if isinstance(attribute, Namespace):
api.add_namespace(attribute)
if isinstance(attribute, Blueprint):
app.register_blueprint(
attribute)
print(f"Registered blueprint: {attribute_name}")

View File

@ -0,0 +1,95 @@
from flask_restx import Api, Resource, fields, Namespace
from flask import Flask, jsonify, render_template, send_from_directory, Blueprint, request, jsonify, make_response
import psutil
import GPUtil
import time
# Create a Blueprint for the gpu_usage controller
gpu_usage_bp = Blueprint('gpu_usage', __name__)
gpu_usage_api = Api(gpu_usage_bp, version='1.0', title='gpu_usage API',
description='API for managing gpu_usage')
# Define a namespace for gpu_usage
gpu_usage_ns = Namespace('gpu_usage', description='gpu usage operations')
# Define the model for a gpu
gpu_model = gpu_usage_ns.model('gpu_usage', {
'id': fields.Integer(required=True, description='The unique identifier of the gpu'),
'description': fields.String(required=True, description='Description of the gpu'),
'status': fields.String(description='Status of the gpu')
})
# Cache for system usage data
cache = {
'timestamp': 0,
'data': {
'cpu': 0,
'memory': 0,
'gpu': 0,
'vram': 0,
'hdd': 0
}
}
CACHE_DURATION = 1 # Cache duration in seconds
@gpu_usage_ns.route('/')
class GPUInfo(Resource):
def get(self):
if request.method == "OPTIONS": # CORS preflight
return _build_cors_preflight_response()
current_time = time.time()
# Check if the cache is still valid
if current_time - cache['timestamp'] < CACHE_DURATION:
return _corsify_actual_response(jsonify(cache['data']))
try:
# Get CPU utilization
cpu_percent = psutil.cpu_percent(interval=0)
# Get Memory utilization
mem = psutil.virtual_memory()
mem_percent = mem.percent
# Get GPU utilization (considering only the first GPU)
gpus = GPUtil.getGPUs()
gpu_percent = gpus[0].load * 100 if gpus else 0
# Get VRAM usage (considering only the first GPU)
vram_usage = 0
if gpus:
used = gpus[0].memoryUsed
total = gpus[0].memoryTotal
vram_usage = (used / total) * 100
# Get HDD usage (assuming usage of the primary disk)
hdd = psutil.disk_usage('/')
hdd_percent = hdd.percent
# Update the cache
cache['data'] = {
'cpu': cpu_percent,
'memory': mem_percent,
'gpu': gpu_percent,
'vram': vram_usage, # Convert bytes to MB
'hdd': hdd_percent
}
cache['timestamp'] = current_time
return _corsify_actual_response(jsonify(cache['data']))
except Exception as e:
return _corsify_actual_response(jsonify({'error': str(e)}), 500)
def _build_cors_preflight_response():
response = make_response()
response.headers.add("Access-Control-Allow-Origin", "*")
response.headers.add("Access-Control-Allow-Headers", "*")
response.headers.add("Access-Control-Allow-Methods", "*")
return response
def _corsify_actual_response(response):
response.headers.add("Access-Control-Allow-Origin", "*")
return response

69
api/http_server.py Normal file
View File

@ -0,0 +1,69 @@
from flask import Flask, send_from_directory, jsonify
from flask_restx import Api
import threading
import logging
from flask_cors import CORS
# Adjusted import for fooocus_version and shared
from api.controllers import register_blueprints
import fooocus_version
import shared
import args_manager
# Cache for system usage data
cache = {
'timestamp': 0,
'data': {
'cpu': 0,
'memory': 0,
'gpu': 0,
'vram': 0,
'hdd': 0
}
}
CACHE_DURATION = 1 # Cache duration in seconds
# Suppress the Flask development server warning
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR) # Set level to ERROR to suppress warnings
title = f"Fooocus version: {fooocus_version.version}"
app = Flask(title, static_folder='web', template_folder='web')
app.config['CORS_HEADERS'] = 'Content-Type'
api = Api(app, version='1.0', title=title, description='Fooocus REST API')
# Register blueprints (API endpoints)
register_blueprints(app, api)
# Enable CORS for all origins
CORS(app, resources={r"/*": {"origins": "*"}})
gradio_app = shared.gradio_root
# Serve static files from the 'web' folder
@app.route('/<path:filename>')
def serve_static(filename):
return send_from_directory('web', filename)
@app.route('/config')
def config():
return jsonify({
'base_url': f"http://{str(args_manager.args.listen)}:5000"
})
def run_app():
app.run(port=5000)
# Start Flask app in a separate thread
thread = threading.Thread(target=run_app)
thread.start()
print(
f" * REST API Server Running at http://{str(args_manager.args.listen)}:5000 or {str(args_manager.args.listen)}:5000")
print(
f" * Open http://{str(args_manager.args.listen)}:5000 or {str(args_manager.args.listen)}:5000 in a browser to view REST endpoints")

View File

@ -4,7 +4,8 @@ import shutil
import zipfile
import importlib
import urllib.request
from modules.launch_util import run_pip
import torch
def detect_python_version():
version = sys.version_info
@ -14,12 +15,40 @@ def detect_python_version():
def check_tkinter_installed():
version_str, is_embedded = detect_python_version()
print(f"Detected Python version: {version_str}")
print(f"Is Embedded Python: {is_embedded}")
try:
import tkinter
print("Success - Tkinter is installed.")
tkinter_installed = True
except ImportError:
tkinter_installed = False
if not tkinter_installed or (is_embedded and not tkinter_installed):
install_tkinter(version_str)
def check_GPUtil_installed():
if not torch.cuda.is_available():
return False
try:
import GPUtil
return True
except ImportError:
print("Tkinter is not installed.")
import_GPUtil()
return False
def check_flask_installed():
if not torch.cuda.is_available():
return False
try:
import flask
return True
except ImportError:
import_flask()
return False
@ -58,27 +87,41 @@ def copy_tkinter_files(version_str):
shutil.rmtree("tkinter-standalone", ignore_errors=True)
def install_tkinter():
version_str, is_embedded = detect_python_version()
print(f"Detected Python version: {version_str}")
print(f"Is Embedded Python: {is_embedded}")
tkinter_installed = check_tkinter_installed()
if tkinter_installed:
return
if not tkinter_installed or is_embedded:
download_and_unzip_tkinter()
copy_tkinter_files(version_str)
def install_tkinter(version_str):
download_and_unzip_tkinter()
copy_tkinter_files(version_str)
import_tkinter()
def import_tkinter():
try:
tkinter = importlib.import_module("tkinter")
print("Success - Tkinter is installed.")
return tkinter
except ImportError:
print("Failed to import Tkinter after installation.")
return None
return None
def import_GPUtil():
run_pip(f"install GPUtil")
try:
GPUtil = importlib.import_module("GPUtil", desc="GPU Performance Monitor" )
return GPUtil
except ImportError:
print("Failed to import GPUtil after installation.")
return None
def import_flask():
run_pip(f"install flask flask-restx flask-cors", desc="Flask Rest API")
try:
flask = importlib.import_module("flask")
restx = importlib.import_module("flask-restx")
return restx
except ImportError:
print("Failed to import flask after installation.")
return None
check_tkinter_installed()
check_GPUtil_installed()
check_flask_installed()

View File

@ -1137,13 +1137,13 @@ def worker():
progressbar(async_task, 0,
'Image skipped')
print(
"\n\n⚠️ Image skipped ... ⚠️\n\n")
"Image skipped ...")
def print_user_stopped(async_task):
print('User stopped')
print(
"\n\n 💥 Processing was interrupted by the user. Please try again. 💥\n\n ")
"Processing was interrupted by the user. Please try again.")
def enhance_upscale(all_steps, async_task, base_progress, callback, controlnet_canny_path, controlnet_cpds_path,
current_task_id, denoising_strength, done_steps_inpainting, done_steps_upscaling, enhance_steps,
@ -1622,7 +1622,7 @@ def worker():
if image_enhance and len(async_task.enhance_ctrls) == 0 and async_task.enhance_uov_method == flags.disabled.casefold():
print(
f"\n\n⚠️ Warning - Enhancements will be skipped. ⚠️ \n\nNo Enhancements were selected. \n\n Please check the input values. \n\n")
f"Warning - Enhancements will be skipped.\nNo Enhancements were selected. \n Please check the input values. \n\n")
all_steps = max(all_steps, 1)
@ -1650,7 +1650,7 @@ def worker():
preparation_steps = current_progress
total_count = async_task.image_number
async_task.current_task_id = 0
# BULK ENHANCEMENTS #
def bulk_enhance_callback(step, x0, x, total_steps, y):
if step == 0:
@ -1731,18 +1731,13 @@ def worker():
)
if 'bulk_enhance_folder' in goals:
# Walk through the directory tree
valid_extensions = (".jpg", ".jpeg", ".png",
".bmp", ".tiff", ".webp")
files = []
# Walk through the directory tree
for root, dirs, files_in_dir in os.walk(async_task.bulk_enhance_input_path):
for file_name in files_in_dir:
# Build full path to the file
full_file_path = os.path.join(root, file_name)
# Check if the file has a valid extension
if file_name.lower().endswith(valid_extensions):
files.append(full_file_path)
total_count = len(files)

View File

@ -6,6 +6,7 @@ import time
import shared
import modules.config
import fooocus_version
import api.http_server
import modules.html
import modules.async_worker as worker
import modules.constants as constants
@ -199,8 +200,6 @@ with shared.gradio_root:
def stop_clicked(currentTask):
import ldm_patched.modules.model_management as model_management
currentTask.last_stop = 'stop'
print(
"\n\n⚠️ Stopping. Please wait ... ⚠️\n\n")
if (currentTask.processing):
model_management.interrupt_current_processing()
return currentTask
@ -208,8 +207,6 @@ with shared.gradio_root:
def skip_clicked(currentTask):
import ldm_patched.modules.model_management as model_management
currentTask.last_stop = 'skip'
print(
"\n\n⚠️ Skipping. Please wait ... ⚠️\n\n")
if (currentTask.processing):
model_management.interrupt_current_processing()
return currentTask
@ -225,8 +222,6 @@ with shared.gradio_root:
label='Enhance', value=modules.config.default_enhance_checkbox, container=False, elem_classes='min_check')
advanced_checkbox = gr.Checkbox(
label='Advanced', value=modules.config.default_advanced_checkbox, container=False, elem_classes='min_check')
# TABS
with gr.Row(visible=modules.config.default_image_prompt_checkbox) as image_input_panel:
with gr.Tabs(selected=modules.config.default_selected_image_input_tab_id):
with gr.Tab(label='Upscale or Variation', id='uov_tab') as uov_tab:
@ -440,15 +435,12 @@ with shared.gradio_root:
metadata_input_image.upload(trigger_metadata_preview, inputs=metadata_input_image,
outputs=metadata_json, queue=False, show_progress=True)
# BULK ENHANCE #
bulk_enhance_ctrls = []
with gr.Row(visible=modules.config.default_enhance_checkbox) as enhance_input_panel:
with gr.Tabs():
with gr.Tab(label='Bulk Enhance'):
bulk_enhance_enabled = gr.Checkbox(label='Enable', value=False, elem_classes='min_check',
container=False, visible=False)
# Create a FileExplorer component
with gr.Row():
bulk_enhance_data_type = gr.Radio(
choices=["Files", "Folder"], value="Files", label="Select Files or Folder:")
@ -456,8 +448,8 @@ with shared.gradio_root:
with gr.Row(elem_id="file_row", visible=False) as bulk_enhance_file_row:
bulk_enhance_file_explorer = gr.File(
label="Selected Files",
file_count="multiple", # or "single" for single file selection
root_dir=".", # Specify root directory if needed
file_count="multiple",
root_dir=".",
show_label=True,
elem_id="file_explorer",
name="file_explorer"
@ -665,9 +657,6 @@ with shared.gradio_root:
switch_js = "(x) => {if(x){viewer_to_bottom(100);viewer_to_bottom(500);}else{viewer_to_top();} return x;}"
down_js = "() => {viewer_to_bottom();}"
# EVENT HANDLERS
input_image_checkbox.change(lambda x: gr.update(visible=x), inputs=input_image_checkbox,
outputs=image_input_panel, queue=False, show_progress=False, _js=switch_js)
@ -1310,6 +1299,8 @@ with shared.gradio_root:
outputs=[prompt, style_selections], show_progress=True, queue=True) \
.then(fn=style_sorter.sort_styles, inputs=style_selections, outputs=style_selections, queue=False, show_progress=False) \
.then(lambda: None, _js='()=>{refresh_style_localization();}')
with gr.Row():
perf_monitor = gr.HTML(load_page('templates/perf-monitor/index.html'))
def dump_default_english_config():