Files
codex_truenas_helper/llamaCpp.Wrapper.app/run.py
Rushabh Gosar 5d1a0ee72b Initial commit
2026-01-07 16:54:39 -08:00

36 lines
895 B
Python

import os
import signal
import subprocess
import sys
from app.config import load_config
def main() -> None:
cfg = load_config()
python = sys.executable
api_cmd = [python, "-m", "uvicorn", "app.api_app:create_api_app", "--factory", "--host", "0.0.0.0", "--port", str(cfg.api_port)]
ui_cmd = [python, "-m", "uvicorn", "app.ui_app:create_ui_app", "--factory", "--host", "0.0.0.0", "--port", str(cfg.ui_port)]
procs = [subprocess.Popen(api_cmd)]
if cfg.ui_port != cfg.api_port:
procs.append(subprocess.Popen(ui_cmd))
def shutdown(_sig, _frame):
for proc in procs:
proc.terminate()
for proc in procs:
proc.wait(timeout=10)
sys.exit(0)
signal.signal(signal.SIGTERM, shutdown)
signal.signal(signal.SIGINT, shutdown)
for proc in procs:
proc.wait()
if __name__ == "__main__":
main()