init
This commit is contained in:
commit
56b128f5c4
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"image": "quay.io/jupyter/scipy-notebook:lab-4.0.12",
|
||||||
|
"postCreateCommand": "pip install 'copier>=9.2,<10' jinja2-time "
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
# Ollama Launcher
|
||||||
|
|
||||||
|
a simple launcher which executes multiple shell commands to access ollama via web-browser.
|
|
@ -0,0 +1,168 @@
|
||||||
|
import subprocess
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
print("##### Init Ollama #####")
|
||||||
|
# --- Global list to store running processes ---
|
||||||
|
running_processes = []
|
||||||
|
|
||||||
|
# --- Function to run a command and wait for a specific string in its output ---
|
||||||
|
def run_and_wait_for_string(command, target_string,printstr, debug=False):
|
||||||
|
"""Runs a subprocess, waits for a string, and hides output (with spinner)."""
|
||||||
|
if debug:
|
||||||
|
print(f"DEBUG: Executing command: {command}")
|
||||||
|
|
||||||
|
print("##### "+printstr+" #####")
|
||||||
|
|
||||||
|
def spinning_cursor():
|
||||||
|
while True:
|
||||||
|
for cursor in itertools.cycle(['-', '/', '|', '\\']):
|
||||||
|
yield cursor
|
||||||
|
|
||||||
|
spinner = spinning_cursor()
|
||||||
|
stop_event = threading.Event()
|
||||||
|
|
||||||
|
def print_spinner():
|
||||||
|
while not stop_event.is_set():
|
||||||
|
sys.stdout.write(next(spinner))
|
||||||
|
sys.stdout.flush()
|
||||||
|
sys.stdout.write('\b')
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
spinner_thread = threading.Thread(target=print_spinner)
|
||||||
|
spinner_thread.start()
|
||||||
|
|
||||||
|
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT, text=True)
|
||||||
|
running_processes.append(process) # Add the process to the list
|
||||||
|
|
||||||
|
while True:
|
||||||
|
output = process.stdout.readline()
|
||||||
|
if output == '' and process.poll() is not None:
|
||||||
|
break
|
||||||
|
if target_string in output:
|
||||||
|
stop_event.set()
|
||||||
|
spinner_thread.join()
|
||||||
|
print("done")
|
||||||
|
return True
|
||||||
|
stop_event.set()
|
||||||
|
spinner_thread.join()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# --- Function to run a command until it succeeds ---
|
||||||
|
def run_until_success(command,printstr, debug=False):
|
||||||
|
"""Runs a command in a loop until it exits with a 0 return code (hidden)."""
|
||||||
|
if debug:
|
||||||
|
print(f"DEBUG: Executing command until successful: {command}")
|
||||||
|
|
||||||
|
print("##### "+printstr+" #####")
|
||||||
|
while True:
|
||||||
|
process = subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.STDOUT)
|
||||||
|
running_processes.append(process) # Add the process to the list
|
||||||
|
process.communicate()
|
||||||
|
if process.returncode == 0:
|
||||||
|
print("done")
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
# --- Function to gracefully terminate running processes ---
|
||||||
|
def terminate_processes():
|
||||||
|
"""Sends SIGINT to all processes in the global list."""
|
||||||
|
for process in running_processes:
|
||||||
|
if process.poll() is None:
|
||||||
|
try:
|
||||||
|
process.send_signal(signal.SIGINT)
|
||||||
|
process.wait()
|
||||||
|
except:
|
||||||
|
print(f"Error terminating process: {process}")
|
||||||
|
running_processes.clear()
|
||||||
|
|
||||||
|
|
||||||
|
# --- Parse command line arguments ---
|
||||||
|
parser = argparse.ArgumentParser(description="Start Ollama and related services.")
|
||||||
|
parser.add_argument("--debug", action="store_true", help="Enable debug mode (print executed commands)")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# --- Main script ---
|
||||||
|
|
||||||
|
# --- Subprocess 1: ollama serve ---
|
||||||
|
command1 = "ollama serve"
|
||||||
|
target_string1 = "inference compute"
|
||||||
|
|
||||||
|
# --- Subprocess 2: webui serve ---
|
||||||
|
command2 = "open-webui serve"
|
||||||
|
target_string2 = "Uvicorn running"
|
||||||
|
|
||||||
|
# --- Subprocess 3: ollama pull ---
|
||||||
|
command3 = "ollama pull qwen2:0.5b"
|
||||||
|
|
||||||
|
# --- Get token from user ---
|
||||||
|
token = input("Please visit https://tun.iuk.hdm-stuttgart.de to obtain a token and paste it here: ")
|
||||||
|
jupyterhub_user = os.environ.get("JUPYTERHUB_USER")
|
||||||
|
if not jupyterhub_user:
|
||||||
|
print("Error: Environment variable 'JUPYTERHUB_USER' not found.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# --- Subprocess 4: pgrok init ---
|
||||||
|
command4 = f"pgrok init --remote-addr tun.iuk.hdm-stuttgart.de:80 --forward-addr https://{jupyterhub_user}.tun.iuk.hdm-stuttgart.de --token {token}"
|
||||||
|
url = f"https://{jupyterhub_user}.tun.iuk.hdm-stuttgart.de"
|
||||||
|
# --- Subprocess 5: pgrok http ---
|
||||||
|
command5 = "pgrok http 8080"
|
||||||
|
target_string5 = "You're ready to go live"
|
||||||
|
|
||||||
|
# --- Run subprocesses sequentially ---
|
||||||
|
if run_and_wait_for_string(command1, target_string1, "Starting Ollama Server", debug=args.debug):
|
||||||
|
if run_and_wait_for_string(command2, target_string2, "Starting WebUI", debug=args.debug):
|
||||||
|
run_until_success(command3,"Loading default model", debug=args.debug)
|
||||||
|
|
||||||
|
if subprocess.call(command4, shell=True, stdout=subprocess.DEVNULL) == 0:
|
||||||
|
print("##### Init Tunnel #####")
|
||||||
|
if run_and_wait_for_string(command5, target_string5,"Create Tunnel", debug=args.debug):
|
||||||
|
|
||||||
|
# --- Ollama shell ---
|
||||||
|
|
||||||
|
print("##### All services started, Please visit " + url + " for the WebUI! #####")
|
||||||
|
print("##### Launching Ollama shell! #####")
|
||||||
|
while True:
|
||||||
|
command = input("ollama> ")
|
||||||
|
if command.lower() == "exit":
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_command = f"ollama {command}"
|
||||||
|
if args.debug:
|
||||||
|
print(f"DEBUG: Executing command: {full_command}")
|
||||||
|
process = subprocess.Popen(full_command, shell=True)
|
||||||
|
process.communicate()
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
print("Command not found. Please make sure 'ollama' is installed and in your PATH.")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An error occurred: {e}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("Error: 'pgrok http' failed to start.")
|
||||||
|
terminate_processes()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("Error: 'pgrok init' failed.")
|
||||||
|
terminate_processes()
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print("Error: 'webui serve' failed to start.")
|
||||||
|
terminate_processes()
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print("Error: 'ollama serve' failed to start.")
|
||||||
|
terminate_processes()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Gracefully terminate running processes on script exit or error
|
||||||
|
terminate_processes()
|
|
@ -0,0 +1,16 @@
|
||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name='ollama-launcher',
|
||||||
|
version='0.1.0',
|
||||||
|
packages=find_packages(),
|
||||||
|
install_requires=[
|
||||||
|
# List any external dependencies your script needs
|
||||||
|
# e.g., 'requests', 'beautifulsoup4'
|
||||||
|
],
|
||||||
|
entry_points={
|
||||||
|
'console_scripts': [
|
||||||
|
'ollama_start=ollama_launcher.launcher'
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
Loading…
Reference in New Issue