#!/bin/bash # Nubby CLI Installer — https://caellum.tech # Usage: curl -fsSL https://caellum.tech/nubby-install.sh | bash set -e INSTALL_DIR="${INSTALL_DIR:-}" CAELUM_DIR="${CAELUM_DIR:-}" GATEWAY_HOST="caellum.tech" GATEWAY_PORT="443" MIN_PYTHON="3.8" LOCAL_OLLAMA_URL="${CAELUM_LOCAL_OLLAMA_URL:-}" # Nubby installer bootstrap uses 11435 only. DEFAULT_OLLAMA_URLS="http://localhost:11435 http://127.0.0.1:11435" SELF_UPGRADE="${NUBBY_SELF_UPGRADE:-false}" # Auto-detect existing install — treat reinstall as upgrade if [ -f "${INSTALL_DIR:-$HOME/.nubby}/nubby_cli.py" ] || [ -f "${INSTALL_DIR:-$HOME/.nubby}/config.json" ]; then SELF_UPGRADE="true" fi # Optional root-mode target user (fresh isolated installs) TARGET_USER="$(id -un)" TARGET_HOME="$HOME" if [ "${EUID:-$(id -u)}" -eq 0 ]; then TARGET_USER="${NUBBY_INSTALL_USER:-caelum}" if ! id -u "$TARGET_USER" >/dev/null 2>&1; then useradd -m -d "/home/$TARGET_USER" -s /bin/bash "$TARGET_USER" fi TARGET_HOME="$(getent passwd "$TARGET_USER" | cut -d: -f6)" [ -z "$TARGET_HOME" ] && TARGET_HOME="/home/$TARGET_USER" fi INSTALL_DIR="${INSTALL_DIR:-$TARGET_HOME/.nubby}" CAELUM_DIR="${CAELUM_DIR:-$TARGET_HOME/.caelum}" # Colors RED='\033[0;31m' GREEN='\033[0;32m' YELLOW='\033[0;33m' BOLD='\033[1m' RESET='\033[0m' info() { echo -e "${GREEN}[+]${RESET} $1"; } warn() { echo -e "${YELLOW}[!]${RESET} $1"; } error() { echo -e "${RED}[x]${RESET} $1"; exit 1; } cmd() { echo -e "\033[96m$1${RESET}"; } echo echo -e "${BOLD} Nubby CLI Installer${RESET}" echo -e " https://caellum.tech" echo -e " Install user: ${TARGET_USER}" echo echo -e " Required install order:" echo -e " 1. Install prerequisites" echo -e " 2. Install Docker / Docker Compose" echo -e " 3. Install/prepare local Ollama" echo -e " 4. Install Nubby CLI" echo echo -e " Workspace guidance:" echo -e " Terminal backend — pick one pair:" echo -e " Option A (default): Ghostty + Zellij" echo -e " Option B: WezTerm + tmux" echo -e " Nubby auto-detects whichever is installed (NUBBY_TERMINAL_BACKEND=ghostty|wezterm to override)." echo -e " 1. Install Zellij or tmux for the standard /workspace (detached operator mode)." echo -e " 2. Install Ghostty or WezTerm for multi-window layouts (/workspace tiled, split, etc.)." echo -e " 3. Keep using nubby utility terminal, nubby utility files, and nubby utility nubby." echo -e " 4. The file utility uses your desktop file manager / explorer." echo echo -e " Local Ollama is mandatory for the Worker runtime." echo # ── 1. Check Python ────────────────────────────────────────────────────────── PYTHON="" for cmd in python3 python; do if command -v "$cmd" &>/dev/null; then ver=$("$cmd" -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')" 2>/dev/null || echo "0.0") major=$(echo "$ver" | cut -d. -f1) minor=$(echo "$ver" | cut -d. -f2) if [ "$major" -ge 3 ] && [ "$minor" -ge 8 ]; then PYTHON="$cmd" break fi fi done [ -z "$PYTHON" ] && error "Python ${MIN_PYTHON}+ is required. Install it and try again." info "Python: $($PYTHON --version 2>&1)" # ── 2. Check pip ───────────────────────────────────────────────────────────── if ! "$PYTHON" -m pip --version &>/dev/null; then error "pip is not installed. Run: $PYTHON -m ensurepip --upgrade" fi # ── 3. Check curl ──────────────────────────────────────────────────────────── command -v curl &>/dev/null || error "curl is required. Install it and try again." # ── 4. Check git ───────────────────────────────────────────────────────────── command -v git &>/dev/null || error "git is required. Install it and try again." # ── 5. Check zstd ──────────────────────────────────────────────────────────── if [ "$SELF_UPGRADE" != "true" ]; then command -v zstd &>/dev/null || error "zstd is required before installing Ollama. Debian/Ubuntu: sudo apt-get install -y zstd" fi # ── 6. Check Docker / Compose ──────────────────────────────────────────────── if [ "$SELF_UPGRADE" != "true" ]; then command -v docker &>/dev/null || error "Docker is required before installing Nubby CLI. Install it first with: curl -fsSL https://get.docker.com | sh" if docker compose version >/dev/null 2>&1; then : elif command -v docker-compose >/dev/null 2>&1; then : else error "Docker Compose is required before installing Nubby CLI. Install Docker, verify it with 'docker compose version', then rerun this installer." fi fi # ── 6b. Check terminal emulator (Ghostty or WezTerm required for /workspace) ─ if command -v ghostty &>/dev/null; then info "Terminal: Ghostty detected (backend A — Ghostty + Zellij)" elif command -v wezterm &>/dev/null; then info "Terminal: WezTerm detected (backend B — WezTerm + tmux)" else warn "No supported terminal emulator found. Install one of:" warn " Option A — Ghostty: https://ghostty.org/docs/install" warn " Ubuntu: sudo snap install ghostty --edge" warn " Option B — WezTerm: https://wezfurlong.org/wezterm/installation" warn "Required for /workspace and /utility commands." fi # ── 6c. Check workspace multiplexer (Zellij or tmux) ──────────────────────── if command -v zellij &>/dev/null; then info "Workspace multiplexer: Zellij detected" elif command -v tmux &>/dev/null; then info "Workspace multiplexer: tmux detected" else warn "No supported workspace multiplexer found. Install zellij (preferred) or tmux." warn "Without one of them, the standard detached /workspace cannot start." fi # ── 7. Check local Ollama readiness ────────────────────────────────────────── if [ -n "${LOCAL_OLLAMA_URL}" ]; then OLLAMA_CANDIDATES="${LOCAL_OLLAMA_URL}" else OLLAMA_CANDIDATES="${DEFAULT_OLLAMA_URLS}" fi OLLAMA_FOUND="" for candidate in ${OLLAMA_CANDIDATES}; do if curl -fsS "${candidate}/api/tags" >/dev/null 2>&1; then OLLAMA_FOUND="${candidate}" break fi done if [ -z "${OLLAMA_FOUND}" ]; then if [ "$SELF_UPGRADE" = "true" ]; then warn "Local Ollama was not reachable during self-upgrade. Preserving the existing configuration and skipping runtime reprovisioning." else warn "Local Ollama was not reachable on port 11435 (caelum-axon-ollama). Step 3 will be bootstrapped after the package is downloaded." fi else LOCAL_OLLAMA_URL="${OLLAMA_FOUND}" info "Local Ollama: ${LOCAL_OLLAMA_URL}" fi # ── 8. Create directories ──────────────────────────────────────────────────── info "Creating directories..." mkdir -p "$INSTALL_DIR" mkdir -p "$CAELUM_DIR" # ── 9. Download release tarball ────────────────────────────────────────────── RELEASE_URL="https://caellum.tech/static/nubby-latest.tar.gz" info "Downloading Nubby CLI..." curl -fsSL "$RELEASE_URL" -o "/tmp/nubby-latest.tar.gz" || error "Failed to download from $RELEASE_URL" # ── 10. Extract ────────────────────────────────────────────────────────────── info "Extracting..." # Remove stale runtime files from older releases before unpacking. # Fresh installs and upgrades must replace the shipped support modules as a unit; # mixing old utils/core files with a newer nubby_cli.py can break startup imports. rm -rf "$INSTALL_DIR/client" "$INSTALL_DIR/core" "$INSTALL_DIR/utils" "$INSTALL_DIR/config" rm -f "$INSTALL_DIR/requirements_cli.txt" "$INSTALL_DIR/nubby_cli.py" "$INSTALL_DIR/nubby_command_router.py" "$INSTALL_DIR/nubby_cli_enhanced.py" "$INSTALL_DIR/.nubby_cli_version" tar -xzf "/tmp/nubby-latest.tar.gz" -C "$INSTALL_DIR" --strip-components=1 2>/dev/null || \ tar -xzf "/tmp/nubby-latest.tar.gz" -C "$INSTALL_DIR" rm -f "/tmp/nubby-latest.tar.gz" if [ ! -f "$INSTALL_DIR/requirements_cli.txt" ]; then error "Downloaded Nubby package is incomplete or stale (missing requirements_cli.txt). Rebuild/publish the latest nubby-latest.tar.gz and rerun this installer." fi if [ ! -f "$INSTALL_DIR/utils/context_manager.py" ] || [ ! -f "$INSTALL_DIR/core/project_profile.py" ]; then error "Downloaded Nubby package is incomplete or stale. Rebuild/publish the latest nubby-latest.tar.gz and rerun this installer." fi # Normalize canonical responses from the downloaded bundle so stale markdown # cannot survive a fresh install or upgrade. if [ -f "$INSTALL_DIR/config/canonical_responses.json" ]; then info "Normalizing canonical responses..." CANONICAL_SEED="$INSTALL_DIR/config/canonical_responses.json" \ CANONICAL_GOLDEN="$CAELUM_DIR/golden/canonical_responses.json" \ AUTH_PACK="$CAELUM_DIR/authority_pack.json" \ "$PYTHON" - <<'PY' import json, os, re from pathlib import Path BOLD_RE = re.compile(r"\*\*([^*\n]+)\*\*") UNDER_RE = re.compile(r"__([^_\n]+)__") def strip_emphasis(text): text = BOLD_RE.sub(r"\1", text) text = UNDER_RE.sub(r"\1", text) return text def is_table_row(s): return "|" in s and not s.strip().startswith("```") def is_table_sep(s): c = s.strip() return "|" in c and bool(re.fullmatch(r"[:\-|\s]+", c)) and c.count("-") >= 3 def format_terminal_text(text): if not text: return text lines = text.splitlines() out = [] in_code = False i = 0 while i < len(lines): line = lines[i] if line.lstrip().startswith("```"): in_code = not in_code out.append(line) i += 1 continue if in_code: out.append(line) i += 1 continue if is_table_row(line) and i + 1 < len(lines) and is_table_sep(lines[i + 1]): block = [] while i < len(lines) and lines[i].strip() and (is_table_row(lines[i]) or is_table_sep(lines[i])): if not is_table_sep(lines[i]): block.append(strip_emphasis(lines[i])) i += 1 out.append("```text") out.extend(block) out.append("```") continue out.append(strip_emphasis(line)) i += 1 return "\n".join(out) seed_path = Path(os.environ["CANONICAL_SEED"]) golden_path = Path(os.environ["CANONICAL_GOLDEN"]) try: data = json.loads(seed_path.read_text(encoding="utf-8")) for topic in data.get("topics", {}).values(): responses = topic.get("responses", {}) for lang, response in list(responses.items()): if isinstance(response, str): responses[lang] = format_terminal_text(response) golden_path.parent.mkdir(parents=True, exist_ok=True) golden_path.write_text(json.dumps(data, indent=2, ensure_ascii=False), encoding="utf-8") seed_path.write_text(json.dumps(data, indent=2, ensure_ascii=False), encoding="utf-8") auth_pack = Path(os.environ["AUTH_PACK"]) if auth_pack.exists(): auth_pack.unlink() except Exception as exc: print(f"[!] Canonical normalization skipped: {exc}") PY fi if [ -z "${LOCAL_OLLAMA_URL}" ] && [ -x "$INSTALL_DIR/scripts/provision_local_worker.sh" ] && [ -f "$INSTALL_DIR/docker-compose.ollama.local.yml" ]; then info "Bootstrapping local Ollama from packaged step 3 assets..." if [ "${EUID:-$(id -u)}" -eq 0 ]; then ACTIVE_WORKER_CMD=(sudo -u "$TARGET_USER" -H env HOME="$TARGET_HOME") else ACTIVE_WORKER_CMD=(env HOME="$TARGET_HOME") fi if "${ACTIVE_WORKER_CMD[@]}" \ CAELUM_STACK_ROOT="$INSTALL_DIR" \ CAELUM_OLLAMA_COMPOSE_FILE="$INSTALL_DIR/docker-compose.ollama.local.yml" \ CAELUM_OLLAMA_DATA="$TARGET_HOME/.nubby/ollama" \ CAELUM_LOCAL_OLLAMA_URL="http://localhost:11435" \ CAELUM_INSTALL_REQUIRE_CLOUD_MODELS=false \ "$INSTALL_DIR/scripts/provision_local_worker.sh"; then LOCAL_OLLAMA_URL="http://localhost:11435" info "Local Ollama bootstrapped: ${LOCAL_OLLAMA_URL}" else warn "Local worker bootstrap completed with warnings." fi fi if [ -z "${LOCAL_OLLAMA_URL}" ] && [ -z "${OLLAMA_FOUND}" ]; then error "Local Ollama is still not reachable after bootstrap. Start /hybrid start or export CAELUM_LOCAL_OLLAMA_URL, then rerun this installer." fi # ── 11. Install Python dependencies ────────────────────────────────────────── if [ -f "$INSTALL_DIR/requirements_cli.txt" ]; then info "Installing Python dependencies..." # In root mode, force venv so dependencies belong to the install target. if [ "${EUID:-$(id -u)}" -eq 0 ]; then info "Root mode detected: installing dependencies into $INSTALL_DIR/venv" "$PYTHON" -m venv "$INSTALL_DIR/venv" "$INSTALL_DIR/venv/bin/pip" install --quiet -r "$INSTALL_DIR/requirements_cli.txt" PYTHON="$INSTALL_DIR/venv/bin/python3" # Non-root: try user pip first, then venv fallback. elif "$PYTHON" -m pip install --quiet --user -r "$INSTALL_DIR/requirements_cli.txt" 2>/dev/null; then true elif "$PYTHON" -m pip install --quiet --break-system-packages --user -r "$INSTALL_DIR/requirements_cli.txt" 2>/dev/null; then true else info "Creating virtual environment..." "$PYTHON" -m venv "$INSTALL_DIR/venv" "$INSTALL_DIR/venv/bin/pip" install --quiet -r "$INSTALL_DIR/requirements_cli.txt" PYTHON="$INSTALL_DIR/venv/bin/python3" warn "Installed in venv. The nubby command will use $INSTALL_DIR/venv/bin/python3" fi fi # ── 11b. Select install mode ───────────────────────────────────────────────── INSTALL_MODE="${NUBBY_INSTALL_MODE:-}" if [ -z "$INSTALL_MODE" ] && [ "$SELF_UPGRADE" != "true" ]; then echo echo -e "${BOLD} Select install mode:${RESET}" echo -e " 1) Standard — DeepSeek + Gemini + NVIDIA keys (best quality)" echo -e " 2) Cloud — same models, zero keys needed (proxied through Caellum cloud)" echo read -rp " Choose [1/2, default=1]: " mode_choice case "$mode_choice" in 2) INSTALL_MODE="free" ;; *) INSTALL_MODE="standard" ;; esac fi INSTALL_MODE="${INSTALL_MODE:-standard}" # ── 12. Create default config ──────────────────────────────────────────────── CONFIG_FILE="$CAELUM_DIR/config.json" RUNTIME_ENV_FILE="$CAELUM_DIR/runtime.env" FIRST_INSTALL=false if [ ! -f "$RUNTIME_ENV_FILE" ]; then FIRST_INSTALL=true fi if [ ! -f "$CONFIG_FILE" ]; then if [ "$INSTALL_MODE" = "free" ]; then info "Creating free-tier configuration (OpenRouter)..." cat > "$CONFIG_FILE" << CONFIGEOF { "caelum_install_profile": "openrouter_free", "deployment_class": "local", "caelum_caste": "espirito", "caelum_hybrid_mode": false, "caelum_local_worker_required": false, "caelum_prompt_personas_enabled": true, "caelum_plan": "sovereign" } CONFIGEOF else info "Creating default configuration..." cat > "$CONFIG_FILE" << CONFIGEOF { "caelum_install_profile": "default_local", "deployment_class": "local", "caelum_caste": "agil", "caelum_hybrid_mode": true, "routing_local_ollama_url": "${LOCAL_OLLAMA_URL}", "caelum_local_worker_required": true, "caelum_prompt_personas_enabled": true, "caelum_plan": "sovereign" } CONFIGEOF fi fi if [ "$FIRST_INSTALL" = true ]; then info "Creating isolated runtime profile (first install)..." cat > "$RUNTIME_ENV_FILE" << ENVEOF # Nubby runtime profile (created by installer on first install) CAELUM_ISOLATED_MODE=true CAELUM_LEGACY_PATH_FALLBACK=false CAELUM_ODOO19_PATH=/opt/nubby/odoo19 CAELUM_CUSTOM_ADDONS_PATH=/opt/nubby/odoo19/caelum_addons CAELUM_SESSION_ROOT=/opt/nubby/odoo19/caelum_addons CAELUM_ODOO19_CONF_PATH=/etc/nubby_odoo19.conf CAELUM_ODOO19_SERVICE_NAME=nubby_odoo19 CAELUM_ODOO19_COMPOSE_DIR=/opt/nubby/odoo19 CAELUM_ODOO19_LOG_PATH_LOCAL=/var/log/nubby_odoo19/nubby_odoo19.log V79_PROOF_ADDONS_PATH=/opt/nubby/odoo19/caelum_addons CAELUM_ATOMIZE_SOURCE_PATH=/opt/nubby/odoo19/odoo/addons CAELUM_ATOMIZE_REPOS_ROOT=/opt/nubby/odoo19/third_party ENVEOF chmod 600 "$RUNTIME_ENV_FILE" if [ "${EUID:-$(id -u)}" -eq 0 ]; then install -d -o "$TARGET_USER" -g "$TARGET_USER" -m 775 \ /opt/nubby/odoo19 \ /opt/nubby/odoo19/caelum_addons \ /opt/nubby/odoo19/third_party \ /opt/nubby/odoo19/odoo \ /opt/nubby/odoo19/odoo/addons info "Prepared isolated runtime directories at /opt/nubby/odoo19" elif mkdir -p /opt/nubby/odoo19/caelum_addons /opt/nubby/odoo19/third_party /opt/nubby/odoo19/odoo/addons 2>/dev/null; then info "Created isolated runtime directories at /opt/nubby/odoo19" elif command -v sudo &>/dev/null && sudo mkdir -p /opt/nubby/odoo19/caelum_addons /opt/nubby/odoo19/third_party /opt/nubby/odoo19/odoo/addons 2>/dev/null; then sudo chown "$(id -u):$(id -g)" /opt/nubby/odoo19 /opt/nubby/odoo19/caelum_addons /opt/nubby/odoo19/third_party /opt/nubby/odoo19/odoo /opt/nubby/odoo19/odoo/addons 2>/dev/null || true info "Prepared isolated runtime directories at /opt/nubby/odoo19" else warn "Could not create /opt/nubby/odoo19 automatically. Create it manually before using local deploy/proof." fi fi if [ "${EUID:-$(id -u)}" -eq 0 ]; then chown -R "$TARGET_USER:$TARGET_USER" "$INSTALL_DIR" "$CAELUM_DIR" 2>/dev/null || true fi # Determine which python to use (venv if created, system otherwise) if [ -f "$INSTALL_DIR/venv/bin/python3" ]; then NUBBY_PYTHON="$INSTALL_DIR/venv/bin/python3" else NUBBY_PYTHON="$PYTHON" fi # ── 13. Provision local models ─────────────────────────────────────────────── if [ "$SELF_UPGRADE" = "true" ]; then info "Self-upgrade: preserving existing local Worker and environment state..." elif [ -n "$LOCAL_OLLAMA_URL" ]; then info "Provisioning local Worker and auxiliary models..." if PYTHONPATH="$INSTALL_DIR" CAELUM_LOCAL_OLLAMA_URL="$LOCAL_OLLAMA_URL" CAELUM_REQUIRE_CLOUD_MODELS=false "$NUBBY_PYTHON" - <<'PY' import os import sys root = os.environ.get("PYTHONPATH", "") if root and root not in sys.path: sys.path.insert(0, root) from core.model_provisioner import ModelProvisioner from core.metamorphosis import MetamorphosisEngine def progress(msg: str): if msg.startswith("\r"): print(msg, end="", flush=True) else: print(msg, flush=True) ollama_url = os.environ.get("CAELUM_LOCAL_OLLAMA_URL", "http://localhost:11435") provisioner = ModelProvisioner(ollama_url=ollama_url) worker_results = provisioner.provision(castes=["agil", "espirito"], progress_cb=progress) worker_ok = all( isinstance(payload, dict) and payload.get("fallback") in {"registered", "already_present", "sha256_match_skip", "hub_pull"} for payload in worker_results.values() ) if not worker_ok: raise SystemExit(2) print("[+] Provisioning auxiliary local models...", flush=True) aux_targets = ["vision_local"] meta = MetamorphosisEngine() detected_caste = meta.select_caste(meta.detect_hardware()) print(f"Auxiliary model profile: {detected_caste}", flush=True) if detected_caste in {"sabio", "arconte"}: aux_targets.append("devstral_worker") aux_results = provisioner.provision_auxiliary(model_keys=aux_targets, progress_cb=progress) # Write detected caste and matching worker model back to config.json import json as _json _caste_model = { "espirito": "qwen2.5-coder:0.5b", "agil": "qwen2.5-coder:7b", "sabio": "qwen2.5-coder:14b", "arconte": "qwen2.5-coder:32b", }.get(detected_caste, "qwen2.5-coder:7b") _cfg_path = os.path.join(os.environ.get("CAELUM_DIR", os.path.join(os.path.expanduser("~"), ".caelum")), "config.json") try: with open(_cfg_path) as _f: _cfg = _json.load(_f) _cfg["caelum_caste"] = detected_caste _cfg["worker_model"] = _caste_model _cfg["caelum_hybrid_worker_model"] = _caste_model with open(_cfg_path, "w") as _f: _json.dump(_cfg, _f, indent=2) print(f"[+] Caste written to config: {detected_caste} → {_caste_model}", flush=True) except Exception as _e: print(f"[!] Could not write caste to config: {_e}", flush=True) PY then : else error "Failed to provision mandatory local Worker baseline models. Ensure local Ollama can reach the model sources and rerun the installer." fi else warn "Local Ollama was not detected during self-upgrade. Existing runtime settings were kept; run /hybrid status after upgrade." fi # ── 13. Write nubby entrypoint + create symlink ─────────────────────────────── NUBBY_BIN="$INSTALL_DIR/nubby" cat > "$NUBBY_BIN" << ENTRYEOF #!/bin/bash # Nubby CLI — https://caellum.tech NUBBY_DIR="\${NUBBY_DIR:-$INSTALL_DIR}" CAELUM_RUNTIME_ENV_FILE="\${CAELUM_RUNTIME_ENV_FILE:-$CAELUM_DIR/runtime.env}" if [ -f "\$CAELUM_RUNTIME_ENV_FILE" ]; then set -a # shellcheck disable=SC1090 . "\$CAELUM_RUNTIME_ENV_FILE" set +a fi if [ "\$1" = "--upgrade" ]; then exec bash -lc 'curl -fsSL https://caellum.tech/nubby-install.sh | bash' fi exec "$NUBBY_PYTHON" "\$NUBBY_DIR/nubby_cli.py" "\$@" ENTRYEOF chmod +x "$NUBBY_BIN" # Try /usr/local/bin first, fall back to ~/bin SYMLINK_CREATED=false if [ -w "/usr/local/bin" ]; then ln -sf "$NUBBY_BIN" /usr/local/bin/nubby 2>/dev/null && SYMLINK_CREATED=true fi if [ "$SYMLINK_CREATED" = false ]; then mkdir -p "$HOME/bin" ln -sf "$NUBBY_BIN" "$HOME/bin/nubby" # Add ~/bin to PATH if not already there if ! echo "$PATH" | grep -q "$HOME/bin"; then for rc in "$HOME/.bashrc" "$HOME/.zshrc"; do if [ -f "$rc" ]; then echo 'export PATH="$HOME/bin:$PATH"' >> "$rc" fi done warn "Added ~/bin to PATH. Restart your terminal or run: source ~/.bashrc" fi fi # ── 14. Done ───────────────────────────────────────────────────────────────── echo echo -e "${BOLD}\033[1;94m" echo " ▄▄▄▄▄ Nubby CLI v9.81" echo " ▄███████▄ The Sovereign Odoo Code Forge AI (v19) Edition" echo -e "█████████ ${YELLOW}$(pwd)${RESET}${BOLD}\033[1;94m" echo " ▀▀▀▀▀▀▀ Nubby CLI v9.81" echo " NUBBY" echo -e "\033[0m${RESET}" echo echo -e "${GREEN}${BOLD} Nubby CLI installed successfully!${RESET}" echo echo -e " Next steps:" echo -e " 1. Run $(cmd nubby) to launch the CLI" echo -e " 2. Type $(cmd /signup) to create an account or $(cmd /login) to bind an existing one" echo -e " 3. Type $(cmd /setup) to confirm the default stack and enter ${BOLD}DEEPSEEK_API_KEY${RESET}, ${BOLD}GEMINI_API_KEY${RESET}, and ${BOLD}NVIDIA_API_KEY${RESET}" echo -e ' 4. Type '$(cmd '/hybrid status')' to verify local Ollama and Worker model availability' echo -e ' 5. Type '$(cmd '/deploy local')' to scaffold the local Odoo 19 runtime' echo -e " (clones Odoo CE source, creates venv, generates service config)" echo ENTERPRISE_DIR="/opt/nubby/odoo19/enterprise" if [ -d "$ENTERPRISE_DIR" ] && [ "$(ls -A "$ENTERPRISE_DIR" 2>/dev/null)" ]; then echo -e " ${GREEN}✅ Odoo Enterprise addons detected at ${BOLD}${ENTERPRISE_DIR}${RESET}" else echo -e " ${YELLOW}${BOLD} Important: Odoo Enterprise${RESET}" echo -e " ${YELLOW} If you have an Odoo 19 Enterprise license, copy the enterprise" echo -e " addons to: ${BOLD}${ENTERPRISE_DIR}/${RESET}" echo -e " ${YELLOW} This step is manual — Enterprise code is not open-source.${RESET}" fi echo echo -e " New to Caelum? Sign up free: ${BOLD}https://caellum.tech/caelum/signup${RESET}" echo echo -e " ${BOLD}First Run Sequence${RESET}" echo -e " • Start $(cmd nubby)" echo -e " • Run $(cmd /signup) if you do not yet have a Caellum account" echo -e " • Run $(cmd /login) to bind this local Nubby instance to the account" echo -e " • Run $(cmd /setup) to confirm the ${BOLD}default_local${RESET} install baseline and store ${BOLD}DEEPSEEK_API_KEY${RESET}, ${BOLD}GEMINI_API_KEY${RESET}, and ${BOLD}NVIDIA_API_KEY${RESET}" echo -e ' • Run '$(cmd '/hybrid status')' to verify local Worker readiness' echo -e ' • Run '$(cmd '/deploy docker')' and '$(cmd '/deploy local')' to scaffold the two local Odoo 19 environments' echo -e " • Start normal natural-language Odoo work" echo