#!/usr/bin/env python3 # https://code.claude.com/docs/en/settings import argparse import os import sys p = argparse.ArgumentParser( prog="xclaude", description="Run Claude Code with any OpenAI-compatible API backend.", epilog="""\ Environment variables (overridable by arguments): CLAUDE_URL - API base URL (e.g. https://openrouter.ai/api/v1) CLAUDE_TOKEN - API token/key CLAUDE_MODEL - model name If neither URL nor token is set, defaults to Ollama: OLLAMA_HOST - Ollama server address (default: nvidia.hell), or use --host OLLAMA_MODEL - Ollama model (default: glm-5:cloud) Examples: xclaude xclaude --model qwen3-coder xclaude --url https://openrouter.ai/api/v1 --token sk-XXX --model openai/gpt-4o """, formatter_class=argparse.RawDescriptionHelpFormatter, ) p.add_argument("--url", default=os.environ.get("CLAUDE_URL", "")) p.add_argument("--token", default=os.environ.get("CLAUDE_TOKEN", "")) p.add_argument("--model", default=os.environ.get("CLAUDE_MODEL", "")) p.add_argument("--host", default=os.environ.get("OLLAMA_HOST", "nvidia.hell")) args, rest = p.parse_known_args() os.environ["DISABLE_TELEMETRY"] = "1" is_external_api = bool(args.url or args.token) if is_external_api: if not args.url: print("Warning: no URL specified", file=sys.stderr) sys.exit(1) if not args.token: print("Warning: no token specified", file=sys.stderr) sys.exit(1) if not args.model: print("Warning: no model specified", file=sys.stderr) sys.exit(1) os.environ["ANTHROPIC_BASE_URL"] = args.url os.environ["ANTHROPIC_API_KEY"] = args.token os.environ.pop("ANTHROPIC_AUTH_TOKEN", None) model = args.model else: host = args.host model = args.model or os.environ.get("OLLAMA_MODEL", "glm-5:cloud") os.environ["ANTHROPIC_BASE_URL"] = f"http://{host}:11434" os.environ["ANTHROPIC_AUTH_TOKEN"] = "ollama" os.environ["ANTHROPIC_API_KEY"] = "" cmd = ["claude", "--model", model] + rest os.execvp("claude", cmd)