Files
linux-workspace/bin/xclaude
2026-02-19 20:16:48 +01:00

65 lines
1.9 KiB
Python
Executable File

#!/usr/bin/env python3
# https://code.claude.com/docs/en/settings
import argparse
import os
import sys
env = os.environ
def die(msg):
print(msg, file=sys.stderr)
sys.exit(1)
p = argparse.ArgumentParser(
prog="xclaude",
description="Run Claude Code with any OpenAI-compatible API backend.",
epilog="""\
Environment variables (overridable by arguments):
CLAUDE_URL - API base URL (e.g. https://openrouter.ai/api/v1)
CLAUDE_TOKEN - API token/key
CLAUDE_MODEL - model name
If neither URL nor token is set, defaults to Ollama:
OLLAMA_HOST - Ollama server address (default: nvidia.hell), or use --host
OLLAMA_MODEL - Ollama model (default: glm-5:cloud)
Examples:
xclaude
xclaude --model qwen3-coder
xclaude --url https://openrouter.ai/api/v1 --token sk-XXX --model openai/gpt-4o
""",
formatter_class=argparse.RawDescriptionHelpFormatter,
)
p.add_argument("--url", default=env.get("CLAUDE_URL", ""))
p.add_argument("--token", default=env.get("CLAUDE_TOKEN", ""))
p.add_argument("--model", default=env.get("CLAUDE_MODEL", ""))
p.add_argument("--host", default=env.get("OLLAMA_HOST", "nvidia.hell"))
args, rest = p.parse_known_args()
env["DISABLE_TELEMETRY"] = "1"
is_external_api = bool(args.url or args.token)
if is_external_api:
if not args.url:
die("Warning: no URL specified")
if not args.token:
die("Warning: no token specified")
if not args.model:
die("Warning: no model specified")
env["ANTHROPIC_BASE_URL"] = args.url
env["ANTHROPIC_API_KEY"] = args.token
env.pop("ANTHROPIC_AUTH_TOKEN", None)
model = args.model
else:
host = args.host
model = args.model or env.get("OLLAMA_MODEL", "glm-5:cloud")
env["ANTHROPIC_BASE_URL"] = f"http://{host}:11434"
env["ANTHROPIC_AUTH_TOKEN"] = "ollama"
env["ANTHROPIC_API_KEY"] = ""
cmd = ["claude", "--model", model] + rest
os.execvp("claude", cmd)