Add ez-assistant and kerberos service folders

This commit is contained in:
kelin
2026-02-11 14:56:03 -05:00
parent e4e8ae1b87
commit 9ccfb36923
4471 changed files with 746463 additions and 0 deletions

View File

@@ -0,0 +1,89 @@
#!/bin/bash
# Auth Expiry Monitor
# Run via cron or systemd timer to get proactive notifications
# before Claude Code auth expires.
#
# Suggested cron: */30 * * * * /home/admin/moltbot/scripts/auth-monitor.sh
#
# Environment variables:
# NOTIFY_PHONE - Phone number to send Moltbot notification (e.g., +1234567890)
# NOTIFY_NTFY - ntfy.sh topic for push notifications (e.g., moltbot-alerts)
# WARN_HOURS - Hours before expiry to warn (default: 2)
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CLAUDE_CREDS="$HOME/.claude/.credentials.json"
STATE_FILE="$HOME/.clawdbot/auth-monitor-state"
# Configuration
WARN_HOURS="${WARN_HOURS:-2}"
NOTIFY_PHONE="${NOTIFY_PHONE:-}"
NOTIFY_NTFY="${NOTIFY_NTFY:-}"
# State tracking to avoid spam
mkdir -p "$(dirname "$STATE_FILE")"
LAST_NOTIFIED=$(cat "$STATE_FILE" 2>/dev/null || echo "0")
NOW=$(date +%s)
# Only notify once per hour max
MIN_INTERVAL=3600
send_notification() {
local message="$1"
local priority="${2:-default}"
echo "$(date '+%Y-%m-%d %H:%M:%S') - $message"
# Check if we notified recently
if [ $((NOW - LAST_NOTIFIED)) -lt $MIN_INTERVAL ]; then
echo "Skipping notification (sent recently)"
return
fi
# Send via Moltbot if phone configured and auth still valid
if [ -n "$NOTIFY_PHONE" ]; then
# Check if we can still use moltbot
if "$SCRIPT_DIR/claude-auth-status.sh" simple 2>/dev/null | grep -q "OK\|EXPIRING"; then
echo "Sending via Moltbot to $NOTIFY_PHONE..."
moltbot send --to "$NOTIFY_PHONE" --message "$message" 2>/dev/null || true
fi
fi
# Send via ntfy.sh if configured
if [ -n "$NOTIFY_NTFY" ]; then
echo "Sending via ntfy.sh to $NOTIFY_NTFY..."
curl -s -o /dev/null \
-H "Title: Moltbot Auth Alert" \
-H "Priority: $priority" \
-H "Tags: warning,key" \
-d "$message" \
"https://ntfy.sh/$NOTIFY_NTFY" || true
fi
# Update state
echo "$NOW" > "$STATE_FILE"
}
# Check auth status
if [ ! -f "$CLAUDE_CREDS" ]; then
send_notification "Claude Code credentials missing! Run: claude setup-token" "high"
exit 1
fi
EXPIRES_AT=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS")
NOW_MS=$((NOW * 1000))
DIFF_MS=$((EXPIRES_AT - NOW_MS))
HOURS_LEFT=$((DIFF_MS / 3600000))
MINS_LEFT=$(((DIFF_MS % 3600000) / 60000))
if [ "$DIFF_MS" -lt 0 ]; then
send_notification "Claude Code auth EXPIRED! Moltbot is down. Run: ssh l36 '~/moltbot/scripts/mobile-reauth.sh'" "urgent"
exit 1
elif [ "$HOURS_LEFT" -lt "$WARN_HOURS" ]; then
send_notification "Claude Code auth expires in ${HOURS_LEFT}h ${MINS_LEFT}m. Consider re-auth soon." "high"
exit 0
else
echo "$(date '+%Y-%m-%d %H:%M:%S') - Auth OK: ${HOURS_LEFT}h ${MINS_LEFT}m remaining"
exit 0
fi

View File

@@ -0,0 +1,144 @@
import { completeSimple, getModel, type Model } from "@mariozechner/pi-ai";
type Usage = {
input?: number;
output?: number;
cacheRead?: number;
cacheWrite?: number;
totalTokens?: number;
};
type RunResult = {
durationMs: number;
usage?: Usage;
};
const DEFAULT_PROMPT =
"Reply with a single word: ok. No punctuation or extra text.";
const DEFAULT_RUNS = 10;
function parseArg(flag: string): string | undefined {
const idx = process.argv.indexOf(flag);
if (idx === -1) return undefined;
return process.argv[idx + 1];
}
function parseRuns(raw: string | undefined): number {
if (!raw) return DEFAULT_RUNS;
const parsed = Number(raw);
if (!Number.isFinite(parsed) || parsed <= 0) return DEFAULT_RUNS;
return Math.floor(parsed);
}
function median(values: number[]): number {
if (values.length === 0) return 0;
const sorted = [...values].sort((a, b) => a - b);
const mid = Math.floor(sorted.length / 2);
if (sorted.length % 2 === 0) {
return Math.round((sorted[mid - 1] + sorted[mid]) / 2);
}
return sorted[mid];
}
async function runModel(opts: {
label: string;
model: Model<any>;
apiKey: string;
runs: number;
prompt: string;
}): Promise<RunResult[]> {
const results: RunResult[] = [];
for (let i = 0; i < opts.runs; i += 1) {
const started = Date.now();
const res = await completeSimple(
opts.model,
{
messages: [
{
role: "user",
content: opts.prompt,
timestamp: Date.now(),
},
],
},
{ apiKey: opts.apiKey, maxTokens: 64 },
);
const durationMs = Date.now() - started;
results.push({ durationMs, usage: res.usage });
console.log(
`${opts.label} run ${i + 1}/${opts.runs}: ${durationMs}ms`,
);
}
return results;
}
async function main(): Promise<void> {
const runs = parseRuns(parseArg("--runs"));
const prompt = parseArg("--prompt") ?? DEFAULT_PROMPT;
const anthropicKey = process.env.ANTHROPIC_API_KEY?.trim();
const minimaxKey = process.env.MINIMAX_API_KEY?.trim();
if (!anthropicKey) {
throw new Error("Missing ANTHROPIC_API_KEY in environment.");
}
if (!minimaxKey) {
throw new Error("Missing MINIMAX_API_KEY in environment.");
}
const minimaxBaseUrl =
process.env.MINIMAX_BASE_URL?.trim() || "https://api.minimax.io/v1";
const minimaxModelId =
process.env.MINIMAX_MODEL?.trim() || "MiniMax-M2.1";
const minimaxModel: Model<"openai-completions"> = {
id: minimaxModelId,
name: `MiniMax ${minimaxModelId}`,
api: "openai-completions",
provider: "minimax",
baseUrl: minimaxBaseUrl,
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
};
const opusModel = getModel("anthropic", "claude-opus-4-5");
console.log(`Prompt: ${prompt}`);
console.log(`Runs: ${runs}`);
console.log("");
const minimaxResults = await runModel({
label: "minimax",
model: minimaxModel,
apiKey: minimaxKey,
runs,
prompt,
});
const opusResults = await runModel({
label: "opus",
model: opusModel,
apiKey: anthropicKey,
runs,
prompt,
});
const summarize = (label: string, results: RunResult[]) => {
const durations = results.map((r) => r.durationMs);
const med = median(durations);
const min = Math.min(...durations);
const max = Math.max(...durations);
return { label, med, min, max };
};
const summary = [summarize("minimax", minimaxResults), summarize("opus", opusResults)];
console.log("");
console.log("Summary (ms):");
for (const row of summary) {
console.log(
`${row.label.padEnd(7)} median=${row.med} min=${row.min} max=${row.max}`,
);
}
}
await main();

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
set -euo pipefail
cd "$(dirname "$0")/../apps/macos"
BUILD_PATH=".build-local"
PRODUCT="Moltbot"
BIN="$BUILD_PATH/debug/$PRODUCT"
printf "\n▶ Building $PRODUCT (debug, build path: $BUILD_PATH)\n"
swift build -c debug --product "$PRODUCT" --build-path "$BUILD_PATH"
printf "\n⏹ Stopping existing $PRODUCT...\n"
killall -q "$PRODUCT" 2>/dev/null || true
printf "\n🚀 Launching $BIN ...\n"
nohup "$BIN" >/tmp/moltbot.log 2>&1 &
PID=$!
printf "Started $PRODUCT (PID $PID). Logs: /tmp/moltbot.log\n"

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env node
import fs from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const root = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const binDir = path.join(root, "bin");
const scriptPath = path.join(root, "scripts", "docs-list.js");
const binPath = path.join(binDir, "docs-list");
fs.mkdirSync(binDir, { recursive: true });
const wrapper = `#!/usr/bin/env node\nimport { spawnSync } from "node:child_process";\nimport path from "node:path";\nimport { fileURLToPath } from "node:url";\n\nconst here = path.dirname(fileURLToPath(import.meta.url));\nconst script = path.join(here, "..", "scripts", "docs-list.js");\n\nconst result = spawnSync(process.execPath, [script], { stdio: "inherit" });\nprocess.exit(result.status ?? 1);\n`;
fs.writeFileSync(binPath, wrapper, { mode: 0o755 });

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
set -euo pipefail
# Render the macOS .icon bundle to a padded .icns like Trimmy's pipeline.
# Defaults target the Moltbot assets so you can just run the script from repo root.
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
ICON_FILE=${1:-"$ROOT_DIR/apps/macos/Icon.icon"}
BASENAME=${2:-Moltbot}
OUT_ROOT=${3:-"$ROOT_DIR/apps/macos/build/icon"}
XCODE_APP=${XCODE_APP:-/Applications/Xcode.app}
# Where the final .icns should live; override DEST_ICNS to change.
DEST_ICNS=${DEST_ICNS:-"$ROOT_DIR/apps/macos/Sources/Moltbot/Resources/Moltbot.icns"}
ICTOOL="$XCODE_APP/Contents/Applications/Icon Composer.app/Contents/Executables/ictool"
if [[ ! -x "$ICTOOL" ]]; then
ICTOOL="$XCODE_APP/Contents/Applications/Icon Composer.app/Contents/Executables/icontool"
fi
if [[ ! -x "$ICTOOL" ]]; then
echo "ictool/icontool not found. Set XCODE_APP if Xcode is elsewhere." >&2
exit 1
fi
ICONSET_DIR="$OUT_ROOT/${BASENAME}.iconset"
TMP_DIR="$OUT_ROOT/tmp"
mkdir -p "$ICONSET_DIR" "$TMP_DIR"
MASTER_ART="$TMP_DIR/icon_art_824.png"
MASTER_1024="$TMP_DIR/icon_1024.png"
# Render inner art (no margin) with macOS Default appearance
"$ICTOOL" "$ICON_FILE" \
--export-preview macOS Default 824 824 1 -45 "$MASTER_ART"
# Pad to 1024x1024 with transparent border
sips --padToHeightWidth 1024 1024 "$MASTER_ART" --out "$MASTER_1024" >/dev/null
# Generate required sizes
sizes=(16 32 64 128 256 512 1024)
for sz in "${sizes[@]}"; do
out="$ICONSET_DIR/icon_${sz}x${sz}.png"
sips -z "$sz" "$sz" "$MASTER_1024" --out "$out" >/dev/null
if [[ "$sz" -ne 1024 ]]; then
dbl=$((sz*2))
out2="$ICONSET_DIR/icon_${sz}x${sz}@2x.png"
sips -z "$dbl" "$dbl" "$MASTER_1024" --out "$out2" >/dev/null
fi
done
# 512x512@2x already covered by 1024; ensure it exists
cp "$MASTER_1024" "$ICONSET_DIR/icon_512x512@2x.png"
iconutil -c icns "$ICONSET_DIR" -o "$OUT_ROOT/${BASENAME}.icns"
mkdir -p "$(dirname "$DEST_ICNS")"
cp "$OUT_ROOT/${BASENAME}.icns" "$DEST_ICNS"
echo "Icon.icns generated at $DEST_ICNS"

View File

@@ -0,0 +1,87 @@
#!/usr/bin/env bash
set -euo pipefail
on_error() {
echo "A2UI bundling failed. Re-run with: pnpm canvas:a2ui:bundle" >&2
echo "If this persists, verify pnpm deps and try again." >&2
}
trap on_error ERR
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
HASH_FILE="$ROOT_DIR/src/canvas-host/a2ui/.bundle.hash"
OUTPUT_FILE="$ROOT_DIR/src/canvas-host/a2ui/a2ui.bundle.js"
A2UI_RENDERER_DIR="$ROOT_DIR/vendor/a2ui/renderers/lit"
A2UI_APP_DIR="$ROOT_DIR/apps/shared/MoltbotKit/Tools/CanvasA2UI"
# Docker builds exclude vendor/apps via .dockerignore.
# In that environment we must keep the prebuilt bundle.
if [[ ! -d "$A2UI_RENDERER_DIR" || ! -d "$A2UI_APP_DIR" ]]; then
echo "A2UI sources missing; keeping prebuilt bundle."
exit 0
fi
INPUT_PATHS=(
"$ROOT_DIR/package.json"
"$ROOT_DIR/pnpm-lock.yaml"
"$A2UI_RENDERER_DIR"
"$A2UI_APP_DIR"
)
compute_hash() {
ROOT_DIR="$ROOT_DIR" node --input-type=module - "${INPUT_PATHS[@]}" <<'NODE'
import { createHash } from "node:crypto";
import { promises as fs } from "node:fs";
import path from "node:path";
const rootDir = process.env.ROOT_DIR ?? process.cwd();
const inputs = process.argv.slice(2);
const files = [];
async function walk(entryPath) {
const st = await fs.stat(entryPath);
if (st.isDirectory()) {
const entries = await fs.readdir(entryPath);
for (const entry of entries) {
await walk(path.join(entryPath, entry));
}
return;
}
files.push(entryPath);
}
for (const input of inputs) {
await walk(input);
}
function normalize(p) {
return p.split(path.sep).join("/");
}
files.sort((a, b) => normalize(a).localeCompare(normalize(b)));
const hash = createHash("sha256");
for (const filePath of files) {
const rel = normalize(path.relative(rootDir, filePath));
hash.update(rel);
hash.update("\0");
hash.update(await fs.readFile(filePath));
hash.update("\0");
}
process.stdout.write(hash.digest("hex"));
NODE
}
current_hash="$(compute_hash)"
if [[ -f "$HASH_FILE" ]]; then
previous_hash="$(cat "$HASH_FILE")"
if [[ "$previous_hash" == "$current_hash" && -f "$OUTPUT_FILE" ]]; then
echo "A2UI bundle up to date; skipping."
exit 0
fi
fi
pnpm -s exec tsc -p "$A2UI_RENDERER_DIR/tsconfig.json"
rolldown -c "$A2UI_APP_DIR/rolldown.config.mjs"
echo "$current_hash" > "$HASH_FILE"

View File

@@ -0,0 +1,49 @@
import fs from "node:fs/promises";
import path from "node:path";
import { fileURLToPath, pathToFileURL } from "node:url";
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
export function getA2uiPaths(env = process.env) {
const srcDir =
env.CLAWDBOT_A2UI_SRC_DIR ?? path.join(repoRoot, "src", "canvas-host", "a2ui");
const outDir =
env.CLAWDBOT_A2UI_OUT_DIR ?? path.join(repoRoot, "dist", "canvas-host", "a2ui");
return { srcDir, outDir };
}
export async function copyA2uiAssets({
srcDir,
outDir,
}: {
srcDir: string;
outDir: string;
}) {
const skipMissing = process.env.CLAWDBOT_A2UI_SKIP_MISSING === "1";
try {
await fs.stat(path.join(srcDir, "index.html"));
await fs.stat(path.join(srcDir, "a2ui.bundle.js"));
} catch (err) {
const message =
'Missing A2UI bundle assets. Run "pnpm canvas:a2ui:bundle" and retry.';
if (skipMissing) {
console.warn(`${message} Skipping copy (CLAWDBOT_A2UI_SKIP_MISSING=1).`);
return;
}
throw new Error(message, { cause: err });
}
await fs.mkdir(path.dirname(outDir), { recursive: true });
await fs.cp(srcDir, outDir, { recursive: true });
}
async function main() {
const { srcDir, outDir } = getA2uiPaths();
await copyA2uiAssets({ srcDir, outDir });
}
if (import.meta.url === pathToFileURL(process.argv[1] ?? "").href) {
main().catch((err) => {
console.error(String(err));
process.exit(1);
});
}

View File

@@ -0,0 +1,91 @@
#!/usr/bin/env bash
set -euo pipefail
VERSION=${1:-}
CHANGELOG_FILE=${2:-}
if [[ -z "$VERSION" ]]; then
echo "Usage: $0 <version> [changelog_file]" >&2
exit 1
fi
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
if [[ -z "$CHANGELOG_FILE" ]]; then
if [[ -f "$SCRIPT_DIR/../CHANGELOG.md" ]]; then
CHANGELOG_FILE="$SCRIPT_DIR/../CHANGELOG.md"
elif [[ -f "CHANGELOG.md" ]]; then
CHANGELOG_FILE="CHANGELOG.md"
elif [[ -f "../CHANGELOG.md" ]]; then
CHANGELOG_FILE="../CHANGELOG.md"
else
echo "Error: Could not find CHANGELOG.md" >&2
exit 1
fi
fi
if [[ ! -f "$CHANGELOG_FILE" ]]; then
echo "Error: Changelog file '$CHANGELOG_FILE' not found" >&2
exit 1
fi
extract_version_section() {
local version=$1
local file=$2
awk -v version="$version" '
BEGIN { found=0 }
/^## / {
if ($0 ~ "^##[[:space:]]+" version "([[:space:]].*|$)") { found=1; next }
if (found) { exit }
}
found { print }
' "$file"
}
markdown_to_html() {
local text=$1
text=$(echo "$text" | sed 's/^##### \(.*\)$/<h5>\1<\/h5>/')
text=$(echo "$text" | sed 's/^#### \(.*\)$/<h4>\1<\/h4>/')
text=$(echo "$text" | sed 's/^### \(.*\)$/<h3>\1<\/h3>/')
text=$(echo "$text" | sed 's/^## \(.*\)$/<h2>\1<\/h2>/')
text=$(echo "$text" | sed 's/^- \*\*\([^*]*\)\*\*\(.*\)$/<li><strong>\1<\/strong>\2<\/li>/')
text=$(echo "$text" | sed 's/^- \([^*].*\)$/<li>\1<\/li>/')
text=$(echo "$text" | sed 's/\*\*\([^*]*\)\*\*/<strong>\1<\/strong>/g')
text=$(echo "$text" | sed 's/`\([^`]*\)`/<code>\1<\/code>/g')
text=$(echo "$text" | sed 's/\[\([^]]*\)\](\([^)]*\))/<a href="\2">\1<\/a>/g')
echo "$text"
}
version_content=$(extract_version_section "$VERSION" "$CHANGELOG_FILE")
if [[ -z "$version_content" ]]; then
echo "<h2>Moltbot $VERSION</h2>"
echo "<p>Latest Moltbot update.</p>"
echo "<p><a href=\"https://github.com/moltbot/moltbot/blob/main/CHANGELOG.md\">View full changelog</a></p>"
exit 0
fi
echo "<h2>Moltbot $VERSION</h2>"
in_list=false
while IFS= read -r line; do
if [[ "$line" =~ ^- ]]; then
if [[ "$in_list" == false ]]; then
echo "<ul>"
in_list=true
fi
markdown_to_html "$line"
else
if [[ "$in_list" == true ]]; then
echo "</ul>"
in_list=false
fi
if [[ -n "$line" ]]; then
markdown_to_html "$line"
fi
fi
done <<< "$version_content"
if [[ "$in_list" == true ]]; then
echo "</ul>"
fi
echo "<p><a href=\"https://github.com/moltbot/moltbot/blob/main/CHANGELOG.md\">View full changelog</a></p>"

View File

@@ -0,0 +1,74 @@
import { existsSync } from "node:fs";
import { readFile } from "node:fs/promises";
import { execFileSync } from "node:child_process";
type ParsedArgs = {
maxLines: number;
};
function parseArgs(argv: string[]): ParsedArgs {
let maxLines = 500;
for (let index = 0; index < argv.length; index++) {
const arg = argv[index];
if (arg === "--max") {
const next = argv[index + 1];
if (!next || Number.isNaN(Number(next))) throw new Error("Missing/invalid --max value");
maxLines = Number(next);
index++;
continue;
}
}
return { maxLines };
}
function gitLsFilesAll(): string[] {
// Include untracked files too so local refactors dont “pass” by accident.
const stdout = execFileSync("git", ["ls-files", "--cached", "--others", "--exclude-standard"], {
encoding: "utf8",
});
return stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
}
async function countLines(filePath: string): Promise<number> {
const content = await readFile(filePath, "utf8");
// Count physical lines. Keeps the rule simple + predictable.
return content.split("\n").length;
}
async function main() {
// Makes `... | head` safe.
process.stdout.on("error", (error: NodeJS.ErrnoException) => {
if (error.code === "EPIPE") process.exit(0);
throw error;
});
const { maxLines } = parseArgs(process.argv.slice(2));
const files = gitLsFilesAll()
.filter((filePath) => existsSync(filePath))
.filter((filePath) => filePath.endsWith(".ts") || filePath.endsWith(".tsx"));
const results = await Promise.all(
files.map(async (filePath) => ({ filePath, lines: await countLines(filePath) })),
);
const offenders = results
.filter((result) => result.lines > maxLines)
.sort((a, b) => b.lines - a.lines);
if (!offenders.length) return;
// Minimal, grep-friendly output.
for (const offender of offenders) {
// eslint-disable-next-line no-console
console.log(`${offender.lines}\t${offender.filePath}`);
}
process.exitCode = 1;
}
await main();

View File

@@ -0,0 +1,280 @@
#!/bin/bash
# Claude Code Authentication Status Checker
# Checks both Claude Code and Moltbot auth status
set -euo pipefail
CLAUDE_CREDS="$HOME/.claude/.credentials.json"
CLAWDBOT_AUTH="$HOME/.clawdbot/agents/main/agent/auth-profiles.json"
# Colors for terminal output
RED='\033[0;31m'
YELLOW='\033[1;33m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
# Output mode: "full" (default), "json", or "simple"
OUTPUT_MODE="${1:-full}"
fetch_models_status_json() {
moltbot models status --json 2>/dev/null || true
}
STATUS_JSON="$(fetch_models_status_json)"
USE_JSON=0
if [ -n "$STATUS_JSON" ]; then
USE_JSON=1
fi
calc_status_from_expires() {
local expires_at="$1"
if ! [[ "$expires_at" =~ ^-?[0-9]+$ ]]; then
expires_at=0
fi
local now_ms=$(( $(date +%s) * 1000 ))
local diff_ms=$((expires_at - now_ms))
local hours=$((diff_ms / 3600000))
local mins=$(((diff_ms % 3600000) / 60000))
if [ "$expires_at" -le 0 ]; then
echo "MISSING"
return 1
elif [ "$diff_ms" -lt 0 ]; then
echo "EXPIRED"
return 1
elif [ "$diff_ms" -lt 3600000 ]; then
echo "EXPIRING:${mins}m"
return 2
else
echo "OK:${hours}h${mins}m"
return 0
fi
}
json_expires_for_claude_cli() {
echo "$STATUS_JSON" | jq -r '
[.auth.oauth.profiles[]
| select(.provider == "anthropic" and (.type == "oauth" or .type == "token"))
| .expiresAt // 0]
| max // 0
' 2>/dev/null || echo "0"
}
json_expires_for_anthropic_any() {
echo "$STATUS_JSON" | jq -r '
[.auth.oauth.profiles[]
| select(.provider == "anthropic" and .type == "oauth")
| .expiresAt // 0]
| max // 0
' 2>/dev/null || echo "0"
}
json_best_anthropic_profile() {
echo "$STATUS_JSON" | jq -r '
[.auth.oauth.profiles[]
| select(.provider == "anthropic" and .type == "oauth")
| {id: .profileId, exp: (.expiresAt // 0)}]
| sort_by(.exp) | reverse | .[0].id // "none"
' 2>/dev/null || echo "none"
}
json_anthropic_api_key_count() {
echo "$STATUS_JSON" | jq -r '
[.auth.providers[] | select(.provider == "anthropic") | .profiles.apiKey]
| max // 0
' 2>/dev/null || echo "0"
}
check_claude_code_auth() {
if [ "$USE_JSON" -eq 1 ]; then
local expires_at
expires_at=$(json_expires_for_claude_cli)
calc_status_from_expires "$expires_at"
return $?
fi
if [ ! -f "$CLAUDE_CREDS" ]; then
echo "MISSING"
return 1
fi
local expires_at
expires_at=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
calc_status_from_expires "$expires_at"
}
check_moltbot_auth() {
if [ "$USE_JSON" -eq 1 ]; then
local api_keys
api_keys=$(json_anthropic_api_key_count)
if ! [[ "$api_keys" =~ ^[0-9]+$ ]]; then
api_keys=0
fi
local expires_at
expires_at=$(json_expires_for_anthropic_any)
if [ "$expires_at" -le 0 ] && [ "$api_keys" -gt 0 ]; then
echo "OK:static"
return 0
fi
calc_status_from_expires "$expires_at"
return $?
fi
if [ ! -f "$CLAWDBOT_AUTH" ]; then
echo "MISSING"
return 1
fi
local expires
expires=$(jq -r '
[.profiles | to_entries[] | select(.value.provider == "anthropic") | .value.expires]
| max // 0
' "$CLAWDBOT_AUTH" 2>/dev/null || echo "0")
calc_status_from_expires "$expires"
}
# JSON output mode
if [ "$OUTPUT_MODE" = "json" ]; then
claude_status=$(check_claude_code_auth 2>/dev/null || true)
moltbot_status=$(check_moltbot_auth 2>/dev/null || true)
claude_expires=0
moltbot_expires=0
if [ "$USE_JSON" -eq 1 ]; then
claude_expires=$(json_expires_for_claude_cli)
moltbot_expires=$(json_expires_for_anthropic_any)
else
claude_expires=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
moltbot_expires=$(jq -r '.profiles["anthropic:default"].expires // 0' "$CLAWDBOT_AUTH" 2>/dev/null || echo "0")
fi
jq -n \
--arg cs "$claude_status" \
--arg ce "$claude_expires" \
--arg bs "$moltbot_status" \
--arg be "$moltbot_expires" \
'{
claude_code: {status: $cs, expires_at_ms: ($ce | tonumber)},
moltbot: {status: $bs, expires_at_ms: ($be | tonumber)},
needs_reauth: (($cs | startswith("EXPIRED") or startswith("EXPIRING") or startswith("MISSING")) or ($bs | startswith("EXPIRED") or startswith("EXPIRING") or startswith("MISSING")))
}'
exit 0
fi
# Simple output mode (for scripts/widgets)
if [ "$OUTPUT_MODE" = "simple" ]; then
claude_status=$(check_claude_code_auth 2>/dev/null || true)
moltbot_status=$(check_moltbot_auth 2>/dev/null || true)
if [[ "$claude_status" == EXPIRED* ]] || [[ "$claude_status" == MISSING* ]]; then
echo "CLAUDE_EXPIRED"
exit 1
elif [[ "$moltbot_status" == EXPIRED* ]] || [[ "$moltbot_status" == MISSING* ]]; then
echo "CLAWDBOT_EXPIRED"
exit 1
elif [[ "$claude_status" == EXPIRING* ]]; then
echo "CLAUDE_EXPIRING"
exit 2
elif [[ "$moltbot_status" == EXPIRING* ]]; then
echo "CLAWDBOT_EXPIRING"
exit 2
else
echo "OK"
exit 0
fi
fi
# Full output mode (default)
echo "=== Claude Code Auth Status ==="
echo ""
# Claude Code credentials
echo "Claude Code (~/.claude/.credentials.json):"
if [ "$USE_JSON" -eq 1 ]; then
expires_at=$(json_expires_for_claude_cli)
else
expires_at=$(jq -r '.claudeAiOauth.expiresAt // 0' "$CLAUDE_CREDS" 2>/dev/null || echo "0")
fi
if [ -f "$CLAUDE_CREDS" ]; then
sub_type=$(jq -r '.claudeAiOauth.subscriptionType // "unknown"' "$CLAUDE_CREDS" 2>/dev/null || echo "unknown")
rate_tier=$(jq -r '.claudeAiOauth.rateLimitTier // "unknown"' "$CLAUDE_CREDS" 2>/dev/null || echo "unknown")
echo " Subscription: $sub_type"
echo " Rate tier: $rate_tier"
fi
if [ "$expires_at" -le 0 ]; then
echo -e " Status: ${RED}NOT FOUND${NC}"
echo " Action needed: Run 'claude setup-token'"
else
now_ms=$(( $(date +%s) * 1000 ))
diff_ms=$((expires_at - now_ms))
hours=$((diff_ms / 3600000))
mins=$(((diff_ms % 3600000) / 60000))
if [ "$diff_ms" -lt 0 ]; then
echo -e " Status: ${RED}EXPIRED${NC}"
echo " Action needed: Run 'claude setup-token' or re-authenticate"
elif [ "$diff_ms" -lt 3600000 ]; then
echo -e " Status: ${YELLOW}EXPIRING SOON (${mins}m remaining)${NC}"
echo " Consider running: claude setup-token"
else
echo -e " Status: ${GREEN}OK${NC}"
echo " Expires: $(date -d @$((expires_at/1000))) (${hours}h ${mins}m)"
fi
fi
echo ""
echo "Moltbot Auth (~/.clawdbot/agents/main/agent/auth-profiles.json):"
if [ "$USE_JSON" -eq 1 ]; then
best_profile=$(json_best_anthropic_profile)
expires=$(json_expires_for_anthropic_any)
api_keys=$(json_anthropic_api_key_count)
else
best_profile=$(jq -r '
.profiles | to_entries
| map(select(.value.provider == "anthropic"))
| sort_by(.value.expires) | reverse
| .[0].key // "none"
' "$CLAWDBOT_AUTH" 2>/dev/null || echo "none")
expires=$(jq -r '
[.profiles | to_entries[] | select(.value.provider == "anthropic") | .value.expires]
| max // 0
' "$CLAWDBOT_AUTH" 2>/dev/null || echo "0")
api_keys=0
fi
echo " Profile: $best_profile"
if [ "$expires" -le 0 ] && [ "$api_keys" -gt 0 ]; then
echo -e " Status: ${GREEN}OK${NC} (API key)"
elif [ "$expires" -le 0 ]; then
echo -e " Status: ${RED}NOT FOUND${NC}"
echo " Note: Run 'moltbot doctor --yes' to sync from Claude Code"
else
now_ms=$(( $(date +%s) * 1000 ))
diff_ms=$((expires - now_ms))
hours=$((diff_ms / 3600000))
mins=$(((diff_ms % 3600000) / 60000))
if [ "$diff_ms" -lt 0 ]; then
echo -e " Status: ${RED}EXPIRED${NC}"
echo " Note: Run 'moltbot doctor --yes' to sync from Claude Code"
elif [ "$diff_ms" -lt 3600000 ]; then
echo -e " Status: ${YELLOW}EXPIRING SOON (${mins}m remaining)${NC}"
else
echo -e " Status: ${GREEN}OK${NC}"
echo " Expires: $(date -d @$((expires/1000))) (${hours}h ${mins}m)"
fi
fi
echo ""
echo "=== Service Status ==="
if systemctl --user is-active moltbot >/dev/null 2>&1; then
echo -e "Moltbot service: ${GREEN}running${NC}"
else
echo -e "Moltbot service: ${RED}NOT running${NC}"
fi

View File

@@ -0,0 +1,309 @@
#!/bin/bash
# VibeTunnel Logging Utility
# Simplifies access to VibeTunnel logs using macOS unified logging system
set -euo pipefail
# Configuration
SUBSYSTEM="bot.molt"
DEFAULT_LEVEL="info"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Function to handle sudo password errors
handle_sudo_error() {
echo -e "\n${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${YELLOW}⚠️ Password Required for Log Access${NC}"
echo -e "${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
echo -e "clawlog needs to use sudo to show complete log data (Apple hides sensitive info by default)."
echo -e "\nTo avoid password prompts, configure passwordless sudo for the log command:"
echo -e "See: ${BLUE}apple/docs/logging-private-fix.md${NC}\n"
echo -e "Quick fix:"
echo -e " 1. Run: ${GREEN}sudo visudo${NC}"
echo -e " 2. Add: ${GREEN}$(whoami) ALL=(ALL) NOPASSWD: /usr/bin/log${NC}"
echo -e " 3. Save and exit (:wq)\n"
echo -e "${RED}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}\n"
exit 1
}
# Default values
STREAM_MODE=false
TIME_RANGE="5m" # Default to last 5 minutes
CATEGORY=""
LOG_LEVEL="$DEFAULT_LEVEL"
SEARCH_TEXT=""
OUTPUT_FILE=""
ERRORS_ONLY=false
SERVER_ONLY=false
TAIL_LINES=50 # Default number of lines to show
SHOW_TAIL=true
SHOW_HELP=false
# Function to show usage
show_usage() {
cat << EOF
clawlog - Moltbot Logging Utility
USAGE:
clawlog [OPTIONS]
DESCRIPTION:
View Moltbot logs with full details (bypasses Apple's privacy redaction).
Requires sudo access configured for /usr/bin/log command.
LOG FLOW ARCHITECTURE:
Moltbot logs flow through the macOS unified log (subsystem: bot.molt).
LOG CATEGORIES (examples):
• voicewake - Voice wake detection/test harness
• gateway - Gateway process manager
• xpc - XPC service calls
• notifications - Notification helper
• screenshot - Screenshotter
• shell - ShellExecutor
QUICK START:
clawlog -n 100 Show last 100 lines from all components
clawlog -f Follow logs in real-time
clawlog -e Show only errors
clawlog -c ServerManager Show logs from ServerManager only
OPTIONS:
-h, --help Show this help message
-f, --follow Stream logs continuously (like tail -f)
-n, --lines NUM Number of lines to show (default: 50)
-l, --last TIME Time range to search (default: 5m)
Examples: 5m, 1h, 2d, 1w
-c, --category CAT Filter by category (e.g., ServerManager, SessionService)
-e, --errors Show only error messages
-d, --debug Show debug level logs (more verbose)
-s, --search TEXT Search for specific text in log messages
-o, --output FILE Export logs to file
--server Show only server output logs
--all Show all logs without tail limit
--list-categories List all available log categories
--json Output in JSON format
EXAMPLES:
clawlog Show last 50 lines from past 5 minutes (default)
clawlog -f Stream logs continuously
clawlog -n 100 Show last 100 lines
clawlog -e Show only recent errors
clawlog -l 30m -n 200 Show last 200 lines from past 30 minutes
clawlog -c ServerManager Show recent ServerManager logs
clawlog -s "fail" Search for "fail" in recent logs
clawlog --server -e Show recent server errors
clawlog -f -d Stream debug logs continuously
CATEGORIES:
Common categories include:
- ServerManager - Server lifecycle and configuration
- SessionService - Terminal session management
- TerminalManager - Terminal spawning and control
- GitRepository - Git integration features
- ScreencapService - Screen capture functionality
- WebRTCManager - WebRTC connections
- UnixSocket - Unix socket communication
- WindowTracker - Window tracking and focus
- NgrokService - Ngrok tunnel management
- ServerOutput - Node.js server output
TIME FORMATS:
- 5m = 5 minutes - 1h = 1 hour
- 2d = 2 days - 1w = 1 week
EOF
}
# Function to list categories
list_categories() {
echo -e "${BLUE}Fetching VibeTunnel log categories from the last hour...${NC}\n"
# Get unique categories from recent logs
log show --predicate "subsystem == \"$SUBSYSTEM\"" --last 1h 2>/dev/null | \
grep -E "category: \"[^\"]+\"" | \
sed -E 's/.*category: "([^"]+)".*/\1/' | \
sort | uniq | \
while read -r cat; do
echo "$cat"
done
echo -e "\n${YELLOW}Note: Only categories with recent activity are shown${NC}"
}
# Show help if no arguments provided
if [[ $# -eq 0 ]]; then
show_usage
exit 0
fi
# Parse command line arguments
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_usage
exit 0
;;
-f|--follow)
STREAM_MODE=true
SHOW_TAIL=false
shift
;;
-n|--lines)
TAIL_LINES="$2"
shift 2
;;
-l|--last)
TIME_RANGE="$2"
shift 2
;;
-c|--category)
CATEGORY="$2"
shift 2
;;
-e|--errors)
ERRORS_ONLY=true
shift
;;
-d|--debug)
LOG_LEVEL="debug"
shift
;;
-s|--search)
SEARCH_TEXT="$2"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
--server)
SERVER_ONLY=true
CATEGORY="ServerOutput"
shift
;;
--list-categories)
list_categories
exit 0
;;
--json)
STYLE_ARGS="--style json"
shift
;;
--all)
SHOW_TAIL=false
shift
;;
*)
echo -e "${RED}Unknown option: $1${NC}"
echo "Use -h or --help for usage information"
exit 1
;;
esac
done
# Build the predicate
PREDICATE="subsystem == \"$SUBSYSTEM\""
# Add category filter if specified
if [[ -n "$CATEGORY" ]]; then
PREDICATE="$PREDICATE AND category == \"$CATEGORY\""
fi
# Add error filter if specified
if [[ "$ERRORS_ONLY" == true ]]; then
PREDICATE="$PREDICATE AND (eventType == \"error\" OR messageType == \"error\" OR eventMessage CONTAINS \"ERROR\" OR eventMessage CONTAINS \"[31m\")"
fi
# Add search filter if specified
if [[ -n "$SEARCH_TEXT" ]]; then
PREDICATE="$PREDICATE AND eventMessage CONTAINS[c] \"$SEARCH_TEXT\""
fi
# Build the command - always use sudo with --info to show private data
if [[ "$STREAM_MODE" == true ]]; then
# Streaming mode
CMD="sudo log stream --predicate '$PREDICATE' --level $LOG_LEVEL --info"
echo -e "${GREEN}Streaming VibeTunnel logs continuously...${NC}"
echo -e "${YELLOW}Press Ctrl+C to stop${NC}\n"
else
# Show mode
CMD="sudo log show --predicate '$PREDICATE'"
# Add log level for show command
if [[ "$LOG_LEVEL" == "debug" ]]; then
CMD="$CMD --debug"
else
CMD="$CMD --info"
fi
# Add time range
CMD="$CMD --last $TIME_RANGE"
if [[ "$SHOW_TAIL" == true ]]; then
echo -e "${GREEN}Showing last $TAIL_LINES log lines from the past $TIME_RANGE${NC}"
else
echo -e "${GREEN}Showing all logs from the past $TIME_RANGE${NC}"
fi
# Show applied filters
if [[ "$ERRORS_ONLY" == true ]]; then
echo -e "${RED}Filter: Errors only${NC}"
fi
if [[ -n "$CATEGORY" ]]; then
echo -e "${BLUE}Category: $CATEGORY${NC}"
fi
if [[ -n "$SEARCH_TEXT" ]]; then
echo -e "${YELLOW}Search: \"$SEARCH_TEXT\"${NC}"
fi
echo "" # Empty line for readability
fi
# Add style arguments if specified
if [[ -n "${STYLE_ARGS:-}" ]]; then
CMD="$CMD $STYLE_ARGS"
fi
# Execute the command
if [[ -n "$OUTPUT_FILE" ]]; then
# First check if sudo works without password for the log command
if sudo -n /usr/bin/log show --last 1s 2>&1 | grep -q "password"; then
handle_sudo_error
fi
echo -e "${BLUE}Exporting logs to: $OUTPUT_FILE${NC}\n"
if [[ "$SHOW_TAIL" == true ]] && [[ "$STREAM_MODE" == false ]]; then
eval "$CMD" 2>&1 | tail -n "$TAIL_LINES" > "$OUTPUT_FILE"
else
eval "$CMD" > "$OUTPUT_FILE" 2>&1
fi
# Check if file was created and has content
if [[ -s "$OUTPUT_FILE" ]]; then
LINE_COUNT=$(wc -l < "$OUTPUT_FILE" | tr -d ' ')
echo -e "${GREEN}✓ Exported $LINE_COUNT lines to $OUTPUT_FILE${NC}"
else
echo -e "${YELLOW}⚠ No logs found matching the criteria${NC}"
fi
else
# Run interactively
# First check if sudo works without password for the log command
if sudo -n /usr/bin/log show --last 1s 2>&1 | grep -q "password"; then
handle_sudo_error
fi
if [[ "$SHOW_TAIL" == true ]] && [[ "$STREAM_MODE" == false ]]; then
# Apply tail for non-streaming mode
eval "$CMD" 2>&1 | tail -n "$TAIL_LINES"
echo -e "\n${YELLOW}Showing last $TAIL_LINES lines. Use --all or -n to see more.${NC}"
else
eval "$CMD"
fi
fi

View File

@@ -0,0 +1,39 @@
{
"ensureLogins": [
"odrobnik",
"alphonse-arianee",
"aaronn",
"ronak-guliani",
"cpojer",
"carlulsoe",
"jdrhyne",
"latitudeki5223",
"longmaba",
"manmal",
"thesash",
"rhjoh",
"ysqander",
"atalovesyou",
"0xJonHoldsCrypto",
"hougangdev"
],
"seedCommit": "d6863f87",
"placeholderAvatar": "assets/avatar-placeholder.svg",
"displayName": {
"jdrhyne": "Jonathan D. Rhyne (DJ-D)"
},
"nameToLogin": {
"peter steinberger": "steipete",
"eng. juan combetto": "omniwired",
"mariano belinky": "mbelinky",
"vasanth rao naik sabavat": "vsabavat",
"tu nombre real": "nachx639",
"django navarro": "djangonavarro220"
},
"emailToLogin": {
"steipete@gmail.com": "steipete",
"sbarrios93@gmail.com": "sebslight",
"rltorres26+github@gmail.com": "RandyVentures",
"hixvac@gmail.com": "VACInc"
}
}

View File

@@ -0,0 +1,289 @@
#!/usr/bin/env bash
set -euo pipefail
APP_BUNDLE="${1:-dist/Moltbot.app}"
IDENTITY="${SIGN_IDENTITY:-}"
TIMESTAMP_MODE="${CODESIGN_TIMESTAMP:-auto}"
DISABLE_LIBRARY_VALIDATION="${DISABLE_LIBRARY_VALIDATION:-0}"
SKIP_TEAM_ID_CHECK="${SKIP_TEAM_ID_CHECK:-0}"
ENT_TMP_BASE=$(mktemp -t moltbot-entitlements-base.XXXXXX)
ENT_TMP_APP_BASE=$(mktemp -t moltbot-entitlements-app-base.XXXXXX)
ENT_TMP_RUNTIME=$(mktemp -t moltbot-entitlements-runtime.XXXXXX)
if [[ "${APP_BUNDLE}" == "--help" || "${APP_BUNDLE}" == "-h" ]]; then
cat <<'HELP'
Usage: scripts/codesign-mac-app.sh [app-bundle]
Env:
SIGN_IDENTITY="Apple Development: Your Name (TEAMID)"
ALLOW_ADHOC_SIGNING=1
CODESIGN_TIMESTAMP=auto|on|off
DISABLE_LIBRARY_VALIDATION=1 # dev-only Sparkle Team ID workaround
SKIP_TEAM_ID_CHECK=1 # bypass Team ID audit
HELP
exit 0
fi
if [ ! -d "$APP_BUNDLE" ]; then
echo "App bundle not found: $APP_BUNDLE" >&2
exit 1
fi
select_identity() {
local preferred available first
# Prefer a Developer ID Application cert.
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
| awk -F'\"' '/Developer ID Application/ { print $2; exit }')"
if [ -n "$preferred" ]; then
echo "$preferred"
return
fi
# Next, try Apple Distribution.
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
| awk -F'\"' '/Apple Distribution/ { print $2; exit }')"
if [ -n "$preferred" ]; then
echo "$preferred"
return
fi
# Then, try Apple Development.
preferred="$(security find-identity -p codesigning -v 2>/dev/null \
| awk -F'\"' '/Apple Development/ { print $2; exit }')"
if [ -n "$preferred" ]; then
echo "$preferred"
return
fi
# Fallback to the first valid signing identity.
available="$(security find-identity -p codesigning -v 2>/dev/null \
| sed -n 's/.*\"\\(.*\\)\"/\\1/p')"
if [ -n "$available" ]; then
first="$(printf '%s\n' "$available" | head -n1)"
echo "$first"
return
fi
return 1
}
if [ -z "$IDENTITY" ]; then
if ! IDENTITY="$(select_identity)"; then
if [[ "${ALLOW_ADHOC_SIGNING:-}" == "1" ]]; then
echo "WARN: No signing identity found. Falling back to ad-hoc signing (-)." >&2
echo " !!! WARNING: Ad-hoc signed apps do NOT persist TCC permissions (Accessibility, etc) !!!" >&2
echo " !!! You will need to re-grant permissions every time you restart the app. !!!" >&2
IDENTITY="-"
else
echo "ERROR: No signing identity found. Set SIGN_IDENTITY to a valid codesigning certificate." >&2
echo " Alternatively, set ALLOW_ADHOC_SIGNING=1 to fallback to ad-hoc signing (limitations apply)." >&2
exit 1
fi
fi
fi
echo "Using signing identity: $IDENTITY"
if [[ "$IDENTITY" == "-" ]]; then
cat <<'WARN' >&2
================================================================================
!!! AD-HOC SIGNING IN USE - PERMISSIONS WILL NOT STICK (macOS RESTRICTION) !!!
macOS ties permissions to the code signature, bundle ID, and app path.
Ad-hoc signing generates a new signature every build, so macOS treats the app
as a different binary and will forget permissions (prompts may vanish).
For correct permission behavior you MUST sign with a real Apple Development or
Developer ID certificate.
If prompts disappear: remove the app entry in System Settings -> Privacy & Security,
relaunch the app, and re-grant. Some permissions only reappear after a full
macOS restart.
================================================================================
WARN
fi
timestamp_arg="--timestamp=none"
case "$TIMESTAMP_MODE" in
1|on|yes|true)
timestamp_arg="--timestamp"
;;
0|off|no|false)
timestamp_arg="--timestamp=none"
;;
auto)
if [[ "$IDENTITY" == *"Developer ID Application"* ]]; then
timestamp_arg="--timestamp"
fi
;;
*)
echo "ERROR: Unknown CODESIGN_TIMESTAMP value: $TIMESTAMP_MODE (use auto|on|off)" >&2
exit 1
;;
esac
if [[ "$IDENTITY" == "-" ]]; then
timestamp_arg="--timestamp=none"
fi
options_args=()
if [[ "$IDENTITY" != "-" ]]; then
options_args=("--options" "runtime")
fi
timestamp_args=("$timestamp_arg")
cat > "$ENT_TMP_BASE" <<'PLIST'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.automation.apple-events</key>
<true/>
<key>com.apple.security.device.audio-input</key>
<true/>
<key>com.apple.security.device.camera</key>
<true/>
</dict>
</plist>
PLIST
cat > "$ENT_TMP_APP_BASE" <<'PLIST'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.automation.apple-events</key>
<true/>
<key>com.apple.security.device.audio-input</key>
<true/>
<key>com.apple.security.device.camera</key>
<true/>
<key>com.apple.security.personal-information.location</key>
<true/>
</dict>
</plist>
PLIST
cat > "$ENT_TMP_RUNTIME" <<'PLIST'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-jit</key>
<true/>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
</dict>
</plist>
PLIST
if [[ "$DISABLE_LIBRARY_VALIDATION" == "1" ]]; then
/usr/libexec/PlistBuddy -c "Add :com.apple.security.cs.disable-library-validation bool true" "$ENT_TMP_APP_BASE" >/dev/null 2>&1 || \
/usr/libexec/PlistBuddy -c "Set :com.apple.security.cs.disable-library-validation true" "$ENT_TMP_APP_BASE"
echo "Note: disable-library-validation entitlement enabled (DISABLE_LIBRARY_VALIDATION=1)."
fi
APP_ENTITLEMENTS="$ENT_TMP_APP_BASE"
# clear extended attributes to avoid stale signatures
xattr -cr "$APP_BUNDLE" 2>/dev/null || true
sign_item() {
local target="$1"
local entitlements="$2"
codesign --force ${options_args+"${options_args[@]}"} "${timestamp_args[@]}" --entitlements "$entitlements" --sign "$IDENTITY" "$target"
}
sign_plain_item() {
local target="$1"
codesign --force ${options_args+"${options_args[@]}"} "${timestamp_args[@]}" --sign "$IDENTITY" "$target"
}
team_id_for() {
codesign -dv --verbose=4 "$1" 2>&1 | awk -F= '/^TeamIdentifier=/{print $2; exit}'
}
verify_team_ids() {
if [[ "$SKIP_TEAM_ID_CHECK" == "1" ]]; then
echo "Note: skipping Team ID audit (SKIP_TEAM_ID_CHECK=1)."
return 0
fi
local expected
expected="$(team_id_for "$APP_BUNDLE" || true)"
if [[ -z "$expected" ]]; then
echo "WARN: TeamIdentifier missing on app bundle; skipping Team ID audit."
return 0
fi
local mismatches=()
while IFS= read -r -d '' f; do
if /usr/bin/file "$f" | /usr/bin/grep -q "Mach-O"; then
local team
team="$(team_id_for "$f" || true)"
if [[ -z "$team" ]]; then
team="not set"
fi
if [[ "$expected" == "not set" ]]; then
if [[ "$team" != "not set" ]]; then
mismatches+=("$f (TeamIdentifier=$team)")
fi
elif [[ "$team" != "$expected" ]]; then
mismatches+=("$f (TeamIdentifier=$team)")
fi
fi
done < <(find "$APP_BUNDLE" -type f -print0)
if [[ "${#mismatches[@]}" -gt 0 ]]; then
echo "ERROR: Team ID mismatch detected (expected: $expected)"
for entry in "${mismatches[@]}"; do
echo " - $entry"
done
echo "Hint: re-sign embedded frameworks or set DISABLE_LIBRARY_VALIDATION=1 for dev builds."
exit 1
fi
}
# Sign main binary
if [ -f "$APP_BUNDLE/Contents/MacOS/Moltbot" ]; then
echo "Signing main binary"; sign_item "$APP_BUNDLE/Contents/MacOS/Moltbot" "$APP_ENTITLEMENTS"
fi
# Sign Sparkle deeply if present
SPARKLE="$APP_BUNDLE/Contents/Frameworks/Sparkle.framework"
if [ -d "$SPARKLE" ]; then
echo "Signing Sparkle framework and helpers"
find "$SPARKLE" -type f -print0 | while IFS= read -r -d '' f; do
if /usr/bin/file "$f" | /usr/bin/grep -q "Mach-O"; then
sign_plain_item "$f"
fi
done
sign_plain_item "$SPARKLE/Versions/B/Sparkle"
sign_plain_item "$SPARKLE/Versions/B/Autoupdate"
sign_plain_item "$SPARKLE/Versions/B/Updater.app/Contents/MacOS/Updater"
sign_plain_item "$SPARKLE/Versions/B/Updater.app"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Downloader.xpc/Contents/MacOS/Downloader"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Downloader.xpc"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Installer.xpc/Contents/MacOS/Installer"
sign_plain_item "$SPARKLE/Versions/B/XPCServices/Installer.xpc"
sign_plain_item "$SPARKLE/Versions/B"
sign_plain_item "$SPARKLE"
fi
# Sign any other embedded frameworks/dylibs
if [ -d "$APP_BUNDLE/Contents/Frameworks" ]; then
find "$APP_BUNDLE/Contents/Frameworks" \( -name "*.framework" -o -name "*.dylib" \) ! -path "*Sparkle.framework*" -print0 | while IFS= read -r -d '' f; do
echo "Signing framework: $f"; sign_plain_item "$f"
done
fi
# Finally sign the bundle
sign_item "$APP_BUNDLE" "$APP_ENTITLEMENTS"
verify_team_ids
rm -f "$ENT_TMP_BASE" "$ENT_TMP_APP_BASE" "$ENT_TMP_RUNTIME"
echo "Codesign complete for $APP_BUNDLE"

View File

@@ -0,0 +1,117 @@
#!/usr/bin/env bash
set -euo pipefail
# Disable glob expansion to handle brackets in file paths
set -f
usage() {
printf 'Usage: %s [--force] "commit message" "file" ["file" ...]\n' "$(basename "$0")" >&2
exit 2
}
if [ "$#" -lt 2 ]; then
usage
fi
force_delete_lock=false
if [ "${1:-}" = "--force" ]; then
force_delete_lock=true
shift
fi
if [ "$#" -lt 2 ]; then
usage
fi
commit_message=$1
shift
if [[ "$commit_message" != *[![:space:]]* ]]; then
printf 'Error: commit message must not be empty\n' >&2
exit 1
fi
if [ -e "$commit_message" ]; then
printf 'Error: first argument looks like a file path ("%s"); provide the commit message first\n' "$commit_message" >&2
exit 1
fi
if [ "$#" -eq 0 ]; then
usage
fi
files=("$@")
# Disallow "." because it stages the entire repository and defeats the helper's safety guardrails.
for file in "${files[@]}"; do
if [ "$file" = "." ]; then
printf 'Error: "." is not allowed; list specific paths instead\n' >&2
exit 1
fi
done
# Prevent staging node_modules even if a path is forced.
for file in "${files[@]}"; do
case "$file" in
*node_modules* | */node_modules | */node_modules/* | node_modules)
printf 'Error: node_modules paths are not allowed: %s\n' "$file" >&2
exit 1
;;
esac
done
last_commit_error=''
run_git_commit() {
local stderr_log
stderr_log=$(mktemp)
if git commit -m "$commit_message" -- "${files[@]}" 2> >(tee "$stderr_log" >&2); then
rm -f "$stderr_log"
last_commit_error=''
return 0
fi
last_commit_error=$(cat "$stderr_log")
rm -f "$stderr_log"
return 1
}
for file in "${files[@]}"; do
if [ ! -e "$file" ]; then
if ! git ls-files --error-unmatch -- "$file" >/dev/null 2>&1; then
printf 'Error: file not found: %s\n' "$file" >&2
exit 1
fi
fi
done
git restore --staged :/
git add --force -- "${files[@]}"
if git diff --staged --quiet; then
printf 'Warning: no staged changes detected for: %s\n' "${files[*]}" >&2
exit 1
fi
committed=false
if run_git_commit; then
committed=true
elif [ "$force_delete_lock" = true ]; then
lock_path=$(
printf '%s\n' "$last_commit_error" |
awk -F"'" '/Unable to create .*\.git\/index\.lock/ { print $2; exit }'
)
if [ -n "$lock_path" ] && [ -e "$lock_path" ]; then
rm -f "$lock_path"
printf 'Removed stale git lock: %s\n' "$lock_path" >&2
if run_git_commit; then
committed=true
fi
fi
fi
if [ "$committed" = false ]; then
exit 1
fi
printf 'Committed "%s" with %d files\n' "$commit_message" "${#files[@]}"

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env tsx
/**
* Copy HOOK.md files from src/hooks/bundled to dist/hooks/bundled
*/
import fs from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const projectRoot = path.resolve(__dirname, '..');
const srcBundled = path.join(projectRoot, 'src', 'hooks', 'bundled');
const distBundled = path.join(projectRoot, 'dist', 'hooks', 'bundled');
function copyHookMetadata() {
if (!fs.existsSync(srcBundled)) {
console.warn('[copy-hook-metadata] Source directory not found:', srcBundled);
return;
}
if (!fs.existsSync(distBundled)) {
fs.mkdirSync(distBundled, { recursive: true });
}
const entries = fs.readdirSync(srcBundled, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) continue;
const hookName = entry.name;
const srcHookDir = path.join(srcBundled, hookName);
const distHookDir = path.join(distBundled, hookName);
const srcHookMd = path.join(srcHookDir, 'HOOK.md');
const distHookMd = path.join(distHookDir, 'HOOK.md');
if (!fs.existsSync(srcHookMd)) {
console.warn(`[copy-hook-metadata] No HOOK.md found for ${hookName}`);
continue;
}
if (!fs.existsSync(distHookDir)) {
fs.mkdirSync(distHookDir, { recursive: true });
}
fs.copyFileSync(srcHookMd, distHookMd);
console.log(`[copy-hook-metadata] Copied ${hookName}/HOOK.md`);
}
console.log('[copy-hook-metadata] Done');
}
copyHookMetadata();

View File

@@ -0,0 +1,176 @@
#!/usr/bin/env bash
set -euo pipefail
# Create a styled DMG containing the app bundle + /Applications symlink.
#
# Usage:
# scripts/create-dmg.sh <app_path> [output_dmg]
#
# Env:
# DMG_VOLUME_NAME default: CFBundleName (or "Moltbot")
# DMG_BACKGROUND_PATH default: assets/dmg-background.png
# DMG_BACKGROUND_SMALL default: assets/dmg-background-small.png (recommended)
# DMG_WINDOW_BOUNDS default: "400 100 900 420" (500x320)
# DMG_ICON_SIZE default: 128
# DMG_APP_POS default: "125 160"
# DMG_APPS_POS default: "375 160"
# SKIP_DMG_STYLE=1 skip Finder styling
# DMG_EXTRA_SECTORS extra sectors to keep when shrinking RW image (default: 2048)
APP_PATH="${1:-}"
OUT_PATH="${2:-}"
if [[ -z "$APP_PATH" ]]; then
echo "Usage: $0 <app_path> [output_dmg]" >&2
exit 1
fi
if [[ ! -d "$APP_PATH" ]]; then
echo "Error: App not found: $APP_PATH" >&2
exit 1
fi
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
BUILD_DIR="$ROOT_DIR/dist"
mkdir -p "$BUILD_DIR"
APP_NAME=$(/usr/libexec/PlistBuddy -c "Print CFBundleName" "$APP_PATH/Contents/Info.plist" 2>/dev/null || echo "Moltbot")
VERSION=$(/usr/libexec/PlistBuddy -c "Print CFBundleShortVersionString" "$APP_PATH/Contents/Info.plist" 2>/dev/null || echo "0.0.0")
DMG_NAME="${APP_NAME}-${VERSION}.dmg"
DMG_VOLUME_NAME="${DMG_VOLUME_NAME:-$APP_NAME}"
DMG_BACKGROUND_SMALL="${DMG_BACKGROUND_SMALL:-$ROOT_DIR/assets/dmg-background-small.png}"
DMG_BACKGROUND_PATH="${DMG_BACKGROUND_PATH:-$ROOT_DIR/assets/dmg-background.png}"
DMG_WINDOW_BOUNDS="${DMG_WINDOW_BOUNDS:-400 100 900 420}"
DMG_ICON_SIZE="${DMG_ICON_SIZE:-128}"
DMG_APP_POS="${DMG_APP_POS:-125 160}"
DMG_APPS_POS="${DMG_APPS_POS:-375 160}"
DMG_EXTRA_SECTORS="${DMG_EXTRA_SECTORS:-2048}"
to_applescript_list4() {
local raw="$1"
echo "$raw" | awk '{ printf "%s, %s, %s, %s", $1, $2, $3, $4 }'
}
to_applescript_pair() {
local raw="$1"
echo "$raw" | awk '{ printf "%s, %s", $1, $2 }'
}
if [[ -z "$OUT_PATH" ]]; then
OUT_PATH="$BUILD_DIR/$DMG_NAME"
fi
echo "Creating DMG: $OUT_PATH"
# Cleanup stuck volumes.
for vol in "/Volumes/$DMG_VOLUME_NAME"* "/Volumes/$APP_NAME"*; do
if [[ -d "$vol" ]]; then
hdiutil detach "$vol" -force 2>/dev/null || true
sleep 1
fi
done
DMG_TEMP="$(mktemp -d /tmp/moltbot-dmg.XXXXXX)"
trap 'hdiutil detach "/Volumes/'"$DMG_VOLUME_NAME"'" -force 2>/dev/null || true; rm -rf "$DMG_TEMP" 2>/dev/null || true' EXIT
cp -R "$APP_PATH" "$DMG_TEMP/"
ln -s /Applications "$DMG_TEMP/Applications"
APP_SIZE_MB=$(du -sm "$APP_PATH" | awk '{print $1}')
DMG_SIZE_MB=$((APP_SIZE_MB + 80))
DMG_RW_PATH="${OUT_PATH%.dmg}-rw.dmg"
rm -f "$DMG_RW_PATH" "$OUT_PATH"
hdiutil create \
-volname "$DMG_VOLUME_NAME" \
-srcfolder "$DMG_TEMP" \
-ov \
-format UDRW \
-size "${DMG_SIZE_MB}m" \
"$DMG_RW_PATH"
MOUNT_POINT="/Volumes/$DMG_VOLUME_NAME"
if [[ -d "$MOUNT_POINT" ]]; then
hdiutil detach "$MOUNT_POINT" -force 2>/dev/null || true
sleep 2
fi
hdiutil attach "$DMG_RW_PATH" -mountpoint "$MOUNT_POINT" -nobrowse
if [[ "${SKIP_DMG_STYLE:-0}" != "1" ]]; then
mkdir -p "$MOUNT_POINT/.background"
if [[ -f "$DMG_BACKGROUND_SMALL" ]]; then
cp "$DMG_BACKGROUND_SMALL" "$MOUNT_POINT/.background/background.png"
elif [[ -f "$DMG_BACKGROUND_PATH" ]]; then
cp "$DMG_BACKGROUND_PATH" "$MOUNT_POINT/.background/background.png"
else
echo "WARN: DMG background missing: $DMG_BACKGROUND_SMALL / $DMG_BACKGROUND_PATH" >&2
fi
# Volume icon: reuse the app icon if available.
ICON_SRC="$ROOT_DIR/apps/macos/Sources/Moltbot/Resources/Moltbot.icns"
if [[ -f "$ICON_SRC" ]]; then
cp "$ICON_SRC" "$MOUNT_POINT/.VolumeIcon.icns"
if command -v SetFile >/dev/null 2>&1; then
SetFile -a C "$MOUNT_POINT" 2>/dev/null || true
fi
fi
osascript <<EOF
tell application "Finder"
tell disk "$DMG_VOLUME_NAME"
open
set current view of container window to icon view
set toolbar visible of container window to false
set statusbar visible of container window to false
set the bounds of container window to {$(to_applescript_list4 "$DMG_WINDOW_BOUNDS")}
set viewOptions to the icon view options of container window
set arrangement of viewOptions to not arranged
set icon size of viewOptions to ${DMG_ICON_SIZE}
if exists file ".background:background.png" then
set background picture of viewOptions to file ".background:background.png"
end if
set text size of viewOptions to 12
set label position of viewOptions to bottom
set shows item info of viewOptions to false
set shows icon preview of viewOptions to true
set position of item "${APP_NAME}.app" of container window to {$(to_applescript_pair "$DMG_APP_POS")}
set position of item "Applications" of container window to {$(to_applescript_pair "$DMG_APPS_POS")}
update without registering applications
delay 2
close
open
delay 1
end tell
end tell
EOF
sleep 2
osascript -e 'tell application "Finder" to close every window' || true
fi
for i in {1..5}; do
if hdiutil detach "$MOUNT_POINT" -quiet 2>/dev/null; then
break
fi
if [[ "$i" == "3" ]]; then
hdiutil detach "$MOUNT_POINT" -force 2>/dev/null || true
fi
sleep 2
done
hdiutil resize -limits "$DMG_RW_PATH" >/tmp/moltbot-dmg-limits.txt 2>/dev/null || true
MIN_SECTORS="$(tail -n 1 /tmp/moltbot-dmg-limits.txt 2>/dev/null | awk '{print $1}')"
rm -f /tmp/moltbot-dmg-limits.txt
if [[ "$MIN_SECTORS" =~ ^[0-9]+$ ]] && [[ "$DMG_EXTRA_SECTORS" =~ ^[0-9]+$ ]]; then
TARGET_SECTORS=$((MIN_SECTORS + DMG_EXTRA_SECTORS))
echo "Shrinking RW image: min sectors=$MIN_SECTORS (+$DMG_EXTRA_SECTORS) -> $TARGET_SECTORS"
hdiutil resize -sectors "$TARGET_SECTORS" "$DMG_RW_PATH" >/dev/null 2>&1 || true
fi
hdiutil convert "$DMG_RW_PATH" -format ULMO -o "$OUT_PATH" -ov
rm -f "$DMG_RW_PATH"
hdiutil verify "$OUT_PATH" >/dev/null
echo "✅ DMG ready: $OUT_PATH"

View File

@@ -0,0 +1,343 @@
import crypto from "node:crypto";
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import { execFileSync } from "node:child_process";
type Args = {
agentId: string;
reveal: boolean;
sessionKey?: string;
};
const mask = (value: string) => {
const compact = value.trim();
if (!compact) return "missing";
const edge = compact.length >= 12 ? 6 : 4;
return `${compact.slice(0, edge)}${compact.slice(-edge)}`;
};
const parseArgs = (): Args => {
const args = process.argv.slice(2);
let agentId = "main";
let reveal = false;
let sessionKey: string | undefined;
for (let i = 0; i < args.length; i++) {
const arg = args[i];
if (arg === "--agent" && args[i + 1]) {
agentId = String(args[++i]).trim() || "main";
continue;
}
if (arg === "--reveal") {
reveal = true;
continue;
}
if (arg === "--session-key" && args[i + 1]) {
sessionKey = String(args[++i]).trim() || undefined;
continue;
}
}
return { agentId, reveal, sessionKey };
};
const loadAuthProfiles = (agentId: string) => {
const stateRoot =
process.env.CLAWDBOT_STATE_DIR?.trim() || path.join(os.homedir(), ".clawdbot");
const authPath = path.join(stateRoot, "agents", agentId, "agent", "auth-profiles.json");
if (!fs.existsSync(authPath)) throw new Error(`Missing: ${authPath}`);
const store = JSON.parse(fs.readFileSync(authPath, "utf8")) as {
profiles?: Record<string, { provider?: string; type?: string; token?: string; key?: string }>;
};
return { authPath, store };
};
const pickAnthropicTokens = (store: {
profiles?: Record<string, { provider?: string; type?: string; token?: string; key?: string }>;
}): Array<{ profileId: string; token: string }> => {
const profiles = store.profiles ?? {};
const found: Array<{ profileId: string; token: string }> = [];
for (const [id, cred] of Object.entries(profiles)) {
if (cred?.provider !== "anthropic") continue;
const token = cred.type === "token" ? cred.token?.trim() : undefined;
if (token) found.push({ profileId: id, token });
}
return found;
};
const fetchAnthropicOAuthUsage = async (token: string) => {
const res = await fetch("https://api.anthropic.com/api/oauth/usage", {
headers: {
Authorization: `Bearer ${token}`,
Accept: "application/json",
"anthropic-version": "2023-06-01",
"anthropic-beta": "oauth-2025-04-20",
"User-Agent": "moltbot-debug",
},
});
const text = await res.text();
return { status: res.status, contentType: res.headers.get("content-type"), text };
};
const readClaudeCliKeychain = (): {
accessToken: string;
expiresAt?: number;
scopes?: string[];
} | null => {
if (process.platform !== "darwin") return null;
try {
const raw = execFileSync(
"security",
["find-generic-password", "-s", "Claude Code-credentials", "-w"],
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
);
const parsed = JSON.parse(raw.trim()) as Record<string, unknown>;
const oauth = parsed?.claudeAiOauth as Record<string, unknown> | undefined;
if (!oauth || typeof oauth !== "object") return null;
const accessToken = oauth.accessToken;
if (typeof accessToken !== "string" || !accessToken.trim()) return null;
const expiresAt =
typeof oauth.expiresAt === "number" ? oauth.expiresAt : undefined;
const scopes = Array.isArray(oauth.scopes)
? oauth.scopes.filter((v): v is string => typeof v === "string")
: undefined;
return { accessToken, expiresAt, scopes };
} catch {
return null;
}
};
const chromeServiceNameForPath = (cookiePath: string): string => {
if (cookiePath.includes("/Arc/")) return "Arc Safe Storage";
if (cookiePath.includes("/BraveSoftware/")) return "Brave Safe Storage";
if (cookiePath.includes("/Microsoft Edge/")) return "Microsoft Edge Safe Storage";
if (cookiePath.includes("/Chromium/")) return "Chromium Safe Storage";
return "Chrome Safe Storage";
};
const readKeychainPassword = (service: string): string | null => {
try {
const out = execFileSync(
"security",
["find-generic-password", "-w", "-s", service],
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
);
const pw = out.trim();
return pw ? pw : null;
} catch {
return null;
}
};
const decryptChromeCookieValue = (encrypted: Buffer, service: string): string | null => {
if (encrypted.length < 4) return null;
const prefix = encrypted.subarray(0, 3).toString("utf8");
if (prefix !== "v10" && prefix !== "v11") return null;
const password = readKeychainPassword(service);
if (!password) return null;
const key = crypto.pbkdf2Sync(password, "saltysalt", 1003, 16, "sha1");
const iv = Buffer.alloc(16, 0x20);
const data = encrypted.subarray(3);
try {
const decipher = crypto.createDecipheriv("aes-128-cbc", key, iv);
decipher.setAutoPadding(true);
const decrypted = Buffer.concat([decipher.update(data), decipher.final()]);
const text = decrypted.toString("utf8").trim();
return text ? text : null;
} catch {
return null;
}
};
const queryChromeCookieDb = (cookieDb: string): string | null => {
try {
const out = execFileSync(
"sqlite3",
[
"-readonly",
cookieDb,
`
SELECT
COALESCE(NULLIF(value,''), hex(encrypted_value))
FROM cookies
WHERE (host_key LIKE '%claude.ai%' OR host_key = '.claude.ai')
AND name = 'sessionKey'
LIMIT 1;
`,
],
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
).trim();
if (!out) return null;
if (out.startsWith("sk-ant-")) return out;
const hex = out.replace(/[^0-9A-Fa-f]/g, "");
if (!hex) return null;
const buf = Buffer.from(hex, "hex");
const service = chromeServiceNameForPath(cookieDb);
const decrypted = decryptChromeCookieValue(buf, service);
return decrypted && decrypted.startsWith("sk-ant-") ? decrypted : null;
} catch {
return null;
}
};
const queryFirefoxCookieDb = (cookieDb: string): string | null => {
try {
const out = execFileSync(
"sqlite3",
[
"-readonly",
cookieDb,
`
SELECT value
FROM moz_cookies
WHERE (host LIKE '%claude.ai%' OR host = '.claude.ai')
AND name = 'sessionKey'
LIMIT 1;
`,
],
{ encoding: "utf8", stdio: ["ignore", "pipe", "ignore"], timeout: 5000 },
).trim();
return out && out.startsWith("sk-ant-") ? out : null;
} catch {
return null;
}
};
const findClaudeSessionKey = (): { sessionKey: string; source: string } | null => {
if (process.platform !== "darwin") return null;
const firefoxRoot = path.join(
os.homedir(),
"Library",
"Application Support",
"Firefox",
"Profiles",
);
if (fs.existsSync(firefoxRoot)) {
for (const entry of fs.readdirSync(firefoxRoot)) {
const db = path.join(firefoxRoot, entry, "cookies.sqlite");
if (!fs.existsSync(db)) continue;
const value = queryFirefoxCookieDb(db);
if (value) return { sessionKey: value, source: `firefox:${db}` };
}
}
const chromeCandidates = [
path.join(os.homedir(), "Library", "Application Support", "Google", "Chrome"),
path.join(os.homedir(), "Library", "Application Support", "Chromium"),
path.join(os.homedir(), "Library", "Application Support", "Arc"),
path.join(os.homedir(), "Library", "Application Support", "BraveSoftware", "Brave-Browser"),
path.join(os.homedir(), "Library", "Application Support", "Microsoft Edge"),
];
for (const root of chromeCandidates) {
if (!fs.existsSync(root)) continue;
const profiles = fs
.readdirSync(root)
.filter((name) => name === "Default" || name.startsWith("Profile "));
for (const profile of profiles) {
const db = path.join(root, profile, "Cookies");
if (!fs.existsSync(db)) continue;
const value = queryChromeCookieDb(db);
if (value) return { sessionKey: value, source: `chromium:${db}` };
}
}
return null;
};
const fetchClaudeWebUsage = async (sessionKey: string) => {
const headers = {
Cookie: `sessionKey=${sessionKey}`,
Accept: "application/json",
"User-Agent":
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.1 Safari/605.1.15",
};
const orgRes = await fetch("https://claude.ai/api/organizations", { headers });
const orgText = await orgRes.text();
if (!orgRes.ok) {
return { ok: false as const, step: "organizations", status: orgRes.status, body: orgText };
}
const orgs = JSON.parse(orgText) as Array<{ uuid?: string }>;
const orgId = orgs?.[0]?.uuid;
if (!orgId) {
return { ok: false as const, step: "organizations", status: 200, body: orgText };
}
const usageRes = await fetch(`https://claude.ai/api/organizations/${orgId}/usage`, { headers });
const usageText = await usageRes.text();
return usageRes.ok
? { ok: true as const, orgId, body: usageText }
: { ok: false as const, step: "usage", status: usageRes.status, body: usageText };
};
const main = async () => {
const opts = parseArgs();
const { authPath, store } = loadAuthProfiles(opts.agentId);
console.log(`Auth file: ${authPath}`);
const keychain = readClaudeCliKeychain();
if (keychain) {
console.log(
`Claude Code CLI keychain: accessToken=${opts.reveal ? keychain.accessToken : mask(keychain.accessToken)} scopes=${keychain.scopes?.join(",") ?? "(unknown)"}`,
);
const oauth = await fetchAnthropicOAuthUsage(keychain.accessToken);
console.log(
`OAuth usage (keychain): HTTP ${oauth.status} (${oauth.contentType ?? "no content-type"})`,
);
console.log(oauth.text.slice(0, 200).replace(/\s+/g, " ").trim());
} else {
console.log("Claude Code CLI keychain: missing/unreadable");
}
const anthropic = pickAnthropicTokens(store);
if (anthropic.length === 0) {
console.log("Auth profiles: no Anthropic token profiles found");
} else {
for (const entry of anthropic) {
console.log(
`Auth profiles: ${entry.profileId} token=${opts.reveal ? entry.token : mask(entry.token)}`,
);
const oauth = await fetchAnthropicOAuthUsage(entry.token);
console.log(
`OAuth usage (${entry.profileId}): HTTP ${oauth.status} (${oauth.contentType ?? "no content-type"})`,
);
console.log(oauth.text.slice(0, 200).replace(/\s+/g, " ").trim());
}
}
const sessionKey =
opts.sessionKey?.trim() ||
process.env.CLAUDE_AI_SESSION_KEY?.trim() ||
process.env.CLAUDE_WEB_SESSION_KEY?.trim() ||
findClaudeSessionKey()?.sessionKey;
const source =
opts.sessionKey
? "--session-key"
: process.env.CLAUDE_AI_SESSION_KEY || process.env.CLAUDE_WEB_SESSION_KEY
? "env"
: findClaudeSessionKey()?.source ?? "auto";
if (!sessionKey) {
console.log("Claude web: no sessionKey found (try --session-key or export CLAUDE_AI_SESSION_KEY)");
return;
}
console.log(
`Claude web: sessionKey=${opts.reveal ? sessionKey : mask(sessionKey)} (source: ${source})`,
);
const web = await fetchClaudeWebUsage(sessionKey);
if (!web.ok) {
console.log(`Claude web: ${web.step} HTTP ${web.status}`);
console.log(String(web.body).slice(0, 400).replace(/\s+/g, " ").trim());
return;
}
console.log(`Claude web: org=${web.orgId} OK`);
console.log(web.body.slice(0, 400).replace(/\s+/g, " ").trim());
};
await main();

View File

@@ -0,0 +1,20 @@
FROM node:22-bookworm-slim
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
bash \
ca-certificates \
git \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /repo
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
COPY scripts/postinstall.js ./scripts/postinstall.js
RUN corepack enable \
&& pnpm install --frozen-lockfile
COPY . .
COPY scripts/docker/cleanup-smoke/run.sh /usr/local/bin/moltbot-cleanup-smoke
RUN chmod +x /usr/local/bin/moltbot-cleanup-smoke
ENTRYPOINT ["/usr/local/bin/moltbot-cleanup-smoke"]

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env bash
set -euo pipefail
cd /repo
export CLAWDBOT_STATE_DIR="/tmp/moltbot-test"
export CLAWDBOT_CONFIG_PATH="${CLAWDBOT_STATE_DIR}/moltbot.json"
echo "==> Seed state"
mkdir -p "${CLAWDBOT_STATE_DIR}/credentials"
mkdir -p "${CLAWDBOT_STATE_DIR}/agents/main/sessions"
echo '{}' >"${CLAWDBOT_CONFIG_PATH}"
echo 'creds' >"${CLAWDBOT_STATE_DIR}/credentials/marker.txt"
echo 'session' >"${CLAWDBOT_STATE_DIR}/agents/main/sessions/sessions.json"
echo "==> Reset (config+creds+sessions)"
pnpm moltbot reset --scope config+creds+sessions --yes --non-interactive
test ! -f "${CLAWDBOT_CONFIG_PATH}"
test ! -d "${CLAWDBOT_STATE_DIR}/credentials"
test ! -d "${CLAWDBOT_STATE_DIR}/agents/main/sessions"
echo "==> Recreate minimal config"
mkdir -p "${CLAWDBOT_STATE_DIR}/credentials"
echo '{}' >"${CLAWDBOT_CONFIG_PATH}"
echo "==> Uninstall (state only)"
pnpm moltbot uninstall --state --yes --non-interactive
test ! -d "${CLAWDBOT_STATE_DIR}"
echo "OK"

View File

@@ -0,0 +1,14 @@
FROM node:22-bookworm-slim
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
bash \
ca-certificates \
curl \
git \
&& rm -rf /var/lib/apt/lists/*
COPY run.sh /usr/local/bin/moltbot-install-e2e
RUN chmod +x /usr/local/bin/moltbot-install-e2e
ENTRYPOINT ["/usr/local/bin/moltbot-install-e2e"]

View File

@@ -0,0 +1,531 @@
#!/usr/bin/env bash
set -euo pipefail
INSTALL_URL="${CLAWDBOT_INSTALL_URL:-https://molt.bot/install.sh}"
MODELS_MODE="${CLAWDBOT_E2E_MODELS:-both}" # both|openai|anthropic
INSTALL_TAG="${CLAWDBOT_INSTALL_TAG:-latest}"
E2E_PREVIOUS_VERSION="${CLAWDBOT_INSTALL_E2E_PREVIOUS:-}"
SKIP_PREVIOUS="${CLAWDBOT_INSTALL_E2E_SKIP_PREVIOUS:-0}"
OPENAI_API_KEY="${OPENAI_API_KEY:-}"
ANTHROPIC_API_KEY="${ANTHROPIC_API_KEY:-}"
ANTHROPIC_API_TOKEN="${ANTHROPIC_API_TOKEN:-}"
if [[ "$MODELS_MODE" != "both" && "$MODELS_MODE" != "openai" && "$MODELS_MODE" != "anthropic" ]]; then
echo "ERROR: CLAWDBOT_E2E_MODELS must be one of: both|openai|anthropic" >&2
exit 2
fi
if [[ "$MODELS_MODE" == "both" ]]; then
if [[ -z "$OPENAI_API_KEY" ]]; then
echo "ERROR: CLAWDBOT_E2E_MODELS=both requires OPENAI_API_KEY." >&2
exit 2
fi
if [[ -z "$ANTHROPIC_API_TOKEN" && -z "$ANTHROPIC_API_KEY" ]]; then
echo "ERROR: CLAWDBOT_E2E_MODELS=both requires ANTHROPIC_API_TOKEN or ANTHROPIC_API_KEY." >&2
exit 2
fi
elif [[ "$MODELS_MODE" == "openai" && -z "$OPENAI_API_KEY" ]]; then
echo "ERROR: CLAWDBOT_E2E_MODELS=openai requires OPENAI_API_KEY." >&2
exit 2
elif [[ "$MODELS_MODE" == "anthropic" && -z "$ANTHROPIC_API_TOKEN" && -z "$ANTHROPIC_API_KEY" ]]; then
echo "ERROR: CLAWDBOT_E2E_MODELS=anthropic requires ANTHROPIC_API_TOKEN or ANTHROPIC_API_KEY." >&2
exit 2
fi
echo "==> Resolve npm versions"
EXPECTED_VERSION="$(npm view "moltbot@${INSTALL_TAG}" version)"
if [[ -z "$EXPECTED_VERSION" || "$EXPECTED_VERSION" == "undefined" || "$EXPECTED_VERSION" == "null" ]]; then
echo "ERROR: unable to resolve moltbot@${INSTALL_TAG} version" >&2
exit 2
fi
if [[ -n "$E2E_PREVIOUS_VERSION" ]]; then
PREVIOUS_VERSION="$E2E_PREVIOUS_VERSION"
else
PREVIOUS_VERSION="$(node - <<'NODE'
const { execSync } = require("node:child_process");
const versions = JSON.parse(execSync("npm view moltbot versions --json", { encoding: "utf8" }));
if (!Array.isArray(versions) || versions.length === 0) process.exit(1);
process.stdout.write(versions.length >= 2 ? versions[versions.length - 2] : versions[0]);
NODE
)"
fi
echo "expected=$EXPECTED_VERSION previous=$PREVIOUS_VERSION"
if [[ "$SKIP_PREVIOUS" == "1" ]]; then
echo "==> Skip preinstall previous (CLAWDBOT_INSTALL_E2E_SKIP_PREVIOUS=1)"
else
echo "==> Preinstall previous (forces installer upgrade path; avoids read() prompt)"
npm install -g "moltbot@${PREVIOUS_VERSION}"
fi
echo "==> Run official installer one-liner"
if [[ "$INSTALL_TAG" == "beta" ]]; then
CLAWDBOT_BETA=1 curl -fsSL "$INSTALL_URL" | bash
elif [[ "$INSTALL_TAG" != "latest" ]]; then
CLAWDBOT_VERSION="$INSTALL_TAG" curl -fsSL "$INSTALL_URL" | bash
else
curl -fsSL "$INSTALL_URL" | bash
fi
echo "==> Verify installed version"
INSTALLED_VERSION="$(moltbot --version 2>/dev/null | head -n 1 | tr -d '\r')"
echo "installed=$INSTALLED_VERSION expected=$EXPECTED_VERSION"
if [[ "$INSTALLED_VERSION" != "$EXPECTED_VERSION" ]]; then
echo "ERROR: expected moltbot@$EXPECTED_VERSION, got moltbot@$INSTALLED_VERSION" >&2
exit 1
fi
set_image_model() {
local profile="$1"
shift
local candidate
for candidate in "$@"; do
if moltbot --profile "$profile" models set-image "$candidate" >/dev/null 2>&1; then
echo "$candidate"
return 0
fi
done
echo "ERROR: could not set an image model (tried: $*)" >&2
return 1
}
set_agent_model() {
local profile="$1"
local candidate
shift
for candidate in "$@"; do
if moltbot --profile "$profile" models set "$candidate" >/dev/null 2>&1; then
echo "$candidate"
return 0
fi
done
echo "ERROR: could not set agent model (tried: $*)" >&2
return 1
}
write_png_lr_rg() {
local out="$1"
node - <<'NODE' "$out"
const fs = require("node:fs");
const zlib = require("node:zlib");
const out = process.argv[2];
const width = 96;
const height = 64;
const crcTable = (() => {
const table = new Uint32Array(256);
for (let i = 0; i < 256; i++) {
let c = i;
for (let k = 0; k < 8; k++) c = (c & 1) ? (0xedb88320 ^ (c >>> 1)) : (c >>> 1);
table[i] = c >>> 0;
}
return table;
})();
function crc32(buf) {
let c = 0xffffffff;
for (let i = 0; i < buf.length; i++) c = crcTable[(c ^ buf[i]) & 0xff] ^ (c >>> 8);
return (c ^ 0xffffffff) >>> 0;
}
function chunk(type, data) {
const typeBuf = Buffer.from(type, "ascii");
const len = Buffer.alloc(4);
len.writeUInt32BE(data.length, 0);
const crcBuf = Buffer.alloc(4);
crcBuf.writeUInt32BE(crc32(Buffer.concat([typeBuf, data])), 0);
return Buffer.concat([len, typeBuf, data, crcBuf]);
}
const sig = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
const ihdr = Buffer.alloc(13);
ihdr.writeUInt32BE(width, 0);
ihdr.writeUInt32BE(height, 4);
ihdr[8] = 8; // bit depth
ihdr[9] = 2; // color type: truecolor
ihdr[10] = 0; // compression
ihdr[11] = 0; // filter
ihdr[12] = 0; // interlace
const rows = [];
for (let y = 0; y < height; y++) {
const row = Buffer.alloc(1 + width * 3);
row[0] = 0; // filter: none
for (let x = 0; x < width; x++) {
const i = 1 + x * 3;
const left = x < width / 2;
row[i + 0] = left ? 255 : 0;
row[i + 1] = left ? 0 : 255;
row[i + 2] = 0;
}
rows.push(row);
}
const raw = Buffer.concat(rows);
const idat = zlib.deflateSync(raw, { level: 9 });
const png = Buffer.concat([
sig,
chunk("IHDR", ihdr),
chunk("IDAT", idat),
chunk("IEND", Buffer.alloc(0)),
]);
fs.writeFileSync(out, png);
NODE
}
run_agent_turn() {
local profile="$1"
local session_id="$2"
local prompt="$3"
local out_json="$4"
moltbot --profile "$profile" agent \
--session-id "$session_id" \
--message "$prompt" \
--thinking off \
--json >"$out_json"
}
assert_agent_json_has_text() {
local path="$1"
node - <<'NODE' "$path"
const fs = require("node:fs");
const p = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
const payloads =
Array.isArray(p?.result?.payloads) ? p.result.payloads :
Array.isArray(p?.payloads) ? p.payloads :
[];
const texts = payloads.map((x) => String(x?.text ?? "").trim()).filter(Boolean);
if (texts.length === 0) process.exit(1);
NODE
}
assert_agent_json_ok() {
local json_path="$1"
local expect_provider="$2"
node - <<'NODE' "$json_path" "$expect_provider"
const fs = require("node:fs");
const jsonPath = process.argv[2];
const expectProvider = process.argv[3];
const p = JSON.parse(fs.readFileSync(jsonPath, "utf8"));
if (typeof p?.status === "string" && p.status !== "ok" && p.status !== "accepted") {
console.error(`ERROR: gateway status=${p.status}`);
process.exit(1);
}
const result = p?.result ?? p;
const payloads = Array.isArray(result?.payloads) ? result.payloads : [];
const anyError = payloads.some((pl) => pl && pl.isError === true);
const combinedText = payloads.map((pl) => String(pl?.text ?? "")).filter(Boolean).join("\n").trim();
if (anyError) {
console.error(`ERROR: agent returned error payload: ${combinedText}`);
process.exit(1);
}
if (/rate_limit_error/i.test(combinedText) || /^429\\b/.test(combinedText)) {
console.error(`ERROR: agent rate limited: ${combinedText}`);
process.exit(1);
}
const meta = result?.meta;
const provider =
(typeof meta?.agentMeta?.provider === "string" && meta.agentMeta.provider.trim()) ||
(typeof meta?.provider === "string" && meta.provider.trim()) ||
"";
if (expectProvider && provider && provider !== expectProvider) {
console.error(`ERROR: expected provider=${expectProvider}, got provider=${provider}`);
process.exit(1);
}
NODE
}
extract_matching_text() {
local path="$1"
local expected="$2"
node - <<'NODE' "$path" "$expected"
const fs = require("node:fs");
const p = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
const expected = String(process.argv[3] ?? "");
const payloads =
Array.isArray(p?.result?.payloads) ? p.result.payloads :
Array.isArray(p?.payloads) ? p.payloads :
[];
const texts = payloads.map((x) => String(x?.text ?? "").trim()).filter(Boolean);
const match = texts.find((text) => text === expected);
process.stdout.write(match ?? texts[0] ?? "");
NODE
}
assert_session_used_tools() {
local jsonl="$1"
shift
node - <<'NODE' "$jsonl" "$@"
const fs = require("node:fs");
const jsonl = process.argv[2];
const required = new Set(process.argv.slice(3));
const raw = fs.readFileSync(jsonl, "utf8");
const lines = raw.split("\n").map((l) => l.trim()).filter(Boolean);
const seen = new Set();
const toolTypes = new Set([
"tool_use",
"tool_result",
"tool",
"tool-call",
"tool_call",
"tooluse",
"tool-use",
"toolresult",
"tool-result",
]);
function walk(node, parent) {
if (!node) return;
if (Array.isArray(node)) {
for (const item of node) walk(item, node);
return;
}
if (typeof node !== "object") return;
const obj = node;
const t = typeof obj.type === "string" ? obj.type : null;
if (t && (toolTypes.has(t) || /tool/i.test(t))) {
const name =
typeof obj.name === "string" ? obj.name :
typeof obj.toolName === "string" ? obj.toolName :
typeof obj.tool_name === "string" ? obj.tool_name :
(obj.tool && typeof obj.tool.name === "string") ? obj.tool.name :
null;
if (name) seen.add(name);
}
if (typeof obj.name === "string" && typeof obj.input === "object" && obj.input) {
// Many tool-use blocks look like { type: "...", name: "exec", input: {...} }
// but some transcripts omit/rename type.
seen.add(obj.name);
}
// OpenAI-ish tool call shapes.
if (Array.isArray(obj.tool_calls)) {
for (const c of obj.tool_calls) {
const fn = c?.function;
if (fn && typeof fn.name === "string") seen.add(fn.name);
}
}
if (obj.function && typeof obj.function.name === "string") seen.add(obj.function.name);
for (const v of Object.values(obj)) walk(v, obj);
}
for (const line of lines) {
try {
const entry = JSON.parse(line);
walk(entry, null);
} catch {
// ignore unparsable lines
}
}
const missing = [...required].filter((t) => !seen.has(t));
if (missing.length > 0) {
console.error(`Missing tools in transcript: ${missing.join(", ")}`);
console.error(`Seen tools: ${[...seen].sort().join(", ")}`);
console.error("Transcript head:");
console.error(lines.slice(0, 5).join("\n"));
process.exit(1);
}
NODE
}
run_profile() {
local profile="$1"
local port="$2"
local workspace="$3"
local agent_model_provider="$4" # "openai"|"anthropic"
echo "==> Onboard ($profile)"
if [[ "$agent_model_provider" == "openai" ]]; then
moltbot --profile "$profile" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--auth-choice openai-api-key \
--openai-api-key "$OPENAI_API_KEY" \
--gateway-port "$port" \
--gateway-bind loopback \
--gateway-auth token \
--workspace "$workspace" \
--skip-health
elif [[ -n "$ANTHROPIC_API_TOKEN" ]]; then
moltbot --profile "$profile" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--auth-choice token \
--token-provider anthropic \
--token "$ANTHROPIC_API_TOKEN" \
--gateway-port "$port" \
--gateway-bind loopback \
--gateway-auth token \
--workspace "$workspace" \
--skip-health
else
moltbot --profile "$profile" onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--auth-choice apiKey \
--anthropic-api-key "$ANTHROPIC_API_KEY" \
--gateway-port "$port" \
--gateway-bind loopback \
--gateway-auth token \
--workspace "$workspace" \
--skip-health
fi
echo "==> Verify workspace identity files ($profile)"
test -f "$workspace/AGENTS.md"
test -f "$workspace/IDENTITY.md"
test -f "$workspace/USER.md"
test -f "$workspace/SOUL.md"
test -f "$workspace/TOOLS.md"
echo "==> Configure models ($profile)"
local agent_model
local image_model
if [[ "$agent_model_provider" == "openai" ]]; then
agent_model="$(set_agent_model "$profile" \
"openai/gpt-4.1-mini" \
"openai/gpt-4.1" \
"openai/gpt-4o-mini" \
"openai/gpt-4o")"
image_model="$(set_image_model "$profile" \
"openai/gpt-4.1" \
"openai/gpt-4o-mini" \
"openai/gpt-4o" \
"openai/gpt-4.1-mini")"
else
agent_model="$(set_agent_model "$profile" \
"anthropic/claude-opus-4-5" \
"claude-opus-4-5")"
image_model="$(set_image_model "$profile" \
"anthropic/claude-opus-4-5" \
"claude-opus-4-5")"
fi
echo "model=$agent_model"
echo "imageModel=$image_model"
echo "==> Prepare tool fixtures ($profile)"
PROOF_TXT="$workspace/proof.txt"
PROOF_COPY="$workspace/copy.txt"
HOSTNAME_TXT="$workspace/hostname.txt"
IMAGE_PNG="$workspace/proof.png"
IMAGE_TXT="$workspace/image.txt"
SESSION_ID="e2e-tools-${profile}"
SESSION_JSONL="/root/.clawdbot-${profile}/agents/main/sessions/${SESSION_ID}.jsonl"
PROOF_VALUE="$(node -e 'console.log(require("node:crypto").randomBytes(16).toString("hex"))')"
echo -n "$PROOF_VALUE" >"$PROOF_TXT"
write_png_lr_rg "$IMAGE_PNG"
EXPECTED_HOSTNAME="$(cat /etc/hostname | tr -d '\r\n')"
echo "==> Start gateway ($profile)"
GATEWAY_LOG="$workspace/gateway.log"
moltbot --profile "$profile" gateway --port "$port" --bind loopback >"$GATEWAY_LOG" 2>&1 &
GATEWAY_PID="$!"
cleanup_profile() {
if kill -0 "$GATEWAY_PID" 2>/dev/null; then
kill "$GATEWAY_PID" 2>/dev/null || true
wait "$GATEWAY_PID" 2>/dev/null || true
fi
}
trap cleanup_profile EXIT
echo "==> Wait for health ($profile)"
for _ in $(seq 1 60); do
if moltbot --profile "$profile" health --timeout 2000 --json >/dev/null 2>&1; then
break
fi
sleep 0.25
done
moltbot --profile "$profile" health --timeout 10000 --json >/dev/null
echo "==> Agent turns ($profile)"
TURN1_JSON="/tmp/agent-${profile}-1.json"
TURN2_JSON="/tmp/agent-${profile}-2.json"
TURN3_JSON="/tmp/agent-${profile}-3.json"
TURN4_JSON="/tmp/agent-${profile}-4.json"
run_agent_turn "$profile" "$SESSION_ID" \
"Use the read tool (not exec) to read proof.txt. Reply with the exact contents only (no extra whitespace)." \
"$TURN1_JSON"
assert_agent_json_has_text "$TURN1_JSON"
assert_agent_json_ok "$TURN1_JSON" "$agent_model_provider"
local reply1
reply1="$(extract_matching_text "$TURN1_JSON" "$PROOF_VALUE" | tr -d '\r\n')"
if [[ "$reply1" != "$PROOF_VALUE" ]]; then
echo "ERROR: agent did not read proof.txt correctly ($profile): $reply1" >&2
exit 1
fi
local prompt2
prompt2=$'Use the write tool (not exec) to write exactly this string into copy.txt:\n'"${reply1}"$'\nThen use the read tool (not exec) to read copy.txt and reply with the exact contents only (no extra whitespace).'
run_agent_turn "$profile" "$SESSION_ID" "$prompt2" "$TURN2_JSON"
assert_agent_json_has_text "$TURN2_JSON"
assert_agent_json_ok "$TURN2_JSON" "$agent_model_provider"
local copy_value
copy_value="$(cat "$PROOF_COPY" 2>/dev/null | tr -d '\r\n' || true)"
if [[ "$copy_value" != "$PROOF_VALUE" ]]; then
echo "ERROR: copy.txt did not match proof.txt ($profile)" >&2
exit 1
fi
local reply2
reply2="$(extract_matching_text "$TURN2_JSON" "$PROOF_VALUE" | tr -d '\r\n')"
if [[ "$reply2" != "$PROOF_VALUE" ]]; then
echo "ERROR: agent did not read copy.txt correctly ($profile): $reply2" >&2
exit 1
fi
local prompt3
prompt3=$'Use the exec tool to run: cat /etc/hostname\nThen use the write tool to write the exact stdout (trim trailing newline) into hostname.txt. Reply with the hostname only.'
run_agent_turn "$profile" "$SESSION_ID" "$prompt3" "$TURN3_JSON"
assert_agent_json_has_text "$TURN3_JSON"
assert_agent_json_ok "$TURN3_JSON" "$agent_model_provider"
if [[ "$(cat "$HOSTNAME_TXT" 2>/dev/null | tr -d '\r\n' || true)" != "$EXPECTED_HOSTNAME" ]]; then
echo "ERROR: hostname.txt did not match /etc/hostname ($profile)" >&2
exit 1
fi
run_agent_turn "$profile" "$SESSION_ID" \
"Use the image tool on proof.png. Determine which color is on the left half and which is on the right half. Then use the write tool to write exactly: LEFT=RED RIGHT=GREEN into image.txt. Reply with exactly: LEFT=RED RIGHT=GREEN" \
"$TURN4_JSON"
assert_agent_json_has_text "$TURN4_JSON"
assert_agent_json_ok "$TURN4_JSON" "$agent_model_provider"
if [[ "$(cat "$IMAGE_TXT" 2>/dev/null | tr -d '\r\n' || true)" != "LEFT=RED RIGHT=GREEN" ]]; then
echo "ERROR: image.txt did not contain expected marker ($profile)" >&2
exit 1
fi
local reply4
reply4="$(extract_matching_text "$TURN4_JSON" "LEFT=RED RIGHT=GREEN")"
if [[ "$reply4" != "LEFT=RED RIGHT=GREEN" ]]; then
echo "ERROR: agent reply did not contain expected marker ($profile): $reply4" >&2
exit 1
fi
echo "==> Verify tool usage via session transcript ($profile)"
# Give the gateway a moment to flush transcripts.
sleep 1
if [[ ! -f "$SESSION_JSONL" ]]; then
echo "ERROR: missing session transcript ($profile): $SESSION_JSONL" >&2
ls -la "/root/.clawdbot-${profile}/agents/main/sessions" >&2 || true
exit 1
fi
assert_session_used_tools "$SESSION_JSONL" read write exec image
cleanup_profile
trap - EXIT
}
if [[ "$MODELS_MODE" == "openai" || "$MODELS_MODE" == "both" ]]; then
run_profile "e2e-openai" "18789" "/tmp/clawd-e2e-openai" "openai"
fi
if [[ "$MODELS_MODE" == "anthropic" || "$MODELS_MODE" == "both" ]]; then
run_profile "e2e-anthropic" "18799" "/tmp/clawd-e2e-anthropic" "anthropic"
fi
echo "OK"

View File

@@ -0,0 +1,29 @@
FROM ubuntu:24.04
RUN set -eux; \
for attempt in 1 2 3; do \
if apt-get update -o Acquire::Retries=3; then break; fi; \
echo "apt-get update failed (attempt ${attempt})" >&2; \
if [ "${attempt}" -eq 3 ]; then exit 1; fi; \
sleep 3; \
done; \
apt-get -o Acquire::Retries=3 install -y --no-install-recommends \
bash \
ca-certificates \
curl \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN useradd -m -s /bin/bash app \
&& echo "app ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/app
USER app
WORKDIR /home/app
ENV NPM_CONFIG_FUND=false
ENV NPM_CONFIG_AUDIT=false
COPY run.sh /usr/local/bin/moltbot-install-nonroot
RUN sudo chmod +x /usr/local/bin/moltbot-install-nonroot
ENTRYPOINT ["/usr/local/bin/moltbot-install-nonroot"]

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env bash
set -euo pipefail
INSTALL_URL="${CLAWDBOT_INSTALL_URL:-https://molt.bot/install.sh}"
DEFAULT_PACKAGE="moltbot"
if [[ -z "${CLAWDBOT_INSTALL_PACKAGE:-}" && "$INSTALL_URL" == *"clawd.bot"* ]]; then
DEFAULT_PACKAGE="clawdbot"
fi
PACKAGE_NAME="${CLAWDBOT_INSTALL_PACKAGE:-$DEFAULT_PACKAGE}"
if [[ "$PACKAGE_NAME" == "moltbot" ]]; then
ALT_PACKAGE_NAME="clawdbot"
else
ALT_PACKAGE_NAME="moltbot"
fi
echo "==> Pre-flight: ensure git absent"
if command -v git >/dev/null; then
echo "git is present unexpectedly" >&2
exit 1
fi
echo "==> Run installer (non-root user)"
curl -fsSL "$INSTALL_URL" | bash
# Ensure PATH picks up user npm prefix
export PATH="$HOME/.npm-global/bin:$PATH"
echo "==> Verify git installed"
command -v git >/dev/null
EXPECTED_VERSION="${CLAWDBOT_INSTALL_EXPECT_VERSION:-}"
if [[ -n "$EXPECTED_VERSION" ]]; then
LATEST_VERSION="$EXPECTED_VERSION"
else
LATEST_VERSION="$(npm view "$PACKAGE_NAME" version)"
fi
CLI_NAME="$PACKAGE_NAME"
CMD_PATH="$(command -v "$CLI_NAME" || true)"
if [[ -z "$CMD_PATH" ]]; then
CLI_NAME="$ALT_PACKAGE_NAME"
CMD_PATH="$(command -v "$CLI_NAME" || true)"
fi
if [[ -z "$CMD_PATH" && -x "$HOME/.npm-global/bin/$PACKAGE_NAME" ]]; then
CLI_NAME="$PACKAGE_NAME"
CMD_PATH="$HOME/.npm-global/bin/$PACKAGE_NAME"
fi
if [[ -z "$CMD_PATH" && -x "$HOME/.npm-global/bin/$ALT_PACKAGE_NAME" ]]; then
CLI_NAME="$ALT_PACKAGE_NAME"
CMD_PATH="$HOME/.npm-global/bin/$ALT_PACKAGE_NAME"
fi
if [[ -z "$CMD_PATH" ]]; then
echo "Neither $PACKAGE_NAME nor $ALT_PACKAGE_NAME is on PATH" >&2
exit 1
fi
if [[ -z "$EXPECTED_VERSION" && "$CLI_NAME" != "$PACKAGE_NAME" ]]; then
LATEST_VERSION="$(npm view "$CLI_NAME" version)"
fi
echo "==> Verify CLI installed: $CLI_NAME"
INSTALLED_VERSION="$("$CMD_PATH" --version 2>/dev/null | head -n 1 | tr -d '\r')"
echo "cli=$CLI_NAME installed=$INSTALLED_VERSION expected=$LATEST_VERSION"
if [[ "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
echo "ERROR: expected ${CLI_NAME}@${LATEST_VERSION}, got ${CLI_NAME}@${INSTALLED_VERSION}" >&2
exit 1
fi
echo "==> Sanity: CLI runs"
"$CMD_PATH" --help >/dev/null
echo "OK"

View File

@@ -0,0 +1,21 @@
FROM node:22-bookworm-slim
RUN set -eux; \
for attempt in 1 2 3; do \
if apt-get update -o Acquire::Retries=3; then break; fi; \
echo "apt-get update failed (attempt ${attempt})" >&2; \
if [ "${attempt}" -eq 3 ]; then exit 1; fi; \
sleep 3; \
done; \
apt-get -o Acquire::Retries=3 install -y --no-install-recommends \
bash \
ca-certificates \
curl \
git \
sudo \
&& rm -rf /var/lib/apt/lists/*
COPY run.sh /usr/local/bin/moltbot-install-smoke
RUN chmod +x /usr/local/bin/moltbot-install-smoke
ENTRYPOINT ["/usr/local/bin/moltbot-install-smoke"]

View File

@@ -0,0 +1,87 @@
#!/usr/bin/env bash
set -euo pipefail
INSTALL_URL="${CLAWDBOT_INSTALL_URL:-https://molt.bot/install.sh}"
SMOKE_PREVIOUS_VERSION="${CLAWDBOT_INSTALL_SMOKE_PREVIOUS:-}"
SKIP_PREVIOUS="${CLAWDBOT_INSTALL_SMOKE_SKIP_PREVIOUS:-0}"
DEFAULT_PACKAGE="moltbot"
if [[ -z "${CLAWDBOT_INSTALL_PACKAGE:-}" && "$INSTALL_URL" == *"clawd.bot"* ]]; then
DEFAULT_PACKAGE="clawdbot"
fi
PACKAGE_NAME="${CLAWDBOT_INSTALL_PACKAGE:-$DEFAULT_PACKAGE}"
if [[ "$PACKAGE_NAME" == "moltbot" ]]; then
ALT_PACKAGE_NAME="clawdbot"
else
ALT_PACKAGE_NAME="moltbot"
fi
echo "==> Resolve npm versions"
LATEST_VERSION="$(npm view "$PACKAGE_NAME" version)"
if [[ -n "$SMOKE_PREVIOUS_VERSION" ]]; then
PREVIOUS_VERSION="$SMOKE_PREVIOUS_VERSION"
else
VERSIONS_JSON="$(npm view "$PACKAGE_NAME" versions --json)"
PREVIOUS_VERSION="$(VERSIONS_JSON="$VERSIONS_JSON" LATEST_VERSION="$LATEST_VERSION" node - <<'NODE'
const raw = process.env.VERSIONS_JSON || "[]";
const latest = process.env.LATEST_VERSION || "";
let versions;
try {
versions = JSON.parse(raw);
} catch {
versions = raw ? [raw] : [];
}
if (!Array.isArray(versions)) {
versions = [versions];
}
if (versions.length === 0) {
process.exit(1);
}
const latestIndex = latest ? versions.lastIndexOf(latest) : -1;
if (latestIndex > 0) {
process.stdout.write(String(versions[latestIndex - 1]));
process.exit(0);
}
process.stdout.write(String(latest || versions[versions.length - 1]));
NODE
)"
fi
echo "package=$PACKAGE_NAME latest=$LATEST_VERSION previous=$PREVIOUS_VERSION"
if [[ "$SKIP_PREVIOUS" == "1" ]]; then
echo "==> Skip preinstall previous (CLAWDBOT_INSTALL_SMOKE_SKIP_PREVIOUS=1)"
else
echo "==> Preinstall previous (forces installer upgrade path)"
npm install -g "${PACKAGE_NAME}@${PREVIOUS_VERSION}"
fi
echo "==> Run official installer one-liner"
curl -fsSL "$INSTALL_URL" | bash
echo "==> Verify installed version"
CLI_NAME="$PACKAGE_NAME"
if ! command -v "$CLI_NAME" >/dev/null 2>&1; then
if command -v "$ALT_PACKAGE_NAME" >/dev/null 2>&1; then
CLI_NAME="$ALT_PACKAGE_NAME"
LATEST_VERSION="$(npm view "$CLI_NAME" version)"
echo "==> Detected alternate CLI: $CLI_NAME"
else
echo "ERROR: neither $PACKAGE_NAME nor $ALT_PACKAGE_NAME is on PATH" >&2
exit 1
fi
fi
if [[ -n "${CLAWDBOT_INSTALL_LATEST_OUT:-}" ]]; then
printf "%s" "$LATEST_VERSION" > "$CLAWDBOT_INSTALL_LATEST_OUT"
fi
INSTALLED_VERSION="$("$CLI_NAME" --version 2>/dev/null | head -n 1 | tr -d '\r')"
echo "cli=$CLI_NAME installed=$INSTALLED_VERSION expected=$LATEST_VERSION"
if [[ "$INSTALLED_VERSION" != "$LATEST_VERSION" ]]; then
echo "ERROR: expected ${CLI_NAME}@${LATEST_VERSION}, got ${CLI_NAME}@${INSTALLED_VERSION}" >&2
exit 1
fi
echo "==> Sanity: CLI runs"
"$CLI_NAME" --help >/dev/null
echo "OK"

View File

@@ -0,0 +1,167 @@
#!/usr/bin/env node
import { existsSync, readdirSync, readFileSync, statSync } from 'node:fs';
import { join, relative } from 'node:path';
process.stdout.on('error', (error) => {
if (error?.code === 'EPIPE') {
process.exit(0);
}
throw error;
});
const DOCS_DIR = join(process.cwd(), 'docs');
if (!existsSync(DOCS_DIR)) {
console.error('docs:list: missing docs directory. Run from repo root.');
process.exit(1);
}
if (!statSync(DOCS_DIR).isDirectory()) {
console.error('docs:list: docs path is not a directory.');
process.exit(1);
}
const EXCLUDED_DIRS = new Set(['archive', 'research']);
/**
* @param {unknown[]} values
* @returns {string[]}
*/
function compactStrings(values) {
const result = [];
for (const value of values) {
if (value === null || value === undefined) {
continue;
}
const normalized = String(value).trim();
if (normalized.length > 0) {
result.push(normalized);
}
}
return result;
}
/**
* @param {string} dir
* @param {string} base
* @returns {string[]}
*/
function walkMarkdownFiles(dir, base = dir) {
const entries = readdirSync(dir, { withFileTypes: true });
const files = [];
for (const entry of entries) {
if (entry.name.startsWith('.')) {
continue;
}
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
if (EXCLUDED_DIRS.has(entry.name)) {
continue;
}
files.push(...walkMarkdownFiles(fullPath, base));
} else if (entry.isFile() && entry.name.endsWith('.md')) {
files.push(relative(base, fullPath));
}
}
return files.sort((a, b) => a.localeCompare(b));
}
/**
* @param {string} fullPath
* @returns {{ summary: string | null; readWhen: string[]; error?: string }}
*/
function extractMetadata(fullPath) {
const content = readFileSync(fullPath, 'utf8');
if (!content.startsWith('---')) {
return { summary: null, readWhen: [], error: 'missing front matter' };
}
const endIndex = content.indexOf('\n---', 3);
if (endIndex === -1) {
return { summary: null, readWhen: [], error: 'unterminated front matter' };
}
const frontMatter = content.slice(3, endIndex).trim();
const lines = frontMatter.split('\n');
let summaryLine = null;
const readWhen = [];
let collectingField = null;
for (const rawLine of lines) {
const line = rawLine.trim();
if (line.startsWith('summary:')) {
summaryLine = line;
collectingField = null;
continue;
}
if (line.startsWith('read_when:')) {
collectingField = 'read_when';
const inline = line.slice('read_when:'.length).trim();
if (inline.startsWith('[') && inline.endsWith(']')) {
try {
const parsed = JSON.parse(inline.replace(/'/g, '"'));
if (Array.isArray(parsed)) {
readWhen.push(...compactStrings(parsed));
}
} catch {
// ignore malformed inline arrays
}
}
continue;
}
if (collectingField === 'read_when') {
if (line.startsWith('- ')) {
const hint = line.slice(2).trim();
if (hint) {
readWhen.push(hint);
}
} else if (line === '') {
// allow blank lines inside the list
} else {
collectingField = null;
}
}
}
if (!summaryLine) {
return { summary: null, readWhen, error: 'summary key missing' };
}
const summaryValue = summaryLine.slice('summary:'.length).trim();
const normalized = summaryValue
.replace(/^['"]|['"]$/g, '')
.replace(/\s+/g, ' ')
.trim();
if (!normalized) {
return { summary: null, readWhen, error: 'summary is empty' };
}
return { summary: normalized, readWhen };
}
console.log('Listing all markdown files in docs folder:');
const markdownFiles = walkMarkdownFiles(DOCS_DIR);
for (const relativePath of markdownFiles) {
const fullPath = join(DOCS_DIR, relativePath);
const { summary, readWhen, error } = extractMetadata(fullPath);
if (summary) {
console.log(`${relativePath} - ${summary}`);
if (readWhen.length > 0) {
console.log(` Read when: ${readWhen.join('; ')}`);
}
} else {
const reason = error ? ` - [${error}]` : '';
console.log(`${relativePath}${reason}`);
}
}
console.log(
'\nReminder: keep docs up to date as behavior changes. When your task matches any "Read when" hint above (React hooks, cache directives, database work, tests, etc.), read that doc before coding, and suggest new coverage when it is missing.'
);

View File

@@ -0,0 +1,23 @@
FROM node:22-bookworm
RUN corepack enable
WORKDIR /app
ENV NODE_OPTIONS="--disable-warning=ExperimentalWarning"
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml tsconfig.json vitest.config.ts vitest.e2e.config.ts ./
COPY src ./src
COPY test ./test
COPY scripts ./scripts
COPY docs ./docs
COPY skills ./skills
COPY patches ./patches
COPY ui ./ui
COPY extensions/memory-core ./extensions/memory-core
RUN pnpm install --frozen-lockfile
RUN pnpm build
RUN pnpm ui:build
CMD ["bash"]

View File

@@ -0,0 +1,9 @@
FROM node:22-bookworm
RUN corepack enable
WORKDIR /app
COPY . .
RUN pnpm install --frozen-lockfile

View File

@@ -0,0 +1,147 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="moltbot-doctor-install-switch-e2e"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Running doctor install switch E2E..."
docker run --rm -t "$IMAGE_NAME" bash -lc '
set -euo pipefail
# Keep logs focused; the npm global install step can emit noisy deprecation warnings.
export npm_config_loglevel=error
export npm_config_fund=false
export npm_config_audit=false
# Stub systemd/loginctl so doctor + daemon flows work in Docker.
export PATH="/tmp/moltbot-bin:$PATH"
mkdir -p /tmp/moltbot-bin
cat > /tmp/moltbot-bin/systemctl <<"SYSTEMCTL"
#!/usr/bin/env bash
set -euo pipefail
args=("$@")
if [[ "${args[0]:-}" == "--user" ]]; then
args=("${args[@]:1}")
fi
cmd="${args[0]:-}"
case "$cmd" in
status)
exit 0
;;
is-enabled)
unit="${args[1]:-}"
unit_path="$HOME/.config/systemd/user/${unit}"
if [ -f "$unit_path" ]; then
exit 0
fi
exit 1
;;
show)
echo "ActiveState=inactive"
echo "SubState=dead"
echo "MainPID=0"
echo "ExecMainStatus=0"
echo "ExecMainCode=0"
exit 0
;;
*)
exit 0
;;
esac
SYSTEMCTL
chmod +x /tmp/moltbot-bin/systemctl
cat > /tmp/moltbot-bin/loginctl <<"LOGINCTL"
#!/usr/bin/env bash
set -euo pipefail
if [[ "$*" == *"show-user"* ]]; then
echo "Linger=yes"
exit 0
fi
if [[ "$*" == *"enable-linger"* ]]; then
exit 0
fi
exit 0
LOGINCTL
chmod +x /tmp/moltbot-bin/loginctl
# Install the npm-global variant from the local /app source.
# `npm pack` can emit script output; keep only the tarball name.
pkg_tgz="$(npm pack --silent /app | tail -n 1 | tr -d '\r')"
if [ ! -f "/app/$pkg_tgz" ]; then
echo "npm pack failed (expected /app/$pkg_tgz)"
exit 1
fi
npm install -g --prefix /tmp/npm-prefix "/app/$pkg_tgz"
npm_bin="/tmp/npm-prefix/bin/moltbot"
npm_entry="/tmp/npm-prefix/lib/node_modules/moltbot/dist/entry.js"
git_entry="/app/dist/entry.js"
assert_entrypoint() {
local unit_path="$1"
local expected="$2"
local exec_line=""
exec_line=$(grep -m1 "^ExecStart=" "$unit_path" || true)
if [ -z "$exec_line" ]; then
echo "Missing ExecStart in $unit_path"
exit 1
fi
exec_line="${exec_line#ExecStart=}"
entrypoint=$(echo "$exec_line" | awk "{print \$2}")
entrypoint="${entrypoint%\"}"
entrypoint="${entrypoint#\"}"
if [ "$entrypoint" != "$expected" ]; then
echo "Expected entrypoint $expected, got $entrypoint"
exit 1
fi
}
# Each flow: install service with one variant, run doctor from the other,
# and verify ExecStart entrypoint switches accordingly.
run_flow() {
local name="$1"
local install_cmd="$2"
local install_expected="$3"
local doctor_cmd="$4"
local doctor_expected="$5"
echo "== Flow: $name =="
home_dir=$(mktemp -d "/tmp/moltbot-switch-${name}.XXXXXX")
export HOME="$home_dir"
export USER="testuser"
eval "$install_cmd"
unit_path="$HOME/.config/systemd/user/moltbot-gateway.service"
if [ ! -f "$unit_path" ]; then
echo "Missing unit file: $unit_path"
exit 1
fi
assert_entrypoint "$unit_path" "$install_expected"
eval "$doctor_cmd"
assert_entrypoint "$unit_path" "$doctor_expected"
}
run_flow \
"npm-to-git" \
"$npm_bin daemon install --force" \
"$npm_entry" \
"node $git_entry doctor --repair --force" \
"$git_entry"
run_flow \
"git-to-npm" \
"node $git_entry daemon install --force" \
"$git_entry" \
"$npm_bin doctor --repair --force" \
"$npm_entry"
'

View File

@@ -0,0 +1,115 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="moltbot-gateway-network-e2e"
PORT="18789"
TOKEN="e2e-$(date +%s)-$$"
NET_NAME="moltbot-net-e2e-$$"
GW_NAME="moltbot-gateway-e2e-$$"
cleanup() {
docker rm -f "$GW_NAME" >/dev/null 2>&1 || true
docker network rm "$NET_NAME" >/dev/null 2>&1 || true
}
trap cleanup EXIT
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Creating Docker network..."
docker network create "$NET_NAME" >/dev/null
echo "Starting gateway container..."
docker run --rm -d \
--name "$GW_NAME" \
--network "$NET_NAME" \
-e "CLAWDBOT_GATEWAY_TOKEN=$TOKEN" \
-e "CLAWDBOT_SKIP_CHANNELS=1" \
-e "CLAWDBOT_SKIP_GMAIL_WATCHER=1" \
-e "CLAWDBOT_SKIP_CRON=1" \
-e "CLAWDBOT_SKIP_CANVAS_HOST=1" \
"$IMAGE_NAME" \
bash -lc "node dist/index.js gateway --port $PORT --bind lan --allow-unconfigured > /tmp/gateway-net-e2e.log 2>&1"
echo "Waiting for gateway to come up..."
for _ in $(seq 1 20); do
if docker exec "$GW_NAME" bash -lc "grep -q \"listening on ws://\" /tmp/gateway-net-e2e.log"; then
break
fi
sleep 0.5
done
docker exec "$GW_NAME" bash -lc "tail -n 50 /tmp/gateway-net-e2e.log"
echo "Running client container (connect + health)..."
docker run --rm \
--network "$NET_NAME" \
-e "GW_URL=ws://$GW_NAME:$PORT" \
-e "GW_TOKEN=$TOKEN" \
"$IMAGE_NAME" \
bash -lc "node - <<'NODE'
import { WebSocket } from \"ws\";
import { PROTOCOL_VERSION } from \"./dist/gateway/protocol/index.js\";
const url = process.env.GW_URL;
const token = process.env.GW_TOKEN;
if (!url || !token) throw new Error(\"missing GW_URL/GW_TOKEN\");
const ws = new WebSocket(url);
await new Promise((resolve, reject) => {
const t = setTimeout(() => reject(new Error(\"ws open timeout\")), 5000);
ws.once(\"open\", () => {
clearTimeout(t);
resolve();
});
});
function onceFrame(filter, timeoutMs = 5000) {
return new Promise((resolve, reject) => {
const t = setTimeout(() => reject(new Error(\"timeout\")), timeoutMs);
const handler = (data) => {
const obj = JSON.parse(String(data));
if (!filter(obj)) return;
clearTimeout(t);
ws.off(\"message\", handler);
resolve(obj);
};
ws.on(\"message\", handler);
});
}
ws.send(
JSON.stringify({
type: \"req\",
id: \"c1\",
method: \"connect\",
params: {
minProtocol: PROTOCOL_VERSION,
maxProtocol: PROTOCOL_VERSION,
client: {
id: \"test\",
displayName: \"docker-net-e2e\",
version: \"dev\",
platform: process.platform,
mode: \"test\",
},
caps: [],
auth: { token },
},
}),
);
const connectRes = await onceFrame((o) => o?.type === \"res\" && o?.id === \"c1\");
if (!connectRes.ok) throw new Error(\"connect failed: \" + (connectRes.error?.message ?? \"unknown\"));
ws.send(JSON.stringify({ type: \"req\", id: \"h1\", method: \"health\" }));
const healthRes = await onceFrame((o) => o?.type === \"res\" && o?.id === \"h1\", 10000);
if (!healthRes.ok) throw new Error(\"health failed: \" + (healthRes.error?.message ?? \"unknown\"));
if (healthRes.payload?.ok !== true) throw new Error(\"unexpected health payload\");
ws.close();
console.log(\"ok\");
NODE"
echo "OK"

View File

@@ -0,0 +1,545 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="moltbot-onboard-e2e"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Running onboarding E2E..."
docker run --rm -t "$IMAGE_NAME" bash -lc '
set -euo pipefail
trap "" PIPE
export TERM=xterm-256color
ONBOARD_FLAGS="--flow quickstart --auth-choice skip --skip-channels --skip-skills --skip-daemon --skip-ui"
# Provide a minimal trash shim to avoid noisy "missing trash" logs in containers.
export PATH="/tmp/moltbot-bin:$PATH"
mkdir -p /tmp/moltbot-bin
cat > /tmp/moltbot-bin/trash <<'"'"'TRASH'"'"'
#!/usr/bin/env bash
set -euo pipefail
trash_dir="$HOME/.Trash"
mkdir -p "$trash_dir"
for target in "$@"; do
[ -e "$target" ] || continue
base="$(basename "$target")"
dest="$trash_dir/$base"
if [ -e "$dest" ]; then
dest="$trash_dir/${base}-$(date +%s)-$$"
fi
mv "$target" "$dest"
done
TRASH
chmod +x /tmp/moltbot-bin/trash
send() {
local payload="$1"
local delay="${2:-0.4}"
# Let prompts render before sending keystrokes.
sleep "$delay"
printf "%b" "$payload" >&3 2>/dev/null || true
}
wait_for_log() {
local needle="$1"
local timeout_s="${2:-45}"
local needle_compact
needle_compact="$(printf "%s" "$needle" | tr -cd "[:alnum:]")"
local start_s
start_s="$(date +%s)"
while true; do
if [ -n "${WIZARD_LOG_PATH:-}" ] && [ -f "$WIZARD_LOG_PATH" ]; then
if grep -a -F -q "$needle" "$WIZARD_LOG_PATH"; then
return 0
fi
if NEEDLE=\"$needle_compact\" node --input-type=module -e "
import fs from \"node:fs\";
const file = process.env.WIZARD_LOG_PATH;
const needle = process.env.NEEDLE ?? \"\";
let text = \"\";
try { text = fs.readFileSync(file, \"utf8\"); } catch { process.exit(1); }
if (text.length > 20000) text = text.slice(-20000);
const stripAnsi = (value) => value.replace(/\\x1b\\[[0-9;]*[A-Za-z]/g, \"\");
const compact = (value) => stripAnsi(value).toLowerCase().replace(/[^a-z0-9]+/g, \"\");
const haystack = compact(text);
const compactNeedle = compact(needle);
if (!compactNeedle) process.exit(1);
process.exit(haystack.includes(compactNeedle) ? 0 : 1);
"; then
return 0
fi
fi
if [ $(( $(date +%s) - start_s )) -ge "$timeout_s" ]; then
echo "Timeout waiting for log: $needle"
if [ -n "${WIZARD_LOG_PATH:-}" ] && [ -f "$WIZARD_LOG_PATH" ]; then
tail -n 140 "$WIZARD_LOG_PATH" || true
fi
return 1
fi
sleep 0.2
done
}
start_gateway() {
node dist/index.js gateway --port 18789 --bind loopback --allow-unconfigured > /tmp/gateway-e2e.log 2>&1 &
GATEWAY_PID="$!"
}
wait_for_gateway() {
for _ in $(seq 1 20); do
if node --input-type=module -e "
import net from 'node:net';
const socket = net.createConnection({ host: '127.0.0.1', port: 18789 });
const timeout = setTimeout(() => {
socket.destroy();
process.exit(1);
}, 500);
socket.on('connect', () => {
clearTimeout(timeout);
socket.end();
process.exit(0);
});
socket.on('error', () => {
clearTimeout(timeout);
process.exit(1);
});
" >/dev/null 2>&1; then
return 0
fi
if [ -f /tmp/gateway-e2e.log ] && grep -E -q "listening on ws://[^ ]+:18789" /tmp/gateway-e2e.log; then
if [ -n "${GATEWAY_PID:-}" ] && kill -0 "$GATEWAY_PID" 2>/dev/null; then
return 0
fi
fi
sleep 1
done
echo "Gateway failed to start"
cat /tmp/gateway-e2e.log || true
return 1
}
stop_gateway() {
local gw_pid="$1"
if [ -n "$gw_pid" ]; then
kill "$gw_pid" 2>/dev/null || true
wait "$gw_pid" || true
fi
}
run_wizard_cmd() {
local case_name="$1"
local home_dir="$2"
local command="$3"
local send_fn="$4"
local with_gateway="${5:-false}"
local validate_fn="${6:-}"
echo "== Wizard case: $case_name =="
export HOME="$home_dir"
mkdir -p "$HOME"
input_fifo="$(mktemp -u "/tmp/moltbot-onboard-${case_name}.XXXXXX")"
mkfifo "$input_fifo"
local log_path="/tmp/moltbot-onboard-${case_name}.log"
WIZARD_LOG_PATH="$log_path"
export WIZARD_LOG_PATH
# Run under script to keep an interactive TTY for clack prompts.
script -q -f -c "$command" "$log_path" < "$input_fifo" &
wizard_pid=$!
exec 3> "$input_fifo"
local gw_pid=""
if [ "$with_gateway" = "true" ]; then
start_gateway
gw_pid="$GATEWAY_PID"
wait_for_gateway
fi
"$send_fn"
if ! wait "$wizard_pid"; then
wizard_status=$?
exec 3>&-
rm -f "$input_fifo"
stop_gateway "$gw_pid"
echo "Wizard exited with status $wizard_status"
if [ -f "$log_path" ]; then
tail -n 160 "$log_path" || true
fi
exit "$wizard_status"
fi
exec 3>&-
rm -f "$input_fifo"
stop_gateway "$gw_pid"
if [ -n "$validate_fn" ]; then
"$validate_fn" "$log_path"
fi
}
run_wizard() {
local case_name="$1"
local home_dir="$2"
local send_fn="$3"
local validate_fn="${4:-}"
# Default onboarding command wrapper.
run_wizard_cmd "$case_name" "$home_dir" "node dist/index.js onboard $ONBOARD_FLAGS" "$send_fn" true "$validate_fn"
}
make_home() {
mktemp -d "/tmp/moltbot-e2e-$1.XXXXXX"
}
assert_file() {
local file_path="$1"
if [ ! -f "$file_path" ]; then
echo "Missing file: $file_path"
exit 1
fi
}
assert_dir() {
local dir_path="$1"
if [ ! -d "$dir_path" ]; then
echo "Missing dir: $dir_path"
exit 1
fi
}
select_skip_hooks() {
# Hooks multiselect: pick "Skip for now".
wait_for_log "Enable hooks?" 60 || true
send $'"'"' \r'"'"' 0.6
}
send_local_basic() {
# Risk acknowledgement (default is "No").
wait_for_log "Continue?" 60
send $'"'"'y\r'"'"' 0.6
# Choose local gateway, accept defaults, skip channels/skills/daemon, skip UI.
if wait_for_log "Where will the Gateway run?" 20; then
send $'"'"'\r'"'"' 0.5
fi
select_skip_hooks
}
send_reset_config_only() {
# Risk acknowledgement (default is "No").
wait_for_log "Continue?" 40 || true
send $'"'"'y\r'"'"' 0.8
# Select reset flow for existing config.
wait_for_log "Config handling" 40 || true
send $'"'"'\e[B'"'"' 0.3
send $'"'"'\e[B'"'"' 0.3
send $'"'"'\r'"'"' 0.4
# Reset scope -> Config only (default).
wait_for_log "Reset scope" 40 || true
send $'"'"'\r'"'"' 0.4
select_skip_hooks
}
send_channels_flow() {
# Configure channels via configure wizard.
# Prompts are interactive; notes are not. Use conservative delays to stay in sync.
# Where will the Gateway run? -> Local (default)
send $'"'"'\r'"'"' 1.2
# Channels mode -> Configure/link (default)
send $'"'"'\r'"'"' 1.5
# Select a channel -> Finished (last option; clack wraps on Up)
send $'"'"'\e[A\r'"'"' 2.0
# Keep stdin open until wizard exits.
send "" 2.5
}
send_skills_flow() {
# Select skills section and skip optional installs.
wait_for_log "Where will the Gateway run?" 60 || true
send $'"'"'\r'"'"' 0.6
# Configure skills now? -> No
wait_for_log "Configure skills now?" 60 || true
send $'"'"'n\r'"'"' 0.8
send "" 1.0
}
run_case_local_basic() {
local home_dir
home_dir="$(make_home local-basic)"
export HOME="$home_dir"
mkdir -p "$HOME"
node dist/index.js onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--mode local \
--skip-channels \
--skip-skills \
--skip-daemon \
--skip-ui \
--skip-health
# Assert config + workspace scaffolding.
workspace_dir="$HOME/clawd"
config_path="$HOME/.clawdbot/moltbot.json"
sessions_dir="$HOME/.clawdbot/agents/main/sessions"
assert_file "$config_path"
assert_dir "$sessions_dir"
for file in AGENTS.md BOOTSTRAP.md IDENTITY.md SOUL.md TOOLS.md USER.md; do
assert_file "$workspace_dir/$file"
done
CONFIG_PATH="$config_path" WORKSPACE_DIR="$workspace_dir" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const expectedWorkspace = process.env.WORKSPACE_DIR;
const errors = [];
if (cfg?.agents?.defaults?.workspace !== expectedWorkspace) {
errors.push(
`agents.defaults.workspace mismatch (got ${cfg?.agents?.defaults?.workspace ?? "unset"})`,
);
}
if (cfg?.gateway?.mode !== "local") {
errors.push(`gateway.mode mismatch (got ${cfg?.gateway?.mode ?? "unset"})`);
}
if (cfg?.gateway?.bind !== "loopback") {
errors.push(`gateway.bind mismatch (got ${cfg?.gateway?.bind ?? "unset"})`);
}
if ((cfg?.gateway?.tailscale?.mode ?? "off") !== "off") {
errors.push(
`gateway.tailscale.mode mismatch (got ${cfg?.gateway?.tailscale?.mode ?? "unset"})`,
);
}
if (!cfg?.wizard?.lastRunAt) {
errors.push("wizard.lastRunAt missing");
}
if (!cfg?.wizard?.lastRunVersion) {
errors.push("wizard.lastRunVersion missing");
}
if (cfg?.wizard?.lastRunCommand !== "onboard") {
errors.push(
`wizard.lastRunCommand mismatch (got ${cfg?.wizard?.lastRunCommand ?? "unset"})`,
);
}
if (cfg?.wizard?.lastRunMode !== "local") {
errors.push(
`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`,
);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_remote_non_interactive() {
local home_dir
home_dir="$(make_home remote-non-interactive)"
export HOME="$home_dir"
mkdir -p "$HOME"
# Smoke test non-interactive remote config write.
node dist/index.js onboard --non-interactive --accept-risk \
--mode remote \
--remote-url ws://gateway.local:18789 \
--remote-token remote-token \
--skip-skills \
--skip-health
config_path="$HOME/.clawdbot/moltbot.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.gateway?.mode !== "remote") {
errors.push(`gateway.mode mismatch (got ${cfg?.gateway?.mode ?? "unset"})`);
}
if (cfg?.gateway?.remote?.url !== "ws://gateway.local:18789") {
errors.push(`gateway.remote.url mismatch (got ${cfg?.gateway?.remote?.url ?? "unset"})`);
}
if (cfg?.gateway?.remote?.token !== "remote-token") {
errors.push(`gateway.remote.token mismatch (got ${cfg?.gateway?.remote?.token ?? "unset"})`);
}
if (cfg?.wizard?.lastRunMode !== "remote") {
errors.push(`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_reset() {
local home_dir
home_dir="$(make_home reset-config)"
export HOME="$home_dir"
mkdir -p "$HOME/.clawdbot"
# Seed a remote config to exercise reset path.
cat > "$HOME/.clawdbot/moltbot.json" <<'"'"'JSON'"'"'
{
"agents": { "defaults": { "workspace": "/root/old" } },
"gateway": {
"mode": "remote",
"remote": { "url": "ws://old.example:18789", "token": "old-token" }
}
}
JSON
node dist/index.js onboard \
--non-interactive \
--accept-risk \
--flow quickstart \
--mode local \
--reset \
--skip-channels \
--skip-skills \
--skip-daemon \
--skip-ui \
--skip-health
config_path="$HOME/.clawdbot/moltbot.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.gateway?.mode !== "local") {
errors.push(`gateway.mode mismatch (got ${cfg?.gateway?.mode ?? "unset"})`);
}
if (cfg?.gateway?.remote?.url) {
errors.push(`gateway.remote.url should be cleared (got ${cfg?.gateway?.remote?.url})`);
}
if (cfg?.wizard?.lastRunMode !== "local") {
errors.push(`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_channels() {
local home_dir
home_dir="$(make_home channels)"
# Channels-only configure flow.
run_wizard_cmd channels "$home_dir" "node dist/index.js configure --section channels" send_channels_flow
config_path="$HOME/.clawdbot/moltbot.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.telegram?.botToken) {
errors.push(`telegram.botToken should be unset (got ${cfg?.telegram?.botToken})`);
}
if (cfg?.discord?.token) {
errors.push(`discord.token should be unset (got ${cfg?.discord?.token})`);
}
if (cfg?.slack?.botToken || cfg?.slack?.appToken) {
errors.push(
`slack tokens should be unset (got bot=${cfg?.slack?.botToken ?? "unset"}, app=${cfg?.slack?.appToken ?? "unset"})`,
);
}
if (cfg?.wizard?.lastRunCommand !== "configure") {
errors.push(
`wizard.lastRunCommand mismatch (got ${cfg?.wizard?.lastRunCommand ?? "unset"})`,
);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
run_case_skills() {
local home_dir
home_dir="$(make_home skills)"
export HOME="$home_dir"
mkdir -p "$HOME/.clawdbot"
# Seed skills config to ensure it survives the wizard.
cat > "$HOME/.clawdbot/moltbot.json" <<'"'"'JSON'"'"'
{
"skills": {
"allowBundled": ["__none__"],
"install": { "nodeManager": "bun" }
}
}
JSON
run_wizard_cmd skills "$home_dir" "node dist/index.js configure --section skills" send_skills_flow
config_path="$HOME/.clawdbot/moltbot.json"
assert_file "$config_path"
CONFIG_PATH="$config_path" node --input-type=module - <<'"'"'NODE'"'"'
import fs from "node:fs";
import JSON5 from "json5";
const cfg = JSON5.parse(fs.readFileSync(process.env.CONFIG_PATH, "utf-8"));
const errors = [];
if (cfg?.skills?.install?.nodeManager !== "bun") {
errors.push(`skills.install.nodeManager mismatch (got ${cfg?.skills?.install?.nodeManager ?? "unset"})`);
}
if (!Array.isArray(cfg?.skills?.allowBundled) || cfg.skills.allowBundled[0] !== "__none__") {
errors.push("skills.allowBundled missing");
}
if (cfg?.wizard?.lastRunMode !== "local") {
errors.push(`wizard.lastRunMode mismatch (got ${cfg?.wizard?.lastRunMode ?? "unset"})`);
}
if (errors.length > 0) {
console.error(errors.join("\n"));
process.exit(1);
}
NODE
}
assert_log_not_contains() {
local file_path="$1"
local needle="$2"
if grep -q "$needle" "$file_path"; then
echo "Unexpected log output: $needle"
exit 1
fi
}
validate_local_basic_log() {
local log_path="$1"
assert_log_not_contains "$log_path" "systemctl --user unavailable"
}
run_case_local_basic
run_case_remote_non_interactive
run_case_reset
run_case_channels
run_case_skills
'
echo "E2E complete."

View File

@@ -0,0 +1,178 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="moltbot-plugins-e2e"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
echo "Running plugins Docker E2E..."
docker run --rm -t "$IMAGE_NAME" bash -lc '
set -euo pipefail
home_dir=$(mktemp -d "/tmp/moltbot-plugins-e2e.XXXXXX")
export HOME="$home_dir"
mkdir -p "$HOME/.clawdbot/extensions"
cat > "$HOME/.clawdbot/extensions/demo-plugin.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin",
name: "Demo Plugin",
description: "Docker E2E demo plugin",
register(api) {
api.registerTool(() => null, { name: "demo_tool" });
api.registerGatewayMethod("demo.ping", async () => ({ ok: true }));
api.registerCli(() => {}, { commands: ["demo"] });
api.registerService({ id: "demo-service", start: () => {} });
},
};
JS
node dist/index.js plugins list --json > /tmp/plugins.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin");
if (!plugin) throw new Error("plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
const assertIncludes = (list, value, label) => {
if (!Array.isArray(list) || !list.includes(value)) {
throw new Error(`${label} missing: ${value}`);
}
};
assertIncludes(plugin.toolNames, "demo_tool", "tool");
assertIncludes(plugin.gatewayMethods, "demo.ping", "gateway method");
assertIncludes(plugin.cliCommands, "demo", "cli command");
assertIncludes(plugin.services, "demo-service", "service");
const diagErrors = (data.diagnostics || []).filter((diag) => diag.level === "error");
if (diagErrors.length > 0) {
throw new Error(`diagnostics errors: ${diagErrors.map((diag) => diag.message).join("; ")}`);
}
console.log("ok");
NODE
echo "Testing tgz install flow..."
pack_dir="$(mktemp -d "/tmp/moltbot-plugin-pack.XXXXXX")"
mkdir -p "$pack_dir/package"
cat > "$pack_dir/package/package.json" <<'"'"'JSON'"'"'
{
"name": "@moltbot/demo-plugin-tgz",
"version": "0.0.1",
"moltbot": { "extensions": ["./index.js"] }
}
JSON
cat > "$pack_dir/package/index.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin-tgz",
name: "Demo Plugin TGZ",
register(api) {
api.registerGatewayMethod("demo.tgz", async () => ({ ok: true }));
},
};
JS
tar -czf /tmp/demo-plugin-tgz.tgz -C "$pack_dir" package
node dist/index.js plugins install /tmp/demo-plugin-tgz.tgz
node dist/index.js plugins list --json > /tmp/plugins2.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins2.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin-tgz");
if (!plugin) throw new Error("tgz plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
if (!Array.isArray(plugin.gatewayMethods) || !plugin.gatewayMethods.includes("demo.tgz")) {
throw new Error("expected gateway method demo.tgz");
}
console.log("ok");
NODE
echo "Testing install from local folder (plugins.load.paths)..."
dir_plugin="$(mktemp -d "/tmp/moltbot-plugin-dir.XXXXXX")"
cat > "$dir_plugin/package.json" <<'"'"'JSON'"'"'
{
"name": "@moltbot/demo-plugin-dir",
"version": "0.0.1",
"moltbot": { "extensions": ["./index.js"] }
}
JSON
cat > "$dir_plugin/index.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin-dir",
name: "Demo Plugin DIR",
register(api) {
api.registerGatewayMethod("demo.dir", async () => ({ ok: true }));
},
};
JS
node dist/index.js plugins install "$dir_plugin"
node dist/index.js plugins list --json > /tmp/plugins3.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins3.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin-dir");
if (!plugin) throw new Error("dir plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
if (!Array.isArray(plugin.gatewayMethods) || !plugin.gatewayMethods.includes("demo.dir")) {
throw new Error("expected gateway method demo.dir");
}
console.log("ok");
NODE
echo "Testing install from npm spec (file:)..."
file_pack_dir="$(mktemp -d "/tmp/moltbot-plugin-filepack.XXXXXX")"
mkdir -p "$file_pack_dir/package"
cat > "$file_pack_dir/package/package.json" <<'"'"'JSON'"'"'
{
"name": "@moltbot/demo-plugin-file",
"version": "0.0.1",
"moltbot": { "extensions": ["./index.js"] }
}
JSON
cat > "$file_pack_dir/package/index.js" <<'"'"'JS'"'"'
module.exports = {
id: "demo-plugin-file",
name: "Demo Plugin FILE",
register(api) {
api.registerGatewayMethod("demo.file", async () => ({ ok: true }));
},
};
JS
node dist/index.js plugins install "file:$file_pack_dir/package"
node dist/index.js plugins list --json > /tmp/plugins4.json
node - <<'"'"'NODE'"'"'
const fs = require("node:fs");
const data = JSON.parse(fs.readFileSync("/tmp/plugins4.json", "utf8"));
const plugin = (data.plugins || []).find((entry) => entry.id === "demo-plugin-file");
if (!plugin) throw new Error("file plugin not found");
if (plugin.status !== "loaded") {
throw new Error(`unexpected status: ${plugin.status}`);
}
if (!Array.isArray(plugin.gatewayMethods) || !plugin.gatewayMethods.includes("demo.file")) {
throw new Error("expected gateway method demo.file");
}
console.log("ok");
NODE
'
echo "OK"

View File

@@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
IMAGE_NAME="${CLAWDBOT_QR_SMOKE_IMAGE:-moltbot-qr-smoke}"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile.qr-import" "$ROOT_DIR"
echo "Running qrcode-terminal import smoke..."
docker run --rm -t "$IMAGE_NAME" node -e "import('qrcode-terminal').then((m)=>m.default.generate('qr-smoke',{small:true}))"

View File

@@ -0,0 +1,131 @@
import { extractReadableContent, fetchFirecrawlContent } from "../src/agents/tools/web-tools.js";
const DEFAULT_URLS = [
"https://en.wikipedia.org/wiki/Web_scraping",
"https://news.ycombinator.com/",
"https://www.apple.com/iphone/",
"https://www.nytimes.com/",
"https://www.reddit.com/r/javascript/",
];
const urls = process.argv.slice(2);
const targets = urls.length > 0 ? urls : DEFAULT_URLS;
const apiKey = process.env.FIRECRAWL_API_KEY;
const baseUrl = process.env.FIRECRAWL_BASE_URL ?? "https://api.firecrawl.dev";
const userAgent =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 14_7_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36";
const timeoutMs = 30_000;
function truncate(value: string, max = 180): string {
if (!value) return "";
return value.length > max ? `${value.slice(0, max)}` : value;
}
async function fetchHtml(url: string): Promise<{
ok: boolean;
status: number;
contentType: string;
finalUrl: string;
body: string;
}> {
const controller = new AbortController();
const timer = setTimeout(() => controller.abort(), timeoutMs);
try {
const res = await fetch(url, {
method: "GET",
headers: { Accept: "*/*", "User-Agent": userAgent },
signal: controller.signal,
});
const contentType = res.headers.get("content-type") ?? "application/octet-stream";
const body = await res.text();
return {
ok: res.ok,
status: res.status,
contentType,
finalUrl: res.url || url,
body,
};
} finally {
clearTimeout(timer);
}
}
async function run() {
if (!apiKey) {
console.log("FIRECRAWL_API_KEY not set. Firecrawl comparisons will be skipped.");
}
for (const url of targets) {
console.log(`\n=== ${url}`);
let localStatus = "skipped";
let localTitle = "";
let localText = "";
let localError: string | undefined;
try {
const res = await fetchHtml(url);
if (!res.ok) {
localStatus = `http ${res.status}`;
} else if (!res.contentType.includes("text/html")) {
localStatus = `non-html (${res.contentType})`;
} else {
const readable = await extractReadableContent({
html: res.body,
url: res.finalUrl,
extractMode: "markdown",
});
if (readable?.text) {
localStatus = "readability";
localTitle = readable.title ?? "";
localText = readable.text;
} else {
localStatus = "readability-empty";
}
}
} catch (error) {
localStatus = "error";
localError = error instanceof Error ? error.message : String(error);
}
console.log(
`local: ${localStatus} len=${localText.length} title=${truncate(localTitle, 80)}`
);
if (localError) console.log(`local error: ${localError}`);
if (localText) console.log(`local sample: ${truncate(localText)}`);
if (apiKey) {
try {
const firecrawl = await fetchFirecrawlContent({
url,
extractMode: "markdown",
apiKey,
baseUrl,
onlyMainContent: true,
maxAgeMs: 172_800_000,
proxy: "auto",
storeInCache: true,
timeoutSeconds: 60,
});
console.log(
`firecrawl: ok len=${firecrawl.text.length} title=${truncate(
firecrawl.title ?? "",
80,
)} status=${firecrawl.status ?? "n/a"}`
);
if (firecrawl.warning) console.log(`firecrawl warning: ${firecrawl.warning}`);
if (firecrawl.text) console.log(`firecrawl sample: ${truncate(firecrawl.text)}`);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
console.log(`firecrawl: error ${message}`);
}
}
}
process.exit(0);
}
run().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,148 @@
import fs from "node:fs";
import path from "node:path";
import { spawnSync } from "node:child_process";
import { fileURLToPath } from "node:url";
const OXFMT_EXTENSIONS = new Set([
".cjs",
".js",
".json",
".jsonc",
".jsx",
".mjs",
".ts",
".tsx",
]);
function getRepoRoot() {
const here = path.dirname(fileURLToPath(import.meta.url));
return path.resolve(here, "..");
}
function runGitCommand(args, options = {}) {
return spawnSync("git", args, {
cwd: options.cwd,
encoding: "utf-8",
stdio: options.stdio ?? "pipe",
});
}
function splitNullDelimited(value) {
if (!value) return [];
const text = String(value);
return text.split("\0").filter(Boolean);
}
function normalizeGitPath(filePath) {
return filePath.replace(/\\/g, "/");
}
function filterOxfmtTargets(paths) {
return paths
.map(normalizeGitPath)
.filter((filePath) =>
(filePath.startsWith("src/") || filePath.startsWith("test/")) &&
OXFMT_EXTENSIONS.has(path.posix.extname(filePath)),
);
}
function findPartiallyStagedFiles(stagedFiles, unstagedFiles) {
const unstaged = new Set(unstagedFiles.map(normalizeGitPath));
return stagedFiles.filter((filePath) => unstaged.has(normalizeGitPath(filePath)));
}
function filterOutPartialTargets(targets, partialTargets) {
if (partialTargets.length === 0) return targets;
const partial = new Set(partialTargets.map(normalizeGitPath));
return targets.filter((filePath) => !partial.has(normalizeGitPath(filePath)));
}
function resolveOxfmtCommand(repoRoot) {
const binName = process.platform === "win32" ? "oxfmt.cmd" : "oxfmt";
const local = path.join(repoRoot, "node_modules", ".bin", binName);
if (fs.existsSync(local)) {
return { command: local, args: [] };
}
const result = spawnSync("oxfmt", ["--version"], { stdio: "ignore" });
if (result.status === 0) {
return { command: "oxfmt", args: [] };
}
return null;
}
function getGitPaths(args, repoRoot) {
const result = runGitCommand(args, { cwd: repoRoot });
if (result.status !== 0) return [];
return splitNullDelimited(result.stdout ?? "");
}
function formatFiles(repoRoot, oxfmt, files) {
const result = spawnSync(oxfmt.command, ["--write", ...oxfmt.args, ...files], {
cwd: repoRoot,
stdio: "inherit",
});
return result.status === 0;
}
function stageFiles(repoRoot, files) {
if (files.length === 0) return true;
const result = runGitCommand(["add", "--", ...files], { cwd: repoRoot, stdio: "inherit" });
return result.status === 0;
}
function main() {
const repoRoot = getRepoRoot();
const staged = getGitPaths([
"diff",
"--cached",
"--name-only",
"-z",
"--diff-filter=ACMR",
], repoRoot);
const targets = filterOxfmtTargets(staged);
if (targets.length === 0) return;
const unstaged = getGitPaths(["diff", "--name-only", "-z"], repoRoot);
const partial = findPartiallyStagedFiles(targets, unstaged);
if (partial.length > 0) {
process.stderr.write("[pre-commit] Skipping partially staged files:\n");
for (const filePath of partial) {
process.stderr.write(`- ${filePath}\n`);
}
process.stderr.write("Stage full files to format them automatically.\n");
}
const filteredTargets = filterOutPartialTargets(targets, partial);
if (filteredTargets.length === 0) return;
const oxfmt = resolveOxfmtCommand(repoRoot);
if (!oxfmt) {
process.stderr.write("[pre-commit] oxfmt not found; skipping format.\n");
return;
}
if (!formatFiles(repoRoot, oxfmt, filteredTargets)) {
process.exitCode = 1;
return;
}
if (!stageFiles(repoRoot, filteredTargets)) {
process.exitCode = 1;
}
}
export {
filterOxfmtTargets,
filterOutPartialTargets,
findPartiallyStagedFiles,
getRepoRoot,
normalizeGitPath,
resolveOxfmtCommand,
splitNullDelimited,
};
if (process.argv[1] && path.resolve(process.argv[1]) === fileURLToPath(import.meta.url)) {
main();
}

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env bash
set -euo pipefail
team_id="$(defaults read com.apple.dt.Xcode IDEProvisioningTeamIdentifiers 2>/dev/null | grep -Eo '[A-Z0-9]{10}' | head -n1 || true)"
if [[ -z "$team_id" ]]; then
team_id="$(security find-identity -p codesigning -v 2>/dev/null | grep -Eo '\\([A-Z0-9]{10}\\)' | head -n1 | tr -d '()' || true)"
fi
if [[ -z "$team_id" ]]; then
echo "No Apple Team ID found. Open Xcode or install signing certificates first." >&2
exit 1
fi
echo "$team_id"

View File

@@ -0,0 +1,69 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT=$(cd "$(dirname "$0")/.." && pwd)
ZIP=${1:?"Usage: $0 Moltbot-<ver>.zip"}
FEED_URL=${2:-"https://raw.githubusercontent.com/moltbot/moltbot/main/appcast.xml"}
PRIVATE_KEY_FILE=${SPARKLE_PRIVATE_KEY_FILE:-}
if [[ -z "$PRIVATE_KEY_FILE" ]]; then
echo "Set SPARKLE_PRIVATE_KEY_FILE to your ed25519 private key (Sparkle)." >&2
exit 1
fi
if [[ ! -f "$ZIP" ]]; then
echo "Zip not found: $ZIP" >&2
exit 1
fi
ZIP_DIR=$(cd "$(dirname "$ZIP")" && pwd)
ZIP_NAME=$(basename "$ZIP")
ZIP_BASE="${ZIP_NAME%.zip}"
VERSION=${SPARKLE_RELEASE_VERSION:-}
if [[ -z "$VERSION" ]]; then
if [[ "$ZIP_NAME" =~ ^Moltbot-([0-9]+(\.[0-9]+){1,2}([-.][^.]*)?)\.zip$ ]]; then
VERSION="${BASH_REMATCH[1]}"
else
echo "Could not infer version from $ZIP_NAME; set SPARKLE_RELEASE_VERSION." >&2
exit 1
fi
fi
TMP_DIR="$(mktemp -d)"
cleanup() {
rm -rf "$TMP_DIR"
if [[ "${KEEP_SPARKLE_NOTES:-0}" != "1" ]]; then
rm -f "$NOTES_HTML"
fi
}
trap cleanup EXIT
cp -f "$ZIP" "$TMP_DIR/$ZIP_NAME"
if [[ -f "$ROOT/appcast.xml" ]]; then
cp -f "$ROOT/appcast.xml" "$TMP_DIR/appcast.xml"
fi
NOTES_HTML="${ZIP_DIR}/${ZIP_BASE}.html"
if [[ -x "$ROOT/scripts/changelog-to-html.sh" ]]; then
"$ROOT/scripts/changelog-to-html.sh" "$VERSION" >"$NOTES_HTML"
else
echo "Missing scripts/changelog-to-html.sh; cannot generate HTML release notes." >&2
exit 1
fi
cp -f "$NOTES_HTML" "$TMP_DIR/${ZIP_BASE}.html"
DOWNLOAD_URL_PREFIX=${SPARKLE_DOWNLOAD_URL_PREFIX:-"https://github.com/moltbot/moltbot/releases/download/v${VERSION}/"}
export PATH="$ROOT/apps/macos/.build/artifacts/sparkle/Sparkle/bin:$PATH"
if ! command -v generate_appcast >/dev/null; then
echo "generate_appcast not found in PATH. Build Sparkle tools via SwiftPM." >&2
exit 1
fi
generate_appcast \
--ed-key-file "$PRIVATE_KEY_FILE" \
--download-url-prefix "$DOWNLOAD_URL_PREFIX" \
--embed-release-notes \
--link "$FEED_URL" \
"$TMP_DIR"
cp -f "$TMP_DIR/appcast.xml" "$ROOT/appcast.xml"
echo "Appcast generated (appcast.xml). Upload alongside $ZIP at $FEED_URL"

View File

@@ -0,0 +1,84 @@
#!/bin/bash
# Mobile-friendly Claude Code re-authentication
# Designed for use via SSH from Termux
#
# This script handles the authentication flow in a way that works
# from a mobile device by:
# 1. Checking if auth is needed
# 2. Running claude setup-token for long-lived auth
# 3. Outputting URLs that can be easily opened on phone
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
NC='\033[0m'
echo "=== Claude Code Mobile Re-Auth ==="
echo ""
# Check current auth status
echo "Checking auth status..."
AUTH_STATUS=$("$SCRIPT_DIR/claude-auth-status.sh" simple 2>/dev/null || echo "ERROR")
case "$AUTH_STATUS" in
OK)
echo -e "${GREEN}Auth is valid!${NC}"
"$SCRIPT_DIR/claude-auth-status.sh" full
exit 0
;;
CLAUDE_EXPIRING|CLAWDBOT_EXPIRING)
echo -e "${YELLOW}Auth is expiring soon.${NC}"
echo ""
;;
*)
echo -e "${RED}Auth needs refresh.${NC}"
echo ""
;;
esac
echo "Starting long-lived token setup..."
echo ""
echo -e "${CYAN}Instructions:${NC}"
echo "1. Open this URL on your phone:"
echo ""
echo -e " ${CYAN}https://console.anthropic.com/settings/api-keys${NC}"
echo ""
echo "2. Sign in if needed"
echo "3. Create a new API key or use existing 'Claude Code' key"
echo "4. Copy the key (starts with sk-ant-...)"
echo "5. When prompted below, paste the key"
echo ""
echo "Press Enter when ready to continue..."
read -r
# Run setup-token interactively
echo ""
echo "Running 'claude setup-token'..."
echo "(Follow the prompts and paste your API key when asked)"
echo ""
if claude setup-token; then
echo ""
echo -e "${GREEN}Authentication successful!${NC}"
echo ""
"$SCRIPT_DIR/claude-auth-status.sh" full
# Restart moltbot service if running
if systemctl --user is-active moltbot >/dev/null 2>&1; then
echo ""
echo "Restarting moltbot service..."
systemctl --user restart moltbot
echo -e "${GREEN}Service restarted.${NC}"
fi
else
echo ""
echo -e "${RED}Authentication failed.${NC}"
echo "Please try again or check the Claude Code documentation."
exit 1
fi

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
set -euo pipefail
# Notarize a macOS artifact (zip/dmg/pkg) and optionally staple the app bundle.
#
# Usage:
# STAPLE_APP_PATH=dist/Moltbot.app scripts/notarize-mac-artifact.sh <artifact>
#
# Auth (pick one):
# NOTARYTOOL_PROFILE keychain profile created via `xcrun notarytool store-credentials`
# NOTARYTOOL_KEY path to App Store Connect API key (.p8)
# NOTARYTOOL_KEY_ID API key ID
# NOTARYTOOL_ISSUER API issuer ID
ARTIFACT="${1:-}"
STAPLE_APP_PATH="${STAPLE_APP_PATH:-}"
if [[ -z "$ARTIFACT" ]]; then
echo "Usage: $0 <artifact>" >&2
exit 1
fi
if [[ ! -e "$ARTIFACT" ]]; then
echo "Error: artifact not found: $ARTIFACT" >&2
exit 1
fi
if ! command -v xcrun >/dev/null 2>&1; then
echo "Error: xcrun not found; install Xcode command line tools." >&2
exit 1
fi
auth_args=()
if [[ -n "${NOTARYTOOL_PROFILE:-}" ]]; then
auth_args+=(--keychain-profile "$NOTARYTOOL_PROFILE")
elif [[ -n "${NOTARYTOOL_KEY:-}" && -n "${NOTARYTOOL_KEY_ID:-}" && -n "${NOTARYTOOL_ISSUER:-}" ]]; then
auth_args+=(--key "$NOTARYTOOL_KEY" --key-id "$NOTARYTOOL_KEY_ID" --issuer "$NOTARYTOOL_ISSUER")
else
echo "Error: Notary auth missing. Set NOTARYTOOL_PROFILE or NOTARYTOOL_KEY/NOTARYTOOL_KEY_ID/NOTARYTOOL_ISSUER." >&2
exit 1
fi
echo "🧾 Notarizing: $ARTIFACT"
xcrun notarytool submit "$ARTIFACT" "${auth_args[@]}" --wait
case "$ARTIFACT" in
*.dmg|*.pkg)
echo "📌 Stapling artifact: $ARTIFACT"
xcrun stapler staple "$ARTIFACT"
xcrun stapler validate "$ARTIFACT"
;;
*)
;;
esac
if [[ -n "$STAPLE_APP_PATH" ]]; then
if [[ -d "$STAPLE_APP_PATH" ]]; then
echo "📌 Stapling app: $STAPLE_APP_PATH"
xcrun stapler staple "$STAPLE_APP_PATH"
xcrun stapler validate "$STAPLE_APP_PATH"
else
echo "Warn: STAPLE_APP_PATH not found: $STAPLE_APP_PATH" >&2
fi
fi
echo "✅ Notarization complete"

View File

@@ -0,0 +1,261 @@
#!/usr/bin/env bash
set -euo pipefail
# Build and bundle Moltbot into a minimal .app we can open.
# Outputs to dist/Moltbot.app
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
APP_ROOT="$ROOT_DIR/dist/Moltbot.app"
BUILD_ROOT="$ROOT_DIR/apps/macos/.build"
PRODUCT="Moltbot"
BUNDLE_ID="${BUNDLE_ID:-bot.molt.mac.debug}"
PKG_VERSION="$(cd "$ROOT_DIR" && node -p "require('./package.json').version" 2>/dev/null || echo "0.0.0")"
BUILD_TS=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
GIT_COMMIT=$(cd "$ROOT_DIR" && git rev-parse --short HEAD 2>/dev/null || echo "unknown")
GIT_BUILD_NUMBER=$(cd "$ROOT_DIR" && git rev-list --count HEAD 2>/dev/null || echo "0")
APP_VERSION="${APP_VERSION:-$PKG_VERSION}"
APP_BUILD="${APP_BUILD:-$GIT_BUILD_NUMBER}"
BUILD_CONFIG="${BUILD_CONFIG:-debug}"
BUILD_ARCHS_VALUE="${BUILD_ARCHS:-$(uname -m)}"
if [[ "${BUILD_ARCHS_VALUE}" == "all" ]]; then
BUILD_ARCHS_VALUE="arm64 x86_64"
fi
IFS=' ' read -r -a BUILD_ARCHS <<< "$BUILD_ARCHS_VALUE"
PRIMARY_ARCH="${BUILD_ARCHS[0]}"
SPARKLE_PUBLIC_ED_KEY="${SPARKLE_PUBLIC_ED_KEY:-AGCY8w5vHirVfGGDGc8Szc5iuOqupZSh9pMj/Qs67XI=}"
SPARKLE_FEED_URL="${SPARKLE_FEED_URL:-https://raw.githubusercontent.com/moltbot/moltbot/main/appcast.xml}"
AUTO_CHECKS=true
if [[ "$BUNDLE_ID" == *.debug ]]; then
SPARKLE_FEED_URL=""
AUTO_CHECKS=false
fi
if [[ "$AUTO_CHECKS" == "true" && ! "$APP_BUILD" =~ ^[0-9]+$ ]]; then
echo "ERROR: APP_BUILD must be numeric for Sparkle compare (CFBundleVersion). Got: $APP_BUILD" >&2
exit 1
fi
build_path_for_arch() {
echo "$BUILD_ROOT/$1"
}
bin_for_arch() {
echo "$(build_path_for_arch "$1")/$BUILD_CONFIG/$PRODUCT"
}
sparkle_framework_for_arch() {
echo "$(build_path_for_arch "$1")/$BUILD_CONFIG/Sparkle.framework"
}
merge_framework_machos() {
local primary="$1"
local dest="$2"
shift 2
local others=("$@")
archs_for() {
/usr/bin/lipo -info "$1" | /usr/bin/sed -E 's/.*are: //; s/.*architecture: //'
}
arch_in_list() {
local needle="$1"
shift
for item in "$@"; do
if [[ "$item" == "$needle" ]]; then
return 0
fi
done
return 1
}
while IFS= read -r -d '' file; do
if /usr/bin/file "$file" | /usr/bin/grep -q "Mach-O"; then
local rel="${file#$primary/}"
local primary_archs
primary_archs=$(archs_for "$file")
IFS=' ' read -r -a primary_arch_array <<< "$primary_archs"
local missing_files=()
local tmp_dir
tmp_dir=$(mktemp -d)
for fw in "${others[@]}"; do
local other_file="$fw/$rel"
if [[ ! -f "$other_file" ]]; then
echo "ERROR: Missing $rel in $fw" >&2
rm -rf "$tmp_dir"
exit 1
fi
if /usr/bin/file "$other_file" | /usr/bin/grep -q "Mach-O"; then
local other_archs
other_archs=$(archs_for "$other_file")
IFS=' ' read -r -a other_arch_array <<< "$other_archs"
for arch in "${other_arch_array[@]}"; do
if ! arch_in_list "$arch" "${primary_arch_array[@]}"; then
local thin_file="$tmp_dir/$(echo "$rel" | tr '/' '_')-$arch"
/usr/bin/lipo -thin "$arch" "$other_file" -output "$thin_file"
missing_files+=("$thin_file")
primary_arch_array+=("$arch")
fi
done
fi
done
if [[ "${#missing_files[@]}" -gt 0 ]]; then
/usr/bin/lipo -create "$file" "${missing_files[@]}" -output "$dest/$rel"
fi
rm -rf "$tmp_dir"
fi
done < <(find "$primary" -type f -print0)
}
echo "📦 Ensuring deps (pnpm install)"
(cd "$ROOT_DIR" && pnpm install --no-frozen-lockfile --config.node-linker=hoisted)
if [[ "${SKIP_TSC:-0}" != "1" ]]; then
echo "📦 Building JS (pnpm exec tsc)"
(cd "$ROOT_DIR" && pnpm exec tsc -p tsconfig.json)
else
echo "📦 Skipping TS build (SKIP_TSC=1)"
fi
if [[ "${SKIP_UI_BUILD:-0}" != "1" ]]; then
echo "🖥 Building Control UI (ui:build)"
(cd "$ROOT_DIR" && node scripts/ui.js build)
else
echo "🖥 Skipping Control UI build (SKIP_UI_BUILD=1)"
fi
cd "$ROOT_DIR/apps/macos"
echo "🔨 Building $PRODUCT ($BUILD_CONFIG) [${BUILD_ARCHS[*]}]"
for arch in "${BUILD_ARCHS[@]}"; do
BUILD_PATH="$(build_path_for_arch "$arch")"
swift build -c "$BUILD_CONFIG" --product "$PRODUCT" --build-path "$BUILD_PATH" --arch "$arch" -Xlinker -rpath -Xlinker @executable_path/../Frameworks
done
BIN_PRIMARY="$(bin_for_arch "$PRIMARY_ARCH")"
echo "pkg: binary $BIN_PRIMARY" >&2
echo "🧹 Cleaning old app bundle"
rm -rf "$APP_ROOT"
mkdir -p "$APP_ROOT/Contents/MacOS"
mkdir -p "$APP_ROOT/Contents/Resources"
mkdir -p "$APP_ROOT/Contents/Frameworks"
echo "📄 Copying Info.plist template"
INFO_PLIST_SRC="$ROOT_DIR/apps/macos/Sources/Moltbot/Resources/Info.plist"
if [ ! -f "$INFO_PLIST_SRC" ]; then
echo "ERROR: Info.plist template missing at $INFO_PLIST_SRC" >&2
exit 1
fi
cp "$INFO_PLIST_SRC" "$APP_ROOT/Contents/Info.plist"
/usr/libexec/PlistBuddy -c "Set :CFBundleIdentifier ${BUNDLE_ID}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :CFBundleShortVersionString ${APP_VERSION}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :CFBundleVersion ${APP_BUILD}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :MoltbotBuildTimestamp ${BUILD_TS}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :MoltbotGitCommit ${GIT_COMMIT}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :SUFeedURL ${SPARKLE_FEED_URL}" "$APP_ROOT/Contents/Info.plist" \
|| /usr/libexec/PlistBuddy -c "Add :SUFeedURL string ${SPARKLE_FEED_URL}" "$APP_ROOT/Contents/Info.plist" || true
/usr/libexec/PlistBuddy -c "Set :SUPublicEDKey ${SPARKLE_PUBLIC_ED_KEY}" "$APP_ROOT/Contents/Info.plist" \
|| /usr/libexec/PlistBuddy -c "Add :SUPublicEDKey string ${SPARKLE_PUBLIC_ED_KEY}" "$APP_ROOT/Contents/Info.plist" || true
if /usr/libexec/PlistBuddy -c "Set :SUEnableAutomaticChecks ${AUTO_CHECKS}" "$APP_ROOT/Contents/Info.plist"; then
true
else
/usr/libexec/PlistBuddy -c "Add :SUEnableAutomaticChecks bool ${AUTO_CHECKS}" "$APP_ROOT/Contents/Info.plist" || true
fi
echo "🚚 Copying binary"
cp "$BIN_PRIMARY" "$APP_ROOT/Contents/MacOS/Moltbot"
if [[ "${#BUILD_ARCHS[@]}" -gt 1 ]]; then
BIN_INPUTS=()
for arch in "${BUILD_ARCHS[@]}"; do
BIN_INPUTS+=("$(bin_for_arch "$arch")")
done
/usr/bin/lipo -create "${BIN_INPUTS[@]}" -output "$APP_ROOT/Contents/MacOS/Moltbot"
fi
chmod +x "$APP_ROOT/Contents/MacOS/Moltbot"
# SwiftPM outputs ad-hoc signed binaries; strip the signature before install_name_tool to avoid warnings.
/usr/bin/codesign --remove-signature "$APP_ROOT/Contents/MacOS/Moltbot" 2>/dev/null || true
SPARKLE_FRAMEWORK_PRIMARY="$(sparkle_framework_for_arch "$PRIMARY_ARCH")"
if [ -d "$SPARKLE_FRAMEWORK_PRIMARY" ]; then
echo "✨ Embedding Sparkle.framework"
cp -R "$SPARKLE_FRAMEWORK_PRIMARY" "$APP_ROOT/Contents/Frameworks/"
if [[ "${#BUILD_ARCHS[@]}" -gt 1 ]]; then
OTHER_FRAMEWORKS=()
for arch in "${BUILD_ARCHS[@]}"; do
if [[ "$arch" == "$PRIMARY_ARCH" ]]; then
continue
fi
OTHER_FRAMEWORKS+=("$(sparkle_framework_for_arch "$arch")")
done
merge_framework_machos "$SPARKLE_FRAMEWORK_PRIMARY" "$APP_ROOT/Contents/Frameworks/Sparkle.framework" "${OTHER_FRAMEWORKS[@]}"
fi
chmod -R a+rX "$APP_ROOT/Contents/Frameworks/Sparkle.framework"
fi
echo "📦 Copying Swift 6.2 compatibility libraries"
SWIFT_COMPAT_LIB="$(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-6.2/macosx/libswiftCompatibilitySpan.dylib"
if [ -f "$SWIFT_COMPAT_LIB" ]; then
cp "$SWIFT_COMPAT_LIB" "$APP_ROOT/Contents/Frameworks/"
chmod +x "$APP_ROOT/Contents/Frameworks/libswiftCompatibilitySpan.dylib"
else
echo "WARN: Swift compatibility library not found at $SWIFT_COMPAT_LIB (continuing)" >&2
fi
echo "🖼 Copying app icon"
cp "$ROOT_DIR/apps/macos/Sources/Moltbot/Resources/Moltbot.icns" "$APP_ROOT/Contents/Resources/Moltbot.icns"
echo "📦 Copying device model resources"
rm -rf "$APP_ROOT/Contents/Resources/DeviceModels"
cp -R "$ROOT_DIR/apps/macos/Sources/Moltbot/Resources/DeviceModels" "$APP_ROOT/Contents/Resources/DeviceModels"
echo "📦 Copying model catalog"
MODEL_CATALOG_SRC="$ROOT_DIR/node_modules/@mariozechner/pi-ai/dist/models.generated.js"
MODEL_CATALOG_DEST="$APP_ROOT/Contents/Resources/models.generated.js"
if [ -f "$MODEL_CATALOG_SRC" ]; then
cp "$MODEL_CATALOG_SRC" "$MODEL_CATALOG_DEST"
else
echo "WARN: model catalog missing at $MODEL_CATALOG_SRC (continuing)" >&2
fi
echo "📦 Copying MoltbotKit resources"
MOLTBOTKIT_BUNDLE="$(build_path_for_arch "$PRIMARY_ARCH")/$BUILD_CONFIG/MoltbotKit_MoltbotKit.bundle"
if [ -d "$MOLTBOTKIT_BUNDLE" ]; then
rm -rf "$APP_ROOT/Contents/Resources/MoltbotKit_MoltbotKit.bundle"
cp -R "$MOLTBOTKIT_BUNDLE" "$APP_ROOT/Contents/Resources/MoltbotKit_MoltbotKit.bundle"
else
echo "WARN: MoltbotKit resource bundle not found at $MOLTBOTKIT_BUNDLE (continuing)" >&2
fi
echo "📦 Copying Textual resources"
TEXTUAL_BUNDLE_DIR="$(build_path_for_arch "$PRIMARY_ARCH")/$BUILD_CONFIG"
TEXTUAL_BUNDLE=""
for candidate in \
"$TEXTUAL_BUNDLE_DIR/textual_Textual.bundle" \
"$TEXTUAL_BUNDLE_DIR/Textual_Textual.bundle"
do
if [ -d "$candidate" ]; then
TEXTUAL_BUNDLE="$candidate"
break
fi
done
if [ -z "$TEXTUAL_BUNDLE" ]; then
TEXTUAL_BUNDLE="$(find "$BUILD_ROOT" -type d \( -name "textual_Textual.bundle" -o -name "Textual_Textual.bundle" \) -print -quit)"
fi
if [ -n "$TEXTUAL_BUNDLE" ] && [ -d "$TEXTUAL_BUNDLE" ]; then
rm -rf "$APP_ROOT/Contents/Resources/$(basename "$TEXTUAL_BUNDLE")"
cp -R "$TEXTUAL_BUNDLE" "$APP_ROOT/Contents/Resources/"
else
if [[ "${ALLOW_MISSING_TEXTUAL_BUNDLE:-0}" == "1" ]]; then
echo "WARN: Textual resource bundle not found (continuing due to ALLOW_MISSING_TEXTUAL_BUNDLE=1)" >&2
else
echo "ERROR: Textual resource bundle not found. Set ALLOW_MISSING_TEXTUAL_BUNDLE=1 to bypass." >&2
exit 1
fi
fi
echo "⏹ Stopping any running Moltbot"
killall -q Moltbot 2>/dev/null || true
echo "🔏 Signing bundle (auto-selects signing identity if SIGN_IDENTITY is unset)"
"$ROOT_DIR/scripts/codesign-mac-app.sh" "$APP_ROOT"
echo "✅ Bundle ready at $APP_ROOT"

View File

@@ -0,0 +1,56 @@
#!/usr/bin/env bash
set -euo pipefail
# Build the mac app bundle, then create a zip (Sparkle) + styled DMG (humans).
#
# Output:
# - dist/Moltbot.app
# - dist/Moltbot-<version>.zip
# - dist/Moltbot-<version>.dmg
ROOT_DIR="$(cd "$(dirname "$0")/.." && pwd)"
# Default to universal binary for distribution builds (supports both Apple Silicon and Intel Macs)
export BUILD_ARCHS="${BUILD_ARCHS:-all}"
"$ROOT_DIR/scripts/package-mac-app.sh"
APP="$ROOT_DIR/dist/Moltbot.app"
if [[ ! -d "$APP" ]]; then
echo "Error: missing app bundle at $APP" >&2
exit 1
fi
VERSION=$(/usr/libexec/PlistBuddy -c "Print CFBundleShortVersionString" "$APP/Contents/Info.plist" 2>/dev/null || echo "0.0.0")
ZIP="$ROOT_DIR/dist/Moltbot-$VERSION.zip"
DMG="$ROOT_DIR/dist/Moltbot-$VERSION.dmg"
NOTARY_ZIP="$ROOT_DIR/dist/Moltbot-$VERSION.notary.zip"
SKIP_NOTARIZE="${SKIP_NOTARIZE:-0}"
NOTARIZE=1
if [[ "$SKIP_NOTARIZE" == "1" ]]; then
NOTARIZE=0
fi
if [[ "$NOTARIZE" == "1" ]]; then
echo "📦 Notary zip: $NOTARY_ZIP"
rm -f "$NOTARY_ZIP"
ditto -c -k --sequesterRsrc --keepParent "$APP" "$NOTARY_ZIP"
STAPLE_APP_PATH="$APP" "$ROOT_DIR/scripts/notarize-mac-artifact.sh" "$NOTARY_ZIP"
rm -f "$NOTARY_ZIP"
fi
echo "📦 Zip: $ZIP"
rm -f "$ZIP"
ditto -c -k --sequesterRsrc --keepParent "$APP" "$ZIP"
echo "💿 DMG: $DMG"
"$ROOT_DIR/scripts/create-dmg.sh" "$APP" "$DMG"
if [[ "$NOTARIZE" == "1" ]]; then
if [[ -n "${SIGN_IDENTITY:-}" ]]; then
echo "🔏 Signing DMG: $DMG"
/usr/bin/codesign --force --sign "$SIGN_IDENTITY" --timestamp "$DMG"
fi
"$ROOT_DIR/scripts/notarize-mac-artifact.sh" "$DMG"
fi

View File

@@ -0,0 +1,300 @@
import fs from "node:fs";
import path from "node:path";
import { spawnSync } from "node:child_process";
import { fileURLToPath } from "node:url";
import { setupGitHooks } from "./setup-git-hooks.js";
function detectPackageManager(ua = process.env.npm_config_user_agent ?? "") {
// Examples:
// - "pnpm/10.23.0 npm/? node/v22.21.1 darwin arm64"
// - "npm/10.9.4 node/v22.12.0 linux x64"
// - "bun/1.2.2"
const normalized = String(ua).trim();
if (normalized.startsWith("pnpm/")) return "pnpm";
if (normalized.startsWith("bun/")) return "bun";
if (normalized.startsWith("npm/")) return "npm";
if (normalized.startsWith("yarn/")) return "yarn";
return "unknown";
}
function shouldApplyPnpmPatchedDependenciesFallback(pm = detectPackageManager()) {
// pnpm already applies pnpm.patchedDependencies itself; re-applying would fail.
return pm !== "pnpm";
}
function getRepoRoot() {
const here = path.dirname(fileURLToPath(import.meta.url));
return path.resolve(here, "..");
}
function ensureExecutable(targetPath) {
if (process.platform === "win32") return;
if (!fs.existsSync(targetPath)) return;
try {
const mode = fs.statSync(targetPath).mode & 0o777;
if (mode & 0o100) return;
fs.chmodSync(targetPath, 0o755);
} catch (err) {
console.warn(`[postinstall] chmod failed: ${err}`);
}
}
function hasGit(repoRoot) {
const result = spawnSync("git", ["--version"], { cwd: repoRoot, stdio: "ignore" });
return result.status === 0;
}
function extractPackageName(key) {
if (key.startsWith("@")) {
const idx = key.indexOf("@", 1);
if (idx === -1) return key;
return key.slice(0, idx);
}
const idx = key.lastIndexOf("@");
if (idx <= 0) return key;
return key.slice(0, idx);
}
function stripPrefix(p) {
if (p.startsWith("a/") || p.startsWith("b/")) return p.slice(2);
return p;
}
function parseRange(segment) {
// segment: "-12,5" or "+7"
const [startRaw, countRaw] = segment.slice(1).split(",");
const start = Number.parseInt(startRaw, 10);
const count = countRaw ? Number.parseInt(countRaw, 10) : 1;
if (Number.isNaN(start) || Number.isNaN(count)) {
throw new Error(`invalid hunk range: ${segment}`);
}
return { start, count };
}
function parsePatch(patchText) {
const lines = patchText.split("\n");
const files = [];
let i = 0;
while (i < lines.length) {
if (!lines[i].startsWith("diff --git ")) {
i += 1;
continue;
}
const file = { oldPath: null, newPath: null, hunks: [] };
i += 1;
// Skip index line(s)
while (i < lines.length && lines[i].startsWith("index ")) i += 1;
if (i < lines.length && lines[i].startsWith("--- ")) {
file.oldPath = stripPrefix(lines[i].slice(4).trim());
i += 1;
}
if (i < lines.length && lines[i].startsWith("+++ ")) {
file.newPath = stripPrefix(lines[i].slice(4).trim());
i += 1;
}
while (i < lines.length && lines[i].startsWith("@@")) {
const header = lines[i];
const match = /^@@\s+(-\d+(?:,\d+)?)\s+(\+\d+(?:,\d+)?)\s+@@/.exec(header);
if (!match) throw new Error(`invalid hunk header: ${header}`);
const oldRange = parseRange(match[1]);
const newRange = parseRange(match[2]);
i += 1;
const hunkLines = [];
while (i < lines.length) {
const line = lines[i];
if (line.startsWith("@@") || line.startsWith("diff --git ")) break;
if (line === "") {
i += 1;
continue;
}
if (line.startsWith("\\ No newline at end of file")) {
i += 1;
continue;
}
hunkLines.push(line);
i += 1;
}
file.hunks.push({
oldStart: oldRange.start,
oldLines: oldRange.count,
newStart: newRange.start,
newLines: newRange.count,
lines: hunkLines,
});
}
if (file.newPath && file.hunks.length > 0) {
files.push(file);
}
}
return files;
}
function readFileLines(targetPath) {
if (!fs.existsSync(targetPath)) {
throw new Error(`target file missing: ${targetPath}`);
}
const raw = fs.readFileSync(targetPath, "utf-8");
const hasTrailingNewline = raw.endsWith("\n");
const parts = raw.split("\n");
if (hasTrailingNewline) parts.pop();
return { lines: parts, hasTrailingNewline };
}
function writeFileLines(targetPath, lines, hadTrailingNewline) {
const content = lines.join("\n") + (hadTrailingNewline ? "\n" : "");
fs.writeFileSync(targetPath, content, "utf-8");
}
function applyHunk(lines, hunk, offset) {
let cursor = hunk.oldStart - 1 + offset;
const expected = [];
for (const raw of hunk.lines) {
const marker = raw[0];
if (marker === " " || marker === "+") {
expected.push(raw.slice(1));
}
}
if (cursor >= 0 && cursor + expected.length <= lines.length) {
let alreadyApplied = true;
for (let i = 0; i < expected.length; i += 1) {
if (lines[cursor + i] !== expected[i]) {
alreadyApplied = false;
break;
}
}
if (alreadyApplied) {
const delta = hunk.newLines - hunk.oldLines;
return offset + delta;
}
}
for (const raw of hunk.lines) {
const marker = raw[0];
const text = raw.slice(1);
if (marker === " ") {
if (lines[cursor] !== text) {
throw new Error(
`context mismatch at line ${cursor + 1}: expected "${text}", found "${lines[cursor] ?? "<eof>"}"`,
);
}
cursor += 1;
} else if (marker === "-") {
if (lines[cursor] !== text) {
throw new Error(
`delete mismatch at line ${cursor + 1}: expected "${text}", found "${lines[cursor] ?? "<eof>"}"`,
);
}
lines.splice(cursor, 1);
} else if (marker === "+") {
lines.splice(cursor, 0, text);
cursor += 1;
} else {
throw new Error(`unexpected hunk marker: ${marker}`);
}
}
const delta = hunk.newLines - hunk.oldLines;
return offset + delta;
}
function applyPatchToFile(targetDir, filePatch) {
if (filePatch.newPath === "/dev/null") {
// deletion not needed for our patches
return;
}
const relPath = stripPrefix(filePatch.newPath ?? filePatch.oldPath ?? "");
const targetPath = path.join(targetDir, relPath);
const { lines, hasTrailingNewline } = readFileLines(targetPath);
let offset = 0;
for (const hunk of filePatch.hunks) {
offset = applyHunk(lines, hunk, offset);
}
writeFileLines(targetPath, lines, hasTrailingNewline);
}
function applyPatchSet({ patchText, targetDir }) {
let resolvedTarget = path.resolve(targetDir);
if (!fs.existsSync(resolvedTarget) || !fs.statSync(resolvedTarget).isDirectory()) {
console.warn(`[postinstall] skip missing target: ${resolvedTarget}`);
return;
}
resolvedTarget = fs.realpathSync(resolvedTarget);
const files = parsePatch(patchText);
if (files.length === 0) return;
for (const filePatch of files) {
applyPatchToFile(resolvedTarget, filePatch);
}
}
function applyPatchFile({ patchPath, targetDir }) {
const absPatchPath = path.resolve(patchPath);
if (!fs.existsSync(absPatchPath)) {
throw new Error(`missing patch: ${absPatchPath}`);
}
const patchText = fs.readFileSync(absPatchPath, "utf-8");
applyPatchSet({ patchText, targetDir });
}
function main() {
const repoRoot = getRepoRoot();
process.chdir(repoRoot);
ensureExecutable(path.join(repoRoot, "dist", "entry.js"));
setupGitHooks({ repoRoot });
if (!shouldApplyPnpmPatchedDependenciesFallback()) {
return;
}
const pkgPath = path.join(repoRoot, "package.json");
const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
const patched = pkg?.pnpm?.patchedDependencies ?? {};
// Bun does not support pnpm.patchedDependencies. Apply these patch files to
// node_modules packages as a best-effort compatibility layer.
for (const [key, relPatchPath] of Object.entries(patched)) {
if (typeof relPatchPath !== "string" || !relPatchPath.trim()) continue;
const pkgName = extractPackageName(String(key));
if (!pkgName) continue;
applyPatchFile({
targetDir: path.join("node_modules", ...pkgName.split("/")),
patchPath: relPatchPath,
});
}
}
try {
const skip =
process.env.CLAWDBOT_SKIP_POSTINSTALL === "1" ||
process.env.VITEST === "true" ||
process.env.NODE_ENV === "test";
if (!skip) {
main();
}
} catch (err) {
console.error(String(err));
process.exit(1);
}
export {
applyPatchFile,
applyPatchSet,
applyPatchToFile,
detectPackageManager,
parsePatch,
shouldApplyPnpmPatchedDependenciesFallback,
};

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
if [[ $# -lt 1 ]]; then
echo "usage: run-node-tool.sh <tool> [args...]" >&2
exit 2
fi
tool="$1"
shift
if [[ -f "$ROOT_DIR/pnpm-lock.yaml" ]] && command -v pnpm >/dev/null 2>&1; then
exec pnpm exec "$tool" "$@"
fi
if { [[ -f "$ROOT_DIR/bun.lockb" ]] || [[ -f "$ROOT_DIR/bun.lock" ]]; } && command -v bun >/dev/null 2>&1; then
exec bunx --bun "$tool" "$@"
fi
if command -v npm >/dev/null 2>&1; then
exec npm exec -- "$tool" "$@"
fi
if command -v npx >/dev/null 2>&1; then
exec npx "$tool" "$@"
fi
echo "Missing package manager: pnpm, bun, or npm required." >&2
exit 1

View File

@@ -0,0 +1,245 @@
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import {
ErrorCodes,
PROTOCOL_VERSION,
ProtocolSchemas,
} from "../src/gateway/protocol/schema.js";
type JsonSchema = {
type?: string | string[];
properties?: Record<string, JsonSchema>;
required?: string[];
items?: JsonSchema;
enum?: string[];
patternProperties?: Record<string, JsonSchema>;
};
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(__dirname, "..");
const outPaths = [
path.join(
repoRoot,
"apps",
"macos",
"Sources",
"MoltbotProtocol",
"GatewayModels.swift",
),
path.join(
repoRoot,
"apps",
"shared",
"MoltbotKit",
"Sources",
"MoltbotProtocol",
"GatewayModels.swift",
),
];
const header = `// Generated by scripts/protocol-gen-swift.ts — do not edit by hand\nimport Foundation\n\npublic let GATEWAY_PROTOCOL_VERSION = ${PROTOCOL_VERSION}\n\npublic enum ErrorCode: String, Codable, Sendable {\n${Object.values(ErrorCodes)
.map((c) => ` case ${camelCase(c)} = "${c}"`)
.join("\n")}\n}\n`;
const reserved = new Set([
"associatedtype",
"class",
"deinit",
"enum",
"extension",
"fileprivate",
"func",
"import",
"init",
"inout",
"internal",
"let",
"open",
"operator",
"private",
"precedencegroup",
"protocol",
"public",
"rethrows",
"static",
"struct",
"subscript",
"typealias",
"var",
]);
function camelCase(input: string) {
return input
.replace(/[^a-zA-Z0-9]+/g, " ")
.trim()
.toLowerCase()
.split(/\s+/)
.map((p, i) => (i === 0 ? p : p[0].toUpperCase() + p.slice(1)))
.join("");
}
function safeName(name: string) {
const cc = camelCase(name.replace(/-/g, "_"));
if (reserved.has(cc)) return `_${cc}`;
return cc;
}
// filled later once schemas are loaded
const schemaNameByObject = new Map<object, string>();
function swiftType(schema: JsonSchema, required: boolean): string {
const t = schema.type;
const isOptional = !required;
let base: string;
const named = schemaNameByObject.get(schema as object);
if (named) {
base = named;
} else if (t === "string") base = "String";
else if (t === "integer") base = "Int";
else if (t === "number") base = "Double";
else if (t === "boolean") base = "Bool";
else if (t === "array") {
base = `[${swiftType(schema.items ?? { type: "Any" }, true)}]`;
} else if (schema.enum) {
base = "String";
} else if (schema.patternProperties) {
base = "[String: AnyCodable]";
} else if (t === "object") {
base = "[String: AnyCodable]";
} else {
base = "AnyCodable";
}
return isOptional ? `${base}?` : base;
}
function emitStruct(name: string, schema: JsonSchema): string {
const props = schema.properties ?? {};
const required = new Set(schema.required ?? []);
const lines: string[] = [];
lines.push(`public struct ${name}: Codable, Sendable {`);
if (Object.keys(props).length === 0) {
lines.push("}\n");
return lines.join("\n");
}
const codingKeys: string[] = [];
for (const [key, propSchema] of Object.entries(props)) {
const propName = safeName(key);
const propType = swiftType(propSchema, required.has(key));
lines.push(` public let ${propName}: ${propType}`);
if (propName !== key) {
codingKeys.push(` case ${propName} = "${key}"`);
} else {
codingKeys.push(` case ${propName}`);
}
}
lines.push("\n public init(\n" +
Object.entries(props)
.map(([key, prop]) => {
const propName = safeName(key);
const req = required.has(key);
return ` ${propName}: ${swiftType(prop, true)}${req ? "" : "?"}`;
})
.join(",\n") +
"\n ) {\n" +
Object.entries(props)
.map(([key]) => {
const propName = safeName(key);
return ` self.${propName} = ${propName}`;
})
.join("\n") +
"\n }\n" +
" private enum CodingKeys: String, CodingKey {\n" +
codingKeys.join("\n") +
"\n }\n}");
lines.push("");
return lines.join("\n");
}
function emitGatewayFrame(): string {
const cases = ["req", "res", "event"];
const associated: Record<string, string> = {
req: "RequestFrame",
res: "ResponseFrame",
event: "EventFrame",
};
const caseLines = cases.map((c) => ` case ${safeName(c)}(${associated[c]})`);
const initLines = `
private enum CodingKeys: String, CodingKey {
case type
}
public init(from decoder: Decoder) throws {
let typeContainer = try decoder.container(keyedBy: CodingKeys.self)
let type = try typeContainer.decode(String.self, forKey: .type)
switch type {
case "req":
self = .req(try RequestFrame(from: decoder))
case "res":
self = .res(try ResponseFrame(from: decoder))
case "event":
self = .event(try EventFrame(from: decoder))
default:
let container = try decoder.singleValueContainer()
let raw = try container.decode([String: AnyCodable].self)
self = .unknown(type: type, raw: raw)
}
}
public func encode(to encoder: Encoder) throws {
switch self {
case .req(let v): try v.encode(to: encoder)
case .res(let v): try v.encode(to: encoder)
case .event(let v): try v.encode(to: encoder)
case .unknown(_, let raw):
var container = encoder.singleValueContainer()
try container.encode(raw)
}
}
`;
return [
"public enum GatewayFrame: Codable, Sendable {",
...caseLines,
" case unknown(type: String, raw: [String: AnyCodable])",
initLines,
"}",
"",
].join("\n");
}
async function generate() {
const definitions = Object.entries(ProtocolSchemas) as Array<
[string, JsonSchema]
>;
for (const [name, schema] of definitions) {
schemaNameByObject.set(schema as object, name);
}
const parts: string[] = [];
parts.push(header);
// Value structs
for (const [name, schema] of definitions) {
if (name === "GatewayFrame") continue;
if (schema.type === "object") {
parts.push(emitStruct(name, schema));
}
}
// Frame enum must come after payload structs
parts.push(emitGatewayFrame());
const content = parts.join("\n");
for (const outPath of outPaths) {
await fs.mkdir(path.dirname(outPath), { recursive: true });
await fs.writeFile(outPath, content);
console.log(`wrote ${outPath}`);
}
}
generate().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,51 @@
import { promises as fs } from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { ProtocolSchemas } from "../src/gateway/protocol/schema.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(__dirname, "..");
async function writeJsonSchema() {
const definitions: Record<string, unknown> = {};
for (const [name, schema] of Object.entries(ProtocolSchemas)) {
definitions[name] = schema;
}
const rootSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
$id: "https://moltbot.dev/protocol.schema.json",
title: "Moltbot Gateway Protocol",
description: "Handshake, request/response, and event frames for the Gateway WebSocket.",
oneOf: [
{ $ref: "#/definitions/RequestFrame" },
{ $ref: "#/definitions/ResponseFrame" },
{ $ref: "#/definitions/EventFrame" },
],
discriminator: {
propertyName: "type",
mapping: {
req: "#/definitions/RequestFrame",
res: "#/definitions/ResponseFrame",
event: "#/definitions/EventFrame",
},
},
definitions,
};
const distDir = path.join(repoRoot, "dist");
await fs.mkdir(distDir, { recursive: true });
const jsonSchemaPath = path.join(distDir, "protocol.schema.json");
await fs.writeFile(jsonSchemaPath, JSON.stringify(rootSchema, null, 2));
console.log(`wrote ${jsonSchemaPath}`);
return { jsonSchemaPath, schemaString: JSON.stringify(rootSchema) };
}
async function main() {
await writeJsonSchema();
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,60 @@
import { createWebFetchTool } from "../src/agents/tools/web-tools.js";
const DEFAULT_URLS = [
"https://example.com/",
"https://news.ycombinator.com/",
"https://www.reddit.com/r/javascript/",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent",
"https://httpbin.org/html",
];
const urls = process.argv.slice(2);
const targets = urls.length > 0 ? urls : DEFAULT_URLS;
async function runFetch(url: string, readability: boolean) {
if (!readability) {
throw new Error("Basic extraction removed. Set readability=true or enable Firecrawl.");
}
const tool = createWebFetchTool({
config: {
tools: {
web: { fetch: { readability, cacheTtlMinutes: 0, firecrawl: { enabled: false } } },
},
},
sandboxed: false,
});
if (!tool) throw new Error("web_fetch tool is disabled");
const result = await tool.execute("test", { url, extractMode: "markdown" });
return result.details as {
text?: string;
title?: string;
extractor?: string;
length?: number;
truncated?: boolean;
};
}
function truncate(value: string, max = 160): string {
if (!value) return "";
return value.length > max ? `${value.slice(0, max)}` : value;
}
async function run() {
for (const url of targets) {
console.log(`\n=== ${url}`);
const readable = await runFetch(url, true);
console.log(
`readability: ${readable.extractor ?? "unknown"} len=${readable.length ?? 0} title=${truncate(
readable.title ?? "",
80,
)}`,
);
if (readable.text) console.log(`readability sample: ${truncate(readable.text)}`);
}
}
run().catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env -S node --import tsx
import { execSync } from "node:child_process";
import { readdirSync, readFileSync } from "node:fs";
import { join, resolve } from "node:path";
type PackFile = { path: string };
type PackResult = { files?: PackFile[] };
const requiredPaths = [
"dist/discord/send.js",
"dist/hooks/gmail.js",
"dist/whatsapp/normalize.js",
];
const forbiddenPrefixes = ["dist/Moltbot.app/"];
type PackageJson = {
name?: string;
version?: string;
};
function runPackDry(): PackResult[] {
const raw = execSync("npm pack --dry-run --json --ignore-scripts", {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
});
return JSON.parse(raw) as PackResult[];
}
function checkPluginVersions() {
const rootPackagePath = resolve("package.json");
const rootPackage = JSON.parse(readFileSync(rootPackagePath, "utf8")) as PackageJson;
const targetVersion = rootPackage.version;
if (!targetVersion) {
console.error("release-check: root package.json missing version.");
process.exit(1);
}
const extensionsDir = resolve("extensions");
const entries = readdirSync(extensionsDir, { withFileTypes: true }).filter((entry) =>
entry.isDirectory(),
);
const mismatches: string[] = [];
for (const entry of entries) {
const packagePath = join(extensionsDir, entry.name, "package.json");
let pkg: PackageJson;
try {
pkg = JSON.parse(readFileSync(packagePath, "utf8")) as PackageJson;
} catch {
continue;
}
if (!pkg.name || !pkg.version) {
continue;
}
if (pkg.version !== targetVersion) {
mismatches.push(`${pkg.name} (${pkg.version})`);
}
}
if (mismatches.length > 0) {
console.error(`release-check: plugin versions must match ${targetVersion}:`);
for (const item of mismatches) {
console.error(` - ${item}`);
}
console.error("release-check: run `pnpm plugins:sync` to align plugin versions.");
process.exit(1);
}
}
function main() {
checkPluginVersions();
const results = runPackDry();
const files = results.flatMap((entry) => entry.files ?? []);
const paths = new Set(files.map((file) => file.path));
const missing = requiredPaths.filter((path) => !paths.has(path));
const forbidden = [...paths].filter((path) =>
forbiddenPrefixes.some((prefix) => path.startsWith(prefix)),
);
if (missing.length > 0 || forbidden.length > 0) {
if (missing.length > 0) {
console.error("release-check: missing files in npm pack:");
for (const path of missing) {
console.error(` - ${path}`);
}
}
if (forbidden.length > 0) {
console.error("release-check: forbidden files in npm pack:");
for (const path of forbidden) {
console.error(` - ${path}`);
}
}
process.exit(1);
}
console.log("release-check: npm pack contents look OK.");
}
main();

View File

@@ -0,0 +1,3 @@
import "../../src/logging/subsystem.js";
console.log("tsx-name-repro: loaded logging/subsystem");

View File

@@ -0,0 +1,269 @@
#!/usr/bin/env bash
# Reset Moltbot like Trimmy: kill running instances, rebuild, repackage, relaunch, verify.
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
APP_BUNDLE="${CLAWDBOT_APP_BUNDLE:-}"
APP_PROCESS_PATTERN="Moltbot.app/Contents/MacOS/Moltbot"
DEBUG_PROCESS_PATTERN="${ROOT_DIR}/apps/macos/.build/debug/Moltbot"
LOCAL_PROCESS_PATTERN="${ROOT_DIR}/apps/macos/.build-local/debug/Moltbot"
RELEASE_PROCESS_PATTERN="${ROOT_DIR}/apps/macos/.build/release/Moltbot"
LAUNCH_AGENT="${HOME}/Library/LaunchAgents/bot.molt.mac.plist"
LOCK_KEY="$(printf '%s' "${ROOT_DIR}" | shasum -a 256 | cut -c1-8)"
LOCK_DIR="${TMPDIR:-/tmp}/moltbot-restart-${LOCK_KEY}"
LOCK_PID_FILE="${LOCK_DIR}/pid"
WAIT_FOR_LOCK=0
LOG_PATH="${CLAWDBOT_RESTART_LOG:-/tmp/moltbot-restart.log}"
NO_SIGN=0
SIGN=0
AUTO_DETECT_SIGNING=1
GATEWAY_WAIT_SECONDS="${CLAWDBOT_GATEWAY_WAIT_SECONDS:-0}"
LAUNCHAGENT_DISABLE_MARKER="${HOME}/.clawdbot/disable-launchagent"
ATTACH_ONLY=1
log() { printf '%s\n' "$*"; }
fail() { printf 'ERROR: %s\n' "$*" >&2; exit 1; }
# Ensure local node binaries (rolldown, tsc, pnpm) are discoverable for the steps below.
export PATH="${ROOT_DIR}/node_modules/.bin:${PATH}"
run_step() {
local label="$1"; shift
log "==> ${label}"
if ! "$@"; then
fail "${label} failed"
fi
}
cleanup() {
if [[ -d "${LOCK_DIR}" ]]; then
rm -rf "${LOCK_DIR}"
fi
}
acquire_lock() {
while true; do
if mkdir "${LOCK_DIR}" 2>/dev/null; then
echo "$$" > "${LOCK_PID_FILE}"
return 0
fi
local existing_pid=""
if [[ -f "${LOCK_PID_FILE}" ]]; then
existing_pid="$(cat "${LOCK_PID_FILE}" 2>/dev/null || true)"
fi
if [[ -n "${existing_pid}" ]] && kill -0 "${existing_pid}" 2>/dev/null; then
if [[ "${WAIT_FOR_LOCK}" == "1" ]]; then
log "==> Another restart is running (pid ${existing_pid}); waiting..."
while kill -0 "${existing_pid}" 2>/dev/null; do
sleep 1
done
continue
fi
log "==> Another restart is running (pid ${existing_pid}); re-run with --wait."
exit 0
fi
rm -rf "${LOCK_DIR}"
done
}
check_signing_keys() {
security find-identity -p codesigning -v 2>/dev/null \
| grep -Eq '(Developer ID Application|Apple Distribution|Apple Development)'
}
trap cleanup EXIT INT TERM
for arg in "$@"; do
case "${arg}" in
--wait|-w) WAIT_FOR_LOCK=1 ;;
--no-sign) NO_SIGN=1; AUTO_DETECT_SIGNING=0 ;;
--sign) SIGN=1; AUTO_DETECT_SIGNING=0 ;;
--attach-only) ATTACH_ONLY=1 ;;
--no-attach-only) ATTACH_ONLY=0 ;;
--help|-h)
log "Usage: $(basename "$0") [--wait] [--no-sign] [--sign] [--attach-only|--no-attach-only]"
log " --wait Wait for other restart to complete instead of exiting"
log " --no-sign Force no code signing (fastest for development)"
log " --sign Force code signing (will fail if no signing key available)"
log " --attach-only Launch app with --attach-only (skip launchd install)"
log " --no-attach-only Launch app without attach-only override"
log ""
log "Env:"
log " CLAWDBOT_GATEWAY_WAIT_SECONDS=0 Wait time before gateway port check (unsigned only)"
log ""
log "Unsigned recovery:"
log " node dist/entry.js daemon install --force --runtime node"
log " node dist/entry.js daemon restart"
log ""
log "Reset unsigned overrides:"
log " rm ~/.clawdbot/disable-launchagent"
log ""
log "Default behavior: Auto-detect signing keys, fallback to --no-sign if none found"
exit 0
;;
*) ;;
esac
done
if [[ "$NO_SIGN" -eq 1 && "$SIGN" -eq 1 ]]; then
fail "Cannot use --sign and --no-sign together"
fi
mkdir -p "$(dirname "$LOG_PATH")"
rm -f "$LOG_PATH"
exec > >(tee "$LOG_PATH") 2>&1
log "==> Log: ${LOG_PATH}"
if [[ "$NO_SIGN" -eq 1 ]]; then
log "==> Using --no-sign (unsigned flow enabled)"
fi
if [[ "$ATTACH_ONLY" -eq 1 ]]; then
log "==> Using --attach-only (skip launchd install)"
fi
acquire_lock
kill_all_moltbot() {
for _ in {1..10}; do
pkill -f "${APP_PROCESS_PATTERN}" 2>/dev/null || true
pkill -f "${DEBUG_PROCESS_PATTERN}" 2>/dev/null || true
pkill -f "${LOCAL_PROCESS_PATTERN}" 2>/dev/null || true
pkill -f "${RELEASE_PROCESS_PATTERN}" 2>/dev/null || true
pkill -x "Moltbot" 2>/dev/null || true
if ! pgrep -f "${APP_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -f "${DEBUG_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -f "${LOCAL_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -f "${RELEASE_PROCESS_PATTERN}" >/dev/null 2>&1 \
&& ! pgrep -x "Moltbot" >/dev/null 2>&1; then
return 0
fi
sleep 0.3
done
}
stop_launch_agent() {
launchctl bootout gui/"$UID"/bot.molt.mac 2>/dev/null || true
}
# 1) Kill all running instances first.
log "==> Killing existing Moltbot instances"
kill_all_moltbot
stop_launch_agent
# Bundle Gateway-hosted Canvas A2UI assets.
run_step "bundle canvas a2ui" bash -lc "cd '${ROOT_DIR}' && pnpm canvas:a2ui:bundle"
# 2) Rebuild into the same path the packager consumes (.build).
run_step "clean build cache" bash -lc "cd '${ROOT_DIR}/apps/macos' && rm -rf .build .build-swift .swiftpm 2>/dev/null || true"
run_step "swift build" bash -lc "cd '${ROOT_DIR}/apps/macos' && swift build -q --product Moltbot"
if [ "$AUTO_DETECT_SIGNING" -eq 1 ]; then
if check_signing_keys; then
log "==> Signing keys detected, will code sign"
SIGN=1
else
log "==> No signing keys found, will skip code signing (--no-sign)"
NO_SIGN=1
fi
fi
if [ "$NO_SIGN" -eq 1 ]; then
export ALLOW_ADHOC_SIGNING=1
export SIGN_IDENTITY="-"
mkdir -p "${HOME}/.clawdbot"
run_step "disable launchagent writes" /usr/bin/touch "${LAUNCHAGENT_DISABLE_MARKER}"
elif [ "$SIGN" -eq 1 ]; then
if ! check_signing_keys; then
fail "No signing identity found. Use --no-sign or install a signing key."
fi
unset ALLOW_ADHOC_SIGNING
unset SIGN_IDENTITY
fi
# 3) Package app (no embedded gateway).
run_step "package app" bash -lc "cd '${ROOT_DIR}' && SKIP_TSC=${SKIP_TSC:-1} '${ROOT_DIR}/scripts/package-mac-app.sh'"
choose_app_bundle() {
if [[ -n "${APP_BUNDLE}" && -d "${APP_BUNDLE}" ]]; then
return 0
fi
if [[ -d "/Applications/Moltbot.app" ]]; then
APP_BUNDLE="/Applications/Moltbot.app"
return 0
fi
if [[ -d "${ROOT_DIR}/dist/Moltbot.app" ]]; then
APP_BUNDLE="${ROOT_DIR}/dist/Moltbot.app"
if [[ ! -d "${APP_BUNDLE}/Contents/Frameworks/Sparkle.framework" ]]; then
fail "dist/Moltbot.app missing Sparkle after packaging"
fi
return 0
fi
fail "App bundle not found. Set CLAWDBOT_APP_BUNDLE to your installed Moltbot.app"
}
choose_app_bundle
# When signed, clear any previous launchagent override marker.
if [[ "$NO_SIGN" -ne 1 && "$ATTACH_ONLY" -ne 1 && -f "${LAUNCHAGENT_DISABLE_MARKER}" ]]; then
run_step "clear launchagent disable marker" /bin/rm -f "${LAUNCHAGENT_DISABLE_MARKER}"
fi
# When unsigned, ensure the gateway LaunchAgent targets the repo CLI (before the app launches).
# This reduces noisy "could not connect" errors during app startup.
if [ "$NO_SIGN" -eq 1 ] && [ "$ATTACH_ONLY" -ne 1 ]; then
run_step "install gateway launch agent (unsigned)" bash -lc "cd '${ROOT_DIR}' && node dist/entry.js daemon install --force --runtime node"
run_step "restart gateway daemon (unsigned)" bash -lc "cd '${ROOT_DIR}' && node dist/entry.js daemon restart"
if [[ "${GATEWAY_WAIT_SECONDS}" -gt 0 ]]; then
run_step "wait for gateway (unsigned)" sleep "${GATEWAY_WAIT_SECONDS}"
fi
GATEWAY_PORT="$(
node -e '
const fs = require("node:fs");
const path = require("node:path");
try {
const raw = fs.readFileSync(path.join(process.env.HOME, ".clawdbot", "moltbot.json"), "utf8");
const cfg = JSON.parse(raw);
const port = cfg && cfg.gateway && typeof cfg.gateway.port === "number" ? cfg.gateway.port : 18789;
process.stdout.write(String(port));
} catch {
process.stdout.write("18789");
}
'
)"
run_step "verify gateway port ${GATEWAY_PORT} (unsigned)" bash -lc "lsof -iTCP:${GATEWAY_PORT} -sTCP:LISTEN | head -n 5 || true"
fi
ATTACH_ONLY_ARGS=()
if [[ "$ATTACH_ONLY" -eq 1 ]]; then
ATTACH_ONLY_ARGS+=(--args --attach-only)
fi
# 4) Launch the installed app in the foreground so the menu bar extra appears.
# LaunchServices can inherit a huge environment from this shell (secrets, prompt vars, etc.).
# That can cause launchd spawn failures and is undesirable for a GUI app anyway.
run_step "launch app" env -i \
HOME="${HOME}" \
USER="${USER:-$(id -un)}" \
LOGNAME="${LOGNAME:-$(id -un)}" \
TMPDIR="${TMPDIR:-/tmp}" \
PATH="/usr/bin:/bin:/usr/sbin:/sbin" \
LANG="${LANG:-en_US.UTF-8}" \
/usr/bin/open "${APP_BUNDLE}" ${ATTACH_ONLY_ARGS[@]:+"${ATTACH_ONLY_ARGS[@]}"}
# 5) Verify the app is alive.
sleep 1.5
if pgrep -f "${APP_PROCESS_PATTERN}" >/dev/null 2>&1; then
log "OK: Moltbot is running."
else
fail "App exited immediately. Check ${LOG_PATH} or Console.app (User Reports)."
fi
if [ "$NO_SIGN" -eq 1 ] && [ "$ATTACH_ONLY" -ne 1 ]; then
run_step "show gateway launch agent args (unsigned)" bash -lc "/usr/bin/plutil -p '${HOME}/Library/LaunchAgents/bot.molt.gateway.plist' | head -n 40 || true"
fi

View File

@@ -0,0 +1,140 @@
#!/usr/bin/env node
import { spawn } from "node:child_process";
import fs from "node:fs";
import path from "node:path";
import process from "node:process";
const args = process.argv.slice(2);
const env = { ...process.env };
const cwd = process.cwd();
const compiler = env.CLAWDBOT_TS_COMPILER === "tsc" ? "tsc" : "tsgo";
const projectArgs = ["--project", "tsconfig.json"];
const distRoot = path.join(cwd, "dist");
const distEntry = path.join(distRoot, "entry.js");
const buildStampPath = path.join(distRoot, ".buildstamp");
const srcRoot = path.join(cwd, "src");
const configFiles = [path.join(cwd, "tsconfig.json"), path.join(cwd, "package.json")];
const statMtime = (filePath) => {
try {
return fs.statSync(filePath).mtimeMs;
} catch {
return null;
}
};
const isExcludedSource = (filePath) => {
const relativePath = path.relative(srcRoot, filePath);
if (relativePath.startsWith("..")) return false;
return (
relativePath.endsWith(".test.ts") ||
relativePath.endsWith(".test.tsx") ||
relativePath.endsWith(`test-helpers.ts`)
);
};
const findLatestMtime = (dirPath, shouldSkip) => {
let latest = null;
const queue = [dirPath];
while (queue.length > 0) {
const current = queue.pop();
if (!current) continue;
let entries = [];
try {
entries = fs.readdirSync(current, { withFileTypes: true });
} catch {
continue;
}
for (const entry of entries) {
const fullPath = path.join(current, entry.name);
if (entry.isDirectory()) {
queue.push(fullPath);
continue;
}
if (!entry.isFile()) continue;
if (shouldSkip?.(fullPath)) continue;
const mtime = statMtime(fullPath);
if (mtime == null) continue;
if (latest == null || mtime > latest) {
latest = mtime;
}
}
}
return latest;
};
const shouldBuild = () => {
if (env.CLAWDBOT_FORCE_BUILD === "1") return true;
const stampMtime = statMtime(buildStampPath);
if (stampMtime == null) return true;
if (statMtime(distEntry) == null) return true;
for (const filePath of configFiles) {
const mtime = statMtime(filePath);
if (mtime != null && mtime > stampMtime) return true;
}
const srcMtime = findLatestMtime(srcRoot, isExcludedSource);
if (srcMtime != null && srcMtime > stampMtime) return true;
return false;
};
const logRunner = (message) => {
if (env.CLAWDBOT_RUNNER_LOG === "0") return;
process.stderr.write(`[moltbot] ${message}\n`);
};
const runNode = () => {
const nodeProcess = spawn(process.execPath, ["dist/entry.js", ...args], {
cwd,
env,
stdio: "inherit",
});
nodeProcess.on("exit", (exitCode, exitSignal) => {
if (exitSignal) {
process.exit(1);
return;
}
process.exit(exitCode ?? 1);
});
};
const writeBuildStamp = () => {
try {
fs.mkdirSync(distRoot, { recursive: true });
fs.writeFileSync(buildStampPath, `${Date.now()}\n`);
} catch (error) {
// Best-effort stamp; still allow the runner to start.
logRunner(`Failed to write build stamp: ${error?.message ?? "unknown error"}`);
}
};
if (!shouldBuild()) {
runNode();
} else {
logRunner("Building TypeScript (dist is stale).");
const pnpmArgs = ["exec", compiler, ...projectArgs];
const buildCmd = process.platform === "win32" ? "cmd.exe" : "pnpm";
const buildArgs =
process.platform === "win32" ? ["/d", "/s", "/c", "pnpm", ...pnpmArgs] : pnpmArgs;
const build = spawn(buildCmd, buildArgs, {
cwd,
env,
stdio: "inherit",
});
build.on("exit", (code, signal) => {
if (signal) {
process.exit(1);
return;
}
if (code !== 0 && code !== null) {
process.exit(code);
return;
}
writeBuildStamp();
runNode();
});
}

View File

@@ -0,0 +1,67 @@
#!/usr/bin/env bash
set -euo pipefail
export DISPLAY=:1
export HOME=/tmp/moltbot-home
export XDG_CONFIG_HOME="${HOME}/.config"
export XDG_CACHE_HOME="${HOME}/.cache"
CDP_PORT="${CLAWDBOT_BROWSER_CDP_PORT:-9222}"
VNC_PORT="${CLAWDBOT_BROWSER_VNC_PORT:-5900}"
NOVNC_PORT="${CLAWDBOT_BROWSER_NOVNC_PORT:-6080}"
ENABLE_NOVNC="${CLAWDBOT_BROWSER_ENABLE_NOVNC:-1}"
HEADLESS="${CLAWDBOT_BROWSER_HEADLESS:-0}"
mkdir -p "${HOME}" "${HOME}/.chrome" "${XDG_CONFIG_HOME}" "${XDG_CACHE_HOME}"
Xvfb :1 -screen 0 1280x800x24 -ac -nolisten tcp &
if [[ "${HEADLESS}" == "1" ]]; then
CHROME_ARGS=(
"--headless=new"
"--disable-gpu"
)
else
CHROME_ARGS=()
fi
if [[ "${CDP_PORT}" -ge 65535 ]]; then
CHROME_CDP_PORT="$((CDP_PORT - 1))"
else
CHROME_CDP_PORT="$((CDP_PORT + 1))"
fi
CHROME_ARGS+=(
"--remote-debugging-address=127.0.0.1"
"--remote-debugging-port=${CHROME_CDP_PORT}"
"--user-data-dir=${HOME}/.chrome"
"--no-first-run"
"--no-default-browser-check"
"--disable-dev-shm-usage"
"--disable-background-networking"
"--disable-features=TranslateUI"
"--disable-breakpad"
"--disable-crash-reporter"
"--metrics-recording-only"
"--no-sandbox"
)
chromium "${CHROME_ARGS[@]}" about:blank &
for _ in $(seq 1 50); do
if curl -sS --max-time 1 "http://127.0.0.1:${CHROME_CDP_PORT}/json/version" >/dev/null; then
break
fi
sleep 0.1
done
socat \
TCP-LISTEN:"${CDP_PORT}",fork,reuseaddr,bind=0.0.0.0 \
TCP:127.0.0.1:"${CHROME_CDP_PORT}" &
if [[ "${ENABLE_NOVNC}" == "1" && "${HEADLESS}" != "1" ]]; then
x11vnc -display :1 -rfbport "${VNC_PORT}" -shared -forever -nopw -localhost &
websockify --web /usr/share/novnc/ "${NOVNC_PORT}" "localhost:${VNC_PORT}" &
fi
wait -n

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
IMAGE_NAME="moltbot-sandbox-browser:bookworm-slim"
docker build -t "${IMAGE_NAME}" -f Dockerfile.sandbox-browser .
echo "Built ${IMAGE_NAME}"

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
set -euo pipefail
BASE_IMAGE="${BASE_IMAGE:-moltbot-sandbox:bookworm-slim}"
TARGET_IMAGE="${TARGET_IMAGE:-moltbot-sandbox-common:bookworm-slim}"
PACKAGES="${PACKAGES:-curl wget jq coreutils grep nodejs npm python3 git ca-certificates golang-go rustc cargo unzip pkg-config libasound2-dev build-essential file}"
INSTALL_PNPM="${INSTALL_PNPM:-1}"
INSTALL_BUN="${INSTALL_BUN:-1}"
BUN_INSTALL_DIR="${BUN_INSTALL_DIR:-/opt/bun}"
INSTALL_BREW="${INSTALL_BREW:-1}"
BREW_INSTALL_DIR="${BREW_INSTALL_DIR:-/home/linuxbrew/.linuxbrew}"
if ! docker image inspect "${BASE_IMAGE}" >/dev/null 2>&1; then
echo "Base image missing: ${BASE_IMAGE}"
echo "Building base image via scripts/sandbox-setup.sh..."
scripts/sandbox-setup.sh
fi
echo "Building ${TARGET_IMAGE} with: ${PACKAGES}"
docker build \
-t "${TARGET_IMAGE}" \
--build-arg INSTALL_PNPM="${INSTALL_PNPM}" \
--build-arg INSTALL_BUN="${INSTALL_BUN}" \
--build-arg BUN_INSTALL_DIR="${BUN_INSTALL_DIR}" \
--build-arg INSTALL_BREW="${INSTALL_BREW}" \
--build-arg BREW_INSTALL_DIR="${BREW_INSTALL_DIR}" \
- <<EOF
FROM ${BASE_IMAGE}
ENV DEBIAN_FRONTEND=noninteractive
ARG INSTALL_PNPM=1
ARG INSTALL_BUN=1
ARG BUN_INSTALL_DIR=/opt/bun
ARG INSTALL_BREW=1
ARG BREW_INSTALL_DIR=/home/linuxbrew/.linuxbrew
ENV BUN_INSTALL=\${BUN_INSTALL_DIR}
ENV HOMEBREW_PREFIX="\${BREW_INSTALL_DIR}"
ENV HOMEBREW_CELLAR="\${BREW_INSTALL_DIR}/Cellar"
ENV HOMEBREW_REPOSITORY="\${BREW_INSTALL_DIR}/Homebrew"
ENV PATH="\${BUN_INSTALL_DIR}/bin:\${BREW_INSTALL_DIR}/bin:\${BREW_INSTALL_DIR}/sbin:\${PATH}"
RUN apt-get update \\
&& apt-get install -y --no-install-recommends ${PACKAGES} \\
&& rm -rf /var/lib/apt/lists/*
RUN if [ "\${INSTALL_PNPM}" = "1" ]; then npm install -g pnpm; fi
RUN if [ "\${INSTALL_BUN}" = "1" ]; then \\
curl -fsSL https://bun.sh/install | bash; \\
ln -sf "\${BUN_INSTALL_DIR}/bin/bun" /usr/local/bin/bun; \\
fi
RUN if [ "\${INSTALL_BREW}" = "1" ]; then \\
if ! id -u linuxbrew >/dev/null 2>&1; then useradd -m -s /bin/bash linuxbrew; fi; \\
mkdir -p "\${BREW_INSTALL_DIR}"; \\
chown -R linuxbrew:linuxbrew "\$(dirname "\${BREW_INSTALL_DIR}")"; \\
su - linuxbrew -c "NONINTERACTIVE=1 CI=1 /bin/bash -c '\$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)'"; \\
if [ ! -e "\${BREW_INSTALL_DIR}/Library" ]; then ln -s "\${BREW_INSTALL_DIR}/Homebrew/Library" "\${BREW_INSTALL_DIR}/Library"; fi; \\
if [ ! -x "\${BREW_INSTALL_DIR}/bin/brew" ]; then echo "brew install failed"; exit 1; fi; \\
ln -sf "\${BREW_INSTALL_DIR}/bin/brew" /usr/local/bin/brew; \\
fi
EOF
cat <<NOTE
Built ${TARGET_IMAGE}.
To use it, set agents.defaults.sandbox.docker.image to "${TARGET_IMAGE}" and restart.
If you want a clean re-create, remove old sandbox containers:
docker rm -f \$(docker ps -aq --filter label=moltbot.sandbox=1)
NOTE

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
IMAGE_NAME="moltbot-sandbox:bookworm-slim"
docker build -t "${IMAGE_NAME}" -f Dockerfile.sandbox .
echo "Built ${IMAGE_NAME}"

View File

@@ -0,0 +1,119 @@
#!/bin/bash
# Setup Moltbot Auth Management System
# Run this once to set up:
# 1. Long-lived Claude Code token
# 2. Auth monitoring with notifications
# 3. Instructions for Termux widgets
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo "=== Moltbot Auth System Setup ==="
echo ""
# Step 1: Check current auth status
echo "Step 1: Checking current auth status..."
"$SCRIPT_DIR/claude-auth-status.sh" full || true
echo ""
# Step 2: Set up long-lived token
echo "Step 2: Long-lived token setup"
echo ""
echo "Option A: Use 'claude setup-token' (recommended)"
echo " - Creates a long-lived API token"
echo " - No daily re-auth needed"
echo " - Run: claude setup-token"
echo ""
echo "Would you like to set up a long-lived token now? [y/N]"
read -r SETUP_TOKEN
if [[ "$SETUP_TOKEN" =~ ^[Yy] ]]; then
echo ""
echo "Opening https://console.anthropic.com/settings/api-keys"
echo "Create a new key or copy existing one, then paste below."
echo ""
claude setup-token
fi
echo ""
# Step 3: Set up auth monitoring
echo "Step 3: Auth monitoring setup"
echo ""
echo "The auth monitor checks expiry every 30 minutes and notifies you."
echo ""
echo "Configure notification channels:"
echo ""
# Check for ntfy
echo " ntfy.sh: Free push notifications to your phone"
echo " 1. Install ntfy app on your phone"
echo " 2. Subscribe to a topic (e.g., 'moltbot-alerts')"
echo ""
echo "Enter ntfy.sh topic (or leave blank to skip):"
read -r NTFY_TOPIC
# Phone notification
echo ""
echo " Moltbot message: Send warning via Moltbot itself"
echo "Enter your phone number for alerts (or leave blank to skip):"
read -r PHONE_NUMBER
# Update service file
SERVICE_FILE="$SCRIPT_DIR/systemd/moltbot-auth-monitor.service"
if [ -n "$NTFY_TOPIC" ]; then
sed -i "s|# Environment=NOTIFY_NTFY=.*|Environment=NOTIFY_NTFY=$NTFY_TOPIC|" "$SERVICE_FILE"
fi
if [ -n "$PHONE_NUMBER" ]; then
sed -i "s|# Environment=NOTIFY_PHONE=.*|Environment=NOTIFY_PHONE=$PHONE_NUMBER|" "$SERVICE_FILE"
fi
# Install systemd units
echo ""
echo "Installing systemd timer..."
mkdir -p ~/.config/systemd/user
cp "$SCRIPT_DIR/systemd/moltbot-auth-monitor.service" ~/.config/systemd/user/
cp "$SCRIPT_DIR/systemd/moltbot-auth-monitor.timer" ~/.config/systemd/user/
systemctl --user daemon-reload
systemctl --user enable --now moltbot-auth-monitor.timer
echo "Auth monitor installed and running."
echo ""
# Step 4: Termux widget setup
echo "Step 4: Termux widget setup (for phone)"
echo ""
echo "To set up quick auth from your phone:"
echo ""
echo "1. Install Termux and Termux:Widget from F-Droid"
echo "2. Create ~/.shortcuts/ directory in Termux:"
echo " mkdir -p ~/.shortcuts"
echo ""
echo "3. Copy the widget scripts:"
echo " scp $SCRIPT_DIR/termux-quick-auth.sh phone:~/.shortcuts/ClawdAuth"
echo " scp $SCRIPT_DIR/termux-auth-widget.sh phone:~/.shortcuts/ClawdAuth-Full"
echo ""
echo "4. Make them executable on phone:"
echo " ssh phone 'chmod +x ~/.shortcuts/Clawd*'"
echo ""
echo "5. Add Termux:Widget to your home screen"
echo "6. Tap the widget to see your auth scripts"
echo ""
echo "The quick widget (ClawdAuth) shows status and opens auth URL if needed."
echo "The full widget (ClawdAuth-Full) provides guided re-auth flow."
echo ""
# Summary
echo "=== Setup Complete ==="
echo ""
echo "What's configured:"
echo " - Auth status: $SCRIPT_DIR/claude-auth-status.sh"
echo " - Mobile re-auth: $SCRIPT_DIR/mobile-reauth.sh"
echo " - Auth monitor: systemctl --user status moltbot-auth-monitor.timer"
echo ""
echo "Quick commands:"
echo " Check auth: $SCRIPT_DIR/claude-auth-status.sh"
echo " Re-auth: $SCRIPT_DIR/mobile-reauth.sh"
echo " Test monitor: $SCRIPT_DIR/auth-monitor.sh"
echo ""

View File

@@ -0,0 +1,96 @@
import fs from "node:fs";
import path from "node:path";
import { spawnSync } from "node:child_process";
import { fileURLToPath } from "node:url";
const DEFAULT_HOOKS_PATH = "git-hooks";
const PRE_COMMIT_HOOK = "pre-commit";
function getRepoRoot() {
const here = path.dirname(fileURLToPath(import.meta.url));
return path.resolve(here, "..");
}
function runGitCommand(args, options = {}) {
return spawnSync("git", args, {
cwd: options.cwd,
encoding: "utf-8",
stdio: options.stdio ?? "pipe",
});
}
function ensureExecutable(targetPath) {
if (process.platform === "win32") return;
if (!fs.existsSync(targetPath)) return;
try {
const mode = fs.statSync(targetPath).mode & 0o777;
if (mode & 0o100) return;
fs.chmodSync(targetPath, 0o755);
} catch (err) {
console.warn(`[setup-git-hooks] chmod failed: ${err}`);
}
}
function isGitAvailable({ repoRoot = getRepoRoot(), runGit = runGitCommand } = {}) {
const result = runGit(["--version"], { cwd: repoRoot, stdio: "ignore" });
return result.status === 0;
}
function isGitRepo({ repoRoot = getRepoRoot(), runGit = runGitCommand } = {}) {
const result = runGit(["rev-parse", "--is-inside-work-tree"], {
cwd: repoRoot,
stdio: "pipe",
});
if (result.status !== 0) return false;
return String(result.stdout ?? "").trim() === "true";
}
function setHooksPath({
repoRoot = getRepoRoot(),
hooksPath = DEFAULT_HOOKS_PATH,
runGit = runGitCommand,
} = {}) {
const result = runGit(["config", "core.hooksPath", hooksPath], {
cwd: repoRoot,
stdio: "ignore",
});
return result.status === 0;
}
function setupGitHooks({
repoRoot = getRepoRoot(),
hooksPath = DEFAULT_HOOKS_PATH,
runGit = runGitCommand,
} = {}) {
if (!isGitAvailable({ repoRoot, runGit })) {
return { ok: false, reason: "git-missing" };
}
if (!isGitRepo({ repoRoot, runGit })) {
return { ok: false, reason: "not-repo" };
}
if (!setHooksPath({ repoRoot, hooksPath, runGit })) {
return { ok: false, reason: "config-failed" };
}
ensureExecutable(path.join(repoRoot, hooksPath, PRE_COMMIT_HOOK));
return { ok: true };
}
export {
DEFAULT_HOOKS_PATH,
PRE_COMMIT_HOOK,
ensureExecutable,
getRepoRoot,
isGitAvailable,
isGitRepo,
runGitCommand,
setHooksPath,
setupGitHooks,
};
if (process.argv[1] && path.resolve(process.argv[1]) === fileURLToPath(import.meta.url)) {
setupGitHooks();
}

View File

@@ -0,0 +1,40 @@
import { DatabaseSync } from "node:sqlite";
import { load, getLoadablePath } from "sqlite-vec";
function vec(values) {
return Buffer.from(new Float32Array(values).buffer);
}
const db = new DatabaseSync(":memory:", { allowExtension: true });
try {
load(db);
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
console.error("sqlite-vec load failed:");
console.error(message);
console.error("expected extension path:", getLoadablePath());
process.exit(1);
}
db.exec(`
CREATE VIRTUAL TABLE v USING vec0(
id TEXT PRIMARY KEY,
embedding FLOAT[4]
);
`);
const insert = db.prepare("INSERT INTO v (id, embedding) VALUES (?, ?)");
insert.run("a", vec([1, 0, 0, 0]));
insert.run("b", vec([0, 1, 0, 0]));
insert.run("c", vec([0.2, 0.2, 0, 0]));
const query = vec([1, 0, 0, 0]);
const rows = db
.prepare(
"SELECT id, vec_distance_cosine(embedding, ?) AS dist FROM v ORDER BY dist ASC"
)
.all(query);
console.log("sqlite-vec ok");
console.log(rows);

View File

@@ -0,0 +1,107 @@
import { execFileSync } from "node:child_process";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
type RepoLabel = {
name: string;
color?: string;
};
const COLOR_BY_PREFIX = new Map<string, string>([
["channel", "1d76db"],
["app", "6f42c1"],
["extensions", "0e8a16"],
["docs", "0075ca"],
["cli", "f9d0c4"],
["gateway", "d4c5f9"],
]);
const configPath = resolve(".github/labeler.yml");
const labelNames = extractLabelNames(readFileSync(configPath, "utf8"));
if (!labelNames.length) {
throw new Error("labeler.yml must declare at least one label.");
}
const repo = resolveRepo();
const existing = fetchExistingLabels(repo);
const missing = labelNames.filter((label) => !existing.has(label));
if (!missing.length) {
console.log("All labeler labels already exist.");
process.exit(0);
}
for (const label of missing) {
const color = pickColor(label);
execFileSync(
"gh",
[
"api",
"-X",
"POST",
`repos/${repo}/labels`,
"-f",
`name=${label}`,
"-f",
`color=${color}`,
],
{ stdio: "inherit" },
);
console.log(`Created label: ${label}`);
}
function extractLabelNames(contents: string): string[] {
const labels: string[] = [];
for (const line of contents.split("\n")) {
if (!line.trim() || line.trimStart().startsWith("#")) {
continue;
}
if (/^\s/.test(line)) {
continue;
}
const match = line.match(/^(["'])(.+)\1\s*:/) ?? line.match(/^([^:]+):/);
if (match) {
const name = (match[2] ?? match[1] ?? "").trim();
if (name) {
labels.push(name);
}
}
}
return labels;
}
function pickColor(label: string): string {
const prefix = label.includes(":") ? label.split(":", 1)[0].trim() : label.trim();
return COLOR_BY_PREFIX.get(prefix) ?? "ededed";
}
function resolveRepo(): string {
const remote = execFileSync("git", ["config", "--get", "remote.origin.url"], {
encoding: "utf8",
}).trim();
if (!remote) {
throw new Error("Unable to determine repository from git remote.");
}
if (remote.startsWith("git@github.com:")) {
return remote.replace("git@github.com:", "").replace(/\.git$/, "");
}
if (remote.startsWith("https://github.com/")) {
return remote.replace("https://github.com/", "").replace(/\.git$/, "");
}
throw new Error(`Unsupported GitHub remote: ${remote}`);
}
function fetchExistingLabels(repo: string): Map<string, RepoLabel> {
const raw = execFileSync(
"gh",
["api", `repos/${repo}/labels?per_page=100`, "--paginate"],
{ encoding: "utf8" },
);
const labels = JSON.parse(raw) as RepoLabel[];
return new Map(labels.map((label) => [label.name, label]));
}

View File

@@ -0,0 +1,128 @@
import { readFile, writeFile } from "node:fs/promises";
import path from "node:path";
import { fileURLToPath } from "node:url";
import {
MOONSHOT_KIMI_K2_CONTEXT_WINDOW,
MOONSHOT_KIMI_K2_COST,
MOONSHOT_KIMI_K2_INPUT,
MOONSHOT_KIMI_K2_MAX_TOKENS,
MOONSHOT_KIMI_K2_MODELS,
} from "../ui/src/ui/data/moonshot-kimi-k2";
const here = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(here, "..");
function replaceBlockLines(
text: string,
startMarker: string,
endMarker: string,
lines: string[],
): string {
const startIndex = text.indexOf(startMarker);
if (startIndex === -1) {
throw new Error(`Missing start marker: ${startMarker}`);
}
const endIndex = text.indexOf(endMarker, startIndex);
if (endIndex === -1) {
throw new Error(`Missing end marker: ${endMarker}`);
}
const startLineStart = text.lastIndexOf("\n", startIndex);
const startLineStartIndex = startLineStart === -1 ? 0 : startLineStart + 1;
const indent = text.slice(startLineStartIndex, startIndex);
const endLineEnd = text.indexOf("\n", endIndex);
const endLineEndIndex = endLineEnd === -1 ? text.length : endLineEnd + 1;
const before = text.slice(0, startLineStartIndex);
const after = text.slice(endLineEndIndex);
const replacementLines = [
`${indent}${startMarker}`,
...lines.map((line) => `${indent}${line}`),
`${indent}${endMarker}`,
];
const replacement = replacementLines.join("\n");
if (!after) return `${before}${replacement}`;
return `${before}${replacement}\n${after}`;
}
function renderKimiK2Ids(prefix: string) {
return MOONSHOT_KIMI_K2_MODELS.map(
(model) => `- \`${prefix}${model.id}\``,
);
}
function renderMoonshotAliases() {
return MOONSHOT_KIMI_K2_MODELS.map((model, index) => {
const isLast = index === MOONSHOT_KIMI_K2_MODELS.length - 1;
const suffix = isLast ? "" : ",";
return `"moonshot/${model.id}": { alias: "${model.alias}" }${suffix}`;
});
}
function renderMoonshotModels() {
const input = JSON.stringify([...MOONSHOT_KIMI_K2_INPUT]);
const cost = `input: ${MOONSHOT_KIMI_K2_COST.input}, output: ${MOONSHOT_KIMI_K2_COST.output}, cacheRead: ${MOONSHOT_KIMI_K2_COST.cacheRead}, cacheWrite: ${MOONSHOT_KIMI_K2_COST.cacheWrite}`;
return MOONSHOT_KIMI_K2_MODELS.flatMap((model, index) => {
const isLast = index === MOONSHOT_KIMI_K2_MODELS.length - 1;
const closing = isLast ? "}" : "},";
return [
"{",
` id: "${model.id}",`,
` name: "${model.name}",`,
` reasoning: ${model.reasoning},`,
` input: ${input},`,
` cost: { ${cost} },`,
` contextWindow: ${MOONSHOT_KIMI_K2_CONTEXT_WINDOW},`,
` maxTokens: ${MOONSHOT_KIMI_K2_MAX_TOKENS}`,
closing,
];
});
}
async function syncMoonshotDocs() {
const moonshotDoc = path.join(repoRoot, "docs/providers/moonshot.md");
const conceptsDoc = path.join(
repoRoot,
"docs/concepts/model-providers.md",
);
let moonshotText = await readFile(moonshotDoc, "utf8");
moonshotText = replaceBlockLines(
moonshotText,
"<!-- moonshot-kimi-k2-ids:start -->",
"<!-- moonshot-kimi-k2-ids:end -->",
renderKimiK2Ids(""),
);
moonshotText = replaceBlockLines(
moonshotText,
"// moonshot-kimi-k2-aliases:start",
"// moonshot-kimi-k2-aliases:end",
renderMoonshotAliases(),
);
moonshotText = replaceBlockLines(
moonshotText,
"// moonshot-kimi-k2-models:start",
"// moonshot-kimi-k2-models:end",
renderMoonshotModels(),
);
let conceptsText = await readFile(conceptsDoc, "utf8");
conceptsText = replaceBlockLines(
conceptsText,
"<!-- moonshot-kimi-k2-model-refs:start -->",
"<!-- moonshot-kimi-k2-model-refs:end -->",
renderKimiK2Ids("moonshot/"),
);
await writeFile(moonshotDoc, moonshotText);
await writeFile(conceptsDoc, conceptsText);
}
syncMoonshotDocs().catch((error) => {
console.error(error);
process.exitCode = 1;
});

View File

@@ -0,0 +1,71 @@
import { existsSync, readdirSync, readFileSync, writeFileSync } from "node:fs";
import { join, resolve } from "node:path";
type PackageJson = {
name?: string;
version?: string;
};
const root = resolve(".");
const rootPackagePath = resolve("package.json");
const rootPackage = JSON.parse(readFileSync(rootPackagePath, "utf8")) as PackageJson;
const targetVersion = rootPackage.version;
if (!targetVersion) {
throw new Error("Root package.json missing version.");
}
const extensionsDir = resolve("extensions");
const dirs = readdirSync(extensionsDir, { withFileTypes: true }).filter((entry) => entry.isDirectory());
const updated: string[] = [];
const changelogged: string[] = [];
const skipped: string[] = [];
function ensureChangelogEntry(changelogPath: string, version: string): boolean {
if (!existsSync(changelogPath)) return false;
const content = readFileSync(changelogPath, "utf8");
if (content.includes(`## ${version}`)) return false;
const entry = `## ${version}\n\n### Changes\n- Version alignment with core Moltbot release numbers.\n\n`;
if (content.startsWith("# Changelog\n\n")) {
const next = content.replace("# Changelog\n\n", `# Changelog\n\n${entry}`);
writeFileSync(changelogPath, next);
return true;
}
const next = `# Changelog\n\n${entry}${content.trimStart()}`;
writeFileSync(changelogPath, `${next}\n`);
return true;
}
for (const dir of dirs) {
const packagePath = join(extensionsDir, dir.name, "package.json");
let pkg: PackageJson;
try {
pkg = JSON.parse(readFileSync(packagePath, "utf8")) as PackageJson;
} catch {
continue;
}
if (!pkg.name) {
skipped.push(dir.name);
continue;
}
const changelogPath = join(extensionsDir, dir.name, "CHANGELOG.md");
if (ensureChangelogEntry(changelogPath, targetVersion)) {
changelogged.push(pkg.name);
}
if (pkg.version === targetVersion) {
skipped.push(pkg.name);
continue;
}
pkg.version = targetVersion;
writeFileSync(packagePath, `${JSON.stringify(pkg, null, 2)}\n`);
updated.push(pkg.name);
}
console.log(
`Synced plugin versions to ${targetVersion}. Updated: ${updated.length}. Changelogged: ${changelogged.length}. Skipped: ${skipped.length}.`
);

View File

@@ -0,0 +1,14 @@
[Unit]
Description=Moltbot Auth Expiry Monitor
After=network.target
[Service]
Type=oneshot
ExecStart=/home/admin/moltbot/scripts/auth-monitor.sh
# Configure notification channels via environment
Environment=WARN_HOURS=2
# Environment=NOTIFY_PHONE=+1234567890
# Environment=NOTIFY_NTFY=moltbot-alerts
[Install]
WantedBy=default.target

View File

@@ -0,0 +1,10 @@
[Unit]
Description=Check Moltbot auth expiry every 30 minutes
[Timer]
OnBootSec=5min
OnUnitActiveSec=30min
Persistent=true
[Install]
WantedBy=timers.target

View File

@@ -0,0 +1,81 @@
#!/data/data/com.termux/files/usr/bin/bash
# Moltbot Auth Widget for Termux
# Place in ~/.shortcuts/ for Termux:Widget
#
# This widget checks auth status and helps with re-auth if needed.
# It's designed for quick one-tap checking from phone home screen.
# Server hostname (via Tailscale or SSH config)
SERVER="${CLAWDBOT_SERVER:-l36}"
# Check auth status
termux-toast "Checking Moltbot auth..."
STATUS=$(ssh "$SERVER" '$HOME/moltbot/scripts/claude-auth-status.sh simple' 2>&1)
EXIT_CODE=$?
case "$STATUS" in
OK)
# Get remaining time
DETAILS=$(ssh "$SERVER" '$HOME/moltbot/scripts/claude-auth-status.sh json' 2>&1)
HOURS=$(echo "$DETAILS" | jq -r '.claude_code.status' | grep -oP '\d+(?=h)' || echo "?")
termux-vibrate -d 50
termux-toast "Auth OK (${HOURS}h left)"
;;
CLAUDE_EXPIRING|CLAWDBOT_EXPIRING)
termux-vibrate -d 100
# Ask if user wants to re-auth now
CHOICE=$(termux-dialog radio -t "Auth Expiring Soon" -v "Re-auth now,Check later,Dismiss")
SELECTED=$(echo "$CHOICE" | jq -r '.text // "Dismiss"')
case "$SELECTED" in
"Re-auth now")
termux-toast "Opening auth page..."
termux-open-url "https://console.anthropic.com/settings/api-keys"
# Show instructions
termux-dialog confirm -t "Re-auth Instructions" -i "1. Create/copy API key from browser
2. Return here and tap OK
3. SSH to server and paste key"
# Open terminal to server
am start -n com.termux/com.termux.app.TermuxActivity -a android.intent.action.MAIN
termux-toast "Run: ssh $SERVER '$HOME/moltbot/scripts/mobile-reauth.sh'"
;;
*)
termux-toast "Reminder: Auth expires soon"
;;
esac
;;
CLAUDE_EXPIRED|CLAWDBOT_EXPIRED)
termux-vibrate -d 300
CHOICE=$(termux-dialog radio -t "Auth Expired!" -v "Re-auth now,Dismiss")
SELECTED=$(echo "$CHOICE" | jq -r '.text // "Dismiss"')
case "$SELECTED" in
"Re-auth now")
termux-toast "Opening auth page..."
termux-open-url "https://console.anthropic.com/settings/api-keys"
termux-dialog confirm -t "Re-auth Steps" -i "1. Create/copy API key from browser
2. Return here and tap OK to SSH"
am start -n com.termux/com.termux.app.TermuxActivity -a android.intent.action.MAIN
termux-toast "Run: ssh $SERVER '$HOME/moltbot/scripts/mobile-reauth.sh'"
;;
*)
termux-toast "Warning: Moltbot won't work until re-auth"
;;
esac
;;
*)
termux-vibrate -d 200
termux-toast "Error: $STATUS"
;;
esac

View File

@@ -0,0 +1,30 @@
#!/data/data/com.termux/files/usr/bin/bash
# Quick Auth Check - Minimal widget for Termux
# Place in ~/.shortcuts/ for Termux:Widget
#
# One-tap: shows status toast
# If expired: directly opens auth URL
SERVER="${CLAWDBOT_SERVER:-l36}"
STATUS=$(ssh -o ConnectTimeout=5 "$SERVER" '$HOME/moltbot/scripts/claude-auth-status.sh simple' 2>&1)
case "$STATUS" in
OK)
termux-toast -s "Auth OK"
;;
*EXPIRING*)
termux-vibrate -d 100
termux-toast "Auth expiring soon - tap again if needed"
;;
*EXPIRED*|*MISSING*)
termux-vibrate -d 200
termux-toast "Auth expired - opening console..."
termux-open-url "https://console.anthropic.com/settings/api-keys"
sleep 2
termux-notification -t "Moltbot Re-Auth" -c "After getting key, run: ssh $SERVER '~/moltbot/scripts/mobile-reauth.sh'" --id clawd-auth
;;
*)
termux-toast "Connection error"
;;
esac

View File

@@ -0,0 +1,24 @@
#!/data/data/com.termux/files/usr/bin/bash
# Moltbot OAuth Sync Widget
# Syncs Claude Code tokens to Moltbot on l36 server
# Place in ~/.shortcuts/ on phone for Termux:Widget
termux-toast "Syncing Moltbot auth..."
# Run sync on l36 server
RESULT=$(ssh l36 '/home/admin/moltbot/scripts/sync-claude-code-auth.sh' 2>&1)
EXIT_CODE=$?
if [ $EXIT_CODE -eq 0 ]; then
# Extract expiry time from output
EXPIRY=$(echo "$RESULT" | grep "Token expires:" | cut -d: -f2-)
termux-vibrate -d 100
termux-toast "Moltbot synced! Expires:${EXPIRY}"
# Optional: restart moltbot service
ssh l36 'systemctl --user restart moltbot' 2>/dev/null
else
termux-vibrate -d 300
termux-toast "Sync failed: ${RESULT}"
fi

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
IMAGE_NAME="${CLAWDBOT_CLEANUP_SMOKE_IMAGE:-moltbot-cleanup-smoke:local}"
echo "==> Build image: $IMAGE_NAME"
docker build \
-t "$IMAGE_NAME" \
-f "$ROOT_DIR/scripts/docker/cleanup-smoke/Dockerfile" \
"$ROOT_DIR"
echo "==> Run cleanup smoke test"
docker run --rm -t "$IMAGE_NAME"

View File

@@ -0,0 +1,62 @@
#!/usr/bin/env -S node --import tsx
import os from "node:os";
import path from "node:path";
import { spawnSync } from "node:child_process";
import { forceFreePort, type PortProcess } from "../src/cli/ports.js";
const DEFAULT_PORT = 18789;
function killGatewayListeners(port: number): PortProcess[] {
try {
const killed = forceFreePort(port);
if (killed.length > 0) {
console.log(
`freed port ${port}; terminated: ${killed
.map((p) => `${p.command} (pid ${p.pid})`)
.join(", ")}`,
);
} else {
console.log(`port ${port} already free`);
}
return killed;
} catch (err) {
console.error(`failed to free port ${port}: ${String(err)}`);
return [];
}
}
function runTests() {
const isolatedLock =
process.env.CLAWDBOT_GATEWAY_LOCK ??
path.join(os.tmpdir(), `moltbot-gateway.lock.test.${Date.now()}`);
const result = spawnSync("pnpm", ["vitest", "run"], {
stdio: "inherit",
env: {
...process.env,
CLAWDBOT_GATEWAY_LOCK: isolatedLock,
},
});
if (result.error) {
console.error(`pnpm test failed to start: ${String(result.error)}`);
process.exit(1);
}
process.exit(result.status ?? 1);
}
function main() {
const port = Number.parseInt(
process.env.CLAWDBOT_GATEWAY_PORT ?? `${DEFAULT_PORT}`,
10,
);
console.log(`🧹 test:force - clearing gateway on port ${port}`);
const killed = killGatewayListeners(port);
if (killed.length === 0) {
console.log("no listeners to kill");
}
console.log("running pnpm test…");
runTests();
}
main();

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
SMOKE_IMAGE="${CLAWDBOT_INSTALL_SMOKE_IMAGE:-moltbot-install-smoke:local}"
NONROOT_IMAGE="${CLAWDBOT_INSTALL_NONROOT_IMAGE:-moltbot-install-nonroot:local}"
INSTALL_URL="${CLAWDBOT_INSTALL_URL:-https://molt.bot/install.sh}"
CLI_INSTALL_URL="${CLAWDBOT_INSTALL_CLI_URL:-https://molt.bot/install-cli.sh}"
SKIP_NONROOT="${CLAWDBOT_INSTALL_SMOKE_SKIP_NONROOT:-0}"
LATEST_DIR="$(mktemp -d)"
LATEST_FILE="${LATEST_DIR}/latest"
echo "==> Build smoke image (upgrade, root): $SMOKE_IMAGE"
docker build \
-t "$SMOKE_IMAGE" \
-f "$ROOT_DIR/scripts/docker/install-sh-smoke/Dockerfile" \
"$ROOT_DIR/scripts/docker/install-sh-smoke"
echo "==> Run installer smoke test (root): $INSTALL_URL"
docker run --rm -t \
-v "${LATEST_DIR}:/out" \
-e CLAWDBOT_INSTALL_URL="$INSTALL_URL" \
-e CLAWDBOT_INSTALL_LATEST_OUT="/out/latest" \
-e CLAWDBOT_INSTALL_SMOKE_PREVIOUS="${CLAWDBOT_INSTALL_SMOKE_PREVIOUS:-}" \
-e CLAWDBOT_INSTALL_SMOKE_SKIP_PREVIOUS="${CLAWDBOT_INSTALL_SMOKE_SKIP_PREVIOUS:-0}" \
-e CLAWDBOT_NO_ONBOARD=1 \
-e DEBIAN_FRONTEND=noninteractive \
"$SMOKE_IMAGE"
LATEST_VERSION=""
if [[ -f "$LATEST_FILE" ]]; then
LATEST_VERSION="$(cat "$LATEST_FILE")"
fi
if [[ "$SKIP_NONROOT" == "1" ]]; then
echo "==> Skip non-root installer smoke (CLAWDBOT_INSTALL_SMOKE_SKIP_NONROOT=1)"
else
echo "==> Build non-root image: $NONROOT_IMAGE"
docker build \
-t "$NONROOT_IMAGE" \
-f "$ROOT_DIR/scripts/docker/install-sh-nonroot/Dockerfile" \
"$ROOT_DIR/scripts/docker/install-sh-nonroot"
echo "==> Run installer non-root test: $INSTALL_URL"
docker run --rm -t \
-e CLAWDBOT_INSTALL_URL="$INSTALL_URL" \
-e CLAWDBOT_INSTALL_EXPECT_VERSION="$LATEST_VERSION" \
-e CLAWDBOT_NO_ONBOARD=1 \
-e DEBIAN_FRONTEND=noninteractive \
"$NONROOT_IMAGE"
fi
if [[ "${CLAWDBOT_INSTALL_SMOKE_SKIP_CLI:-0}" == "1" ]]; then
echo "==> Skip CLI installer smoke (CLAWDBOT_INSTALL_SMOKE_SKIP_CLI=1)"
exit 0
fi
if [[ "$SKIP_NONROOT" == "1" ]]; then
echo "==> Skip CLI installer smoke (non-root image skipped)"
exit 0
fi
echo "==> Run CLI installer non-root test (same image)"
docker run --rm -t \
--entrypoint /bin/bash \
-e CLAWDBOT_INSTALL_URL="$INSTALL_URL" \
-e CLAWDBOT_INSTALL_CLI_URL="$CLI_INSTALL_URL" \
-e CLAWDBOT_NO_ONBOARD=1 \
-e DEBIAN_FRONTEND=noninteractive \
"$NONROOT_IMAGE" -lc "curl -fsSL \"$CLI_INSTALL_URL\" | bash -s -- --set-npm-prefix --no-onboard"

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
IMAGE_NAME="${CLAWDBOT_INSTALL_E2E_IMAGE:-moltbot-install-e2e:local}"
INSTALL_URL="${CLAWDBOT_INSTALL_URL:-https://molt.bot/install.sh}"
OPENAI_API_KEY="${OPENAI_API_KEY:-}"
ANTHROPIC_API_KEY="${ANTHROPIC_API_KEY:-}"
ANTHROPIC_API_TOKEN="${ANTHROPIC_API_TOKEN:-}"
CLAWDBOT_E2E_MODELS="${CLAWDBOT_E2E_MODELS:-}"
echo "==> Build image: $IMAGE_NAME"
docker build \
-t "$IMAGE_NAME" \
-f "$ROOT_DIR/scripts/docker/install-sh-e2e/Dockerfile" \
"$ROOT_DIR/scripts/docker/install-sh-e2e"
echo "==> Run E2E installer test"
docker run --rm \
-e CLAWDBOT_INSTALL_URL="$INSTALL_URL" \
-e CLAWDBOT_INSTALL_TAG="${CLAWDBOT_INSTALL_TAG:-latest}" \
-e CLAWDBOT_E2E_MODELS="$CLAWDBOT_E2E_MODELS" \
-e CLAWDBOT_INSTALL_E2E_PREVIOUS="${CLAWDBOT_INSTALL_E2E_PREVIOUS:-}" \
-e CLAWDBOT_INSTALL_E2E_SKIP_PREVIOUS="${CLAWDBOT_INSTALL_E2E_SKIP_PREVIOUS:-0}" \
-e OPENAI_API_KEY="$OPENAI_API_KEY" \
-e ANTHROPIC_API_KEY="$ANTHROPIC_API_KEY" \
-e ANTHROPIC_API_TOKEN="$ANTHROPIC_API_TOKEN" \
"$IMAGE_NAME"

View File

@@ -0,0 +1,32 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
IMAGE_NAME="${CLAWDBOT_IMAGE:-moltbot:local}"
CONFIG_DIR="${CLAWDBOT_CONFIG_DIR:-$HOME/.clawdbot}"
WORKSPACE_DIR="${CLAWDBOT_WORKSPACE_DIR:-$HOME/clawd}"
PROFILE_FILE="${CLAWDBOT_PROFILE_FILE:-$HOME/.profile}"
PROFILE_MOUNT=()
if [[ -f "$PROFILE_FILE" ]]; then
PROFILE_MOUNT=(-v "$PROFILE_FILE":/home/node/.profile:ro)
fi
echo "==> Build image: $IMAGE_NAME"
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/Dockerfile" "$ROOT_DIR"
echo "==> Run gateway live model tests (profile keys)"
docker run --rm -t \
--entrypoint bash \
-e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
-e HOME=/home/node \
-e NODE_OPTIONS=--disable-warning=ExperimentalWarning \
-e CLAWDBOT_LIVE_TEST=1 \
-e CLAWDBOT_LIVE_GATEWAY_MODELS="${CLAWDBOT_LIVE_GATEWAY_MODELS:-all}" \
-e CLAWDBOT_LIVE_GATEWAY_PROVIDERS="${CLAWDBOT_LIVE_GATEWAY_PROVIDERS:-}" \
-e CLAWDBOT_LIVE_GATEWAY_MODEL_TIMEOUT_MS="${CLAWDBOT_LIVE_GATEWAY_MODEL_TIMEOUT_MS:-}" \
-v "$CONFIG_DIR":/home/node/.clawdbot \
-v "$WORKSPACE_DIR":/home/node/clawd \
"${PROFILE_MOUNT[@]}" \
"$IMAGE_NAME" \
-lc "set -euo pipefail; [ -f \"$HOME/.profile\" ] && source \"$HOME/.profile\" || true; cd /app && pnpm test:live"

View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
IMAGE_NAME="${CLAWDBOT_IMAGE:-moltbot:local}"
CONFIG_DIR="${CLAWDBOT_CONFIG_DIR:-$HOME/.clawdbot}"
WORKSPACE_DIR="${CLAWDBOT_WORKSPACE_DIR:-$HOME/clawd}"
PROFILE_FILE="${CLAWDBOT_PROFILE_FILE:-$HOME/.profile}"
PROFILE_MOUNT=()
if [[ -f "$PROFILE_FILE" ]]; then
PROFILE_MOUNT=(-v "$PROFILE_FILE":/home/node/.profile:ro)
fi
echo "==> Build image: $IMAGE_NAME"
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/Dockerfile" "$ROOT_DIR"
echo "==> Run live model tests (profile keys)"
docker run --rm -t \
--entrypoint bash \
-e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
-e HOME=/home/node \
-e NODE_OPTIONS=--disable-warning=ExperimentalWarning \
-e CLAWDBOT_LIVE_TEST=1 \
-e CLAWDBOT_LIVE_MODELS="${CLAWDBOT_LIVE_MODELS:-all}" \
-e CLAWDBOT_LIVE_PROVIDERS="${CLAWDBOT_LIVE_PROVIDERS:-}" \
-e CLAWDBOT_LIVE_MODEL_TIMEOUT_MS="${CLAWDBOT_LIVE_MODEL_TIMEOUT_MS:-}" \
-e CLAWDBOT_LIVE_REQUIRE_PROFILE_KEYS="${CLAWDBOT_LIVE_REQUIRE_PROFILE_KEYS:-}" \
-v "$CONFIG_DIR":/home/node/.clawdbot \
-v "$WORKSPACE_DIR":/home/node/clawd \
"${PROFILE_MOUNT[@]}" \
"$IMAGE_NAME" \
-lc "set -euo pipefail; [ -f \"$HOME/.profile\" ] && source \"$HOME/.profile\" || true; cd /app && pnpm test:live"

View File

@@ -0,0 +1,102 @@
import { spawn } from "node:child_process";
import os from "node:os";
const pnpm = process.platform === "win32" ? "pnpm.cmd" : "pnpm";
const runs = [
{
name: "unit",
args: ["vitest", "run", "--config", "vitest.unit.config.ts"],
},
{
name: "extensions",
args: ["vitest", "run", "--config", "vitest.extensions.config.ts"],
},
{
name: "gateway",
args: ["vitest", "run", "--config", "vitest.gateway.config.ts"],
},
];
const children = new Set();
const isCI = process.env.CI === "true" || process.env.GITHUB_ACTIONS === "true";
const isMacOS = process.platform === "darwin" || process.env.RUNNER_OS === "macOS";
const isWindows = process.platform === "win32" || process.env.RUNNER_OS === "Windows";
const isWindowsCi = isCI && isWindows;
const shardOverride = Number.parseInt(process.env.CLAWDBOT_TEST_SHARDS ?? "", 10);
const shardCount = isWindowsCi ? (Number.isFinite(shardOverride) && shardOverride > 1 ? shardOverride : 2) : 1;
const windowsCiArgs = isWindowsCi ? ["--no-file-parallelism", "--dangerouslyIgnoreUnhandledErrors"] : [];
const overrideWorkers = Number.parseInt(process.env.CLAWDBOT_TEST_WORKERS ?? "", 10);
const resolvedOverride = Number.isFinite(overrideWorkers) && overrideWorkers > 0 ? overrideWorkers : null;
const parallelRuns = isWindowsCi ? [] : runs.filter((entry) => entry.name !== "gateway");
const serialRuns = isWindowsCi ? runs : runs.filter((entry) => entry.name === "gateway");
const localWorkers = Math.max(4, Math.min(16, os.cpus().length));
const parallelCount = Math.max(1, parallelRuns.length);
const perRunWorkers = Math.max(1, Math.floor(localWorkers / parallelCount));
const macCiWorkers = isCI && isMacOS ? 1 : perRunWorkers;
// Keep worker counts predictable for local runs; trim macOS CI workers to avoid worker crashes/OOM.
// In CI on linux/windows, prefer Vitest defaults to avoid cross-test interference from lower worker counts.
const maxWorkers = resolvedOverride ?? (isCI && !isMacOS ? null : macCiWorkers);
const WARNING_SUPPRESSION_FLAGS = [
"--disable-warning=ExperimentalWarning",
"--disable-warning=DEP0040",
"--disable-warning=DEP0060",
];
const runOnce = (entry, extraArgs = []) =>
new Promise((resolve) => {
const args = maxWorkers
? [...entry.args, "--maxWorkers", String(maxWorkers), ...windowsCiArgs, ...extraArgs]
: [...entry.args, ...windowsCiArgs, ...extraArgs];
const nodeOptions = process.env.NODE_OPTIONS ?? "";
const nextNodeOptions = WARNING_SUPPRESSION_FLAGS.reduce(
(acc, flag) => (acc.includes(flag) ? acc : `${acc} ${flag}`.trim()),
nodeOptions,
);
const child = spawn(pnpm, args, {
stdio: "inherit",
env: { ...process.env, VITEST_GROUP: entry.name, NODE_OPTIONS: nextNodeOptions },
shell: process.platform === "win32",
});
children.add(child);
child.on("exit", (code, signal) => {
children.delete(child);
resolve(code ?? (signal ? 1 : 0));
});
});
const run = async (entry) => {
if (shardCount <= 1) return runOnce(entry);
for (let shardIndex = 1; shardIndex <= shardCount; shardIndex += 1) {
// eslint-disable-next-line no-await-in-loop
const code = await runOnce(entry, ["--shard", `${shardIndex}/${shardCount}`]);
if (code !== 0) return code;
}
return 0;
};
const shutdown = (signal) => {
for (const child of children) {
child.kill(signal);
}
};
process.on("SIGINT", () => shutdown("SIGINT"));
process.on("SIGTERM", () => shutdown("SIGTERM"));
const parallelCodes = await Promise.all(parallelRuns.map(run));
const failedParallel = parallelCodes.find((code) => code !== 0);
if (failedParallel !== undefined) {
process.exit(failedParallel);
}
for (const entry of serialRuns) {
// eslint-disable-next-line no-await-in-loop
const code = await run(entry);
if (code !== 0) {
process.exit(code);
}
}
process.exit(0);

View File

@@ -0,0 +1,133 @@
#!/usr/bin/env node
import { spawn, spawnSync } from "node:child_process";
import fs from "node:fs";
import { createRequire } from "node:module";
import path from "node:path";
import { fileURLToPath } from "node:url";
const here = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(here, "..");
const uiDir = path.join(repoRoot, "ui");
function usage() {
// keep this tiny; it's invoked from npm scripts too
process.stderr.write(
"Usage: node scripts/ui.js <install|dev|build|test> [...args]\n",
);
}
function which(cmd) {
try {
const key = process.platform === "win32" ? "Path" : "PATH";
const paths = (process.env[key] ?? process.env.PATH ?? "")
.split(path.delimiter)
.filter(Boolean);
const extensions =
process.platform === "win32"
? (process.env.PATHEXT ?? ".EXE;.CMD;.BAT;.COM")
.split(";")
.filter(Boolean)
: [""];
for (const entry of paths) {
for (const ext of extensions) {
const candidate = path.join(entry, process.platform === "win32" ? `${cmd}${ext}` : cmd);
try {
if (fs.existsSync(candidate)) return candidate;
} catch {
// ignore
}
}
}
} catch {
// ignore
}
return null;
}
function resolveRunner() {
const pnpm = which("pnpm");
if (pnpm) return { cmd: pnpm, kind: "pnpm" };
return null;
}
function run(cmd, args) {
const child = spawn(cmd, args, {
cwd: uiDir,
stdio: "inherit",
env: process.env,
shell: process.platform === "win32",
});
child.on("exit", (code, signal) => {
if (signal) process.exit(1);
process.exit(code ?? 1);
});
}
function runSync(cmd, args, envOverride) {
const result = spawnSync(cmd, args, {
cwd: uiDir,
stdio: "inherit",
env: envOverride ?? process.env,
shell: process.platform === "win32",
});
if (result.signal) process.exit(1);
if ((result.status ?? 1) !== 0) process.exit(result.status ?? 1);
}
function depsInstalled(kind) {
try {
const require = createRequire(path.join(uiDir, "package.json"));
require.resolve("vite");
require.resolve("dompurify");
if (kind === "test") {
require.resolve("vitest");
require.resolve("@vitest/browser-playwright");
require.resolve("playwright");
}
return true;
} catch {
return false;
}
}
const [, , action, ...rest] = process.argv;
if (!action) {
usage();
process.exit(2);
}
const runner = resolveRunner();
if (!runner) {
process.stderr.write("Missing UI runner: install pnpm, then retry.\n");
process.exit(1);
}
const script =
action === "install"
? null
: action === "dev"
? "dev"
: action === "build"
? "build"
: action === "test"
? "test"
: null;
if (action !== "install" && !script) {
usage();
process.exit(2);
}
if (action === "install") run(runner.cmd, ["install", ...rest]);
else {
if (!depsInstalled(action === "test" ? "test" : "build")) {
const installEnv =
action === "build"
? { ...process.env, NODE_ENV: "production" }
: process.env;
const installArgs =
action === "build" ? ["install", "--prod"] : ["install"];
runSync(runner.cmd, installArgs, installEnv);
}
run(runner.cmd, ["run", script, ...rest]);
}

View File

@@ -0,0 +1,473 @@
import { execSync } from "node:child_process";
import { readFileSync, writeFileSync } from "node:fs";
import { resolve } from "node:path";
import type { ApiContributor, Entry, MapConfig, User } from "./update-clawtributors.types.js";
const REPO = "moltbot/moltbot";
const PER_LINE = 10;
const mapPath = resolve("scripts/clawtributors-map.json");
const mapConfig = JSON.parse(readFileSync(mapPath, "utf8")) as MapConfig;
const displayName = mapConfig.displayName ?? {};
const nameToLogin = normalizeMap(mapConfig.nameToLogin ?? {});
const emailToLogin = normalizeMap(mapConfig.emailToLogin ?? {});
const ensureLogins = (mapConfig.ensureLogins ?? []).map((login) => login.toLowerCase());
const readmePath = resolve("README.md");
const placeholderAvatar = mapConfig.placeholderAvatar ?? "assets/avatar-placeholder.svg";
const seedCommit = mapConfig.seedCommit ?? null;
const seedEntries = seedCommit ? parseReadmeEntries(run(`git show ${seedCommit}:README.md`)) : [];
const raw = run(`gh api "repos/${REPO}/contributors?per_page=100&anon=1" --paginate`);
const contributors = parsePaginatedJson(raw) as ApiContributor[];
const apiByLogin = new Map<string, User>();
const contributionsByLogin = new Map<string, number>();
for (const item of contributors) {
if (!item?.login || !item?.html_url || !item?.avatar_url) {
continue;
}
if (typeof item.contributions === "number") {
contributionsByLogin.set(item.login.toLowerCase(), item.contributions);
}
apiByLogin.set(item.login.toLowerCase(), {
login: item.login,
html_url: item.html_url,
avatar_url: normalizeAvatar(item.avatar_url),
});
}
for (const login of ensureLogins) {
if (!apiByLogin.has(login)) {
const user = fetchUser(login);
if (user) {
apiByLogin.set(user.login.toLowerCase(), user);
}
}
}
const log = run("git log --format=%aN%x7c%aE --numstat");
const linesByLogin = new Map<string, number>();
let currentName: string | null = null;
let currentEmail: string | null = null;
for (const line of log.split("\n")) {
if (!line.trim()) {
continue;
}
if (line.includes("|") && !/^[0-9-]/.test(line)) {
const [name, email] = line.split("|", 2);
currentName = name?.trim() ?? null;
currentEmail = email?.trim().toLowerCase() ?? null;
continue;
}
if (!currentName) {
continue;
}
const parts = line.split("\t");
if (parts.length < 2) {
continue;
}
const adds = parseCount(parts[0]);
const dels = parseCount(parts[1]);
const total = adds + dels;
if (!total) {
continue;
}
let login = resolveLogin(currentName, currentEmail, apiByLogin, nameToLogin, emailToLogin);
if (!login) {
continue;
}
const key = login.toLowerCase();
linesByLogin.set(key, (linesByLogin.get(key) ?? 0) + total);
}
for (const login of ensureLogins) {
if (!linesByLogin.has(login)) {
linesByLogin.set(login, 0);
}
}
const entriesByKey = new Map<string, Entry>();
for (const seed of seedEntries) {
const login = loginFromUrl(seed.html_url);
const resolvedLogin =
login ?? resolveLogin(seed.display, null, apiByLogin, nameToLogin, emailToLogin);
const key = resolvedLogin ? resolvedLogin.toLowerCase() : `name:${normalizeName(seed.display)}`;
const avatar =
seed.avatar_url && !isGhostAvatar(seed.avatar_url)
? normalizeAvatar(seed.avatar_url)
: placeholderAvatar;
const existing = entriesByKey.get(key);
if (!existing) {
const user = resolvedLogin ? apiByLogin.get(key) : null;
entriesByKey.set(key, {
key,
login: resolvedLogin ?? login ?? undefined,
display: seed.display,
html_url: user?.html_url ?? seed.html_url,
avatar_url: user?.avatar_url ?? avatar,
lines: 0,
});
} else {
existing.display = existing.display || seed.display;
if (existing.avatar_url === placeholderAvatar || !existing.avatar_url) {
existing.avatar_url = avatar;
}
if (!existing.html_url || existing.html_url.includes("/search?q=")) {
existing.html_url = seed.html_url;
}
}
}
for (const item of contributors) {
const baseName = item.name?.trim() || item.email?.trim() || item.login?.trim();
if (!baseName) {
continue;
}
const resolvedLogin = item.login
? item.login
: resolveLogin(baseName, item.email ?? null, apiByLogin, nameToLogin, emailToLogin);
if (resolvedLogin) {
const key = resolvedLogin.toLowerCase();
const existing = entriesByKey.get(key);
if (!existing) {
let user = apiByLogin.get(key) ?? fetchUser(resolvedLogin);
if (user) {
const lines = linesByLogin.get(key) ?? 0;
const contributions = contributionsByLogin.get(key) ?? 0;
entriesByKey.set(key, {
key,
login: user.login,
display: pickDisplay(baseName, user.login, existing?.display),
html_url: user.html_url,
avatar_url: normalizeAvatar(user.avatar_url),
lines: lines > 0 ? lines : contributions,
});
}
} else if (existing) {
existing.login = existing.login ?? resolvedLogin;
existing.display = pickDisplay(baseName, existing.login, existing.display);
if (existing.avatar_url === placeholderAvatar || !existing.avatar_url) {
const user = apiByLogin.get(key) ?? fetchUser(resolvedLogin);
if (user) {
existing.html_url = user.html_url;
existing.avatar_url = normalizeAvatar(user.avatar_url);
}
}
const lines = linesByLogin.get(key) ?? 0;
const contributions = contributionsByLogin.get(key) ?? 0;
existing.lines = Math.max(existing.lines, lines > 0 ? lines : contributions);
}
continue;
}
const anonKey = `name:${normalizeName(baseName)}`;
const existingAnon = entriesByKey.get(anonKey);
if (!existingAnon) {
entriesByKey.set(anonKey, {
key: anonKey,
display: baseName,
html_url: fallbackHref(baseName),
avatar_url: placeholderAvatar,
lines: item.contributions ?? 0,
});
} else {
existingAnon.lines = Math.max(existingAnon.lines, item.contributions ?? 0);
}
}
for (const [login, lines] of linesByLogin.entries()) {
if (entriesByKey.has(login)) {
continue;
}
let user = apiByLogin.get(login);
if (!user) {
user = fetchUser(login);
}
if (user) {
const contributions = contributionsByLogin.get(login) ?? 0;
entriesByKey.set(login, {
key: login,
login: user.login,
display: displayName[user.login.toLowerCase()] ?? user.login,
html_url: user.html_url,
avatar_url: normalizeAvatar(user.avatar_url),
lines: lines > 0 ? lines : contributions,
});
} else {
entriesByKey.set(login, {
key: login,
display: login,
html_url: fallbackHref(login),
avatar_url: placeholderAvatar,
lines,
});
}
}
const entries = Array.from(entriesByKey.values());
entries.sort((a, b) => {
if (b.lines !== a.lines) {
return b.lines - a.lines;
}
return a.display.localeCompare(b.display);
});
const lines: string[] = [];
for (let i = 0; i < entries.length; i += PER_LINE) {
const chunk = entries.slice(i, i + PER_LINE);
const parts = chunk.map((entry) => {
return `<a href=\"${entry.html_url}\"><img src=\"${entry.avatar_url}\" width=\"48\" height=\"48\" alt=\"${entry.display}\" title=\"${entry.display}\"/></a>`;
});
lines.push(` ${parts.join(" ")}`);
}
const block = `${lines.join("\n")}\n`;
const readme = readFileSync(readmePath, "utf8");
const start = readme.indexOf('<p align="left">');
const end = readme.indexOf("</p>", start);
if (start === -1 || end === -1) {
throw new Error("README.md missing clawtributors block");
}
const next = `${readme.slice(0, start)}<p align=\"left\">\n${block}${readme.slice(end)}`;
writeFileSync(readmePath, next);
console.log(`Updated README clawtributors: ${entries.length} entries`);
function run(cmd: string): string {
return execSync(cmd, {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
maxBuffer: 1024 * 1024 * 200,
}).trim();
}
function parsePaginatedJson(raw: string): any[] {
const items: any[] = [];
for (const line of raw.split("\n")) {
if (!line.trim()) {
continue;
}
const parsed = JSON.parse(line);
if (Array.isArray(parsed)) {
items.push(...parsed);
} else {
items.push(parsed);
}
}
return items;
}
function normalizeMap(map: Record<string, string>): Record<string, string> {
const out: Record<string, string> = {};
for (const [key, value] of Object.entries(map)) {
out[normalizeName(key)] = value;
}
return out;
}
function normalizeName(value: string): string {
return value.trim().toLowerCase().replace(/\s+/g, " ");
}
function parseCount(value: string): number {
return /^\d+$/.test(value) ? Number(value) : 0;
}
function normalizeAvatar(url: string): string {
if (!/^https?:/i.test(url)) {
return url;
}
const lower = url.toLowerCase();
if (lower.includes("s=") || lower.includes("size=")) {
return url;
}
const sep = url.includes("?") ? "&" : "?";
return `${url}${sep}s=48`;
}
function isGhostAvatar(url: string): boolean {
return url.toLowerCase().includes("ghost.png");
}
function fetchUser(login: string): User | null {
try {
const data = execSync(`gh api users/${login}`, {
encoding: "utf8",
stdio: ["ignore", "pipe", "pipe"],
});
const parsed = JSON.parse(data);
if (!parsed?.login || !parsed?.html_url || !parsed?.avatar_url) {
return null;
}
return {
login: parsed.login,
html_url: parsed.html_url,
avatar_url: normalizeAvatar(parsed.avatar_url),
};
} catch {
return null;
}
}
function resolveLogin(
name: string,
email: string | null,
apiByLogin: Map<string, User>,
nameToLogin: Record<string, string>,
emailToLogin: Record<string, string>
): string | null {
if (email && emailToLogin[email]) {
return emailToLogin[email];
}
if (email && name) {
const guessed = guessLoginFromEmailName(name, email, apiByLogin);
if (guessed) {
return guessed;
}
}
if (email && email.endsWith("@users.noreply.github.com")) {
const local = email.split("@", 1)[0];
const login = local.includes("+") ? local.split("+")[1] : local;
return login || null;
}
if (email && email.endsWith("@github.com")) {
const login = email.split("@", 1)[0];
if (apiByLogin.has(login.toLowerCase())) {
return login;
}
}
const normalized = normalizeName(name);
if (nameToLogin[normalized]) {
return nameToLogin[normalized];
}
const compact = normalized.replace(/\s+/g, "");
if (nameToLogin[compact]) {
return nameToLogin[compact];
}
if (apiByLogin.has(normalized)) {
return normalized;
}
if (apiByLogin.has(compact)) {
return compact;
}
return null;
}
function guessLoginFromEmailName(
name: string,
email: string,
apiByLogin: Map<string, User>
): string | null {
const local = email.split("@", 1)[0]?.trim();
if (!local) {
return null;
}
const normalizedName = normalizeIdentifier(name);
if (!normalizedName) {
return null;
}
const candidates = new Set([local, local.replace(/[._-]/g, "")]);
for (const candidate of candidates) {
if (!candidate) {
continue;
}
if (normalizeIdentifier(candidate) !== normalizedName) {
continue;
}
const key = candidate.toLowerCase();
if (apiByLogin.has(key)) {
return key;
}
}
return null;
}
function normalizeIdentifier(value: string): string {
return value.toLowerCase().replace(/[^a-z0-9]/g, "");
}
function parseReadmeEntries(
content: string
): Array<{ display: string; html_url: string; avatar_url: string }> {
const start = content.indexOf('<p align="left">');
const end = content.indexOf("</p>", start);
if (start === -1 || end === -1) {
return [];
}
const block = content.slice(start, end);
const entries: Array<{ display: string; html_url: string; avatar_url: string }> = [];
const linked = /<a href=\"([^\"]+)\"><img src=\"([^\"]+)\"[^>]*alt=\"([^\"]+)\"[^>]*>/g;
for (const match of block.matchAll(linked)) {
const [, href, src, alt] = match;
if (!href || !src || !alt) {
continue;
}
entries.push({ html_url: href, avatar_url: src, display: alt });
}
const standalone = /<img src=\"([^\"]+)\"[^>]*alt=\"([^\"]+)\"[^>]*>/g;
for (const match of block.matchAll(standalone)) {
const [, src, alt] = match;
if (!src || !alt) {
continue;
}
if (entries.some((entry) => entry.display === alt && entry.avatar_url === src)) {
continue;
}
entries.push({ html_url: fallbackHref(alt), avatar_url: src, display: alt });
}
return entries;
}
function loginFromUrl(url: string): string | null {
const match = /^https?:\/\/github\.com\/([^\/?#]+)/i.exec(url);
if (!match) {
return null;
}
const login = match[1];
if (!login || login.toLowerCase() === "search") {
return null;
}
return login;
}
function fallbackHref(value: string): string {
const encoded = encodeURIComponent(value.trim());
return encoded ? `https://github.com/search?q=${encoded}` : "https://github.com";
}
function pickDisplay(baseName: string | null | undefined, login: string, existing?: string): string {
const key = login.toLowerCase();
if (displayName[key]) {
return displayName[key];
}
if (existing) {
return existing;
}
if (baseName) {
return baseName;
}
return login;
}

View File

@@ -0,0 +1,32 @@
export type MapConfig = {
ensureLogins?: string[];
displayName?: Record<string, string>;
nameToLogin?: Record<string, string>;
emailToLogin?: Record<string, string>;
placeholderAvatar?: string;
seedCommit?: string;
};
export type ApiContributor = {
login?: string;
html_url?: string;
avatar_url?: string;
name?: string;
email?: string;
contributions?: number;
};
export type User = {
login: string;
html_url: string;
avatar_url: string;
};
export type Entry = {
key: string;
login?: string;
display: string;
html_url: string;
avatar_url: string;
lines: number;
};

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env node
import { spawn, spawnSync } from "node:child_process";
import process from "node:process";
const args = process.argv.slice(2);
const env = { ...process.env };
const cwd = process.cwd();
const compiler = env.CLAWDBOT_TS_COMPILER === "tsc" ? "tsc" : "tsgo";
const projectArgs = ["--project", "tsconfig.json"];
const initialBuild = spawnSync("pnpm", ["exec", compiler, ...projectArgs], {
cwd,
env,
stdio: "inherit",
});
if (initialBuild.status !== 0) {
process.exit(initialBuild.status ?? 1);
}
const watchArgs =
compiler === "tsc"
? [...projectArgs, "--watch", "--preserveWatchOutput"]
: [...projectArgs, "--watch"];
const compilerProcess = spawn("pnpm", ["exec", compiler, ...watchArgs], {
cwd,
env,
stdio: "inherit",
});
const nodeProcess = spawn(process.execPath, ["--watch", "dist/entry.js", ...args], {
cwd,
env,
stdio: "inherit",
});
let exiting = false;
function cleanup(code = 0) {
if (exiting) return;
exiting = true;
nodeProcess.kill("SIGTERM");
compilerProcess.kill("SIGTERM");
process.exit(code);
}
process.on("SIGINT", () => cleanup(130));
process.on("SIGTERM", () => cleanup(143));
compilerProcess.on("exit", (code) => {
if (exiting) return;
cleanup(code ?? 1);
});
nodeProcess.on("exit", (code, signal) => {
if (signal || exiting) return;
cleanup(code ?? 1);
});

View File

@@ -0,0 +1,48 @@
import { execSync } from "node:child_process";
import fs from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";
const rootDir = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
const distDir = path.join(rootDir, "dist");
const pkgPath = path.join(rootDir, "package.json");
const readPackageVersion = () => {
try {
const raw = fs.readFileSync(pkgPath, "utf8");
const parsed = JSON.parse(raw) as { version?: string };
return parsed.version ?? null;
} catch {
return null;
}
};
const resolveCommit = () => {
const envCommit = process.env.GIT_COMMIT?.trim() || process.env.GIT_SHA?.trim();
if (envCommit) return envCommit;
try {
return execSync("git rev-parse HEAD", {
cwd: rootDir,
stdio: ["ignore", "pipe", "ignore"],
})
.toString()
.trim();
} catch {
return null;
}
};
const version = readPackageVersion();
const commit = resolveCommit();
const buildInfo = {
version,
commit,
builtAt: new Date().toISOString(),
};
fs.mkdirSync(distDir, { recursive: true });
fs.writeFileSync(
path.join(distDir, "build-info.json"),
`${JSON.stringify(buildInfo, null, 2)}\n`,
);

View File

@@ -0,0 +1,185 @@
import { randomUUID } from "node:crypto";
import { spawn } from "node:child_process";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
type RunResult = {
code: number | null;
signal: NodeJS.Signals | null;
stdout: string;
stderr: string;
};
function pickAnthropicEnv(): { type: "oauth" | "api"; value: string } | null {
const oauth = process.env.ANTHROPIC_OAUTH_TOKEN?.trim();
if (oauth) return { type: "oauth", value: oauth };
const api = process.env.ANTHROPIC_API_KEY?.trim();
if (api) return { type: "api", value: api };
return null;
}
function pickZaiKey(): string | null {
return (
process.env.ZAI_API_KEY?.trim() ?? process.env.Z_AI_API_KEY?.trim() ?? null
);
}
async function runCommand(
label: string,
args: string[],
env: NodeJS.ProcessEnv,
): Promise<RunResult> {
return await new Promise((resolve, reject) => {
const child = spawn("pnpm", args, {
env,
stdio: ["ignore", "pipe", "pipe"],
});
let stdout = "";
let stderr = "";
child.stdout.on("data", (chunk) => {
const text = String(chunk);
stdout += text;
process.stdout.write(text);
});
child.stderr.on("data", (chunk) => {
const text = String(chunk);
stderr += text;
process.stderr.write(text);
});
child.on("error", (err) => reject(err));
child.on("close", (code, signal) => {
if (code === 0) {
resolve({ code, signal, stdout, stderr });
return;
}
resolve({ code, signal, stdout, stderr });
const summary = signal
? `${label} exited with signal ${signal}`
: `${label} exited with code ${code}`;
console.error(summary);
});
});
}
async function main() {
const anthropic = pickAnthropicEnv();
const zaiKey = pickZaiKey();
if (!anthropic) {
console.error("Missing ANTHROPIC_OAUTH_TOKEN or ANTHROPIC_API_KEY.");
process.exit(1);
}
if (!zaiKey) {
console.error("Missing ZAI_API_KEY or Z_AI_API_KEY.");
process.exit(1);
}
const baseDir = await fs.mkdtemp(
path.join(os.tmpdir(), "moltbot-zai-fallback-"),
);
const stateDir = path.join(baseDir, "state");
const configPath = path.join(baseDir, "moltbot.json");
await fs.mkdir(stateDir, { recursive: true });
const config = {
agents: {
defaults: {
model: {
primary: "anthropic/claude-opus-4-5",
fallbacks: ["zai/glm-4.7"],
},
models: {
"anthropic/claude-opus-4-5": {},
"zai/glm-4.7": {},
},
},
},
};
await fs.writeFile(configPath, JSON.stringify(config, null, 2), "utf8");
const sessionId =
process.env.CLAWDBOT_ZAI_FALLBACK_SESSION_ID ?? randomUUID();
const baseEnv: NodeJS.ProcessEnv = {
...process.env,
CLAWDBOT_CONFIG_PATH: configPath,
CLAWDBOT_STATE_DIR: stateDir,
ZAI_API_KEY: zaiKey,
Z_AI_API_KEY: "",
};
const envValidAnthropic: NodeJS.ProcessEnv = {
...baseEnv,
ANTHROPIC_OAUTH_TOKEN: anthropic.type === "oauth" ? anthropic.value : "",
ANTHROPIC_API_KEY: anthropic.type === "api" ? anthropic.value : "",
};
const envInvalidAnthropic: NodeJS.ProcessEnv = {
...baseEnv,
ANTHROPIC_OAUTH_TOKEN: anthropic.type === "oauth" ? "invalid" : "",
ANTHROPIC_API_KEY: anthropic.type === "api" ? "invalid" : "",
};
console.log("== Run 1: create tool history (primary only)");
const toolPrompt =
"Use the exec tool to create a file named zai-fallback-tool.txt with the content tool-ok. " +
"Then use the read tool to display the file contents. Reply with just the file contents.";
const run1 = await runCommand(
"run1",
[
"moltbot",
"agent",
"--local",
"--session-id",
sessionId,
"--message",
toolPrompt,
],
envValidAnthropic,
);
if (run1.code !== 0) {
process.exit(run1.code ?? 1);
}
const sessionFile = path.join(
stateDir,
"agents",
"main",
"sessions",
`${sessionId}.jsonl`,
);
const transcript = await fs.readFile(sessionFile, "utf8").catch(() => "");
if (!transcript.includes('"toolResult"')) {
console.warn("Warning: no toolResult entries detected in session history.");
}
console.log("== Run 2: force auth failover to Z.AI");
const followupPrompt =
"What is the content of zai-fallback-tool.txt? Reply with just the contents.";
const run2 = await runCommand(
"run2",
[
"moltbot",
"agent",
"--local",
"--session-id",
sessionId,
"--message",
followupPrompt,
],
envInvalidAnthropic,
);
if (run2.code === 0) {
console.log("PASS: fallback succeeded.");
process.exit(0);
}
console.error("FAIL: fallback failed.");
process.exit(run2.code ?? 1);
}
main().catch((err) => {
console.error(err);
process.exit(1);
});