Files
scarf/scarf/scarf/scarfApp.swift
T
Alan Wizemann da721fa276 feat(hermes-v12): provider catalog + auxiliary swap (Phase B)
Adds the five v0.12 inference providers to ModelCatalogService.overlayOnlyProviders
so the model picker reaches them. IDs match HERMES_OVERLAYS verbatim:

- gmi → GMI Cloud (api_key)
- azure-foundry → Azure AI Foundry (api_key)
- lmstudio → LM Studio (api_key, promoted from custom-endpoint alias)
- minimax-oauth → MiniMax (OAuth, oauth_external)
- tencent-tokenhub → Tencent TokenHub (api_key)

Auxiliary tasks: drop the `flush_memories` row (Hermes removed it
entirely in v0.12) and add `auxiliary.curator` so users can configure
the model the autonomous curator's review fork uses. The Curator row is
gated on HermesCapabilities.hasCuratorAux, so v0.11 hosts don't see a
control that writes a key Hermes ignores. AuxiliarySettings, the YAML
parser, and HealthViewModel's Tool Gateway breakdown are all updated.

Side fixes:

- CredentialPoolsGatingTests was missing `import ScarfCore` after
  ModelCatalogService moved to the package (broke the test target's
  compile against pure-Mac scarf).
- Promoted `ModelCatalogService.overlayOnlyProviders` to public so the
  new `v012OverlayProvidersCarryCorrectAuthTypes` lock-in test can
  reach it.

Tests: 14 ToolGateway tests pass; 209 ScarfCore tests pass; both Mac
and iOS schemes build clean.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-01 12:16:37 +02:00

454 lines
19 KiB
Swift

import SwiftUI
import ScarfCore
@main
struct ScarfApp: App {
/// User-editable list of remote servers. Loaded from
/// `~/Library/Application Support/scarf/servers.json` at launch.
@State private var registry = ServerRegistry()
/// One live status per registered server (Local + every remote). Polled
/// in the background to keep the menu bar fresh without making it own
/// per-window state.
@State private var liveRegistry: ServerLiveStatusRegistry
@State private var updater = UpdaterService()
init() {
let registry = ServerRegistry()
let live = ServerLiveStatusRegistry(registry: registry)
// Re-fan-out statuses whenever the user adds/removes/renames a
// server in the picker. Without this, new servers wouldn't appear
// in the menu bar until the next full app launch.
registry.onEntriesChanged = { [weak live] in live?.rebuild() }
_registry = State(initialValue: registry)
_liveRegistry = State(initialValue: live)
// Prune snapshot cache dirs whose server UUIDs aren't in the registry
// anymore handles the case where a server was removed while Scarf
// wasn't running. Cheap: just an `ls` of the snapshots root.
registry.sweepOrphanCaches()
// Wire ScarfCore's SSHTransport to the Mac-target login-shell env
// probe. Without this, `ssh`/`scp` subprocesses spawned from Scarf
// can't reach 1Password / Secretive / `.zshrc`-exported ssh-agent
// sockets and auth fails with "Permission denied" (exit 255) even
// though terminal ssh works fine. iOS leaves this unset Citadel
// owns the agent there.
SSHTransport.environmentEnricher = { HermesFileService.enrichedEnvironment() }
// Warm up the login-shell env probe off-main at launch. Without
// this, the first MainActor caller (chat preflight, OAuth flow,
// signal-cli detect, etc.) blocks for 5-8 seconds while
// `zsh -l -i` runs. Doing it eagerly on a detached task means the
// static let is already populated by the time any UI needs it.
Task.detached(priority: .utility) {
_ = HermesFileService.enrichedEnvironment()
}
}
var body: some Scene {
// Multi-window: each window is bound to one `ServerID`. Opening a
// second server via `openWindow(value:)` creates a second window
// with its own coordinator + services; they're independent and can
// run side-by-side. SwiftUI handles window-state restoration
// automatically quit + relaunch reopens the same windows with the
// same server bindings.
WindowGroup("Hermes", for: ServerID.self) { $serverID in
// `nil` means the user removed this server since the window was
// last open. Show a dedicated "server removed" view rather than
// silently falling back to local falling back would mislead
// the user into thinking they're looking at the right server.
if let ctx = registry.context(for: serverID) {
ContextBoundRoot(context: ctx)
.environment(registry)
.environment(liveRegistry)
.environment(\.serverContext, ctx)
.environment(updater)
// Sync the live-status set whenever a window appears
// covers the case where the user added a server in
// another window since this one last opened.
.onAppear { liveRegistry.rebuild() }
// scarf://install?url= deep-link handler. Stages the
// URL on the process-wide router; ProjectsView picks it
// up and presents the install sheet. Activating the
// app here ensures a cold launch from a browser click
// surfaces the sheet without the user having to click
// into Scarf first.
.onOpenURL { url in
TemplateURLRouter.shared.handle(url)
NSApplication.shared.activate()
}
} else {
// MissingServerView is a dead-end "server was removed" pane
// with no ProjectsView so no observer of the router's
// pendingInstallURL exists in this window. Routing a
// scarf://install URL here would silently drop it. Leave
// onOpenURL off this branch; ContextBoundRoot windows in
// the same app instance will still handle it.
MissingServerView(removedServerID: serverID)
.environment(registry)
.environment(updater)
}
} defaultValue: {
// Honour the user's "open on launch" choice from the Manage
// Servers popover. Falls back to Local when no entry is flagged
// (the default behaviour for fresh installs) or when the
// flagged entry was removed while the app was closed.
registry.defaultServerID
}
.defaultSize(width: 1100, height: 700)
// Without an explicit resizability, `WindowGroup` defaults to
// `.automatic` which on macOS evaluates to `.contentSize`
// meaning the window is BOUND to its content's ideal size
// rather than bounded-below by it. Any section whose content's
// intrinsic height changes (Chat's message list, the v2.3
// per-project Sessions tab, Insights charts) would resize the
// window on every section switch, snap back against user
// resize, and sometimes push the whole window past the
// screen. `.contentMinSize` turns the content's ideal height
// into a minimum floor: user resize works freely, the window
// stays put across section switches, and it still can't shrink
// smaller than a section's minimum render.
.windowResizability(.contentMinSize)
.commands {
CommandGroup(after: .appInfo) {
Button("Check for Updates…") { updater.checkForUpdates() }
}
// File Open Server submenu: one entry per registered server
// (including Local). Each opens or focuses a window bound to
// that server.
CommandGroup(after: .newItem) {
OpenServerCommands()
.environment(registry)
}
}
MenuBarExtra(
"Scarf",
systemImage: liveRegistry.anyRunning ? "hare.fill" : "hare"
) {
MenuBarMenu(liveRegistry: liveRegistry, updater: updater)
}
}
}
/// Renders the `File Open Server ` submenu plus per-server number
/// shortcuts (19). Uses `@Environment(\.openWindow)` so each menu item
/// opens (or focuses) a window keyed to that server's `ServerID`. Extracted
/// into its own View so the `@Environment` access happens inside a View
/// context `.commands` closures can't access it directly.
private struct OpenServerCommands: View {
@Environment(ServerRegistry.self) private var registry
@Environment(\.openWindow) private var openWindow
var body: some View {
Menu("Open Server") {
// Local is always slot 1 (1).
Button {
openWindow(value: ServerContext.local.id)
} label: {
Label("Local", systemImage: "laptopcomputer")
}
.keyboardShortcut("1", modifiers: .command)
if !registry.entries.isEmpty {
Divider()
// First 8 remote entries get 29. Beyond 9 servers,
// entries lose their shortcut but remain clickable.
ForEach(Array(registry.entries.prefix(8).enumerated()), id: \.element.id) { index, entry in
Button {
openWindow(value: entry.id)
} label: {
Label(entry.displayName, systemImage: "server.rack")
}
.keyboardShortcut(KeyEquivalent(Character("\(index + 2)")), modifiers: .command)
}
if registry.entries.count > 8 {
ForEach(registry.entries.dropFirst(8)) { entry in
Button {
openWindow(value: entry.id)
} label: {
Label(entry.displayName, systemImage: "server.rack")
}
}
}
}
Divider()
// Quick "open the picker" shortcut. Uses S because O is
// commonly bound to "Open in new tab" by browser/IDE muscle memory
// and we want to feel additive, not conflicting.
Button {
openWindow(value: ServerContext.local.id)
} label: {
Label("Manage Servers…", systemImage: "server.rack")
}
.keyboardShortcut("s", modifiers: [.command, .shift])
}
}
}
/// Wrapper View whose lifetime is scoped to one `ServerContext`. All
/// per-server `@State` file watcher, coordinator, chat lives here so
/// that the enclosing `.id(context.id)` modifier in `ScarfApp` cleanly
/// reinitializes everything when the user switches servers.
private struct ContextBoundRoot: View {
let context: ServerContext
@State private var coordinator: AppCoordinator
@State private var fileWatcher: HermesFileWatcher
@State private var chatViewModel: ChatViewModel
/// Per-window snapshot of the target Hermes installation's capability
/// flags. Drives sidebar visibility (Curator, Kanban only on v0.12+),
/// settings rows (curator aux added on v0.12), and version banners.
/// Refreshes once on init; explicit `refresh()` call rerun after a
/// `hermes update`.
@State private var capabilities: HermesCapabilitiesStore
init(context: ServerContext) {
self.context = context
_coordinator = State(initialValue: AppCoordinator())
_fileWatcher = State(initialValue: HermesFileWatcher(context: context))
_chatViewModel = State(initialValue: ChatViewModel(context: context))
_capabilities = State(initialValue: HermesCapabilitiesStore(context: context))
}
var body: some View {
ContentView()
.environment(coordinator)
.environment(fileWatcher)
.environment(chatViewModel)
.environment(capabilities)
.hermesCapabilities(capabilities)
// Per-window title shows which server this window is bound to.
// Local: "Scarf Local". Remote: "Scarf Mardon Mac Mini".
// The colored dot lives inside the toolbar switcher; the window
// title gives macOS Mission Control / ` cycling a meaningful
// label so users can pick the right window without focusing it.
.navigationTitle("Scarf — \(context.displayName)")
.onAppear { fileWatcher.startWatching() }
.onDisappear { fileWatcher.stopWatching() }
}
}
/// Per-server live state for the menu bar: is hermes running on this
/// server, is its gateway up, and the file service used to start/stop it.
/// One of these per registered server (plus local) so the menu bar can
/// fan out across multiple Hermes installations.
@Observable
@MainActor
final class ServerLiveStatus: Identifiable {
let context: ServerContext
private let fileService: HermesFileService
private var pollTask: Task<Void, Never>?
var hermesRunning = false
var gatewayRunning = false
var id: ServerID { context.id }
init(context: ServerContext) {
self.context = context
self.fileService = HermesFileService(context: context)
}
func startPolling() {
stopPolling()
pollTask = Task { [weak self] in
// Exponential backoff on consecutive failures. Healthy servers
// poll every 10s. When a registered remote goes unreachable,
// pgrep + gateway_state.json reads fail every tick without
// backoff that's a log warning + a 5s pgrep timeout every 10s
// for as long as the remote stays down. Reset to 10s on the
// first probe that fully succeeds.
var consecutiveFailures = 0
while !Task.isCancelled {
let ok = await self?.pollOnce() ?? false
if Task.isCancelled { return }
consecutiveFailures = ok ? 0 : consecutiveFailures + 1
let delaySec: UInt64
switch consecutiveFailures {
case 0: delaySec = 10
case 1: delaySec = 30
case 2: delaySec = 60
case 3: delaySec = 120
default: delaySec = 300
}
try? await Task.sleep(nanoseconds: delaySec * 1_000_000_000)
}
}
}
func stopPolling() {
pollTask?.cancel()
pollTask = nil
}
func startHermes() {
Task.detached { [context] in
_ = context.runHermes(["gateway", "start"])
}
// Refresh after a short delay to pick up the new state.
Task { [weak self] in
try? await Task.sleep(nanoseconds: 3_000_000_000)
self?.refresh()
}
}
func stopHermes() {
Task.detached { [fileService] in _ = fileService.stopHermes() }
Task { [weak self] in
try? await Task.sleep(nanoseconds: 2_000_000_000)
self?.refresh()
}
}
func restartHermes() {
Task.detached { [fileService] in
_ = fileService.stopHermes()
}
Task { [weak self] in
try? await Task.sleep(nanoseconds: 2_000_000_000)
self?.startHermes()
}
}
private func refresh() {
Task { [weak self] in _ = await self?.pollOnce() }
}
/// Single probe used by both the polling loop (which needs the
/// success/failure signal for backoff) and the fire-and-forget
/// `refresh()` callers (start/stop/restart). Returns `true` only when
/// both the pgrep call AND the gateway_state.json read returned a
/// transport-level success `.success(nil)` (file missing because
/// hermes is stopped) still counts as a successful probe.
private func pollOnce() async -> Bool {
let svc = fileService
struct ProbeResult: Sendable {
let running: Bool
let gatewayRunning: Bool
let ok: Bool
}
let probe = await Task.detached { () -> ProbeResult in
let pgrep = svc.hermesPIDResult()
let gateway = svc.loadGatewayStateResult()
let running: Bool
switch pgrep {
case .success(let pid): running = (pid != nil)
case .failure: running = false
}
let gatewayRunning: Bool
switch gateway {
case .success(let state): gatewayRunning = state?.isRunning ?? false
case .failure: gatewayRunning = false
}
let pgrepOK: Bool
if case .failure = pgrep { pgrepOK = false } else { pgrepOK = true }
let gatewayOK: Bool
if case .failure = gateway { gatewayOK = false } else { gatewayOK = true }
return ProbeResult(running: running, gatewayRunning: gatewayRunning, ok: pgrepOK && gatewayOK)
}.value
hermesRunning = probe.running
gatewayRunning = probe.gatewayRunning
return probe.ok
}
}
/// App-scoped registry of `ServerLiveStatus` one per known server. Adds /
/// removes in lockstep with `ServerRegistry`, so the menu bar accurately
/// reflects the current set of registered servers.
@Observable
@MainActor
final class ServerLiveStatusRegistry {
private(set) var statuses: [ServerLiveStatus] = []
private let registry: ServerRegistry
init(registry: ServerRegistry) {
self.registry = registry
rebuild()
}
/// Recompute the status list from the source registry. Re-uses any
/// existing status object whose ID still matches so we don't lose
/// in-flight polling state on a server add/rename.
func rebuild() {
var newStatuses: [ServerLiveStatus] = []
let allContexts = registry.allContexts
for ctx in allContexts {
if let existing = statuses.first(where: { $0.id == ctx.id }) {
newStatuses.append(existing)
} else {
let status = ServerLiveStatus(context: ctx)
status.startPolling()
newStatuses.append(status)
}
}
// Stop polling on statuses that were removed.
for old in statuses where !newStatuses.contains(where: { $0.id == old.id }) {
old.stopPolling()
}
statuses = newStatuses
}
/// True if any registered server reports hermes running. Drives the
/// menu bar icon (filled vs. outline hare).
var anyRunning: Bool { statuses.contains(where: { $0.hermesRunning }) }
}
struct MenuBarMenu: View {
let liveRegistry: ServerLiveStatusRegistry
let updater: UpdaterService
@Environment(\.openWindow) private var openWindow
var body: some View {
VStack(alignment: .leading, spacing: 0) {
// One section per server with its run state + start/stop/restart.
// Iterating registered statuses keeps the menu in sync as the
// user adds/removes servers in the picker.
ForEach(liveRegistry.statuses) { status in
serverSection(status)
Divider()
}
Button("Open Scarf") {
NSApplication.shared.activate()
}
Divider()
Button("Check for Updates…") { updater.checkForUpdates() }
Divider()
Button("Quit Scarf") {
NSApplication.shared.terminate(nil)
}
.keyboardShortcut("q")
}
}
@ViewBuilder
private func serverSection(_ status: ServerLiveStatus) -> some View {
Group {
// Server name as a header, with the open-window action on click.
Button {
openWindow(value: status.context.id)
NSApplication.shared.activate()
} label: {
HStack(spacing: 4) {
Image(systemName: status.context.isRemote ? "server.rack" : "laptopcomputer")
Text(status.context.displayName).bold()
}
}
Label(
status.hermesRunning ? "Hermes Running" : "Hermes Stopped",
systemImage: status.hermesRunning ? "circle.fill" : "circle"
)
Label(
status.gatewayRunning ? "Messaging Gateway Running" : "Messaging Gateway Stopped",
systemImage: status.gatewayRunning ? "circle.fill" : "circle"
)
Button("Start Hermes") { status.startHermes() }
.disabled(status.hermesRunning)
Button("Stop Hermes") { status.stopHermes() }
.disabled(!status.hermesRunning)
Button("Restart Hermes") { status.restartHermes() }
.disabled(!status.hermesRunning)
}
}
}