Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 11 additions & 10 deletions Sources/Fluid/ContentView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -959,7 +959,7 @@ struct ContentView: View {
.listRowBackground(self.sidebarRowBackground(for: .voiceEngine))

NavigationLink(value: SidebarItem.aiEnhancements) {
Label("AI Enhancements", systemImage: "brain")
Label("AI Enhancement", systemImage: "brain")
.font(.system(size: 15, weight: .medium))
.padding(.leading, 18)
}
Expand Down Expand Up @@ -1578,7 +1578,7 @@ struct ContentView: View {
return self.buildSystemPrompt(appInfo: appInfo, dictationSlot: dictationSlot)
}()

// Dictation cleanup folds the prompt + transcript into a single user
// Dictation enhancement folds the prompt + transcript into a single user
// turn (substituting `${transcript}` when present, otherwise appending
// the transcript after a blank line). Non-dictation callers — the AI
// chat tab specifically — keep the legacy two-message layout where
Expand Down Expand Up @@ -1631,7 +1631,7 @@ struct ContentView: View {
}
self.logDictationPromptTrace("Selected context text", value: "<none (dictation mode)>")
}
DebugLogger.shared.debug("Using Apple Intelligence for transcription cleanup", source: "ContentView")
DebugLogger.shared.debug("Using Apple Intelligence for transcription enhancement", source: "ContentView")
let output = try await provider.process(systemPrompt: systemPrompt, userText: userMessageContent)
if self.shouldTracePromptProcessing {
self.logDictationPromptTrace("Model answer (A)", value: output)
Expand Down Expand Up @@ -1710,7 +1710,7 @@ struct ContentView: View {
)
}

// Build messages array. For dictation cleanup the whole prompt +
// Build messages array. For dictation enhancement the whole prompt +
// transcript is folded into a single user message, so we omit the
// (empty) system role. Non-dictation callers keep the legacy
// system + user shape.
Expand All @@ -1722,7 +1722,7 @@ struct ContentView: View {

// NOTE: Transcription doesn't need streaming - the full result appears at once
// Streaming is only useful for Command/Rewrite modes where real-time display helps
// Using non-streaming is simpler and more reliable for transcription cleanup
// Using non-streaming is simpler and more reliable for transcription enhancement
let enableStreaming = false // Hardcoded off for transcription

// Build LLMClient configuration
Expand Down Expand Up @@ -1881,9 +1881,11 @@ struct ContentView: View {

var finalText: String
var aiFallbackReason: String?
let appInfo = self.recordingAppInfo ?? self.getCurrentAppInfo()

let shouldUseAI = activeDictationSlot.map { DictationAIPostProcessingGate.isConfigured(for: $0) } ??
DictationAIPostProcessingGate.isConfigured()
let shouldUseAI = activeDictationSlot.map {
DictationAIPostProcessingGate.isConfigured(for: $0, appBundleID: appInfo.bundleId)
} ?? DictationAIPostProcessingGate.isConfigured(for: .primary, appBundleID: appInfo.bundleId)
let transcriptionModelInfo = self.currentTranscriptionModelInfo()

if shouldUseAI {
Expand Down Expand Up @@ -1973,7 +1975,6 @@ struct ContentView: View {

// Save to transcription history (transcription mode only, if enabled)
if shouldPersistOutputs, SettingsStore.shared.saveTranscriptionHistory {
let appInfo = self.recordingAppInfo ?? self.getCurrentAppInfo()
TranscriptionHistoryStore.shared.addEntry(
rawText: transcribedText,
processedText: finalText,
Expand Down Expand Up @@ -2194,7 +2195,8 @@ struct ContentView: View {

var finalText = transcribedText
var aiFallbackReason: String?
let shouldUseAI = DictationAIPostProcessingGate.isConfigured()
let appInfo = self.getCurrentAppInfo()
let shouldUseAI = DictationAIPostProcessingGate.isConfigured(for: .primary, appBundleID: appInfo.bundleId)
if shouldUseAI {
do {
finalText = try await self.processTextWithAI(transcribedText)
Expand All @@ -2213,7 +2215,6 @@ struct ContentView: View {
self.menuBarManager.setProcessing(false)

finalText = ASRService.applyGAAVFormatting(finalText)
let appInfo = self.getCurrentAppInfo()

if SettingsStore.shared.saveTranscriptionHistory {
TranscriptionHistoryStore.shared.addEntry(
Expand Down
2 changes: 1 addition & 1 deletion Sources/Fluid/Networking/AppleIntelligenceProvider.swift
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ enum AppleIntelligenceService {
#if canImport(FoundationModels)
@available(macOS 26.0, *)
final class AppleIntelligenceProvider {
/// Process text with a system prompt (for transcription cleanup)
/// Process text with a system prompt (for transcription enhancement)
func process(systemPrompt: String, userText: String) async throws -> String {
let session = LanguageModelSession()

Expand Down
2 changes: 2 additions & 0 deletions Sources/Fluid/Persistence/BackupService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,9 @@ struct SettingsBackupPayload: Codable, Equatable {
let customDictionaryEntries: [SettingsStore.CustomDictionaryEntry]
let selectedDictationPromptID: String?
let dictationPromptOff: Bool?
let dictationPromptRoutingScope: SettingsStore.PromptRoutingScope?
let selectedEditPromptID: String?
let editPromptRoutingScope: SettingsStore.PromptRoutingScope?
let defaultDictationPromptOverride: String?
let defaultEditPromptOverride: String?
}
Expand Down
64 changes: 64 additions & 0 deletions Sources/Fluid/Persistence/SettingsStore+PromptRouting.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import Combine
import Foundation

extension SettingsStore {
enum PromptRoutingScope: String, Codable, CaseIterable, Identifiable {
case allApps
case selectedAppsOnly

var id: String { self.rawValue }
}

var dictationPromptRoutingScope: PromptRoutingScope {
get {
guard let rawValue = UserDefaults.standard.string(forKey: PromptRoutingKeys.dictation),
let scope = PromptRoutingScope(rawValue: rawValue)
else {
return .allApps
}
return scope
}
set {
objectWillChange.send()
UserDefaults.standard.set(newValue.rawValue, forKey: PromptRoutingKeys.dictation)
}
}

var editPromptRoutingScope: PromptRoutingScope {
get {
guard let rawValue = UserDefaults.standard.string(forKey: PromptRoutingKeys.edit),
let scope = PromptRoutingScope(rawValue: rawValue)
else {
return .allApps
}
return scope
}
set {
objectWillChange.send()
UserDefaults.standard.set(newValue.rawValue, forKey: PromptRoutingKeys.edit)
}
}

func promptRoutingScope(for mode: PromptMode) -> PromptRoutingScope {
switch mode.normalized {
case .dictate:
return self.dictationPromptRoutingScope
case .edit, .write, .rewrite:
return self.editPromptRoutingScope
}
}

func setPromptRoutingScope(_ scope: PromptRoutingScope, for mode: PromptMode) {
switch mode.normalized {
case .dictate:
self.dictationPromptRoutingScope = scope
case .edit, .write, .rewrite:
self.editPromptRoutingScope = scope
}
}
}

private enum PromptRoutingKeys {
static let dictation = "DictationPromptRoutingScope"
static let edit = "EditPromptRoutingScope"
}
34 changes: 29 additions & 5 deletions Sources/Fluid/Persistence/SettingsStore.swift
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ final class SettingsStore: ObservableObject {
let systemPrompt: String
}

/// User-defined dictation prompt profiles (named system prompts for dictation cleanup).
/// User-defined dictation prompt profiles (named system prompts for dictation enhancement).
/// The built-in default prompt is not stored here.
var dictationPromptProfiles: [DictationPromptProfile] {
get {
Expand Down Expand Up @@ -886,6 +886,15 @@ final class SettingsStore: ObservableObject {
)
}

if self.promptRoutingScope(for: normalizedMode) == .selectedAppsOnly {
return self.defaultPromptResolution(
for: normalizedMode,
source: .builtInDefault,
appBinding: nil,
allowDefaultOverride: false
)
}

if let profile = self.selectedPromptProfile(for: normalizedMode) {
let body = Self.stripBasePrompt(for: normalizedMode, from: profile.prompt)
if !body.isEmpty {
Expand All @@ -907,6 +916,11 @@ final class SettingsStore: ObservableObject {
}

func effectiveDictationPromptBody(for slot: DictationShortcutSlot, appBundleID: String? = nil) -> String {
if self.promptRoutingScope(for: .dictate) == .selectedAppsOnly {
guard self.dictationPromptSelection(for: slot) != .off else { return "" }
return self.effectivePromptBody(for: .dictate, appBundleID: appBundleID)
}

switch self.dictationPromptSelection(for: slot) {
case .off:
return ""
Expand All @@ -925,6 +939,11 @@ final class SettingsStore: ObservableObject {
}

func effectiveDictationSystemPrompt(for slot: DictationShortcutSlot, appBundleID: String? = nil) -> String {
if self.promptRoutingScope(for: .dictate) == .selectedAppsOnly {
guard self.dictationPromptSelection(for: slot) != .off else { return "" }
return self.effectiveSystemPrompt(for: .dictate, appBundleID: appBundleID)
}

switch self.dictationPromptSelection(for: slot) {
case .off, .default:
return self.effectiveSystemPrompt(for: .dictate, appBundleID: appBundleID)
Expand Down Expand Up @@ -953,10 +972,10 @@ final class SettingsStore: ObservableObject {
}

/// Literal placeholder that gets substituted with the raw transcription
/// when composing the user message for a dictation cleanup call.
/// when composing the user message for a dictation enhancement call.
static let transcriptPlaceholder = "${transcript}"

/// Compose the user-turn string for a dictation cleanup call by folding
/// Compose the user-turn string for a dictation enhancement call by folding
/// the transcript into the prompt template. If the template contains the
/// `${transcript}` placeholder, the placeholder is replaced; otherwise
/// the transcript is appended after a blank line, matching the pre-PR
Expand All @@ -973,9 +992,10 @@ final class SettingsStore: ObservableObject {
private func defaultPromptResolution(
for mode: PromptMode,
source: PromptResolutionSource,
appBinding: AppPromptBinding?
appBinding: AppPromptBinding?,
allowDefaultOverride: Bool = true
) -> PromptResolution {
if let override = self.defaultPromptOverride(for: mode) {
if allowDefaultOverride, let override = self.defaultPromptOverride(for: mode) {
let trimmedOverride = override.trimmingCharacters(in: .whitespacesAndNewlines)
if trimmedOverride.isEmpty {
return PromptResolution(
Expand Down Expand Up @@ -2265,7 +2285,9 @@ final class SettingsStore: ObservableObject {
customDictionaryEntries: self.customDictionaryEntries,
selectedDictationPromptID: self.selectedDictationPromptID,
dictationPromptOff: self.isDictationPromptOff,
dictationPromptRoutingScope: self.dictationPromptRoutingScope,
selectedEditPromptID: self.selectedEditPromptID,
editPromptRoutingScope: self.editPromptRoutingScope,
defaultDictationPromptOverride: self.defaultDictationPromptOverride,
defaultEditPromptOverride: self.defaultEditPromptOverride
)
Expand Down Expand Up @@ -2340,6 +2362,8 @@ final class SettingsStore: ObservableObject {
self.appPromptBindings = appPromptBindings
self.selectedDictationPromptID = payload.selectedDictationPromptID
self.isDictationPromptOff = payload.dictationPromptOff ?? self.isDictationPromptOff
self.dictationPromptRoutingScope = payload.dictationPromptRoutingScope ?? .allApps
self.editPromptRoutingScope = payload.editPromptRoutingScope ?? .allApps
self.selectedEditPromptID = payload.selectedEditPromptID
self.defaultDictationPromptOverride = payload.defaultDictationPromptOverride
self.defaultEditPromptOverride = payload.defaultEditPromptOverride
Expand Down
10 changes: 8 additions & 2 deletions Sources/Fluid/Services/DictationAIPostProcessingGate.swift
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,18 @@ enum DictationAIPostProcessingGate {
/// - Requires dictation prompt selection to not be `Off`
/// - Requires the selected provider connection to still be verified
static func isConfigured() -> Bool {
self.isConfigured(for: .primary)
self.isConfigured(for: .primary, appBundleID: nil)
}

static func isConfigured(for slot: SettingsStore.DictationShortcutSlot) -> Bool {
static func isConfigured(for slot: SettingsStore.DictationShortcutSlot, appBundleID: String? = nil) -> Bool {
let settings = SettingsStore.shared
guard settings.dictationPromptSelection(for: slot) != .off else { return false }
if let appBundleID,
settings.promptRoutingScope(for: .dictate) == .selectedAppsOnly,
!settings.hasAppPromptBinding(for: .dictate, appBundleID: appBundleID)
{
return false
}

return self.isProviderConfigured()
}
Expand Down
2 changes: 1 addition & 1 deletion Sources/Fluid/Services/NotificationService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ enum NotificationService {

private static func deliverAIProcessingFallback(error: String, using center: UNUserNotificationCenter) {
let content = UNMutableNotificationContent()
content.title = "AI cleanup failed"
content.title = "AI Enhancement failed"
content.body = "Typed raw transcription instead."
content.subtitle = error
content.sound = nil
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ struct AIEnhancementSettingsView: View {
@State var selectedPromptMode: SettingsStore.PromptMode = .dictate
@State var hoveredPromptModeKey: String? = nil
@State var hoveredCleanupControlKey: String? = nil
@State var hoveredPromptScopeKey: String? = nil

var body: some View {
self.aiConfigurationCard
Expand Down
20 changes: 18 additions & 2 deletions Sources/Fluid/UI/AISettings/AIEnhancementSettingsViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1429,7 +1429,12 @@ final class AIEnhancementSettingsViewModel: ObservableObject {
let trimmedName = appName.trimmingCharacters(in: .whitespacesAndNewlines)
let resolvedName = trimmedName.isEmpty ? normalizedBundleID : trimmedName
let existingPromptID = self.settings.appPromptBinding(for: mode, appBundleID: normalizedBundleID)?.promptID
let resolvedPromptID = existingPromptID ?? self.selectedPromptID(for: mode)
let resolvedPromptID: String?
if self.settings.promptRoutingScope(for: mode) == .selectedAppsOnly {
resolvedPromptID = existingPromptID
} else {
resolvedPromptID = existingPromptID ?? self.selectedPromptID(for: mode)
}

self.appPromptBindingErrorMessage = ""
self.settings.upsertAppPromptBinding(
Expand Down Expand Up @@ -1497,13 +1502,24 @@ final class AIEnhancementSettingsViewModel: ObservableObject {
$0.mode.normalized == mode.normalized
})
else {
return "Default"
return "Built-in Default"
}

let trimmed = profile.name.trimmingCharacters(in: .whitespacesAndNewlines)
return trimmed.isEmpty ? "Untitled Prompt" : trimmed
}

func promptRoutingScope(for mode: SettingsStore.PromptMode) -> SettingsStore.PromptRoutingScope {
self.settings.promptRoutingScope(for: mode)
}

func setPromptRoutingScope(_ scope: SettingsStore.PromptRoutingScope, for mode: SettingsStore.PromptMode) {
self.settings.setPromptRoutingScope(scope, for: mode)
self.selectedDictationPromptID = self.settings.selectedDictationPromptID
self.selectedEditPromptID = self.settings.selectedEditPromptID
self.isDictationPromptOff = self.settings.isDictationPromptOff
}

func isPrimaryDictationPromptSelectionOff() -> Bool {
self.settings.isDictationPromptOff
}
Expand Down
8 changes: 4 additions & 4 deletions Sources/Fluid/UI/AISettingsView+AIConfiguration.swift
Original file line number Diff line number Diff line change
Expand Up @@ -126,11 +126,11 @@ extension AIEnhancementSettingsView {
.frame(width: 34, height: 34)

VStack(alignment: .leading, spacing: 2) {
Text("AI Enhancements")
Text("AI Enhancement")
.font(.title3)
.fontWeight(.semibold)
.foregroundStyle(self.theme.palette.primaryText)
Text("Choose the model used for AI Cleanup.")
Text("Choose the model used for AI Enhancement.")
.font(.caption)
.foregroundStyle(self.theme.palette.secondaryText)
}
Expand All @@ -146,13 +146,13 @@ extension AIEnhancementSettingsView {
self.aiSetupSummaryDivider
self.aiSetupSummaryItem(icon: "cloud", text: "Cloud models use provider APIs")
self.aiSetupSummaryDivider
self.aiSetupSummaryItem(icon: "slider.horizontal.3", text: "AI Cleanup enables dictation prompts")
self.aiSetupSummaryItem(icon: "slider.horizontal.3", text: "AI Enhancement enables dictation prompts")
}

VStack(alignment: .leading, spacing: 7) {
self.aiSetupSummaryItem(icon: "cpu", text: "Local models run on Mac")
self.aiSetupSummaryItem(icon: "cloud", text: "Cloud models use provider APIs")
self.aiSetupSummaryItem(icon: "slider.horizontal.3", text: "AI Cleanup enables dictation prompts")
self.aiSetupSummaryItem(icon: "slider.horizontal.3", text: "AI Enhancement enables dictation prompts")
}
}
.padding(.horizontal, 2)
Expand Down
Loading
Loading