Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions app/MindWork AI Studio/Assistants/I18N/allTexts.lua
Original file line number Diff line number Diff line change
Expand Up @@ -1732,6 +1732,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4188329028"] = "No, kee
-- Export Chat to Microsoft Word
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T861873672"] = "Export Chat to Microsoft Word"

-- The selected model '{0}' is no longer available from '{1}' (provider={2}). Please adapt your provider settings.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTTEXT::T3267850764"] = "The selected model '{0}' is no longer available from '{1}' (provider={2}). Please adapt your provider settings."

-- The local image file does not exist. Skipping the image.
UI_TEXT_CONTENT["AISTUDIO::CHAT::IIMAGESOURCEEXTENSIONS::T255679918"] = "The local image file does not exist. Skipping the image."

Expand Down
76 changes: 75 additions & 1 deletion app/MindWork AI Studio/Chat/ContentText.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

using AIStudio.Provider;
using AIStudio.Settings;
using AIStudio.Tools;
using AIStudio.Tools.PluginSystem;
using AIStudio.Tools.RAG.RAGProcesses;

namespace AIStudio.Chat;
Expand All @@ -13,6 +15,7 @@ namespace AIStudio.Chat;
public sealed class ContentText : IContent
{
private static readonly ILogger<ContentText> LOGGER = Program.LOGGER_FACTORY.CreateLogger<ContentText>();
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(ContentText).Namespace, nameof(ContentText));

/// <summary>
/// The minimum time between two streaming events, when the user
Expand Down Expand Up @@ -48,11 +51,21 @@ public sealed class ContentText : IContent
public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatThread, CancellationToken token = default)
{
if(chatThread is null)
{
await this.CompleteWithoutStreaming();
return new();
}

if(!chatThread.IsLLMProviderAllowed(provider))
{
LOGGER.LogError("The provider is not allowed for this chat thread due to data security reasons. Skipping the AI process.");
await this.CompleteWithoutStreaming();
return chatThread;
}

if(!await this.CheckSelectedModelAvailability(provider, chatModel, token))
{
await this.CompleteWithoutStreaming();
return chatThread;
}

Expand Down Expand Up @@ -137,6 +150,67 @@ await Task.Run(async () =>
return chatThread;
}

private async Task CompleteWithoutStreaming()
{
this.InitialRemoteWait = false;
this.IsStreaming = false;
await this.StreamingDone();
}

private static bool ModelsMatch(Model modelA, Model modelB)
{
var idA = modelA.Id.Trim();
var idB = modelB.Id.Trim();
return string.Equals(idA, idB, StringComparison.OrdinalIgnoreCase);
}

private async Task<bool> CheckSelectedModelAvailability(IProvider provider, Model chatModel, CancellationToken token = default)
{
if(chatModel.IsSystemModel)
return true;

if (string.IsNullOrWhiteSpace(chatModel.Id))
{
LOGGER.LogWarning("Skipping AI request because model ID is null or white space.");
return false;
}

IEnumerable<Model> loadedModels;
try
{
loadedModels = await provider.GetTextModels(token: token);
}
catch (OperationCanceledException)
{
return false;
}
catch (Exception e)
{
LOGGER.LogWarning(e, "Skipping selected model availability check for '{ProviderInstanceName}' (provider={ProviderType}) because the model list could not be loaded.", provider.InstanceName, provider.Provider);
return true;
}

var availableModels = loadedModels.Where(model => !string.IsNullOrWhiteSpace(model.Id)).ToList();
if (availableModels.Count == 0)
{
LOGGER.LogWarning("Skipping AI request because there are no models available from '{ProviderInstanceName}' (provider={ProviderType}).", provider.InstanceName, provider.Provider);
return false;
}

if(availableModels.Any(model => ModelsMatch(model, chatModel)))
return true;

var message = string.Format(
TB("The selected model '{0}' is no longer available from '{1}' (provider={2}). Please adapt your provider settings."),
chatModel.Id,
provider.InstanceName,
provider.Provider);

await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, message));
LOGGER.LogWarning("Skipping AI request because model '{ModelId}' is not available from '{ProviderInstanceName}' (provider={ProviderType}).", chatModel.Id, provider.InstanceName, provider.Provider);
return false;
}

/// <inheritdoc />
public IContent DeepClone() => new ContentText
{
Expand Down Expand Up @@ -214,4 +288,4 @@ public async Task<string> PrepareTextContentForAI()
/// The text content.
/// </summary>
public string Text { get; set; } = string.Empty;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1734,6 +1734,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4188329028"] = "Nein, b
-- Export Chat to Microsoft Word
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T861873672"] = "Chat in Microsoft Word exportieren"

-- The selected model '{0}' is no longer available from '{1}' (provider={2}). Please adapt your provider settings.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTTEXT::T3267850764"] = "Das ausgewählte Modell '{0}' ist bei '{1}' (Anbieter={2}) nicht mehr verfügbar. Bitte passen Sie Ihre Anbietereinstellungen an."

-- The local image file does not exist. Skipping the image.
UI_TEXT_CONTENT["AISTUDIO::CHAT::IIMAGESOURCEEXTENSIONS::T255679918"] = "Die lokale Bilddatei existiert nicht. Das Bild wird übersprungen."

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1734,6 +1734,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4188329028"] = "No, kee
-- Export Chat to Microsoft Word
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T861873672"] = "Export Chat to Microsoft Word"

-- The selected model '{0}' is no longer available from '{1}' (provider={2}). Please adapt your provider settings.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTTEXT::T3267850764"] = "The selected model '{0}' is no longer available from '{1}' (provider={2}). Please adapt your provider settings."

-- The local image file does not exist. Skipping the image.
UI_TEXT_CONTENT["AISTUDIO::CHAT::IIMAGESOURCEEXTENSIONS::T255679918"] = "The local image file does not exist. Skipping the image."

Expand Down
1 change: 1 addition & 0 deletions app/MindWork AI Studio/wwwroot/changelog/v26.3.1.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
- Added the ability to format your user prompt in the chat using icons instead of typing Markdown directly.
- Added the ability to load a system prompt from a file when creating or editing chat templates.
- Added a start-page setting, so AI Studio can now open directly on your preferred page when the app starts. Configuration plugins can also provide and optionally lock this default for organizations.
- Added pre-call validation to check if the selected model exists for the provider before making the request.
- Added math rendering in chats for LaTeX display formulas, including block formats such as `$$ ... $$` and `\[ ... \]`.
- Released the document analysis assistant after an intense testing phase.
- Improved the profile selection for assistants and the chat. You can now explicitly choose between the app default profile, no profile, or a specific profile.
Expand Down