diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs
index 36aa6140..d3a4dcba 100644
--- a/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs
+++ b/shell/agents/AIShell.Ollama.Agent/OllamaAgent.cs
@@ -1,10 +1,28 @@
using System.Diagnostics;
+using System.Text;
+using System.Text.Json;
using AIShell.Abstraction;
namespace AIShell.Ollama.Agent;
public sealed class OllamaAgent : ILLMAgent
{
+ private bool _reloadSettings;
+ private bool _isDisposed;
+ private string _configRoot;
+ private Settings _settings;
+ private FileSystemWatcher _watcher;
+
+ ///
+ /// The name of setting file
+ ///
+ private const string SettingFileName = "ollama.config.json";
+
+ ///
+ /// Gets the settings.
+ ///
+ internal Settings Settings => _settings;
+
///
/// The name of the agent
///
@@ -13,7 +31,7 @@ public sealed class OllamaAgent : ILLMAgent
///
/// The description of the agent to be shown at start up
///
- public string Description => "This is an AI assistant to interact with a language model running locally by utilizing the Ollama CLI tool. Be sure to follow all prerequisites in aka.ms/aish/ollama";
+ public string Description => "This is an AI assistant to interact with a language model running locally by utilizing the Ollama CLI tool. Be sure to follow all prerequisites in https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent";
///
/// This is the company added to /like and /dislike verbiage for who the telemetry helps.
@@ -42,7 +60,15 @@ public sealed class OllamaAgent : ILLMAgent
///
public void Dispose()
{
+ if (_isDisposed)
+ {
+ return;
+ }
+
+ GC.SuppressFinalize(this);
_chatService?.Dispose();
+ _watcher.Dispose();
+ _isDisposed = true;
}
///
@@ -51,12 +77,31 @@ public void Dispose()
/// Agent configuration for any configuration file and other settings
public void Initialize(AgentConfig config)
{
- _chatService = new OllamaChatService();
+ _configRoot = config.ConfigurationRoot;
+
+ SettingFile = Path.Combine(_configRoot, SettingFileName);
+ _settings = ReadSettings();
+
+ if (_settings is null)
+ {
+ // Create the setting file with examples to serve as a template for user to update.
+ NewExampleSettingFile();
+ _settings = ReadSettings();
+ }
+
+ _chatService = new OllamaChatService(_settings);
+
+ _watcher = new FileSystemWatcher(_configRoot, SettingFileName)
+ {
+ NotifyFilter = NotifyFilters.LastWrite,
+ EnableRaisingEvents = true,
+ };
+ _watcher.Changed += OnSettingFileChange;
LegalLinks = new(StringComparer.OrdinalIgnoreCase)
{
["Ollama Docs"] = "https://github.com/ollama/ollama",
- ["Prerequisites"] = "https://aka.ms/ollama/readme"
+ ["Prerequisites"] = "https://github.com/PowerShell/AIShell/tree/main/shell/agents/AIShell.Ollama.Agent"
};
}
@@ -68,7 +113,7 @@ public void Initialize(AgentConfig config)
///
/// Gets the path to the setting file of the agent.
///
- public string SettingFile { private set; get; } = null;
+ public string SettingFile { private set; get; }
///
/// Gets a value indicating whether the agent accepts a specific user action feedback.
@@ -87,7 +132,16 @@ public void OnUserAction(UserActionPayload actionPayload) {}
/// Refresh the current chat by starting a new chat session.
/// This method allows an agent to reset chat states, interact with user for authentication, print welcome message, and more.
///
- public Task RefreshChatAsync(IShell shell, bool force) => Task.CompletedTask;
+ public Task RefreshChatAsync(IShell shell, bool force)
+ {
+ if (force)
+ {
+ // Reload the setting file if needed.
+ ReloadSettings();
+ }
+
+ return Task.CompletedTask;
+ }
///
/// Main chat function that takes the users input and passes it to the LLM and renders it.
@@ -103,23 +157,126 @@ public async Task ChatAsync(string input, IShell shell)
// get the cancellation token
CancellationToken token = shell.CancellationToken;
+ // Reload the setting file if needed.
+ ReloadSettings();
+
if (Process.GetProcessesByName("ollama").Length is 0)
{
- host.RenderFullResponse("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
+ host.MarkupWarningLine($"[[{Name}]]: Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
+ return false;
+ }
+
+ if (!SelfCheck(host))
+ {
+ return false;
+ }
+
+ try
+ {
+ ResponseData ollamaResponse = await host.RunWithSpinnerAsync(
+ status: "Thinking ...",
+ func: async context => await _chatService.GetChatResponseAsync(context, input, token)
+ ).ConfigureAwait(false);
+
+ if (ollamaResponse is not null)
+ {
+ // render the content
+ host.RenderFullResponse(ollamaResponse.response);
+ }
+ }
+ catch (HttpRequestException)
+ {
+ host.WriteErrorLine($"[{Name}]: Cannot serve the query due to the Endpoint or Model misconfiguration. Please properly update the setting file.");
return false;
}
- ResponseData ollamaResponse = await host.RunWithSpinnerAsync(
- status: "Thinking ...",
- func: async context => await _chatService.GetChatResponseAsync(context, input, token)
- ).ConfigureAwait(false);
+ return true;
+ }
- if (ollamaResponse is not null)
+ internal void ReloadSettings()
+ {
+ if (_reloadSettings)
{
- // render the content
- host.RenderFullResponse(ollamaResponse.response);
+ _reloadSettings = false;
+ var settings = ReadSettings();
+ if (settings is null)
+ {
+ return;
+ }
+
+ _settings = settings;
+ _chatService.RefreshSettings(_settings);
}
-
+ }
+
+ private Settings ReadSettings()
+ {
+ Settings settings = null;
+ FileInfo file = new(SettingFile);
+
+ if (file.Exists)
+ {
+ try
+ {
+ using var stream = file.OpenRead();
+ var data = JsonSerializer.Deserialize(stream, SourceGenerationContext.Default.ConfigData);
+ settings = new Settings(data);
+ }
+ catch (Exception e)
+ {
+ throw new InvalidDataException($"Parsing settings from '{SettingFile}' failed with the following error: {e.Message}", e);
+ }
+ }
+
+ return settings;
+ }
+
+ private void OnSettingFileChange(object sender, FileSystemEventArgs e)
+ {
+ if (e.ChangeType is WatcherChangeTypes.Changed)
+ {
+ _reloadSettings = true;
+ }
+ }
+
+ internal bool SelfCheck(IHost host)
+ {
+ var settings = new (string settingValue, string settingName)[]
+ {
+ (_settings?.Model, "Model"),
+ (_settings?.Endpoint, "Endpoint")
+ };
+
+ foreach (var (settingValue, settingName) in settings)
+ {
+ if (string.IsNullOrWhiteSpace(settingValue))
+ {
+ host.WriteErrorLine($"[{Name}]: {settingName} is undefined. Please declare it in the setting file.");
+ return false;
+ }
+ }
+
return true;
}
+
+ private void NewExampleSettingFile()
+ {
+ string SampleContent = $$"""
+ {
+ // To use Ollama API service:
+ // 1. Install Ollama:
+ // winget install Ollama.Ollama
+ // 2. Start Ollama API server:
+ // ollama serve
+ // 3. Install Ollama model:
+ // ollama pull phi3
+
+ // Declare Ollama model
+ "Model": "phi3",
+ // Declare Ollama endpoint
+ "Endpoint": "http://localhost:11434"
+ }
+ """;
+ File.WriteAllText(SettingFile, SampleContent, Encoding.UTF8);
+ }
}
diff --git a/shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs b/shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs
index 8809eff3..746e5e4d 100644
--- a/shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs
+++ b/shell/agents/AIShell.Ollama.Agent/OllamaChatService.cs
@@ -7,11 +7,7 @@ namespace AIShell.Ollama.Agent;
internal class OllamaChatService : IDisposable
{
- ///
- /// Ollama endpoint to call to generate a response
- ///
- internal const string Endpoint = "http://localhost:11434/api/generate";
-
+ private Settings _settings;
///
/// Http client
///
@@ -20,11 +16,21 @@ internal class OllamaChatService : IDisposable
///
/// Initialization method to initialize the http client
///
- internal OllamaChatService()
+ internal OllamaChatService(Settings settings)
{
+ _settings = settings;
_client = new HttpClient();
}
+ ///
+ /// Refresh settings
+ ///
+ ///
+ internal void RefreshSettings(Settings settings)
+ {
+ _settings = settings;
+ }
+
///
/// Dispose of the http client
///
@@ -43,7 +49,7 @@ private HttpRequestMessage PrepareForChat(string input)
// Main data to send to the endpoint
var requestData = new Query
{
- model = "phi3",
+ model = _settings.Model,
prompt = input,
stream = false
};
@@ -51,7 +57,7 @@ private HttpRequestMessage PrepareForChat(string input)
var json = JsonSerializer.Serialize(requestData);
var data = new StringContent(json, Encoding.UTF8, "application/json");
- var request = new HttpRequestMessage(HttpMethod.Post, Endpoint) { Content = data };
+ var request = new HttpRequestMessage(HttpMethod.Post, $"{_settings.Endpoint}/api/generate") { Content = data };
return request;
}
diff --git a/shell/agents/AIShell.Ollama.Agent/README.md b/shell/agents/AIShell.Ollama.Agent/README.md
index 559d504a..87375aec 100644
--- a/shell/agents/AIShell.Ollama.Agent/README.md
+++ b/shell/agents/AIShell.Ollama.Agent/README.md
@@ -5,18 +5,30 @@ this agent you need to have Ollama installed and running.
## Pre-requisites to using the agent
-- Install [Ollama](https://github.com/ollama/ollama)
-- Install a [Ollama model](https://github.com/ollama/ollama?tab=readme-ov-file#model-library), we
- suggest using the `phi3` model as it is set as the default model in the code
+- Install [Ollama](https://github.com/ollama/ollama)
+- Install a [Ollama model](https://github.com/ollama/ollama?tab=readme-ov-file#model-library), we suggest using the `phi3` model as it is set as the default model in the code
- [Start the Ollama API server](https://github.com/ollama/ollama?tab=readme-ov-file#start-ollama)
## Configuration
-Currently to change the model you will need to modify the query in the code in the
-`OllamaChatService` class. The default model is `phi3`.
-
-The default endpoint is `http://localhost:11434/api/generate` with `11434` being the default port. This can be changed in the code
-and eventually will be added to a configuration file.
+To configure the agent, run `/agent config ollama` to open up the setting file in your default editor, and then update the file based on the following example.
+
+```json
+{
+ // To use Ollama API service:
+ // 1. Install Ollama:
+ // winget install Ollama.Ollama
+ // 2. Start Ollama API server:
+ // ollama serve
+ // 3. Install Ollama model:
+ // ollama pull phi3
+
+ // Declare Ollama model
+ "Model": "phi3",
+ // Declare Ollama endpoint
+ "Endpoint": "http://localhost:11434"
+}
+```
## Known Limitations
@@ -24,5 +36,3 @@ and eventually will be added to a configuration file.
queries
- Streaming is currently not supported if you change the stream value to `true` in the data to send
to the API it will not work
-- Configuration is currently hard coded in the code and will be moved to a configuration file in the
- future
\ No newline at end of file
diff --git a/shell/agents/AIShell.Ollama.Agent/Settings.cs b/shell/agents/AIShell.Ollama.Agent/Settings.cs
new file mode 100644
index 00000000..ad7dccef
--- /dev/null
+++ b/shell/agents/AIShell.Ollama.Agent/Settings.cs
@@ -0,0 +1,38 @@
+using System.Text.Json;
+using System.Text.Json.Serialization;
+
+namespace AIShell.Ollama.Agent;
+
+internal class Settings
+{
+ private string _model;
+ private string _endpoint;
+
+ public string Model => _model;
+ public string Endpoint => _endpoint;
+
+ public Settings(ConfigData configData)
+ {
+ _model = configData?.Model;
+ _endpoint = configData?.Endpoint?.TrimEnd('/');
+ }
+}
+
+internal class ConfigData
+{
+ public string Model { get; set; }
+ public string Endpoint { get; set; }
+}
+
+///
+/// Use source generation to serialize and deserialize the setting file.
+/// Both metadata-based and serialization-optimization modes are used to gain the best performance.
+///
+[JsonSourceGenerationOptions(
+ WriteIndented = true,
+ AllowTrailingCommas = true,
+ PropertyNameCaseInsensitive = true,
+ ReadCommentHandling = JsonCommentHandling.Skip,
+ UseStringEnumConverter = true)]
+[JsonSerializable(typeof(ConfigData))]
+internal partial class SourceGenerationContext : JsonSerializerContext { }